Coding datasets
Collection
Datasets for high quality small LM model pre-training.
•
3 items
•
Updated
repo_name
stringlengths 7
121
| text
stringlengths 486
54.2M
|
---|---|
pjslauta/hover-dyn-dns
|
import os
import json
import requests
import datetime
import time
import argparse
import urllib3
from totp import totp # Import the totp function from totp.py
import certifi
from http.cookies import SimpleCookie
import logging
from logging.handlers import TimedRotatingFileHandler
import platform
import socket
abspath = os.path.abspath(__file__)
dname = os.path.dirname(abspath)
os.chdir(dname)
#step 1: visit https://www.hover.com/signin
#step 2: extract cookies from response
#step 3: save cookies to cookies.json
#step 4: post https://www.hover.com/signin/auth.json with cookies and form encoded username and password
#step 5: if response is 200, extract cookies from response and save to cookies.json
#step 6: post https://www.hover.com/signin/auth2.json with cookies and form encoded 2fa code
#step 7: if response is 200, extract cookies from response and save to cookies.json
#step 8: get https://www.hover.com/control_panel/dns/{dnsid} with cookies and form encoded ipaddress
#step 9: if response is 200, extract cookies from response and save to cookies.json
CONFIG_FILE = "./config.json"
IP_FILE = "./IP"
LOG_FILE = "./hover-update.log"
COOKIES_FILE = "./cookie.json"
SIGIN_URL = "https://www.hover.com/signin"
AUTH1URL = "https://www.hover.com/signin/auth.json"
AUTH2URL = "https://www.hover.com/signin/auth2.json"
DNS_UPDATE_URL = "https://www.hover.com/api/control_panel/dns/"
DOMAIN_CHECK_URL = "https://www.hover.com/api/control_panel/domains"
def get_log_retention_days():
if os.path.isfile(CONFIG_FILE):
try:
with open(CONFIG_FILE, 'r') as file:
config = json.load(file)
return int(config.get('logRetentionMaxDays', 7))
except Exception:
return 7
return 7
def remove_endpoint_logs():
for fname in ["hover-signin.log", "hover-auth.log", "hover-auth2.log", "hover-dns.log"]:
try:
os.remove(fname)
except FileNotFoundError:
pass
def get_log_level():
if os.path.isfile(CONFIG_FILE):
try:
with open(CONFIG_FILE, 'r') as file:
config = json.load(file)
return config.get('loglevel', 'INFO').upper()
except Exception:
return 'INFO'
return 'INFO'
def get_use_endpoint_logs():
if os.path.isfile(CONFIG_FILE):
try:
with open(CONFIG_FILE, 'r') as file:
config = json.load(file)
return config.get('useEndpointLogs', False)
except Exception:
return False
return False
# Configure main logging with daily rotation
def setup_main_logging():
global console
log_retention_days = get_log_retention_days()
log_level=get_log_level()
handler = TimedRotatingFileHandler(LOG_FILE, when='midnight', backupCount=log_retention_days)
handler.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s'))
root_logger = logging.getLogger()
root_logger.handlers = []
root_logger.addHandler(handler)
# Also log to console
console = logging.StreamHandler()
console.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
console.setFormatter(formatter)
root_logger.addHandler(console)
root_logger.setLevel(logging.INFO)
setup_main_logging()
def load_config(config_file):
"""
Loads the configuration from a JSON file.
"""
try:
with open(config_file, 'r') as file:
return json.load(file)
except json.JSONDecodeError as e:
logging.error(f"Error loading config file: {e}")
exit(1)
def get_external_ip(http):
"""
Retrieves the external IP address using the ipify API.
"""
response = http.request("GET", "https://api.ipify.org")
if response.status!=200:
logging.error(f"Error retrieving external IP: {response.status}")
exit(1)
return response.data.decode('utf-8')
def get_dns_ip(domain):
"""
Retrieves the IP address for a given domain using socket.gethostbyname (cross-platform).
"""
try:
return socket.gethostbyname(domain)
except Exception as e:
logging.error(f"DNS lookup failed for {domain}: {e}")
return None
### Returns SimpleCookie object representing the cookies
def init_session(http):
"""
Initializes a session with Hover to retrieve cookies and headers.
Returns (cookies, headers) tuple.
"""
response = http.request("GET","https://www.hover.com/signin")
logging.info(f"Init session response status code: {response.status}")
if(response.status!=200):
logging.error(f"Error initializing session: {response.status}")
exit(1)
logging.debug(f"Init session headers: {response.headers}")
if 'set-cookie' not in response.headers.keys():
logging.error("No Set-Cookie header found in response.")
return None, None
logging.debug(f"Init session cookies: {response.headers['Set-Cookie']}")
# Extract headers for reuse, excluding cookies
session_headers = {k: v for k, v in response.headers.items() if k.lower() not in ['set-cookie', 'cookie']}
return SimpleCookie(response.headers['Set-Cookie']), session_headers
def login(http, username, password, cookies):
"""
Logs in to Hover with the provided username and password.
"""
login_payload = {
"username": username,
"password": password,
"remember": "true",
"token": None
}
cookie_header = "; ".join([f"{k}={v.value}" for k, v in cookies.items()])
# Prepare headers for POST
headers = dict(session_headers) if session_headers else {}
body = json.dumps(login_payload).encode('utf-8')
update_headers_case_insensitive(headers, {
"cookie": cookie_header,
"content-type": "application/json;charset=UTF-8",
"content-length": str(len(body)),
"accept-encoding": "identity",
"user-agent": user_agent
})
filter_allowed_headers(headers)
logging.debug(f"Cookie header: {cookie_header}")
response=None
response = http.request(
"POST",
AUTH1URL,
body=body,
headers=headers
)
logging.debug(f"Login URL: {AUTH1URL}")
logging.debug(f"Login Headers: {headers}")
logging.debug(f"Payload: {body}")
logging.info(f"Login response status code: {response.status}")
logging.debug(f"Cookie: {cookies}")
return response
def login2fa(http, totp_code, cookies):
"""
Performs 2FA login with the provided TOTP code.
"""
login_payload = {
"code": totp_code
}
# Prepare headers for POST
headers = dict(session_headers) if session_headers else {}
body = json.dumps(login_payload).encode('utf-8')
update_headers_case_insensitive(headers, {
"cookie": "; ".join([f"{k}={v.value}" for k, v in cookies.items()]),
"content-type": "application/json;charset=UTF-8",
"content-length": str(len(body)),
"accept-encoding": "identity",
"user-agent": user_agent
})
filter_allowed_headers(headers)
response = http.request("POST", AUTH2URL, body=body, headers=headers)
logging.debug(f"Payload: {body}")
logging.info(f"Login 2FA response status code: {response.status}")
logging.debug(f"Login 2FA response content: {response.data}")
logging.debug(f"Cookies: {cookies}")
return response
def update_dns_record(http, dnsid, ipaddress, cookies, session_headers, domain_name="default_domain"):
"""
Updates the DNS record with the provided IP address.
"""
# Build the JSON payload as required
payload = {
"domain": {
"id": f"domain-{domain_name}",
"dns_records": [
{
"id": f"{dnsid}"
}
]
},
"fields": {
"content": ipaddress
}
}
encoded_data = json.dumps(payload).encode('utf-8')
cookie_header = "; ".join([f"{k}={v.value}" for k, v in cookies.items()])
# Prepare headers for POST
headers = dict(session_headers)
update_headers_case_insensitive(headers, {
"cookie": cookie_header,
"content-type": "application/json;charset=UTF-8",
"content-length": str(len(encoded_data)),
"accept-encoding": "identity",
"user-agent": user_agent
})
filter_allowed_headers(headers)
logging.debug(f"Cookie header: {cookie_header}")
filter_allowed_headers(headers)
response = http.request("PUT", DNS_UPDATE_URL, body=encoded_data, headers=headers)
logging.debug(f"DNS URL: {DNS_UPDATE_URL}")
logging.info(f"DNS update response status code: {response.status}")
logging.debug(f"DNS update response content: {response.data}")
return response
def save_cookies(cookies):
"""
Saves cookies (as a key-value dictionary) to a JSON file.
"""
cookies_dict = {key: morsel.value for key, morsel in cookies.items()}
expires_dict = {key: morsel['expires'] for key, morsel in cookies.items() if morsel['expires']}
with open(COOKIES_FILE, 'w') as file:
json.dump({'cookies': cookies_dict, 'expires': expires_dict}, file)
logging.info(f"Cookies saved to {COOKIES_FILE}")
def load_cookies():
"""
Loads cookies from a JSON file as a SimpleCookie object and checks for expiration.
"""
if os.path.isfile(COOKIES_FILE):
with open(COOKIES_FILE, 'r') as file:
data = json.load(file)
cookies_dict = data.get('cookies', {})
expires_dict = data.get('expires', {})
cookie = SimpleCookie()
for key, value in cookies_dict.items():
cookie[key] = value
if key in expires_dict:
cookie[key]['expires'] = expires_dict[key]
# Check expiration
now = datetime.datetime.now(datetime.UTC)
for key, morsel in cookie.items():
expires = morsel['expires']
if expires:
try:
expires_dt = datetime.datetime.strptime(expires, '%a, %d-%b-%Y %H:%M:%S GMT')
if expires_dt < now:
logging.info(f"Cookie {key} expired at {expires_dt}, will not use cached cookies.")
return None
except Exception:
continue
logging.info(f"Loaded cookies from {COOKIES_FILE}")
return cookie
return None
def cookies_valid(http, cookies):
"""
Checks if the cookies are still valid by making a request to an authenticated endpoint.
"""
response = http.request("GET",DOMAIN_CHECK_URL, headers={"Cookie": "; ".join([f"{k}={v}" for k, v in cookies.items()])})
if response.status == 200:
logging.info("Cookies are valid.")
return True
else:
logging.info("Cookies are invalid.")
return False
def get_dns_entries(http, cookies):
"""
Retrieves the DNS entries for the account using urllib3 and logs details.
"""
headers = {"Cookie": "; ".join([f"{k}={v.value}" for k, v in cookies.items()]), "user-agent": user_agent}
remove_headers_case_insensitive(headers, ["transfer-encoding"])
url = DOMAIN_CHECK_URL
response = http.request("GET", url, headers=headers)
if response.status == 200:
try:
dns_entries = json.loads(response.data)
for entry in dns_entries.get('domains', []):
logging.info('================')
logging.info(f"Domain: {entry['name']}, DNSID: {entry['id']}")
sub_url = f"https://www.hover.com/api/control_panel/domains/{entry['name']}"
sub_response = http.request("GET", sub_url, headers=headers)
if sub_response.status == 200:
logging.info("Subdomains:")
subdomains = json.loads(sub_response.data)
for e in subdomains['domain']['dns']:
logging.info(f"Domain: {e['name']}, DNSID: {e['id']}, IP: {e.get('content', 'N/A')}")
else:
logging.info(f"No subdomains found for {sub_url}")
except Exception as ex:
logging.error(f"Failed to parse DNS entries: {ex}")
exit(4)
else:
logging.error(f"Failed to retrieve DNS entries. Status code: {response.status}, Response text: {response.data}")
exit(4)
def doLogin(http, cookies=None):
if cookies is None:
cookies = load_cookies()
session_headers = None
if not cookies:
logging.info("Cookies are invalid or not found. Initializing session and logging in...")
cookies, session_headers = init_session(http)
if not cookies:
logging.error("Failed to initialize session. Exiting...")
return None, None
if session_headers is None:
session_headers = {}
login_response = login(http, config.get('username'), config.get('password'), cookies)
set_cookie_headers = login_response.headers.get_all('Set-Cookie') if hasattr(login_response.headers, 'get_all') else login_response.headers.get('Set-Cookie', '').split(',') if login_response.headers.get('Set-Cookie') else []
for set_cookie in set_cookie_headers:
if set_cookie.strip():
cookies.load(set_cookie)
save_cookies(cookies)
try:
login_response_json = login_response.json()
login_success = login_response_json.get('succeeded')
if login_response_json.get('status') == 'need_2fa':
logging.info("2FA required. Performing 2FA login.")
totp_code = totp(config.get('totp_secret'))
logging.info(f"CODE: {totp_code}")
login2fa_response = login2fa(http, totp_code, cookies)
set_cookie2_headers = login2fa_response.headers.get_all('Set-Cookie') if hasattr(login2fa_response.headers, 'get_all') else login2fa_response.headers.get('Set-Cookie', '').split(',') if login2fa_response.headers.get('Set-Cookie') else []
for set_cookie2 in set_cookie2_headers:
if set_cookie2.strip():
cookies.load(set_cookie2)
save_cookies(cookies)
login2fa_response_json = login2fa_response.json()
login_success = login2fa_response_json.get('succeeded')
else:
set_cookie = login_response.headers.get('Set-Cookie')
if set_cookie:
cookies.load(set_cookie)
except json.JSONDecodeError:
logging.error(f"Login response is not in JSON format. Status code: {login_response.status}, Response text: {login_response.body.decode()}")
return None, None
if 'hoverauth' not in cookies.keys():
logging.error("Hover authentication cookie not found. Login failed.")
return None, None
return cookies, session_headers
def get_domains(http, cookies):
"""
Retrieves the domains for the account using urllib3 and logs details.
"""
if "hoverauth" not in cookies.keys():
logging.error("Hover authentication cookie not found. Running login...")
if not doLogin(http, cookies):
logging.error("Login failed. Exiting...")
exit(1)
headers = {"Cookie": "; ".join([f"{k}={v.value}" for k, v in cookies.items()]), "user-agent": user_agent}
filter_allowed_headers(headers)
url = DOMAIN_CHECK_URL
response = http.request("GET", url, headers=headers)
if response.status == 200:
try:
domains = json.loads(response.data)
for domain in domains.get('domains', []):
print(f"Domain: {domain['name']}, DNSID: {domain['id']}\n")
except Exception as ex:
logging.error(f"Failed to parse domains: {ex}")
exit(4)
else:
logging.error(f"Failed to retrieve domains. Status code: {response.status}, Response text: {response.data}")
exit(4)
# --- Refactored HoverSession class ---
class HoverSession:
def __init__(self, config, http):
self.config = config
self.http = http
self.cookies = None
self.session_headers = {}
self.user_agent = config.get('userAgent', 'Chromium')
def load_cookies(self):
if os.path.isfile(COOKIES_FILE):
with open(COOKIES_FILE, 'r') as file:
data = json.load(file)
cookies_dict = data.get('cookies', {})
expires_dict = data.get('expires', {})
cookie = SimpleCookie()
for key, value in cookies_dict.items():
cookie[key] = value
if key in expires_dict:
cookie[key]['expires'] = expires_dict[key]
now = datetime.datetime.now(datetime.UTC)
for key, morsel in cookie.items():
expires = morsel['expires']
if expires:
try:
expires_dt = datetime.datetime.strptime(expires, '%a, %d-%b-%Y %H:%M:%S GMT')
if expires_dt < now:
logging.info(f"Cookie {key} expired at {expires_dt}, will not use cached cookies.")
return None
except Exception:
continue
logging.info(f"Loaded cookies from {COOKIES_FILE}")
return cookie
return None
def save_cookies(self):
cookies_dict = {key: morsel.value for key, morsel in self.cookies.items()}
expires_dict = {key: morsel['expires'] for key, morsel in self.cookies.items() if morsel['expires']}
with open(COOKIES_FILE, 'w') as file:
json.dump({'cookies': cookies_dict, 'expires': expires_dict}, file)
logging.info(f"Cookies saved to {COOKIES_FILE}")
def init_session(self):
response = self.http.request("GET", SIGIN_URL)
logging.info(f"Init session response status code: {response.status}")
if response.status != 200:
logging.error(f"Error initializing session: {response.status}")
exit(1)
if 'set-cookie' not in response.headers.keys():
logging.error("No Set-Cookie header found in response.")
return None, None
self.session_headers = {k: v for k, v in response.headers.items() if k.lower() not in ['set-cookie', 'cookie']}
self.cookies = SimpleCookie(response.headers['Set-Cookie'])
return self.cookies, self.session_headers
def login(self):
login_payload = {
"username": self.config.get('username'),
"password": self.config.get('password'),
"remember": "true",
"token": None
}
cookie_header = "; ".join([f"{k}={v.value}" for k, v in self.cookies.items()])
headers = dict(self.session_headers)
headers["user-agent"] = self.user_agent
body = json.dumps(login_payload).encode('utf-8')
response = self.http.request("POST", AUTH1URL, body=body, headers=headers)
logging.debug(f"Login URL: {AUTH1URL}")
logging.debug(f"Login Headers: {headers}")
logging.debug(f"Payload: {body}")
logging.info(f"Login response status code: {response.status}")
logging.debug(f"Cookie: {self.cookies}")
self._update_cookies_from_response(response)
return response
def login2fa(self, totp_code):
login_payload = {"code": totp_code}
headers = dict(self.session_headers)
headers["user-agent"] = self.user_agent
body = json.dumps(login_payload).encode('utf-8')
response = self.http.request("POST", AUTH2URL, body=body, headers=headers)
logging.info(f"Login 2FA response status code: {response.status}")
logging.debug(f"Login 2FA response content: {response.data}")
logging.debug(f"Cookies: {self.cookies}")
self._update_cookies_from_response(response)
return response
def update_dns_record(self, dnsid, ipaddress, domain_name="default_domain"):
payload = {
"domain": {
"id": f"domain-{domain_name}",
"dns_records": [
{"id": f"{dnsid}"}
]
},
"fields": {"content": ipaddress}
}
encoded_data = json.dumps(payload).encode('utf-8')
cookie_header = "; ".join([f"{k}={v.value}" for k, v in self.cookies.items()])
headers = dict(self.session_headers)
headers["user-agent"] = self.user_agent
response = self.http.request("PUT", DNS_UPDATE_URL, body=encoded_data, headers=headers)
logging.debug(f"DNS URL: {DNS_UPDATE_URL}")
logging.info(f"DNS update response status code: {response.status}")
logging.debug(f"DNS update response content: {response.data}")
self._update_cookies_from_response(response)
return response
def _update_cookies_from_response(self, response):
set_cookie_headers = response.headers.get_all('Set-Cookie') if hasattr(response.headers, 'get_all') else response.headers.get('Set-Cookie', '').split(',') if response.headers.get('Set-Cookie') else []
for set_cookie in set_cookie_headers:
if set_cookie.strip():
self.cookies.load(set_cookie)
self.save_cookies()
def cookies_valid(self):
headers = {"Cookie": "; ".join([f"{k}={v.value}" for k, v in self.cookies.items()]), "user-agent": self.user_agent}
response = self.http.request("GET", DOMAIN_CHECK_URL, headers=headers)
if response.status == 200:
logging.info("Cookies are valid.")
return True
else:
logging.info("Cookies are invalid.")
return False
def get_dns_entries(self):
headers = {"Cookie": "; ".join([f"{k}={v.value}" for k, v in self.cookies.items()]), "user-agent": self.user_agent}
url = DOMAIN_CHECK_URL
response = self.http.request("GET", url, headers=headers)
if response.status == 200:
try:
dns_entries = json.loads(response.data)
for entry in dns_entries.get('domains', []):
logging.info('================')
logging.info(f"Domain: {entry['name']}, DNSID: {entry['id']}")
sub_url = f"https://www.hover.com/api/control_panel/domains/{entry['name']}"
sub_response = self.http.request("GET", sub_url, headers=headers)
if sub_response.status == 200:
logging.info("Subdomains:")
subdomains = json.loads(sub_response.data)
for e in subdomains['domain']['dns']:
logging.info(f"Domain: {e['name']}, DNSID: {e['id']}")
else:
logging.info(f"No subdomains found for {sub_url}")
except Exception as ex:
logging.error(f"Failed to parse DNS entries: {ex}")
exit(4)
else:
logging.error(f"Failed to retrieve DNS entries. Status code: {response.status}, Response text: {response.data}")
exit(4)
def get_domains(self):
headers = {"Cookie": "; ".join([f"{k}={v.value}" for k, v in self.cookies.items()]), "user-agent": self.user_agent}
url = DOMAIN_CHECK_URL
response = self.http.request("GET", url, headers=headers)
if response.status == 200:
try:
domains = json.loads(response.data)
for domain in domains.get('domains', []):
print(f"Domain: {domain['domain_name']}, DNSID: {domain['id']}")
except Exception as ex:
logging.error(f"Failed to parse domains: {ex}")
exit(4)
else:
logging.error(f"Failed to retrieve domains. Status code: {response.status}, Response text: {response.data}")
exit(4)
class HoverUpdater:
def __init__(self, config, http, logger=logging, ip_file=IP_FILE, cookies_file=COOKIES_FILE):
self.config = config
self.http = http
self.logger = logger
self.ip_file = ip_file
self.cookies_file = cookies_file
self.session_headers = {}
self.user_agent = config.get('userAgent', 'Chromium')
def get_external_ip(self):
response = self.http.request("GET", "https://api.ipify.org")
if response.status != 200:
raise RuntimeError(f"Error retrieving external IP: {response.status}")
return response.data.decode('utf-8')
def get_dns_ip(self, domain):
try:
return socket.gethostbyname(domain)
except Exception as e:
self.logger.error(f"DNS lookup failed for {domain}: {e}")
return None
def load_saved_ip(self):
if os.path.isfile(self.ip_file):
with open(self.ip_file, 'r') as file:
return file.read().strip()
return ""
def save_ip(self, ipaddress):
with open(self.ip_file, 'w') as file:
file.write(ipaddress)
def should_update(self, ipaddress, srcdomain):
saved_ip = self.load_saved_ip()
dns_ip = self.get_dns_ip(srcdomain) if srcdomain else None
external_ip=self.get_external_ip()
self.logger.info(f"Saved IP: {saved_ip}, Discovered IP: {ipaddress}, DNS IP: {dns_ip}, External IP: {external_ip}")
if dns_ip == saved_ip and saved_ip==external_ip:
return False, "DNS IP matches saved IP. No update needed."
return True, "Discovered IP does not match DNS lookup. Updating..."
def run_once(self, args, cookies, session_headers):
config = self.config
ipaddress = config.get('ipaddress')
srcdomain = config.get('srcdomain')
discoverip = config.get('discoverip')
dnsid = config.get('dnsid')
username = config.get('username')
password = config.get('password')
totp_secret = config.get('totp_secret')
domain_name = config.get('nakedDomain', 'default_domain')
# Discover IP address if required
if discoverip == "true":
ipaddress = self.get_external_ip()
self.logger.info(f"IP DISCOVERED: {ipaddress}")
elif srcdomain:
ipaddress = self.get_dns_ip(srcdomain)
self.logger.info(f"SOURCE DOMAIN: {srcdomain}")
self.logger.info(f"IP: {ipaddress}")
# Check if update is needed
should_update, msg = self.should_update(ipaddress, srcdomain)
self.logger.info(msg)
if not should_update:
return False
# Check for required configuration values
if not dnsid and not getattr(args, 'getDNSID', False):
raise ValueError("Missing DNS_ID.")
if not ipaddress and not getattr(args, 'getDNSID', False):
raise ValueError("Missing IP ADDRESS.")
if not username:
raise ValueError("Missing USERNAME.")
if not password:
raise ValueError("Missing PASSWORD.")
if not totp_secret:
raise ValueError("Missing TOTP_SECRET.")
if not domain_name:
raise ValueError("Missing DOMAIN_NAME.")
# Generate TOTP code
totp_code = totp(totp_secret)
# Perform login and update cookies
cookies,session_headers = doLogin(self.http, cookies)
if not cookies:
self.logger.error("Login failed or cookies not initialized.")
return False
# Ensure session_headers is a dict
if session_headers is None:
session_headers = {}
# Update DNS record
update_response = update_dns_record(self.http, dnsid, ipaddress, cookies, session_headers, domain_name=domain_name)
try:
update_response_json = update_response.json()
update_success = update_response_json.get('succeeded')
except json.JSONDecodeError:
self.logger.error(f"Update response is not in JSON format. Status code: {update_response.status}, Response text: {update_response.data.decode()}")
return False
self.logger.info(f"Update response: {update_response_json}")
if not update_success:
self.logger.error("Setting failure!")
return False
else:
self.logger.info("Setting success!")
self.save_ip(ipaddress)
return True
def main():
# Parse command line arguments
parser = argparse.ArgumentParser(description='Hover DNS Update Script')
parser.add_argument('--loglevel', default='INFO', help='Set log level (DEBUG, INFO, WARNING, ERROR, CRITICAL). Default is INFO. Overrides config.json log level.')
parser.add_argument('--mitm', action='store_true', help='Enable mitmproxy for HTTP/HTTPS requests')
parser.add_argument('--nocerts', action='store_true', help='Disable certificate verification for mitmproxy')
parser.add_argument('--getDNSID', '--getdnsid', action='store_true', help='Get DNS IDs for all domains in the account')
parser.add_argument('--interval', type=int, help='Run interval in seconds. Overrides config value runInterval.')
parser.add_argument('--getDomains', '--getdomains', action='store_true', help='Get all domains in the account')
parser.add_argument('--config', default='config.json', help='Path to configuration file (default: config.json)')
args = parser.parse_args()
getdnsid = getattr(args, 'getDNSID', False)
getdomains = getattr(args, 'getDomains', False)
global CONFIG_FILE
CONFIG_FILE = args.config
if not os.path.exists(IP_FILE):
logging.info(f"IP file found: {IP_FILE}")
logging.info("Creating IP file...")
with open(IP_FILE, 'w') as file:
file.write('')
# Load configuration
global config
config = {}
if os.path.isfile(CONFIG_FILE):
config = load_config(CONFIG_FILE)
logging.info(f"Using configuration from {CONFIG_FILE}")
else:
logging.error(f"Configuration file {CONFIG_FILE} not found. Exiting...")
exit(1)
# Extract configuration values
dnsid = config.get('dnsid')
username = config.get('username')
password = config.get('password')
discoverip = config.get('discoverip')
srcdomain = config.get('srcdomain')
ipaddress = config.get('ipaddress')
totp_secret = config.get('totp_secret')
config_loglevel = config.get('loglevel', 'INFO').upper()
logRetentionMaxDays = config.get('logRetentionMaxDays', 7)
domain_name = config.get('nakedDomain', 'default_domain')
global user_agent
user_agent = config.get('userAgent', 'Chromium')
global session_headers
session_headers = {}
# Set log level from command line or config
loglevel_str = args.loglevel.upper() if args.loglevel else config_loglevel
valid_levels = {'CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG', 'NONE'}
if loglevel_str == 'NONE':
logging.info("Log level set to NONE. All logging is disabled.")
logging.disable(logging.CRITICAL)
else:
if loglevel_str not in valid_levels:
logging.warning(f"Invalid loglevel '{args.loglevel}', defaulting to INFO.")
loglevel_str = 'INFO'
loglevel = getattr(logging, loglevel_str, logging.INFO)
logging.getLogger().setLevel(loglevel)
console.setLevel(loglevel)
logging.info(f"Log level set to {logging.getLevelName(loglevel)}")
if loglevel == logging.DEBUG:
logging.getLogger('urllib3').setLevel(logging.DEBUG)
# Remove per-endpoint logs at startup
remove_endpoint_logs()
if args.mitm and not args.nocerts:
if platform.system() == 'Windows':
# Use the mitmproxy CA cert path for Windows
ca_certs_path = os.path.expandvars(r"%USERPROFILE%\.mitmproxy\mitmproxy-ca-cert.pem")
elif platform.system()== 'Linux':
# Use the mitmproxy CA cert path for Linux
ca_certs_path = os.path.expanduser("~/.mitmproxy/mitmproxy-ca-cert.pem")
logging.info(f"Using mitmproxy CA cert at: {ca_certs_path}")
http = urllib3.ProxyManager(
proxy_url="http://127.0.0.1:8080",
cert_reqs="CERT_REQUIRED",
ca_certs=ca_certs_path
)
elif args.mitm and args.nocerts:
http = urllib3.ProxyManager(
proxy_url="http://127.0.0.1:8080",
cert_reqs="CERT_NONE"
)
else:
http = urllib3.PoolManager(
cert_reqs='CERT_REQUIRED',
ca_certs=certifi.where()
)
# Determine loglevel: use command line if specified, else config
loglevel_str = args.loglevel.upper() if args.loglevel else config_loglevel
valid_levels = {'CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG', 'NONE'}
if loglevel_str == 'NONE':
logging.info("Log level set to NONE. All logging is disabled.")
logging.disable(logging.CRITICAL)
else:
if loglevel_str not in valid_levels:
logging.warning(f"Invalid loglevel '{args.loglevel}', defaulting to INFO.")
loglevel_str = 'INFO'
loglevel = getattr(logging, loglevel_str, logging.INFO)
logging.getLogger().setLevel(loglevel)
console.setLevel(loglevel)
logging.info(f"Log level set to {logging.getLevelName(loglevel)}")
if loglevel == logging.DEBUG:
logging.getLogger('urllib3').setLevel(logging.DEBUG)
if getdnsid:
logging.info("Getting DNS IDs for all domains in the account...")
if not doLogin(http):
logging.error("Login failed. Exiting...")
exit(1)
get_dns_entries(http, load_cookies())
exit(0)
if getdomains:
logging.info("Getting all domains in the account...")
if not doLogin(http):
logging.error("Login failed. Exiting...")
exit(1)
get_domains(http, load_cookies())
exit(0)
cookies = None
session_headers = None
updater = HoverUpdater(config, http, logger=logging, ip_file=IP_FILE, cookies_file=COOKIES_FILE)
def run_once():
updater.run_once(args, cookies, session_headers)
# Determine run interval: use command line if specified, else config
run_interval = args.interval if args.interval is not None else int(config.get('runInterval', 0)) if 'runInterval' in config else 0
if run_interval > 0:
logging.info(f"Running in interval mode: every {run_interval} seconds.")
while True:
run_once()
logging.info(f"Sleeping for {run_interval} seconds before next run.")
time.sleep(run_interval)
else:
logging.info("No interval specified, running once and exiting.")
run_once()
def remove_headers_case_insensitive(headers, keys_to_remove):
keys_lower = {k.lower() for k in keys_to_remove}
to_delete = [k for k in list(headers.keys()) if k.lower() in keys_lower]
for k in to_delete:
headers.pop(k, None)
def update_headers_case_insensitive(headers, new_headers):
for new_key in new_headers:
to_remove = [k for k in list(headers.keys()) if k.lower() == new_key.lower()]
for k in to_remove:
headers.pop(k)
headers.update(new_headers)
def filter_allowed_headers(headers):
allowed = {
'accept',
'accept-language',
'content-type',
'origin',
'priority',
'referer',
'sec-ch-ua',
'sec-ch-ua-mobile',
'sec-ch-ua-platform',
'sec-fetch-dest',
'sec-fetch-mode',
'sec-fetch-site',
'user-agent',
'uzlc',
'cookie',
'host',
'content-length',
'x-request-id',
'x-requested-with',
'hoverauth'
}
to_delete = [k for k in list(headers.keys()) if k.lower() not in allowed]
for k in to_delete:
headers.pop(k, None)
if __name__ == "__main__":
main()
<|endoftext|>{
"dnsid": "",
"username": "",
"password": "",
"discoverip": "true",
"srcdomain": "this.example.com",
"ipaddress": "192.168.1.1",
"totp_secret": "xxxx xxxx xxxx xxxx xxxx xxxx xx",
"logLevel": "info",
"nakedDomain": "example.com",
"logRetentionMaxDays": "7",
"userAgent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3"
}
<|endoftext|>import base64
import hmac
import hashlib
import struct
import time
def totp(secret, time_step=30, digits=6):
# Decode base32 secret
secret = secret.upper()
secret = secret.replace(' ','')
missing_padding = len(secret) % 8
if missing_padding:
secret += '=' * (8 - missing_padding)
key = base64.b32decode(secret, casefold=True)
# Get current time step
current_time = int(time.time() // time_step)
# Pack time into byte array (big-endian)
time_bytes = struct.pack(">Q", current_time)
# Generate HMAC-SHA1
hmac_result = hmac.new(key, time_bytes, hashlib.sha1).digest()
# Extract dynamic binary code
offset = hmac_result[-1] & 0x0F
binary = struct.unpack(">I", hmac_result[offset:offset + 4])[0] & 0x7FFFFFFF
# Compute TOTP value
otp = binary % (10 ** digits)
return f"{otp:0{digits}d}"<|endoftext|># Hover DNS Update Script
This project is a Python script for updating DNS records on Hover. It supports optional logging, TOTP-based 2FA, and the use of `mitmproxy` for debugging HTTP/HTTPS requests.
## Dependencies
- Python 3.x
- `requests` library
- `mitmproxy` (optional, for debugging)
## Setup Instructions
### 1. Clone the Repository
```sh
git clone <repository_url>
cd <repository_directory>
```
### 2. Install Python Dependencies
Install the required Python libraries using `pip`:
```sh
pip install requests
```
### 3. Install `mitmproxy` (Optional)
`mitmproxy` is used for debugging HTTP/HTTPS requests. To install `mitmproxy`, run:
```sh
pip install mitmproxy
```
### 4. Create Configuration Files
Create the following configuration files:
#### `config.json`
Create a `config.json` file in the project directory with the following content:
```json
{
"dnsid": "<your_dns_id>",
"username": "<your_hover_username>",
"password": "<your_hover_password>",
"discoverip": "true",
"srcdomain": "this.example.com",
"ipaddress": "192.168.1.1",
"totp_secret": "<your_totp_secret>",
"logLevel": "info",
"nakedDomain": "example.com",
"logRetentionMaxDays": "7",
"userAgent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3"
}
```
- `dnsid`: The DNS record ID to update.
- `username`: Your Hover account username.
- `password`: Your Hover account password.
- `discoverip`: Set to `"true"` to auto-discover your public IP.
- `srcdomain`: The subdomain to update (e.g., `"this.example.com"`).
- `ipaddress`: The IP address to set (overrides `discoverip` if provided).
- `totp_secret`: Your TOTP secret for 2FA (if enabled on your Hover account).
- `logLevel`: Logging level (`"info"`, `"debug"`, etc.).
- `nakedDomain`: The root domain (e.g., `"example.com"`).
- `logRetentionMaxDays`: How many days to keep log files.
- `userAgent`: The User-Agent string to use for HTTP requests.
Replace the placeholders with your actual values.
#### `IP`
Create an empty `IP` file in the project directory:
```sh
type nul > IP
```
*(On Linux/macOS: `touch IP`)*
## Usage Instructions
### Running the Script
To run the script, use the following commands:
#### Install prerequisites:
```sh
pip install -r requirements.txt
```
#### Run the script:
```sh
python hover-update.py [--loglevel LEVEL] [--mitm] [--nocerts] [--getDNSID] [--getDomains] [--interval SECONDS]
```
### Command Line Arguments
- `--loglevel LEVEL`: Set log level (DEBUG, INFO, WARNING, ERROR, CRITICAL). Overrides config.json log level. Default is INFO.
- `--mitm`: Route HTTP/HTTPS requests through `mitmproxy` for debugging.
- `--nocerts`: Disable certificate verification for mitmproxy.
- `--getDNSID`: Get DNS IDs for all domains in the account.
- `--getDomains`: Get all domains in the account.
- `--interval SECONDS`: Run at a specified interval (in seconds). Overrides config value `runInterval`.
- `--config PATH`: Path to configuration file (default: `config.json`).
### Example
To run the script with a custom config file:
```sh
python hover-update.py --config myconfig.json
```
To run the script with logging enabled:
```sh
python hover-update.py --loglevel INFO
```
To run the script with `mitmproxy` enabled:
```sh
python hover-update.py --mitm
```
To run the script with both logging and `mitmproxy` enabled:
```sh
python hover-update.py --loglevel DEBUG --mitm
```
To get DNS IDs for all domains:
```sh
python hover-update.py --getDNSID
```
To get all domains:
```sh
python hover-update.py --getDomains
```
To run the script every 10 minutes:
```sh
python hover-update.py --interval 600
```
## Using `mitmproxy` for Debugging
`mitmproxy` is a powerful tool for debugging HTTP/HTTPS requests. Follow these steps to use `mitmproxy` with this script:
The script is designed to run on Windows or Linux unchanged and will look for the mitmproxy certificates in the appropriate default location for that platform.
### 1. Start `mitmproxy`
Start `mitmproxy` in your terminal:
```sh
mitmproxy
```
This will start `mitmproxy` and listen on port 8080 by default.
### 2. Run the Script with `mitmproxy` Enabled
Run the script with the `--mitm` flag to route HTTP/HTTPS requests through `mitmproxy`:
```sh
python hover-update.py --mitm
```
### 3. Inspect Requests and Responses
In the `mitmproxy` interface, you can inspect the HTTP/HTTPS requests and responses being made by the script. This is useful for debugging and understanding the interactions with the Hover API.
## File Structure
- `hover-update.py`: Main script for updating DNS records on Hover.
- `totp.py`: Contains the TOTP generation function.
- `config.json`: Configuration file with user credentials and settings.
- `IP`: File to store the last known IP address (automatically generated when the IP is resolved).
- `hover-update.log`: Log file (created automatically if logging is enabled).
## License
This project is licensed under the MIT License. See the `LICENSE` file for details.
## Contributing
Contributions are welcome! Please open an issue or submit a pull request on GitHub.
## Contact
For any questions or issues, please contact [[email protected]](mailto:[email protected]).<|endoftext|>
|
AbrarShakhi/solar-system-simulation
|
try:
pass
except:
print("pygame is not installed.")
print("please install pygame.")
from sys import exit
exit(1)
if __name__ == "__main__":
import src.main as start
start.main()
<|endoftext|>
# Solar System Simulation with Pygame
This Python application simulates our solar system using the Pygame library. It includes the following celestial bodies:
1. **Sun**
2. **Earth**
3. **Mars**
4. **Venus**
5. **Mercury**
You can add more planets, moons, and satellites. Additionally, you can customize their initial velocity, mass, and distance to create your own interactive solar system model.
Feel free to explore and experiment with different parameters! 🌏🪐☀️
---
# How to run
* install `python 3.9` or later.
* install `pygame` from `pip`.
* and run `runme.py` file from the project.
* `python runme.py` or`python3 runme.py`
---
### Demo






---
> **_NOTE:_** If you have any suggestions feel free to pull requests or issue here.
<|endoftext|>class COLOR:
WHITE = (255, 255, 255)
BLACK = (0, 0, 0)
RED = (255, 0, 0)
GREEN = (0, 255, 0)
BLUE = (0, 0, 255)
SUN = (255, 242, 110)
EARTH = (67, 158, 142)
MARS = (246, 137, 1)
VENUS = (200, 100, 80)
MOON = (200, 200, 200)
class FONT:
SIZE = 20
class WINDOW:
WIDTH = 1200
MAIN_WIDTH = WIDTH + 300
HEIGHT = 1000
class PHYSICS:
G = 6.67428e-11
AU = 149.6e6 * 1000
class FACTOR:
INIT_LEVEL = 300
TIMESTEP = 3600 * 24
DELTA_DIV = 0
def get_scale(level):
# if (level < 2):
# level = 1
return level / PHYSICS.AU
<|endoftext|>from sys import exit
try:
import math
import pygame
from src.constant import COLOR, FACTOR, FONT, PHYSICS, WINDOW
except:
print("please run runme.py")
exit(1)
class Point:
def __init__(self, x, y) -> None:
self.x = x
self.y = y
def scalify(self, scale) -> tuple:
x = self.x * scale + WINDOW.WIDTH / 2
y = self.y * scale + WINDOW.HEIGHT / 2
return x, y
class Object:
def __init__(
self,
name: str,
center: Point,
radius,
mass,
color: COLOR,
init_velocity: Point,
is_star: bool,
) -> None:
self.center = center
self.radius = radius
self.mass = mass
self.color = color
self.name = name
self.orbit: list[Point] = []
self.is_star = is_star
self.dist_to_near_star = 0
self.velocity = init_velocity
def draw(self, frame: pygame.Surface, font: pygame.font, nth, scale) -> None:
x, y = self.center.scalify(scale)
if len(self.orbit) > 1:
lines = []
for ce in self.orbit:
lines.append(ce.scalify(scale))
pygame.draw.lines(frame, COLOR.WHITE, False, lines, 1)
pygame.draw.circle(frame, self.color, (x, y), self.radius)
text = font.render(self.name, False, COLOR.WHITE, COLOR.BLACK)
frame.blit(text, (x, y))
text = font.render(
f"{self.name}: {round(self.dist_to_near_star / PHYSICS.AU, 8)} AU",
False,
COLOR.WHITE,
COLOR.BLACK,
)
frame.blit(text, (WINDOW.WIDTH, (FONT.SIZE + 6) * nth))
def attraction(self, relative_obj) -> Point:
dist_point = Point(
relative_obj.center.x - self.center.x, relative_obj.center.y - self.center.y
)
d = math.sqrt(dist_point.x**2 + dist_point.y**2)
if relative_obj.is_star:
self.dist_to_near_star = d
F = PHYSICS.G * (self.mass * relative_obj.mass) / (d**2) # F = G(Mm)/(r^2)
theta = math.atan2(dist_point.y, dist_point.x) # tanA = y/x
Fx = F * math.cos(theta) # x = rcosA
Fy = F * math.sin(theta) # y = rsinA
return Point(Fx, Fy)
def move(self, objects, dt, scale) -> None:
# dt = 1
F = Point(0, 0)
for obj in objects:
if obj != self:
f = self.attraction(obj)
F.x += f.x
F.y += f.y
# F = ma,
# a = F/m
self.velocity.x += F.x / self.mass * dt * FACTOR.TIMESTEP
self.velocity.y += F.y / self.mass * dt * FACTOR.TIMESTEP
self.center.x += self.velocity.x * dt * FACTOR.TIMESTEP
self.center.y += self.velocity.y * dt * FACTOR.TIMESTEP
self.orbit.append(Point(self.center.x, self.center.y))
length = len(self.orbit)
if length > math.pi * (self.dist_to_near_star * scale) or length > 1000:
self.orbit.pop(0)
<|endoftext|>from random import randint
from sys import exit
try:
import pygame
from src.constant import COLOR, PHYSICS, WINDOW
from src.object import Object as Obj
from src.object import Point
except:
print("please run runme.py")
exit(1)
def get_pos():
return Point(randint(0, WINDOW.MAIN_WIDTH), randint(0, WINDOW.HEIGHT))
def load_star():
number_of_star = int(min((WINDOW.WIDTH, WINDOW.HEIGHT)) // randint(25, 50))
star_container = []
while number_of_star > 0:
number_of_star -= 1
star_container.append(get_pos())
return star_container
def fill_stars(win, star_container: list[Point]):
for star in star_container:
pygame.draw.circle(win, COLOR.MOON, (star.x, star.y), 3)
if randint(1, 100) % 10 == 0:
number_of_star = len(star_container)
if number_of_star > 0:
dice = randint(1, number_of_star) % number_of_star
star_container.pop(dice)
star_container.append(get_pos())
def get_objects() -> list[Obj]:
SUN_MASS = 1.98892 * 10**30
EARTH_MASS = 5.9742 * 10**24
MARS_MASS = 6.39 * 10**23
VENUS_MASS = 4.8685 * 10**24
MERCURY_MASS = 3.30 * 10**23
objects: list[Obj] = []
objects.append(Obj("sun", Point(0, 0), 30, SUN_MASS, COLOR.SUN, Point(0, 0), True))
objects.append(
Obj(
"earth",
Point(-1 * PHYSICS.AU, 0),
16,
EARTH_MASS,
COLOR.EARTH,
Point(0, 29.783 * 1000),
False,
)
)
objects.append(
Obj(
"mars",
Point(-1.524 * PHYSICS.AU, 0),
12,
MARS_MASS,
COLOR.MARS,
Point(0, 24.077 * 1000),
False,
)
)
objects.append(
Obj(
"venus",
Point(0.723 * PHYSICS.AU, 0),
14,
VENUS_MASS,
COLOR.VENUS,
Point(0, -35.02 * 1000),
False,
)
)
objects.append(
Obj(
"mercury",
Point(0.387 * PHYSICS.AU, 0),
8,
MERCURY_MASS,
COLOR.MOON,
Point(0, -47.4 * 1000),
False,
)
)
return objects
<|endoftext|>from sys import exit
try:
import pygame
from src.constant import COLOR, FACTOR, FONT, WINDOW
from src.galaxy import fill_stars, get_objects, load_star
from src.object import Object as Obj
except:
print("please run runme.py")
exit(1)
class Main:
def __init__(self, objects: list[Obj]) -> None:
pygame.init()
self.active = True
self.pause = False
self.FRAME_RATE = 120
fonts = pygame.font.get_fonts()
self.font = pygame.font.SysFont(fonts[0], FONT.SIZE, False, False)
self.objects = objects
self.stars = load_star()
self.zoom_level = FACTOR.INIT_LEVEL
self.FRAME = pygame.display.set_mode((WINDOW.MAIN_WIDTH, WINDOW.HEIGHT))
pygame.display.set_caption("Solar System Simulation")
self.clock = pygame.time.Clock()
self.dt = 1
def quit(self) -> None:
pygame.quit()
exit()
def event(self) -> None:
for e in pygame.event.get():
if e.type == pygame.QUIT:
self.active = False
self.quit()
elif e.type == pygame.MOUSEBUTTONDOWN:
self.pause = not self.pause
elif e.type == pygame.KEYDOWN:
if e.key == pygame.K_SPACE:
self.pause = not self.pause
# elif e.key == pygame.K_UP or e.key == pygame.K_RIGHT:
# self.zoom_level += 1
# elif e.key == pygame.K_DOWN or e.key == pygame.K_LEFT:
# self.zoom_level -= 1
def update(self) -> None:
self.clock.tick(self.FRAME_RATE)
if FACTOR.DELTA_DIV != 0:
self.dt = self.clock.get_time() / FACTOR.DELTA_DIV
else:
self.dt = 1
pygame.display.update()
def calc(self) -> None:
max_dist_to_star = -1
for obj in self.objects:
obj.move(self.objects, self.dt, FACTOR.get_scale(self.zoom_level))
if obj.dist_to_near_star > max_dist_to_star:
max_dist_to_star = obj.dist_to_near_star
self.zoom_level = ((min(WINDOW.WIDTH, WINDOW.HEIGHT) - 10) / 2) / FACTOR.get_scale(
max_dist_to_star
)
if self.zoom_level > FACTOR.INIT_LEVEL:
self.zoom_level = FACTOR.INIT_LEVEL
def draw(self) -> None:
self.FRAME.fill(COLOR.BLACK)
fill_stars(self.FRAME, self.stars) # TODO
text = self.font.render("distance to nearest star.", False, COLOR.WHITE, None)
self.FRAME.blit(text, (WINDOW.WIDTH, 0))
for i in range(0, len(self.objects)):
self.objects[i].draw(self.FRAME, self.font, i + 1, FACTOR.get_scale(self.zoom_level))
text = self.font.render("press space to pause.", False, COLOR.WHITE, None)
self.FRAME.blit(text, (WINDOW.WIDTH, WINDOW.HEIGHT - FONT.SIZE - 10))
def run(self) -> None:
while self.active:
self.event()
self.update()
if not self.pause:
self.calc()
self.draw()
self.collision()
def collision(self) -> None:
for o1 in self.objects:
for o2 in self.objects:
if o2 == o1:
continue
else:
if o2.center.x == o1.center.x and o2.center.y == o1.center.y:
print(f"{o1.name} collided with {o2.name}")
self.pause = True
break
def main() -> None:
main = Main(get_objects())
main.run()
<|endoftext|>
|
Rudra-Hatte/Sugarcane-node-detection
|
import os
import matplotlib.pyplot as plt
import pandas as pd
import torch
import yaml
from ultralytics import YOLO
def setup_dataset_config():
"""
Create a YAML configuration file for the custom dataset.
Assumes the following directory structure:
- images/train/: Training images
- labels/train/: Training labels (YOLO format)
"""
# Use the absolute paths provided in required_dirs
base_dir = "D:\\sugarcane_training"
data_yaml = {
"path": base_dir, # Base directory
"train": os.path.join(base_dir, "images", "train"), # Path to training images
"val": os.path.join(base_dir, "images", "train"), # Using training images for validation
"nc": 0, # Number of classes (will be updated)
"names": [], # Class names (will be updated)
}
# Count the number of unique classes by checking the labels
class_ids = set()
labels_dir = os.path.join(base_dir, "labels", "train")
if os.path.exists(labels_dir):
for label_file in os.listdir(labels_dir):
if label_file.endswith(".txt"):
with open(os.path.join(labels_dir, label_file)) as f:
for line in f:
parts = line.strip().split()
if parts:
class_ids.add(int(parts[0]))
data_yaml["nc"] = len(class_ids) if class_ids else 1
data_yaml["names"] = [f"class_{i}" for i in range(data_yaml["nc"])]
# Save the YAML configuration
config_path = os.path.join(base_dir, "dataset_config.yaml")
with open(config_path, "w") as f:
yaml.dump(data_yaml, f, sort_keys=False)
print(f"Created dataset configuration with {data_yaml['nc']} classes")
return config_path
def visualize_training_results(results_dir):
"""
Generate detailed visualizations of training results
"""
# Path to results CSV file
results_csv = os.path.join(results_dir, "results.csv")
if not os.path.exists(results_csv):
print(f"Results file not found at {results_csv}")
return
# Load training results
results = pd.read_csv(results_csv)
# Create output directory for graphs
graphs_dir = os.path.join(results_dir, "graphs")
os.makedirs(graphs_dir, exist_ok=True)
# List of metrics to plot
metrics = [
("train/box_loss", "val/box_loss", "Box Loss"),
("train/cls_loss", "val/cls_loss", "Classification Loss"),
("train/dfl_loss", "val/dfl_loss", "Distribution Focal Loss"),
("metrics/precision", None, "Precision"),
("metrics/recall", None, "Recall"),
("metrics/mAP50", None, "[email protected]"),
("metrics/mAP50-95", None, "[email protected]:0.95"),
]
# Create plots for each metric
for train_metric, val_metric, title in metrics:
plt.figure(figsize=(10, 6))
if train_metric in results.columns:
plt.plot(results["epoch"], results[train_metric], label=f"Training {title}")
if val_metric and val_metric in results.columns:
plt.plot(results["epoch"], results[val_metric], label=f"Validation {title}")
plt.xlabel("Epochs")
plt.ylabel(title)
plt.title(f"{title} vs. Epochs")
plt.legend()
plt.grid(True)
plt.savefig(
os.path.join(
graphs_dir,
f"{title.replace('@', 'at').replace(':', '-').replace(' ', '_').lower()}.png",
)
)
plt.close()
print(f"Training visualization graphs saved to {graphs_dir}")
return graphs_dir
def print_detailed_results(results_dir):
"""
Print detailed summary of training results
"""
results_csv = os.path.join(results_dir, "results.csv")
if not os.path.exists(results_csv):
print("Results file not found")
return
results = pd.read_csv(results_csv)
# Get the last row (final epoch results)
final_results = results.iloc[-1]
print("\n" + "=" * 50)
print("TRAINING RESULTS SUMMARY")
print("=" * 50)
print(f"\nTotal Training Epochs: {int(final_results['epoch'])}")
print(f"Final Learning Rate: {final_results['lr']:.6f}")
# Training losses
print("\nFinal Loss Values:")
print(f" Box Loss: {final_results['train/box_loss']:.4f}")
print(f" Classification Loss: {final_results['train/cls_loss']:.4f}")
print(f" Distribution Focal Loss: {final_results['train/dfl_loss']:.4f}")
# Metrics
print("\nFinal Performance Metrics:")
if "metrics/precision" in final_results:
print(f" Precision: {final_results['metrics/precision']:.4f}")
if "metrics/recall" in final_results:
print(f" Recall: {final_results['metrics/recall']:.4f}")
if "metrics/mAP50" in final_results:
print(f" [email protected]: {final_results['metrics/mAP50']:.4f}")
if "metrics/mAP50-95" in final_results:
print(f" [email protected]:0.95: {final_results['metrics/mAP50-95']:.4f}")
# Training speed
if "train/time" in final_results:
print(f"\nAverage Training Time per Epoch: {final_results['train/time']:.2f} seconds")
print("\nBest Performance:")
# Find best mAP
if "metrics/mAP50-95" in results.columns:
best_map = results["metrics/mAP50-95"].max()
best_epoch = results.loc[results["metrics/mAP50-95"].idxmax(), "epoch"]
print(f" Best [email protected]:0.95: {best_map:.4f} (Epoch {int(best_epoch)})")
print("\nModel Size and Speed:")
# These values typically aren't in results.csv but can be calculated if needed
print(" These values can be found in the model summary in the YOLO training output")
print("=" * 50)
def train_new_model():
"""
Train a new YOLOv8 model from scratch using a custom dataset.
"""
# Create dataset configuration
dataset_config = setup_dataset_config()
# Initialize a new model
model = YOLO("yolov8n.yaml") # Start with YOLOv8 nano architecture
# Automatically determine if GPU is available
device = "cuda:0" if torch.cuda.is_available() else "cpu"
print(f"Using device: {device}")
# If training on CPU with limited resources, you may want to reduce batch size
batch_size = 16 if torch.cuda.is_available() else 16
print(f"Using batch size: {batch_size}")
# Train the model
model.train(
data=dataset_config,
epochs=100,
imgsz=640,
batch=batch_size,
name="new_model_training",
verbose=True,
patience=50,
save=True,
device=device, # Automatically use available device
)
# Get the results directory
results_dir = os.path.join("runs", "detect", "new_model_training")
print(f"\nTraining completed. Model saved to {results_dir}")
# Generate and display detailed results
print_detailed_results(results_dir)
visualize_training_results(results_dir)
# Validate the model on the training set to get additional metrics
print("\nRunning validation on training set...")
model = YOLO(os.path.join(results_dir, "weights", "best.pt"))
model.val(data=dataset_config, device=device)
print("\nTraining process completed successfully!")
print(f"Final model weights: {os.path.join(results_dir, 'weights', 'best.pt')}")
print(f"Detailed graphs available in: {os.path.join(results_dir, 'graphs')}")
if __name__ == "__main__":
# Check if required directories exist
required_dirs = [
"D:\\sugarcane_training\\images\\train",
"D:\\sugarcane_training\\labels\\train",
]
for dir_path in required_dirs:
if not os.path.exists(dir_path):
print(
f"Error: Directory {dir_path} not found. Please create the required directory structure."
)
exit(1)
# Install required packages if not already installed
try:
import matplotlib
import pandas
import ultralytics
except ImportError:
print("Installing required packages...")
os.system("pip install ultralytics pyyaml matplotlib pandas")
# Print PyTorch CUDA information
print(f"PyTorch version: {torch.__version__}")
print(f"CUDA available: {torch.cuda.is_available()}")
print(f"CUDA device count: {torch.cuda.device_count()}")
if torch.cuda.is_available():
print(f"CUDA device name: {torch.cuda.get_device_name(0)}")
else:
print("Training will use CPU. This may be slow for large datasets.")
train_new_model()
<|endoftext|>import csv
import os
import sqlite3
import time
from datetime import datetime
import cv2
from ultralytics import YOLO
class SugarcaneNodeDetector:
def __init__(self, model_path):
"""
Initialize the sugarcane node detector
Args:
model_path: Path to trained YOLO model weights
"""
if model_path and os.path.exists(model_path):
self.model = YOLO(model_path)
print(f"Loaded model from {model_path}")
else:
try:
if os.path.exists("sugarcane_model/best_model.pt"):
self.model = YOLO("sugarcane_model/best_model.pt")
print("Loaded model from sugarcane_model/best_model.pt")
else:
latest_run = max(
[os.path.join("runs/detect", d) for d in os.listdir("runs/detect")],
key=os.path.getmtime,
)
weights_path = os.path.join(latest_run, "weights/best.pt")
self.model = YOLO(weights_path)
print(f"Loaded model from {weights_path}")
except:
print("No trained model found. Please provide a valid model path.")
self.model = None
# Initialize database connection
self.db_conn = sqlite3.connect("sugarcane_detections.db")
self.cursor = self.db_conn.cursor()
# Create the detections table if it doesn't exist
self.cursor.execute("""
CREATE TABLE IF NOT EXISTS detections (
id INTEGER PRIMARY KEY AUTOINCREMENT,
frame INTEGER, -- Frame number where detection occurred
timestamp TEXT, -- Timestamp of the detection event
node_id INTEGER, -- ID of the node (e.g., specific location or identifier for the detection)
confidence REAL, -- Confidence score of the detection
x1 REAL, -- Top-left corner X coordinate of the bounding box
y1 REAL, -- Top-left corner Y coordinate of the bounding box
x2 REAL, -- Bottom-right corner X coordinate of the bounding box
y2 REAL, -- Bottom-right corner Y coordinate of the bounding box
class_id INTEGER, -- ID of the detected class (e.g., sugarcane node or other class)
cut INTEGER DEFAULT 0 -- Indicator for when the bounding box is about to exit (0 by default)
)
""")
self.db_conn.commit()
def __del__(self):
"""Ensure database connection is closed properly"""
if hasattr(self, "db_conn"):
self.db_conn.close()
def detect_image(self, image_path, conf_threshold=0.25, save_output=True, output_dir="results"):
if self.model is None:
print("No model loaded. Cannot perform detection.")
return []
if not os.path.exists(image_path):
print(f"Image not found: {image_path}")
return []
if save_output and not os.path.exists(output_dir):
os.makedirs(output_dir)
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
results = self.model.predict(image_path, conf=conf_threshold)
detections = []
for idx, result in enumerate(results):
boxes = result.boxes
for i, box in enumerate(boxes):
x1, y1, x2, y2 = box.xyxy[0].tolist()
confidence = box.conf[0].item()
class_id = int(box.cls[0].item())
detections.append(
{
"id": i,
"timestamp": timestamp,
"confidence": confidence,
"bbox": [x1, y1, x2, y2],
"class_id": class_id,
"image_path": image_path,
}
)
if save_output and detections:
result_img = results[0].plot()
output_path = os.path.join(output_dir, f"output_{os.path.basename(image_path)}")
cv2.imwrite(output_path, result_img)
print(f"Detection results saved to {output_path}")
print(f"Detected {len(detections)} nodes in {os.path.basename(image_path)}")
for d in detections:
print(f" Node {d['id']}: Confidence={d['confidence']:.2f}")
return detections
def detect_video(
self,
video_path,
conf_threshold=0.25,
save_output=True,
output_dir="results",
export_csv=True,
show_progress=True,
):
if self.model is None:
print("No model loaded. Cannot perform detection.")
return []
if not os.path.exists(video_path):
print(f"Video not found: {video_path}")
return []
if not os.path.exists(output_dir):
os.makedirs(output_dir)
cap = cv2.VideoCapture(video_path)
fps = cap.get(cv2.CAP_PROP_FPS)
width = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
height = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
total_frames = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
print(f"Video properties: {width}x{height}, {fps} FPS, {total_frames} frames")
output_video = None
if save_output:
fourcc = cv2.VideoWriter_fourcc(*"mp4v")
output_path = os.path.join(output_dir, f"output_{os.path.basename(video_path)}")
output_video = cv2.VideoWriter(output_path, fourcc, fps, (width, height))
csv_path = None
csv_file = None
if export_csv:
csv_path = os.path.join(output_dir, f"timestamps_{os.path.basename(video_path)}.csv")
csv_file = open(csv_path, "w", newline="")
csv_writer = csv.writer(csv_file)
csv_writer.writerow(
["frame", "timestamp", "node_id", "confidence", "x1", "y1", "x2", "y2", "cut"]
)
detections = []
active_tracks = {} # Track objects across frames
frame_count = 0
start_time = time.time()
detected_frames = 0
exit_margin = 20 # Pixels threshold to consider a box about to exit frame
while cap.isOpened():
ret, frame = cap.read()
if not ret:
break
frame_time = frame_count / fps
hours = int(frame_time // 3600)
minutes = int((frame_time % 3600) // 60)
seconds = frame_time % 60
timestamp = f"{hours:02d}:{minutes:02d}:{seconds:06.3f}"
results = self.model.predict(frame, conf=conf_threshold)
frame_detections = []
# Mark currently active tracks as not seen in this frame
for track_id in active_tracks:
active_tracks[track_id]["seen"] = False
for result in results:
boxes = result.boxes
if len(boxes) > 0:
detected_frames += 1
for i, box in enumerate(boxes):
x1, y1, x2, y2 = box.xyxy[0].tolist()
confidence = box.conf[0].item()
class_id = int(box.cls[0].item())
# Calculate if bounding box is about to exit frame
about_to_exit = (
x1 < exit_margin
or y1 < exit_margin
or x2 > (width - exit_margin)
or y2 > (height - exit_margin)
)
cut_value = 1 if about_to_exit else 0
# Create unique tracking ID for this detection
# In a simple implementation, we'll use frame + node_id
track_id = f"{i}_{class_id}"
# Check if we've seen this object before
if track_id in active_tracks:
active_tracks[track_id]["seen"] = True
active_tracks[track_id]["bbox"] = [x1, y1, x2, y2]
active_tracks[track_id]["cut"] = cut_value
else:
# New detection
active_tracks[track_id] = {
"seen": True,
"bbox": [x1, y1, x2, y2],
"confidence": confidence,
"class_id": class_id,
"cut": cut_value,
"first_frame": frame_count,
}
detection = {
"frame": frame_count,
"timestamp": timestamp,
"node_id": i,
"confidence": confidence,
"bbox": [x1, y1, x2, y2],
"class_id": class_id,
"cut": cut_value,
}
frame_detections.append(detection)
# Insert into SQLite database
if about_to_exit:
self.cursor.execute(
"""
INSERT INTO detections (frame, timestamp, node_id, confidence, x1, y1, x2, y2, class_id, cut)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
(
frame_count,
timestamp,
i,
confidence,
x1,
y1,
x2,
y2,
class_id,
cut_value,
),
)
self.db_conn.commit()
# Only add to CSV if about to exit or if this is a normal detection
if csv_file:
if about_to_exit:
csv_writer.writerow(
[frame_count, timestamp, i, confidence, x1, y1, x2, y2, cut_value]
)
detections.extend(frame_detections)
# Identify tracks that disappeared (objects that exited the frame)
for track_id, track in list(active_tracks.items()):
if not track["seen"]:
# Object disappeared, remove from tracking
del active_tracks[track_id]
if save_output:
result_frame = results[0].plot() if len(frame_detections) > 0 else frame
cv2.putText(
result_frame,
f"Frame: {frame_count} | Time: {timestamp}",
(10, 30),
cv2.FONT_HERSHEY_SIMPLEX,
0.8,
(0, 255, 0),
2,
)
cv2.putText(
result_frame,
f"Nodes: {len(frame_detections)} | About to Exit: {sum(1 for d in frame_detections if d['cut'] == 1)}",
(10, 70),
cv2.FONT_HERSHEY_SIMPLEX,
0.8,
(0, 255, 0),
2,
)
# Highlight boxes about to exit
for detection in frame_detections:
if detection["cut"] == 1:
bbox = detection["bbox"]
cv2.rectangle(
result_frame,
(int(bbox[0]), int(bbox[1])),
(int(bbox[2]), int(bbox[3])),
(0, 0, 255),
3,
) # Red color for exit boxes
output_video.write(result_frame)
frame_count += 1
if show_progress and frame_count % 30 == 0:
elapsed = time.time() - start_time
fps_live = frame_count / elapsed if elapsed > 0 else 0
eta = (total_frames - frame_count) / fps_live if fps_live > 0 else 0
print(
f"Progress: {frame_count}/{total_frames} ({frame_count / total_frames * 100:.1f}%) | "
+ f"Speed: {fps_live:.1f} FPS | ETA: {eta:.1f} seconds"
)
cap.release()
if output_video:
output_video.release()
if csv_file:
csv_file.close()
print("\nDetection completed:")
print(f"- Processed {frame_count} frames in {time.time() - start_time:.1f} seconds")
print(
f"- Found nodes in {detected_frames} frames ({detected_frames / frame_count * 100:.1f}%)"
)
print(f"- Total detections: {len(detections)}")
if save_output:
print(f"- Output video saved to: {output_path}")
if csv_path:
print(f"- CSV file saved to: {csv_path}")
print("- Data saved to SQLite database: sugarcane_detections.db")
return detections
def detect_image_batch(model_path, image_dir, conf_threshold=0.25, output_dir="results_2"):
detector = SugarcaneNodeDetector(model_path)
if not os.path.exists(image_dir):
print(f"Image directory not found: {image_dir}")
return
image_files = [
f for f in os.listdir(image_dir) if f.lower().endswith((".jpg", ".jpeg", ".png"))
]
if not image_files:
print(f"No images found in {image_dir}")
return
print(f"Processing {len(image_files)} images...")
all_detections = []
for img_file in image_files:
img_path = os.path.join(image_dir, img_file)
print(f"\nProcessing {img_file}...")
detections = detector.detect_image(img_path, conf_threshold, output_dir=output_dir)
all_detections.extend(detections)
csv_path = os.path.join(output_dir, "batch_detection_results.csv")
with open(csv_path, "w", newline="") as f:
writer = csv.writer(f)
writer.writerow(["image", "timestamp", "node_id", "confidence", "x1", "y1", "x2", "y2"])
for d in all_detections:
bbox = d["bbox"]
writer.writerow(
[
os.path.basename(d["image_path"]),
d["timestamp"],
d["id"],
d["confidence"],
bbox[0],
bbox[1],
bbox[2],
bbox[3],
]
)
print(
f"\nBatch processing complete. Found {len(all_detections)} nodes in {len(image_files)} images."
)
print(f"Results saved to {csv_path}")
def create_database():
"""Create the SQLite database with the required schema"""
# Create a connection to the SQLite database
conn = sqlite3.connect("sugarcane_detections.db")
cursor = conn.cursor()
# SQL command to create the 'detections' table if it doesn't exist already
cursor.execute("""
CREATE TABLE IF NOT EXISTS detections (
id INTEGER PRIMARY KEY AUTOINCREMENT,
frame INTEGER, -- Frame number where detection occurred
timestamp TEXT, -- Timestamp of the detection event
node_id INTEGER, -- ID of the node (e.g., specific location or identifier for the detection)
confidence REAL, -- Confidence score of the detection
x1 REAL, -- Top-left corner X coordinate of the bounding box
y1 REAL, -- Top-left corner Y coordinate of the bounding box
x2 REAL, -- Bottom-right corner X coordinate of the bounding box
y2 REAL, -- Bottom-right corner Y coordinate of the bounding box
class_id INTEGER, -- ID of the detected class (e.g., sugarcane node or other class)
cut INTEGER DEFAULT 0 -- Indicator for when the bounding box is about to exit (0 by default)
)
""")
# Commit the changes and close the connection
conn.commit()
conn.close()
print("Database and table 'detections' created successfully")
if __name__ == "__main__":
import argparse
# Initialize database
create_database()
parser = argparse.ArgumentParser(description="Sugarcane Node Detection")
parser.add_argument(
"--mode",
type=str,
required=True,
choices=["image", "video", "batch"],
help="Detection mode: image, video, or batch",
)
parser.add_argument(
"--input", type=str, required=True, help="Input file (image/video) or directory (batch)"
)
parser.add_argument("--model", type=str, default=None, help="Path to trained model weights")
parser.add_argument("--conf", type=float, default=0.25, help="Detection confidence threshold")
parser.add_argument(
"--output", type=str, default="results", help="Output directory for results"
)
args = parser.parse_args()
if args.mode == "image":
detector = SugarcaneNodeDetector(args.model)
detector.detect_image(args.input, args.conf, output_dir=args.output)
elif args.mode == "video":
detector = SugarcaneNodeDetector(args.model)
detections = detector.detect_video(args.input, args.conf, output_dir=args.output)
# You can add visualization method if needed
elif args.mode == "batch":
detect_image_batch(args.model, args.input, args.conf, output_dir=args.output)
print("\nTo run this script:")
print(
"1. For video processing: python sugarcane_detector.py --mode video --input D:\\sugarcane_training\test_7.mp4 --model D:\\sugarcane_training\runs_1\\detect\train\\weights\best.pt"
)
print(
"2. For image processing: python sugarcane_detector.py --mode image --input your_image.jpg --model path/to/model.pt"
)
print(
"3. For batch processing: python sugarcane_detector.py --mode batch --input your_images_folder --model path/to/model.pt"
)
<|endoftext|>
# 🌿 Sugarcane Node Detection using YOLOv8
This project aims to detect **nodes on sugarcane stalks** using a custom-trained YOLOv8 model. The system is capable of detecting nodes in both images and videos, and it can be extended to real-time detection for agricultural automation.
---
## 📁 Files Included
```
📦 Sugarcane-Node-Detection
├── train.py # Training script for YOLOv8 using custom sugarcane dataset
├── detect.py # Detection script for images, videos, or live feed
├── best.pt # Trained model weights (YOLOv8 format)
└── test.mp4 # Output video showing detection results
```
## 🧠 Project Overview
- **Model Used:** YOLOv8 (You Only Look Once - Ultralytics)
- **Objective:** Detect nodes on sugarcane using object detection
- **Frameworks:** Python, OpenCV, Ultralytics YOLOv8
- **Training Data:** Custom-annotated dataset (in YOLO format)
- **Output:** Bounding boxes over sugarcane nodes in images or videos
---
## 🔧 Setup Instructions
### 1. ✅ Install Requirements
Install Python dependencies using pip:
```bash
pip install ultralytics opencv-python
```
Alternatively, clone and set up Ultralytics from their official repo:
```bash
git clone https://github.com/ultralytics/ultralytics.git
cd ultralytics
pip install -e .
```
---
### 2. 🏋️♂️ Training the Model
To train the model on your custom sugarcane dataset:
```bash
python train.py
```
Make sure the dataset follows the YOLO format:
```
/dataset
├── images/
│ ├── train/
│ └── val/
└── labels/
├── train/
└── val/
```
Update dataset path and configuration inside `train.py`.
---
### 3. 🔍 Running Detection
Use the `detect.py` script for inference on images, videos, or webcam:
#### a. On an image:
```bash
python detect.py --source path/to/image.jpg --weights best.pt
```
#### b. On a video:
```bash
python detect.py --source path/to/video.mp4 --weights best.pt
```
#### c. On live webcam:
```bash
python detect.py --source 0 --weights best.pt
```
Detected nodes will be shown with bounding boxes and saved to the `runs/detect` folder by default.
---
## 🎥 Output Demo
The file `test.mp4` showcases a sample output of the trained model detecting sugarcane nodes in real-time video.
---
## 📊 Model Performance
- **Accuracy:** High performance on custom annotated dataset
- **Robustness:** Tested on various lighting and background conditions
- **Use-case:** Can be integrated into smart farming systems, yield estimation, and automated crop inspection.
---
## 👨🔬 Author
Developed by **Rudra Hatte**
Third Year – Artificial Intelligence & Data Science
Savitribai Phule Pune University (SPPU)
---
## 📬 Contact
Feel free to reach out for any queries or contributions:
- 📧 Email: [email protected]
- 🌐 GitHub: [github.com/rudrahatte](https://github.com/rudrahatte)
---
## ⭐ Credits
- [Ultralytics YOLOv8](https://github.com/ultralytics/ultralytics)
- OpenCV for video/image handling
- Custom dataset manually annotated for sugarcane node detection
---
## ❤️ Support
If this project helped you, consider giving it a ⭐ on GitHub!
<|endoftext|>
|
de-Sitter/Pytorch-mechine-learning
|
import matplotlib.pyplot as plt
import torch
import torch.nn as nn
import torch.optim as optim
import torchvision
import torchvision.transforms as transforms
from torch.utils.data import DataLoader
transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize((0.5,), (0.5,))])
train_dataset = torchvision.datasets.MNIST(
root="./data", train=True, download=True, transform=transform
)
test_dataset = torchvision.datasets.MNIST(
root="./data", train=False, download=True, transform=transform
)
batch_size = 100
train_loader = DataLoader(dataset=train_dataset, batch_size=batch_size, shuffle=True)
test_loader = DataLoader(dataset=test_dataset, batch_size=batch_size, shuffle=False)
class CNN(nn.Module):
def __init__(self):
super().__init__()
self.conv1 = nn.Conv2d(in_channels=1, out_channels=16, kernel_size=5)
self.conv2 = nn.Conv2d(in_channels=16, out_channels=32, kernel_size=5)
self.pool = nn.MaxPool2d(kernel_size=2, stride=2)
self.fc1 = nn.Linear(32 * 4 * 4, 120)
self.fc2 = nn.Linear(120, 84)
self.fc3 = nn.Linear(84, 10)
self.relu = nn.ReLU()
def forward(self, x):
x = self.pool(self.relu(self.conv1(x)))
x = self.pool(self.relu(self.conv2(x)))
x = x.view(-1, 32 * 4 * 4)
x = self.relu(self.fc1(x))
x = self.relu(self.fc2(x))
x = self.fc3(x)
return x
device = torch.device("cpu")
model = CNN().to(device)
criterion = nn.CrossEntropyLoss()
optimizer = optim.Adam(model.parameters(), lr=0.001)
num_epochs = 5
for epoch in range(num_epochs):
model.train()
running_loss = 0.0
for i, (images, labels) in enumerate(train_loader):
images, labels = images.to(device), labels.to(device)
outputs = model(images)
loss = criterion(outputs, labels)
optimizer.zero_grad()
loss.backward()
optimizer.step()
running_loss += loss.item()
if (i + 1) % 100 == 0:
print(
f"Epoch [{epoch + 1}/{num_epochs}], Step [{i + 1}/{len(train_loader)}], Loss: {running_loss / 100:.4f}"
)
running_loss = 0.0
print("Training finished!")
model.eval()
correct = 0
total = 0
with torch.no_grad():
for images, labels in test_loader:
images, labels = images.to(device), labels.to(device)
outputs = model(images)
_, predicted = torch.max(outputs.data, 1)
total += labels.size(0)
correct += (predicted == labels).sum().item()
accuracy = 100 * correct / total
print(f"Test Accuracy: {accuracy:.2f}%")
def visualize_predictions(model, test_loader, num_images=5):
model.eval()
images, labels = next(iter(test_loader))
images, labels = images.to(device), labels.to(device)
outputs = model(images)
_, predicted = torch.max(outputs, 1)
plt.figure(figsize=(10, 2))
for i in range(num_images):
plt.subplot(1, num_images, i + 1)
plt.imshow(images[i][0].cpu().numpy(), cmap="gray")
plt.title(f"Pred: {predicted[i].item()}\nTrue: {labels[i].item()}")
plt.axis("off")
plt.show()
visualize_predictions(model, test_loader)
# Save the model
torch.save(model.state_dict(), "mnist_cnn.pth")
<|endoftext|># PyTorch-machine-learning
This is a python code for the training of a neural network by PyTorch, in order to recognize the handwritten numerals.
<|endoftext|>
|
Artyom-Murzinov/Artyom-Murzinov
|
from math import *
from graphic_explan.dictonary import dictonary
from mathematics.calculators import angle_decimal, cutting_mode
from mathematics.dxf import TextEngraving
from mathematics.price import PriceCalculator
from mathematics.UFG import UFG
user = {}
class User:
def __init__(self, user_id):
self.user_id = user_id
self.number_dict = {"number": -1}
self.conditions = {"cycle_calc": []}
self.variable = {}
self.document = "% \nO0001 \n"
self.workpiece = {
"profile": None,
"alloy": None,
"size": None,
"length": None,
"wall_thickness": 0.0,
}
self.text_engraving = {
"fonts": 0,
"text_content": None,
"text_position": (0, 0),
"text_alignments": 1,
"text_size": 3,
"angle": 0,
"shag": 1,
"oblique": 15,
"text_circle": 0,
}
def data_generator(user_id, cicle, argument):
"""Функция получает от пользователя необходимые данные для дальнейшей работы с ними"""
"""И отправляет результат пользователю"""
if cicle == "engraving":
user[user_id].number_dict["number"] += 1
number = user[user_id].number_dict["number"]
if number < len(dictonary[cicle]) + 1:
if number < len(dictonary[cicle]):
user[user_id].text_engraving[list(user[user_id].text_engraving)[number - 1]] = (
argument
)
if number <= len(dictonary[cicle]) - 1:
return dictonary[cicle][number][0], dictonary[cicle][number][1]
elif number == len(dictonary[cicle]):
graving = TextEngraving(user[user_id].text_engraving)
with open("O0001.nc", "w") as file:
file.write(str(graving))
return "Проверьте результат, Перенесите этот файл на флешку 👆", 2
else:
return "Цикл завершен, выберите следующий цикл! 👍", None
if cicle in list(dictonary["part_price"]):
user[user_id].number_dict["number"] += 1
number = user[user_id].number_dict["number"]
if number < len(dictonary["part_price"][cicle]) + 1:
if number == 0:
user[user_id].workpiece[list(user[user_id].workpiece)[number]] = cicle
elif number > 0:
user[user_id].workpiece[list(user[user_id].workpiece)[number]] = argument
if number <= len(dictonary["part_price"][cicle]) - 1:
return dictonary["part_price"][cicle][number][0], dictonary["part_price"][cicle][
number
][1]
elif number == len(dictonary["part_price"][cicle]):
price = PriceCalculator(
user[user_id].workpiece["profile"],
user[user_id].workpiece["alloy"],
user[user_id].workpiece["size"],
user[user_id].workpiece["length"],
user[user_id].workpiece["wall_thickness"],
)
return f"{price}", None
else:
return "Цикл завершен, выберите следующий цикл! 👍", None
else:
try:
float(argument)
except:
return "Введите число! 🤯", None
user[user_id].number_dict["number"] += 1
number = user[user_id].number_dict["number"]
if number > 0:
user[user_id].variable.update({f"#{number}": float(argument)})
if len(dictonary[cicle]) - 1 >= user[user_id].number_dict["number"]:
if len(dictonary[cicle][number]) == 2:
return dictonary[cicle][number][0], dictonary[cicle][number][1]
else:
return dictonary[cicle][number], None
elif len(dictonary[cicle]) == user[user_id].number_dict["number"]:
if cicle == "cutting_mode":
den = cutting_mode(
user[user_id].variable["#1"],
user[user_id].variable["#2"],
user[user_id].variable["#3"],
user[user_id].variable["#4"],
)
return f"{den}", None
elif cicle == "degrees_decimal":
den = angle_decimal(
user[user_id].variable["#1"],
user[user_id].variable["#2"],
user[user_id].variable["#3"],
user[user_id].variable["#4"],
)
return f"{den}", None
elif cicle == "UFG":
den = UFG(float(user[user_id].variable["#1"]), float(user[user_id].variable["#2"]))
return f"{den}", None
elif (
(cicle == "round_pocket")
or (cicle == "rectangular_pocket")
or (cicle == "radius_pocket")
or (cicle == "milling _plane")
or (cicle == "conical_thread")
or (cicle == "finishing_rectangular_pocket")
or (cicle == "finishing_round_pocket")
or (cicle == "thread_milling")
):
for key, value in user[user_id].variable.items():
user[user_id].document += f"{key} = {value} \n"
if cicle == "round_pocket":
action = "KK"
elif cicle == "rectangular_pocket":
action = "PK"
elif cicle == "radius_pocket":
action = "RK"
elif cicle == "milling _plane":
action = "FP"
elif cicle == "conical_thread":
action = "KR"
elif cicle == "finishing_rectangular_pocket":
action = "SHPK"
elif cicle == "finishing_round_pocket":
action = "SHKK"
elif cicle == "thread_milling":
action = "FR"
with open(f"telegram_bot/file_document/{action}.txt", encoding="utf-8") as pk:
info = pk.read()
new_code = open("O0001.nc", "w")
new_code.write(user[user_id].document)
with open("O0001.nc", "a") as new_code:
new_code.write(info)
user[user_id].document
return "Перенесите этот файл на флешку 👆", 1
else:
return "Цикл завершен, выберите следующий цикл! 👍", None
<|endoftext|>import os
from dotenv import load_dotenv
load_dotenv()
API_KEY = os.getenv("API_KEY")
BOT_TOKEN = os.getenv("BOT_TOKEN")
<|endoftext|># Security Policy
## Supported Versions
Use this section to tell people about which versions of your project are
currently being supported with security updates.
| Version | Supported |
| ------- | ------------------ |
| 5.1.x | :white_check_mark: |
| 5.0.x | :x: |
| 4.0.x | :white_check_mark: |
| < 4.0 | :x: |
## Reporting a Vulnerability
Use this section to tell people how to report a vulnerability.
Tell them where to go, how often they can expect to get an update on a
reported vulnerability, what to expect if the vulnerability is accepted or
declined, etc.
<|endoftext|>[core]
support_dirs =
'~/Users/artemmurzinov/Documents/шрифты',
'~/shx_fonts',
'~/shp_fonts',
'~/lff_fonts'<|endoftext|>import asyncio
import os
from time import sleep
from aiogram import Bot, Dispatcher
from dotenv import load_dotenv
from handlers import different_types, questions
load_dotenv()
TOKEN = os.getenv("TOKEN")
async def main():
bot = Bot(token=TOKEN)
dp = Dispatcher()
dp.include_routers(questions.router, different_types.router)
await bot.delete_webhook(drop_pending_updates=True)
await dp.start_polling(bot)
if __name__ == "__main__":
while True:
try:
print("Бот запущен!!!")
asyncio.run(main())
except:
sleep(0.3)
<|endoftext|># <p align="center">MacroGcode</p>
##### - Это Телеграм бот, который помогает операторам, наладчикам и программистам станков с ЧПУ на стойке Fanuc, писать управляющие программы, в этом боте собраны самые актуальные циклы которые востребованы при фрезеровке, корректировать и изменять размеры возможно уже непосредственно на самой стойке FANUC не прибегая уже к телефону или компьютеру, так же добавлен функционал калькулятор, на сегодняшний день калькулятор может считать режимы резания, переводить минуты-секунды в десятичные значения, расчет веса заготовки, а так же ее стоимости, отслеживание курса валют юаня и доллара
### Калькулятор расчет стоимости заготовки:
##### - Позволяет расчитывать стоимость заготовки, парсит с сайта цену на выбранный металл, вычисляет среднюю стоимость за тонну из полученных данных на габариты заготовки и ее профиля высчитывает вес и отправляет пользователю.
### Курс рубля:
##### - Курс Доллара и Юаня по отношению к рублю берет с ЦБ России по API, пользователь нажимает на кнопку курс "доллара и юаня" бот отправляет запрос response = requests.get("https://www.cbr-xml-daily.ru/daily_json.js") в ответ получаем json файл, значения по ключам data['Valute']['USD']['Value'], data['Valute']['CNY']['Value'] и отправляем результат курса валют.
### Циклы:
##### - Станочные циклы выдают MACRO код вводя пользователем данные которые запрашивает бот отправляя подсказки ввиде jpeg файлов и текста, отслеживает правильность ввода чтоб исключить ошибки при фрезеровке детали, после подставляя данные отправляет G-код пользователю, затем код можно закинуть на стойку через USB носитель используя смартфон с установленным телеграммом, или же на компьютере можно посмотреть время на обработу, а так же посчитать стоимость заготовки и тем самым понять себестоимость детали!
Возможна гравировка текста гост и чертежного формата как с обводкой так и однострочного текста
### Калькуляторы:
##### - Преобразование угла из минуты в десятичное значение, бот запрашивает угол минуты и секунды, возвращает пользователю угол и десятичное значение пример 13,2534 градусов
##### - Расчитывает поворот фрезерной универсальной поворотной головы UFG, бот получает от пользователя угол вокруг оси Х и угол вокруг оси Z, возвращает углы осей А и С на какие нужно повернуть на голове
##### - Расчет режимов резания бот от пользователя получает скорость резания она находится на коробке производителя инструмента, диаметр инструмента ил детали, кол-во зубьев на инструменте и сьем на зуб за 1 оборот, бот возвращает колличество оборотов и подачу в минутах
<|endoftext|>from aiogram import F, Router
from aiogram.types import CallbackQuery, FSInputFile, Message
from logic import User, data_generator, user
router = Router()
@router.message(F.text)
async def message_with_text(message: Message):
argument = message.text
text, jpg = data_generator(
user[message.from_user.id].user_id,
user[message.from_user.id].conditions["cycle_calc"],
argument,
)
print(message.from_user.full_name, message.from_user.id, message.text)
if jpg is None:
await message.answer(text)
elif jpg == 1:
await message.answer_document(FSInputFile("O0001.nc"), caption=text)
elif jpg == 2:
await message.answer_photo(FSInputFile("your.png"))
await message.answer_document(FSInputFile("O0001.nc"), caption=text)
else:
await message.answer_photo(FSInputFile(jpg), caption=text)
@router.callback_query(F.data)
async def send_random_value(callback: CallbackQuery):
print(callback.from_user.full_name, callback.from_user.id, callback.data)
user[callback.from_user.id] = User(callback.from_user.id)
user[callback.from_user.id].conditions["cycle_calc"] = callback.data
text, jpg = data_generator(callback.from_user.id, callback.data, argument=0)
if jpg is not None:
await callback.message.answer_photo(FSInputFile(jpg), caption=text)
else:
await callback.message.answer(text=text)
<|endoftext|>from aiogram import F, Router
from aiogram.filters import Command
from aiogram.types import Message
from keyboards.inline import calculator, cycles, metal_profile
from keyboards.reply import cycles_calculator, get_yes_no_kb
from mathematics.calculators import exchange_rate
router = Router()
@router.message(Command("start"))
async def cmd_start(message: Message):
await message.answer("Нажмите на необходимую Вам кнопку👇", reply_markup=get_yes_no_kb())
@router.message(F.text == "Курсы USD, CNY")
async def answer_yes(message: Message):
dollar, yuan = exchange_rate()
await message.answer(f"Доллар = {dollar}, Юань = {yuan}")
@router.message(F.text == "Циклы и Калькуляторы")
async def answer_no(message: Message):
await message.answer("Нажмите на необходимую Вам кнопку👇", reply_markup=cycles_calculator())
@router.message(F.text == "Циклы")
async def answer_no(message: Message):
await message.answer("Выберите Цикл👇", reply_markup=cycles())
@router.message(F.text == "Калькуляторы")
async def answer_no(message: Message):
await message.answer("Выберите Калькулятор👇", reply_markup=calculator())
@router.message(F.text == "Назад")
async def answer_no(message: Message):
await message.answer("Нажмите на необходимую Вам кнопку👇", reply_markup=get_yes_no_kb())
@router.message(F.text == "Помощь")
async def answer_no(message: Message):
await message.answer(
"Если бот не работает отправьте команду '/start'", reply_markup=get_yes_no_kb()
)
@router.message(F.text == "Расчет заготовки")
async def answer_no(message: Message):
await message.answer("Выберите профиль👇", reply_markup=metal_profile())
<|endoftext|>import configparser
import math
import ezdxf
from ezdxf.addons.drawing import matplotlib
from ezdxf.addons.text2path import explode
from ezdxf.enums import TextEntityAlignment
from ezdxf.fonts import fonts
config = configparser.ConfigParser()
config.read("settings.ini")
ezdxf.options.support_dirs = config["core"]["support_dirs"]
fonts.build_system_font_cache()
class TextEngraving:
def __init__(self, dictory):
list_shrift = [
"ГОСТ тип А.ttf",
"T-FlexA.ttf",
"AmericanTypewriter.ttc",
"PTSans.ttc",
"GOST 26.008-85.ttf",
]
self.fonts = list_shrift[int(dictory["fonts"])]
self.text_position = (
float(dictory["text_position"].split(",")[0]),
float(dictory["text_position"].split(",")[1]),
) # расположение в системе координат
if int(dictory["text_alignments"]) == 1:
self.location = TextEntityAlignment.BOTTOM_LEFT
if int(dictory["text_alignments"]) == 2:
self.location = TextEntityAlignment.TOP_LEFT
if int(dictory["text_alignments"]) == 3:
self.location = TextEntityAlignment.TOP_CENTER
if int(dictory["text_alignments"]) == 4:
self.location = TextEntityAlignment.TOP_RIGHT
if int(dictory["text_alignments"]) == 5:
self.location = TextEntityAlignment.BOTTOM_RIGHT
if int(dictory["text_alignments"]) == 6:
self.location = TextEntityAlignment.BOTTOM_CENTER
self.doc = ezdxf.new("AC1032", setup=True)
self.msp = self.doc.modelspace()
self.text_circle = dictory["text_circle"] # прямое или по окружности
self.text_content = dictory["text_content"] # текст
self.text_size = float(dictory["text_size"]) # высота символов
self.shag = float(dictory["shag"]) # между символами
self.angle = float(dictory["angle"]) # угол
self.oblique = float(dictory["oblique"]) # наклон
# FONT = fonts.FontFace(family=self.fonts)
# Радиус окружности
if dictory["text_circle"] == 1:
self.radius = dictory["radius"] # радиус
self.doc.styles.new("myStandard", dxfattribs={"font": self.fonts})
def __str__(self):
return TextEngraving.generating_code(self)
def func_grav(dxf_poli, text):
engraving = explode(dxf_poli, kind=4)
for coordinates in engraving:
if coordinates.dxftype() == "LWPOLYLINE":
text += f"G0 X{round(coordinates[0][0], 4)} Y{round(coordinates[0][1], 4)}\n"
text += "G0 Z#1\n"
text += "G1 Z-[#2]\n"
for coordinat in coordinates:
text += f"G1 X{round(coordinat[0], 4)} Y{round(coordinat[1], 4)}\n"
text += "G0 Z#1\n"
if coordinates.dxftype() == "SPLINE":
text += (
f"G0 X{coordinates.control_points[0][0]} Y{coordinates.control_points[0][1]}\n"
)
text += "G0 Z#1\n"
text += "G1 Z-[#2]\n"
for coordinat in coordinates.control_points:
text += f"G1 X{coordinat[0]} Y{coordinat[1]}\n"
text += "G0 Z#1\n"
return text
def generating_code(self):
text = "% \nT1M6 \nS3000M3 \nF600 \nD1H1 \nG43 \n"
text += "#1 = 2 (Bezopasnji pod'ezd) \n"
text += "#2 = 0.1 (glubina frezerowaniya) \n"
if self.text_circle == 0:
dxf_poli = self.msp.add_text(
self.text_content,
rotation=self.angle,
dxfattribs={
"height": self.text_size,
"style": "myStandard",
"width": self.shag,
"oblique": 15,
},
).set_placement(self.text_position)
text = TextEngraving.func_grav(dxf_poli, text)
if self.text_circle == 1:
num_chars = len(self.text_content)
# Угол между символами
if self.shag == 0:
angle_step = 360 / num_chars
else:
angle_step = self.shag
# Добавляем текст по окружности
for char in self.text_content:
x = self.text_position[0] + self.radius * math.cos(
math.radians(self.angle)
) # Вычисляем x-координату
y = self.text_position[1] + self.radius * math.sin(
math.radians(self.angle)
) # Вычисляем y-координату
self.msp.add_text(
char,
rotation=self.angle - 90,
dxfattribs={"height": self.text_size, "style": "myStandard"},
).set_placement((x, y), align=TextEntityAlignment.CENTER)
self.angle -= angle_step
for dxf_poli in self.msp:
if dxf_poli.dxftype() == "TEXT":
text = TextEngraving.func_grav(dxf_poli, text)
matplotlib.qsave(self.doc.modelspace(), "your.png")
text += "G0 Z200 \nM30 \n%"
return text
<|endoftext|>from math import *
import numpy
from scipy import interpolate
class UFG:
"""Этот класс выдает уголы под которыми необходимо повернуть фрезерную поворотную голову UFG"""
"""спомощью модуля scipy, строится сплайн по точкам и возвращает знаечение которое находится между точек"""
"""Список табличный"""
def __init__(self, cornerX, cornerZ):
self.cornerX = cornerX
self.cornerZ = cornerZ
self.w_list = []
self.a3g_list = []
self.a2g_list = []
self.a1g_list = []
self.a_list = [] # --- X
self.a1l_list = [] # --- A
self.a2l_list = []
self.a3l_list = [] # --- C
def __str__(self):
return UFG.calculation_angles(self)
def making_list(self):
"""Заполняю списки данными"""
with open("telegram_bot/file_document/UFG.txt", encoding="utf8") as word:
for i in word:
# w_list
wl = "".join(i).split(" ")[7]
y = float(f"{wl.split('°')[0]}.{wl.split('°')[-1][:-2]}")
y2 = str(y)
if len(y2.split(".")[-1]) == 2:
y1 = str(int(y2.split(".")[-1]) / 60)
else:
y1 = str(int(y2.split(".")[-1]) / 6)
y = float(f"{y2.split('.')[0]}.{y1.split('.')[-1]}")
self.w_list.append(y)
# a3d_list
a3d = "".join(i).split(" ")[6]
y = float(f"{a3d.split('°')[0]}.{a3d.split('°')[-1][:-1]}")
y2 = str(y)
if len(y2.split(".")[-1]) == 2:
y1 = str(int(y2.split(".")[-1]) / 60)
else:
y1 = str(int(y2.split(".")[-1]) / 6)
y = float(f"{y2.split('.')[0]}.{y1.split('.')[-1]}")
self.a3g_list.append(y)
# a2d_list
a2d = "".join(i).split(" ")[5]
y = float(f"{a2d.split('°')[0]}.{a2d.split('°')[-1][:-1]}")
y2 = str(y)
if len(y2.split(".")[-1]) == 2:
y1 = str(int(y2.split(".")[-1]) / 60)
else:
y1 = str(int(y2.split(".")[-1]) / 6)
y = float(f"{y2.split('.')[0]}.{y1.split('.')[-1]}")
self.a2g_list.append(y)
# a1d_list
a1d = "".join(i).split(" ")[4]
y = float(f"{a1d.split('°')[0]}.{a1d.split('°')[-1][:-1]}")
y2 = str(y)
if len(y2.split(".")[-1]) == 2:
y1 = str(int(y2.split(".")[-1]) / 60)
else:
y1 = str(int(y2.split(".")[-1]) / 6)
y = float(f"{y2.split('.')[0]}.{y1.split('.')[-1]}")
self.a1g_list.append(y)
# a2l_list
a2l = "".join(i).split(" ")[1]
y = float(f"{a2l.split('°')[0]}.{a2l.split('°')[-1][:-1]}")
y2 = str(y)
if len(y2.split(".")[-1]) == 2:
y1 = str(int(y2.split(".")[-1]) / 60)
else:
y1 = str(int(y2.split(".")[-1]) / 6)
y = float(f"{y2.split('.')[0]}.{y1.split('.')[-1]}")
self.a2l_list.append(y)
# Ось С
a3l = "".join(i).split(" ")[0]
y = float(f"{a3l.split('°')[0]}.{a3l.split('°')[-1][:-1]}")
y2 = str(y)
if len(y2.split(".")[-1]) == 2:
y1 = str(int(y2.split(".")[-1]) / 60)
else:
y1 = str(int(y2.split(".")[-1]) / 6)
y = float(f"{y2.split('.')[0]}.{y1.split('.')[-1]}")
self.a3l_list.append(y)
# Задаваемый угол
al = "".join(i).split(" ")[3]
x = float(f"{al.split('°')[0]}.{al.split('°')[-1][:-1]}")
x2 = str(x)
if len(x2.split(".")[-1]) == 2:
x1 = str(int(x2.split(".")[-1]) / 60)
else:
x1 = str(int(x2.split(".")[-1]) / 6)
x = float(f"{x2.split('.')[0]}.{x1.split('.')[-1]}")
self.a_list.append(x)
# Ось А
a1l = "".join(i).split(" ")[2]
z = float(f"{a1l.split('°')[0]}.{a1l.split('°')[-1][:-1]}")
z2 = str(z)
if len(z2.split(".")[-1]) == 2:
z1 = str(int(z2.split(".")[-1]) / 60)
else:
z1 = str(int(z2.split(".")[-1]) / 6)
z = float(f"{z2.split('.')[0]}.{z1.split('.')[-1]}")
self.a1l_list.append(z)
def projecting_spline(self, list_a, list_b, list_c, corner):
"""Создаю сплайн исходя из данных, затем находим нужное число исходя из сплайна"""
list_a = numpy.array(list_a)
list_b = numpy.array(list_b)
list_c = numpy.array(list_c)
Cheights_smooth = interpolate.splrep(list_c, list_a)
Aheights_smooth = interpolate.splrep(list_c, list_b)
Cheights = interpolate.splev(corner, Cheights_smooth)
Aheights = interpolate.splev(corner, Aheights_smooth)
return Cheights, Aheights
def calculation_angles(self):
"""Основной код запрашивает углы плоскости, и возвращает уже результат нужного
поворота головы"""
UFG.making_list(self)
# cornerX = float(input('Введите угол вокруг оси X: '))
# cornerZ = float(input('Введите угол вокруг оси Z: '))
variable = 180
if -90 <= self.cornerX <= 90 and -90 <= self.cornerZ <= 90:
if self.cornerX < 0:
self.cornerX *= -1
variable = 0
corner1, corner2 = UFG.projecting_spline(
self, self.a3l_list, self.a1l_list, self.a_list, self.cornerX
)
return f"C = {round(corner1.item() - variable - self.cornerZ, 3)}, A = {round(corner2.item(), 3)}"
else:
return "Угол вокруг оси 'X' и 'Z' должен быть не более 90° и не менее -90°"
<|endoftext|>from math import *
import requests
def exchange_rate():
"""Функция берет из цетробанка валюту доллар и юань"""
response = requests.get("https://www.cbr-xml-daily.ru/daily_json.js")
data = response.json()
return data["Valute"]["USD"]["Value"], data["Valute"]["CNY"]["Value"]
def cutting_mode(speed, diameter, number_teeth, tooth_pitch):
"""Функция считает режимы резания"""
n = (1000 * float(speed)) / (3.14 * float(diameter))
f = float(number_teeth) * float(tooth_pitch) * n
return f"Число оборотов = {int(n)}об/мин, Подача = {int(f)}мм/мин 👍"
def angle_decimal(corner, minute, second, radius):
"""Минуты-секунды в десятичное значение и считает координаты радиуса и угла"""
ugol = int(corner) + int(minute) / 60 + int(second) / 3600
osX = float(radius) * cos(radians(ugol))
osY = float(radius) * sin(radians(ugol))
return f"Угол равен = {round(ugol, 4)}, Ось X = {round(osX, 4)}, Ось Y = {round(osY, 4)} 👍"
<|endoftext|>import os
import re
from math import *
import requests
from bs4 import BeautifulSoup
from dotenv import load_dotenv
load_dotenv()
token = os.getenv("token")
headers = {"Authorization": f"OAuth {token}"}
params = {"path": "dictionary_data/metal_densities.json"}
class PriceCalculator:
"""Класс считает вес и цену заготовки"""
def __init__(self, profile, alloy, size, length, wall_thickness):
self.alloy_density = [] # Плотность сплава
self.alloy = alloy # Сплав
self.profile = profile # Профиль
self.size = float(size) # Размер
self.length = float(length) # Длина
self.wall_thickness = float(wall_thickness) # Толщина стенки
def __str__(self):
return PriceCalculator.workpiece_cost(self)
def alloy_search(self, alloy_grade, alloy):
"""Рекурсивная функция осуществяет поиск марку стали и записывает в виде списка марку и плотность"""
if isinstance(alloy_grade, dict):
for key, value in alloy_grade.items():
PriceCalculator.alloy_search(self, alloy_grade[key], alloy)
if key.lower() == alloy.lower():
self.alloy_density = key, value
return self.alloy_density
return None
def alloy_price_search(self):
"""Достаю из интернета стоимость сплава и высчитываю среднюю стоимость, плотность сплава"""
price = 0
number = 0
patterns = r'[0-9][^a-z="<>/]*'
yandex_url = "https://cloud-api.yandex.net/v1/disk/resources/download"
emetal_url = f"https://e-metall.ru/{self.profile}/?item_steel_mark={self.alloy}"
# тут из яндекс диска получаю файл json и отправялю в рекурсию
yandex_disk = requests.get(yandex_url, headers=headers, params=params)
href = yandex_disk.json()["href"]
yandex_response = requests.get(href)
yandex_response.encoding = "utf-8"
self.alloy_grade = yandex_response.json()
PriceCalculator.alloy_search(self, self.alloy_grade, self.alloy)
metal_response = requests.get(emetal_url)
bs = BeautifulSoup(metal_response.content, "html5lib")
for tag in bs.find_all("td"):
if "Цена, руб с НДС" in str(tag):
try:
if "шт" not in str(tag):
if "т" in str(tag) or "кг" in str(tag):
price += float(re.findall(patterns, str(tag))[0][:-3].replace(" ", ""))
number += 1
except:
pass
try:
self.new_price = round(price / number / 1000, 2)
except:
self.new_price = 0
def workpiece_cost(self):
"""Делаю расчет веса заготовки, средняя стоимость заготовки, средняя стоимость сплава за 1 кг"""
PriceCalculator.alloy_price_search(self)
try:
if self.profile.lower() == "круг":
body_weight = (
pi * self.size**2 / 4 * self.length * self.alloy_density[1][0]
) / 1000000
elif self.profile.lower() == "квадрат":
body_weight = (self.size**2 * self.length * self.alloy_density[1][0]) / 1000000
elif self.profile.lower() == "шестигранник":
body_weight = (
((self.size / 2) ** 2 / sqrt(3)) * 6 * self.length * self.alloy_density[1][0]
) / 1000000
elif self.profile.lower() == "труба":
body_weight = (
(
(pi * self.size**2 / 4)
- (pi * (self.size - self.wall_thickness * 2) ** 2 / 4)
)
* self.length
* self.alloy_density[1][0]
) / 1000000
elif self.profile.lower() == "лист":
body_weight = (
self.size * self.wall_thickness * self.length * self.alloy_density[1][0]
) / 1000000
return f"Средняя цена за кг={self.new_price}руб, Масса заготовки={round(body_weight, 4)}кг, Стоимость заготовки={round(body_weight * self.new_price, 2)}руб"
except:
return "Такого нет, в скором появится, извините!😊"
# alloy='40х'
# profile = "круг"
# size = 25 #Размер
# wall_thickness = None
# length = 300 #Длина заготовки
# density = PriceCalculator(profile, alloy, size, length, wall_thickness)
# print(density)
<|endoftext|>from aiogram.utils.keyboard import (
InlineKeyboardBuilder,
InlineKeyboardMarkup,
ReplyKeyboardBuilder,
ReplyKeyboardMarkup,
)
def cycles() -> InlineKeyboardMarkup:
kb = InlineKeyboardBuilder()
kb.button(text="Круглый карман", callback_data="round_pocket")
kb.button(text="Радиусный карман", callback_data="radius_pocket")
kb.button(text="Прямоугольный карман", callback_data="rectangular_pocket")
kb.button(text="Фрезерование плоскости", callback_data="milling _plane")
kb.button(text="Коническая резьба", callback_data="conical_thread")
kb.adjust(1)
return kb.as_markup(resize_keyboard=True)
def cycles_calculator() -> ReplyKeyboardMarkup:
kb = ReplyKeyboardBuilder()
kb.button(text="Циклы")
kb.button(text="Калькуляторы")
kb.button(text="Назад")
kb.button(text="Помощь")
kb.adjust(2)
# kb.adjust(1)
return kb.as_markup(resize_keyboard=True)
def calculator() -> InlineKeyboardMarkup:
kb = InlineKeyboardBuilder()
kb.button(text="Расчет режимов резания", callback_data="cutting_mode")
kb.button(text="Превод в десятичный угол", callback_data="degrees_decimal")
kb.adjust(1)
return kb.as_markup(resize_keyboard=True)
def get_yes_no_kb() -> ReplyKeyboardMarkup:
kb = ReplyKeyboardBuilder()
kb.button(text="Курсы USD, CNY")
kb.button(text="Расчет заготовки")
kb.button(text="Циклы и Калькуляторы")
kb.button(text="Помощь")
kb.adjust(2)
return kb.as_markup(resize_keyboard=True)
def metal_profile() -> InlineKeyboardMarkup:
kb = InlineKeyboardBuilder()
kb.button(text="Круг", callback_data="Круг")
kb.button(text="Квадрат", callback_data="Квадрат")
kb.button(text="Труба", callback_data="Труба")
kb.button(text="Лист", callback_data="Лист")
kb.button(text="Шестигранник", callback_data="Шестигранник")
kb.adjust(2)
return kb.as_markup(resize_keyboard=True)
<|endoftext|>from aiogram.utils.keyboard import ReplyKeyboardBuilder, ReplyKeyboardMarkup
def cycles_calculator() -> ReplyKeyboardMarkup:
"""Клавиатура меню циклов и калькулятора"""
kb = ReplyKeyboardBuilder()
kb.button(text="Циклы")
kb.button(text="Калькуляторы")
kb.button(text="Назад")
kb.button(text="Помощь")
kb.adjust(2)
# kb.adjust(1)
return kb.as_markup(resize_keyboard=True)
def get_yes_no_kb() -> ReplyKeyboardMarkup:
"""клавиатура основного меню"""
kb = ReplyKeyboardBuilder()
kb.button(text="Курсы USD, CNY")
kb.button(text="Расчет заготовки")
kb.button(text="Циклы и Калькуляторы")
kb.button(text="Помощь")
kb.adjust(2)
return kb.as_markup(resize_keyboard=True)
<|endoftext|>from aiogram.utils.keyboard import InlineKeyboardBuilder, InlineKeyboardMarkup
def cycles() -> InlineKeyboardMarkup:
"""клавиатура Фрезерные циклы карманов"""
kb = InlineKeyboardBuilder()
kb.button(text="Круглый карман", callback_data="round_pocket")
kb.button(text="Чистовой круглый карман", callback_data="finishing_round_pocket")
kb.button(text="Радиусный карман", callback_data="radius_pocket")
kb.button(text="Прямоугольный карман", callback_data="rectangular_pocket")
kb.button(text="Чистовой прямоугольный карман", callback_data="finishing_rectangular_pocket")
kb.button(text="Фрезерование плоскости", callback_data="milling _plane")
kb.button(text="Фрезерование внутренней резьбы", callback_data="thread_milling")
kb.button(text="Коническая резьба", callback_data="conical_thread")
# kb.button(text="Гравировка текста", callback_data="engraving")
kb.adjust(1)
return kb.as_markup(resize_keyboard=True)
def calculator() -> InlineKeyboardMarkup:
"""Клавиатура калькуляторов"""
kb = InlineKeyboardBuilder()
kb.button(text="Расчет режимов резания", callback_data="cutting_mode")
kb.button(text="Превод в десятичный угол", callback_data="degrees_decimal")
kb.button(text="Фрезерная поворотная UFG", callback_data="UFG")
kb.adjust(1)
return kb.as_markup(resize_keyboard=True)
def metal_profile() -> InlineKeyboardMarkup:
"""Клавиатура металлопрофиля"""
kb = InlineKeyboardBuilder()
kb.button(text="Круг", callback_data="Круг")
kb.button(text="Квадрат", callback_data="Квадрат")
kb.button(text="Труба", callback_data="Труба")
kb.button(text="Лист", callback_data="Лист")
kb.button(text="Шестигранник", callback_data="Шестигранник")
kb.adjust(2)
return kb.as_markup(resize_keyboard=True)
<|endoftext|>name: Pylint
on: [push]
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.8", "3.9", "3.10"]
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v3
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install pylint
- name: Analysing the code with pylint
run: |
pylint $(git ls-files '*.py')
<|endoftext|>
|
Shkolik/Foamcut
| "\n__title__ = \"Create Exit path\"\n__author__ = \"Andrew Shkolik & Andrei Bezborodov\"\n__license_(...TRUNCATED) |
Birobizhan/movie_recommender
| "import re\n\nimport requests\nfrom environs import Env\n\nwith open(\"list_films/proteiner_csv/Фэ(...TRUNCATED) |
TRAILab/JDT3D
| "WORK_DIR=${PWD}\nPROJECT=jdt3d\nDOCKER_IMAGE=${PROJECT}:eccv_2024\nDOCKER_FILE=docker/Dockerfile\n\(...TRUNCATED) |
koreo-dev/koreo-core
| "[project]\nname = \"koreo-core\"\nversion = \"0.1.17\"\ndescription = \"Type-safe and testable KRM (...TRUNCATED) |
Itz-Agasta/Iem-Python-Weekly-Assignments
| "# Python Weekly Assignments\nThis repository contains solutions to weekly Python assignments for co(...TRUNCATED) |
No dataset card yet