code
stringlengths
52
7.75k
docs
stringlengths
1
5.85k
def request(self,message,message_type): if message_type == MULTIPART: raise Exception("Unsupported request type") super(Requestor,self).send(message,message_type)
Send a request message of the given type Args: - message: the message to publish - message_type: the type of message being sent
def run_as_admin(command, cwd=None, environ=None): if isinstance(command, str): command = shlex.split(command) if os.name == 'nt': return _run_as_admin_windows(command, cwd, environ) elif os.name == 'posix': command = ['sudo', '-E'] + list(command) sys.exit(subprocess.call(command)) else: raise RuntimeError('Unsupported os: {!r}'.format(os.name))
Runs a command as an admin in the specified *cwd* and *environ*. On Windows, this creates a temporary directory where this information is stored temporarily so that the new process can launch the proper subprocess.
def add_body_part(self, key, data, mime_type, size=None): if isinstance(data, str): size = len(data) if hasattr(data, "fileno"): size = os.fstat(data.fileno())[stat.ST_SIZE] if size is None: # TODO: support chunked transfer if some of the body is of unknown size. raise UnknownSize('Each part of the body must have a known size.') if 'Content-Length' in self.headers: content_length = int(self.headers['Content-Length']) else: content_length = 0 # If this is the first part added to the body, then this is not a multipart # request. boundary_string = '\r\n--%s\r\n' % (MIME_BOUNDARY,) self._body_parts.append(boundary_string) content_length += len(boundary_string) + size # Include the mime type of this part. cd = 'Content-Disposition: form-data; name="%s"' % key mt = mime_type if hasattr(data, "fileno"): cd += '; filename="%s"' % data.name.split('/')[-1] mt = mimetypes.guess_type(data.name)[0] or 'application/octet-stream' cd += '\r\n' type_string = 'Content-Type: %s\r\n\r\n' % (mt) self._body_parts.append(cd) self._body_parts.append(type_string) content_length += len(type_string) + len(cd) self._body_parts.append(data) self.headers['Content-Length'] = str(content_length)
Adds data to the HTTP request body. If more than one part is added, this is assumed to be a mime-multipart request. This method is designed to create MIME 1.0 requests as specified in RFC 1341. Args: data: str or a file-like object containing a part of the request body. mime_type: str The MIME type describing the data size: int Required if the data is a file like object. If the data is a string, the size is calculated so this parameter is ignored.
def _copy(self): copied_uri = Uri(self.uri.scheme, self.uri.host, self.uri.port, self.uri.path, self.uri.query.copy()) new_request = HttpRequest(uri=copied_uri, method=self.method, headers=self.headers.copy()) new_request._body_parts = self._body_parts[:] return new_request
Creates a deep copy of this request.
def _get_relative_path(self): param_string = self._get_query_string() if self.path is None: path = '/' else: path = self.path if param_string: return '?'.join([path, param_string]) else: return path
Returns the path with the query parameters escaped and appended.
def modify_request(self, http_request=None): if http_request is None: http_request = HttpRequest() if http_request.uri is None: http_request.uri = Uri() # Determine the correct scheme. if self.scheme: http_request.uri.scheme = self.scheme if self.port: http_request.uri.port = self.port if self.host: http_request.uri.host = self.host # Set the relative uri path if self.path: http_request.uri.path = self.path if self.query: http_request.uri.query = self.query.copy() return http_request
Sets HTTP request components based on the URI.
def parse_uri(uri_string): parts = urlparse.urlparse(uri_string) uri = Uri() if parts[0]: uri.scheme = parts[0] if parts[1]: host_parts = parts[1].split(':') if host_parts[0]: uri.host = host_parts[0] if len(host_parts) > 1: uri.port = int(host_parts[1]) if parts[2]: uri.path = parts[2] if parts[4]: param_pairs = parts[4].split('&') for pair in param_pairs: pair_parts = pair.split('=') if len(pair_parts) > 1: uri.query[urllib.unquote_plus(pair_parts[0])] = ( urllib.unquote_plus(pair_parts[1])) elif len(pair_parts) == 1: uri.query[urllib.unquote_plus(pair_parts[0])] = None return uri
Creates a Uri object which corresponds to the URI string. This method can accept partial URIs, but it will leave missing members of the Uri unset.
def _get_connection(self, uri, headers=None): connection = None if uri.scheme == 'https': if not uri.port: connection = httplib.HTTPSConnection(uri.host) else: connection = httplib.HTTPSConnection(uri.host, int(uri.port)) else: if not uri.port: connection = httplib.HTTPConnection(uri.host) else: connection = httplib.HTTPConnection(uri.host, int(uri.port)) return connection
Opens a socket connection to the server to set up an HTTP request. Args: uri: The full URL for the request as a Uri object. headers: A dict of string pairs containing the HTTP headers for the request.
def next(self): " Move on to the next character in the text. " char = self.char if char == '\n': self.lineno += 1 self.colno = 0 else: self.colno += 1 self.index += 1 return self.chaf next(self): " Move on to the next character in the text. " char = self.char if char == '\n': self.lineno += 1 self.colno = 0 else: self.colno += 1 self.index += 1 return self.char
Move on to the next character in the text.
def readline(self): " Reads a full line from the scanner and returns it. " start = end = self.index while end < len(self.text): if self.text[end] == '\n': end += 1 break end += 1 result = self.text[start:end] self.index = end if result.endswith('\n'): self.colno = 0 self.lineno += 1 else: self.colno += end - start return resulf readline(self): " Reads a full line from the scanner and returns it. " start = end = self.index while end < len(self.text): if self.text[end] == '\n': end += 1 break end += 1 result = self.text[start:end] self.index = end if result.endswith('\n'): self.colno = 0 self.lineno += 1 else: self.colno += end - start return result
Reads a full line from the scanner and returns it.
def match(self, regex, flags=0): if isinstance(regex, str): regex = re.compile(regex, flags) match = regex.match(self.text, self.index) if not match: return None start, end = match.start(), match.end() lines = self.text.count('\n', start, end) self.index = end if lines: self.colno = end - self.text.rfind('\n', start, end) - 1 self.lineno += lines else: self.colno += end - start return match
Matches the specified *regex* from the current character of the *scanner* and returns the result. The Scanners column and line numbers are updated respectively. # Arguments regex (str, Pattern): The regex to match. flags (int): The flags to use when compiling the pattern.
def getmatch(self, regex, group=0, flags=0): match = self.match(regex, flags) if match: return match.group(group) return None
The same as #Scanner.match(), but returns the captured group rather than the regex match object, or None if the pattern didn't match.
def restore(self, cursor): " Moves the scanner back (or forward) to the specified cursor location. " if not isinstance(cursor, Cursor): raise TypeError('expected Cursor object', type(cursor)) self.index, self.lineno, self.colno = cursof restore(self, cursor): " Moves the scanner back (or forward) to the specified cursor location. " if not isinstance(cursor, Cursor): raise TypeError('expected Cursor object', type(cursor)) self.index, self.lineno, self.colno = cursor
Moves the scanner back (or forward) to the specified cursor location.
def update(self): self.rules_map = {} self.skippable_rules = [] for rule in self.rules: if not isinstance(rule, Rule): raise TypeError('item must be Rule instance', type(rule)) self.rules_map.setdefault(rule.name, []).append(rule) if rule.skip: self.skippable_rules.append(rule)
Updates the #rules_map dictionary and #skippable_rules list based on the #rules list. Must be called after #rules or any of its items have been modified. The same rule name may appear multiple times. # Raises TypeError: if an item in the `rules` list is not a rule.
def expect(self, *names): if not names: return if not self.token or self.token.type not in names: raise UnexpectedTokenError(names, self.token)
Checks if the current #token#s type name matches with any of the specified *names*. This is useful for asserting multiple valid token types at a specific point in the parsing process. # Arguments names (str): One or more token type names. If zero are passed, nothing happens. # Raises UnexpectedTokenError: If the current #token#s type name does not match with any of the specified *names*.
def accept(self, *names, **kwargs): return self.next(*names, as_accept=True, **kwargs)
Extracts a token of one of the specified rule names and doesn't error if unsuccessful. Skippable tokens might still be skipped by this method. # Arguments names (str): One or more token names that are accepted. kwargs: Additional keyword arguments for #next(). # Raises ValueError: if a rule with the specified name doesn't exist.
def append(self, event, help=""): if isinstance(event, str): self._events[event] = HookList(is_waterfall=self.is_waterfall) self._help[event] = (help, getframeinfo(stack()[1][0])) if not help: logger.warning("Great, don't say anything about your hooks and \ wait for plugin creators to figure it out.") elif isinstance(event, Iterable): # Depricated. It does not give the ability to give help string # TODO: Remove this for name in event: self.append(name) else: raise TypeError("Invalid event name!")
Creates a new event. `event` may be iterable or string Args: event (str): Name of event to declare Kwrgs: help (str): Help string for the event Raises: TypeError **Please** describe the event and its calling arguments in the help string.
def hook(self, function, event, dependencies): # Hooks all events (recursively) if event is None: for e in self._events.keys(): self.hook(function, e, dependencies) return # Hook multiple, but specific events (recursively) if not isinstance(event, str) and isinstance(event, Iterable): for e in event: self.hook(function, e, dependencies) return # Hook a simple event event_list = self._events.get(event, None) if event_list is None: raise NameError( "Invalid key provided '%s'. Valid options: %s" % (event, ", ".join(self._events.keys())) ) return return event_list.hook(function, dependencies)
Tries to load the hook to the event Args: function (func): Function that will be called when the event is called Kwargs: dependencies (str): String or Iterable with modules whose hooks should be called before this one Raises: NameError Note that the dependencies are module-wide, that means that if `parent.foo` and `parent.bar` are both subscribed to `example` event and `child` enumerates `parent` as dependcy, **both** `foo` and `bar` must be called in order for the dependcy to get resolved.
def call(path, *args, encoding="utf-8", show_command=False): returncode = 0 output = None try: # 2015-10-10 zrong # 在 windows 上使用 universal_newlines=True # 会导致输出信息为中文时出现编码错误 # 原因是 check_out 中读入 stdout 内容的 read 方法没有传递编码参数 # 因此不再使用 universal_newlines=True 这个参数 # 而改用直接返回 bytes,然后对其解码 arg_list = get_args(path, *args) if show_command: print('git call args:', arg_list) output = subprocess.check_output(arg_list, stderr=subprocess.STDOUT) output = output.decode(encoding=encoding) except subprocess.CalledProcessError as err: returncode = err.returncode output = err.output.decode(encoding=encoding) return returncode, output
使用 subprocess.check_output 调用 git 命令。 :param str path: git 仓库文件夹路径。 :param \*args: git 的附加参数。 :returns: 错误代码和调用结果。 :rtype: int :rtype: string git 返回的信息,若执行出错则为错误信息。
def get_args(path, *args, work_tree=True, bare=False): base = [ 'git' ] if path: base.append('-C') base.append(path) if bare: base.append('--bare') base.append("--git-dir="+path) else: base.append("--git-dir="+os.path.join(path, ".git")) if work_tree: base.append("--work-tree="+path) for arg in args: base.append(arg) return base
获取可被 subprogress 执行的 git 参数 list。 :param str path: git 仓库文件夹路径。 :param \*args: git 的附加参数。 :param bare: 是否视为 bare 库
def get_branches(path): code, output = call(path, 'branch', '--list') if code > 0: return None branches = output.split('\n') newbr = [None] for br in branches: if br: if br[0] == '*': newbr[0] = br[2:] else: newbr.append(br[2:]) return newbr
获取当前所有分支名称的列表。 :param str path: git 仓库文件夹路径。 :return: 分支名称列表。当前分支位于列表第一项。 :rtype: list
def clone(giturl, gitpath): gitArgs = ['git', 'clone', giturl, gitpath] slog.info(' '.join(gitArgs)) return subprocess.call(gitArgs)
clone 一个 git 库。 :param str giturl: git 仓库的 url 地址。 :param str gitpath: git 仓库保存路径。
def get_hash(path, cut=0): code, output = call(path, 'rev-parse', 'HEAD') if code > 0: return None # maybe the string is with a linebreak. sha1 = output.strip() if cut > 0: sha1 = sha1[:7] return sha1
获取可被 git 的 HEAD 的 sha1 值。 :param str path: git 仓库文件夹路径。 :param int cut: 包含的 sha1 值的长度。0代表不剪切。 :returns: 剪切过的 sha1 的值。 :rtype: str
def update_submodules(path, init=True, update=True): succ = None if init: arg = get_args(path, 'submodule', 'init', work_tree=False) slog.info(' '.join(arg)) succ = subprocess.call(arg) if succ>0: slog.error('git execute error!') return succ if update: arg = get_args(path, "submodule", "update", work_tree=False) slog.info(' '.join(arg)) succ = subprocess.call(arg) if succ>0: slog.error('git execute error!') return succ return succ
更新子模块。 :param str path: git 仓库文件夹路径。 :param bool init: 是否初始化子模块。 :param bool update: 是否更新子模块。
def print_message(self, message, verbosity_needed=1): if self.args.verbosity >= verbosity_needed: print(message)
Prints the message, if verbosity is high enough.
def error(self, message, code=1): sys.stderr.write(message) sys.exit(code)
Prints the error, and exits with the given code.
def parse_db_settings(self, settings): if settings == 'DJANGO_SETTINGS_MODULE': django_settings = os.environ.get('DJANGO_SETTINGS_MODULE') self.print_message("Getting settings file from DJANGO_SETTINGS_MODULE=%s" % django_settings) path_pieces = django_settings.split('.') path_pieces[-1] = '%s.py' % path_pieces[-1] settings = os.path.join(*path_pieces) self.print_message("Parsing settings from settings file '%s'" % settings) parser = DatabaseSettingsParser() with open(settings) as settings_file: settings_ast = ast.parse(settings_file.read()) parser.visit(settings_ast) try: return parser.database_settings['default'] except KeyError as e: self.error("Missing key or value for: %s\nSettings must be of the form: %s" % (e, self.settings_format))
Parse out database settings from filename or DJANGO_SETTINGS_MODULE.
def initialize_db_args(self, settings, db_key): self.print_message("Initializing database settings for %s" % db_key, verbosity_needed=2) db_member = self.databases[db_key] db_name = settings.get('NAME') if db_name and not db_member['name']: db_member['name'] = db_name db_member['password'] = settings.get('PASSWORD') args = [] for key in ['USER', 'HOST', 'PORT']: value = settings.get(key) if value: self.print_message("Adding parameter %s" % key.lower, verbosity_needed=2) args.append('--%s=%s' % (key.lower(), value)) db_member['args'] = args
Initialize connection arguments for postgres commands.
def download_file(self, url, filename): self.print_message("Downloading to file '%s' from URL '%s'" % (filename, url)) try: db_file = urllib2.urlopen(url) with open(filename, 'wb') as output: output.write(db_file.read()) db_file.close() except Exception as e: self.error(str(e)) self.print_message("File downloaded")
Download file from url to filename.
def unzip_file_if_necessary(self, source_file): if source_file.endswith(".gz"): self.print_message("Decompressing '%s'" % source_file) subprocess.check_call(["gunzip", "--force", source_file]) source_file = source_file[:-len(".gz")] return source_file
Unzip file if zipped.
def download_file_from_url(self, source_app, url): if source_app: source_name = source_app else: source_name = urlparse.urlparse(url).netloc.replace('.', '_') filename = self.create_file_name(source_name) self.download_file(url, filename) return filename
Download file from source app or url, and return local filename.
def dump_database(self): db_file = self.create_file_name(self.databases['source']['name']) self.print_message("Dumping postgres database '%s' to file '%s'" % (self.databases['source']['name'], db_file)) self.export_pgpassword('source') args = [ "pg_dump", "-Fc", "--no-acl", "--no-owner", "--dbname=%s" % self.databases['source']['name'], "--file=%s" % db_file, ] args.extend(self.databases['source']['args']) subprocess.check_call(args) return db_file
Create dumpfile from postgres database, and return filename.
def drop_database(self): self.print_message("Dropping database '%s'" % self.databases['destination']['name']) self.export_pgpassword('destination') args = [ "dropdb", "--if-exists", self.databases['destination']['name'], ] args.extend(self.databases['destination']['args']) subprocess.check_call(args)
Drop postgres database.
def create_database(self): self.print_message("Creating database '%s'" % self.databases['destination']['name']) self.export_pgpassword('destination') args = [ "createdb", self.databases['destination']['name'], ] args.extend(self.databases['destination']['args']) for arg in self.databases['destination']['args']: if arg[:7] == '--user=': args.append('--owner=%s' % arg[7:]) subprocess.check_call(args)
Create postgres database.
def replace_postgres_db(self, file_url): self.print_message("Replacing postgres database") if file_url: self.print_message("Sourcing data from online backup file '%s'" % file_url) source_file = self.download_file_from_url(self.args.source_app, file_url) elif self.databases['source']['name']: self.print_message("Sourcing data from database '%s'" % self.databases['source']['name']) source_file = self.dump_database() else: self.print_message("Sourcing data from local backup file %s" % self.args.file) source_file = self.args.file self.drop_database() self.create_database() source_file = self.unzip_file_if_necessary(source_file) self.print_message("Importing '%s' into database '%s'" % (source_file, self.databases['destination']['name'])) args = [ "pg_restore", "--no-acl", "--no-owner", "--dbname=%s" % self.databases['destination']['name'], source_file, ] args.extend(self.databases['destination']['args']) subprocess.check_call(args)
Replace postgres database with database from specified source.
def get_file_url_for_heroku_app(self, source_app): self.print_message("Getting backup url for Heroku app '%s'" % source_app) args = [ "heroku", "pg:backups:url", "--app=%s" % source_app, ] if self.args.use_pgbackups: args = [ "heroku", "pgbackups:url", "--app=%s" % source_app, ] return subprocess.check_output(args).strip().decode('ascii')
Get latest backup URL from heroku pg:backups (or pgbackups).
def capture_heroku_database(self): self.print_message("Capturing database backup for app '%s'" % self.args.source_app) args = [ "heroku", "pg:backups:capture", "--app=%s" % self.args.source_app, ] if self.args.use_pgbackups: args = [ "heroku", "pgbackups:capture", "--app=%s" % self.args.source_app, "--expire", ] subprocess.check_call(args)
Capture Heroku database backup.
def reset_heroku_database(self): self.print_message("Resetting database for app '%s'" % self.args.destination_app) args = [ "heroku", "pg:reset", "--app=%s" % self.args.destination_app, "DATABASE_URL", ] subprocess.check_call(args)
Reset Heroku database.
def replace_heroku_db(self, file_url): self.print_message("Replacing database for Heroku app '%s'" % self.args.destination_app) self.reset_heroku_database() if file_url: self.print_message("Restoring from URL '%s'" % file_url) args = [ "heroku", "pg:backups:restore", file_url, "--app=%s" % self.args.destination_app, "DATABASE", "--confirm", self.args.destination_app, ] if self.args.use_pgbackups: args = [ "heroku", "pgbackups:restore", "--app=%s" % self.args.destination_app, "DATABASE_URL", "--confirm", self.args.destination_app, file_url, ] subprocess.check_call(args) else: # TODO perhaps add support for file -> heroku by piping to pg:psql self.print_message("Pushing data from database '%s'" % self.databases['source']['name']) self.print_message("NOTE: Any postgres authentication settings you passed to paragres " "will be ignored.\nIf desired, you can export PG* variables.\n" "You will be prompted for your psql password.") args = [ "heroku", "pg:push", self.databases['source']['name'], "DATABASE_URL", "--app=%s" % self.args.destination_app, ] subprocess.check_call(args)
Replace Heroku database with database from specified source.
def run(self): self.print_message("\nBeginning database replacement process.\n") if self.args.source_settings: settings = self.parse_db_settings(self.args.source_settings) self.initialize_db_args(settings, 'source') if self.args.settings: settings = self.parse_db_settings(self.args.settings) self.initialize_db_args(settings, 'destination') if self.args.capture: self.capture_heroku_database() file_url = self.args.url if self.args.source_app: self.print_message("Sourcing data from backup for Heroku app '%s'" % self.args.source_app) file_url = self.get_file_url_for_heroku_app(self.args.source_app) if self.args.destination_app: self.replace_heroku_db(file_url) elif self.databases['destination']['name']: self.replace_postgres_db(file_url) self.print_message("\nDone.\n\nDon't forget to update the Django Site entry if necessary!")
Replace a database with the data from the specified source.
def import_task_modules(): top_level_modules = settings.INSTALLED_APPS module_names = [] for module in top_level_modules: #Import package mod = import_module(module) #Find all modules in package path for loader, module_name, is_pkg in pkgutil.walk_packages(mod.__path__): if not module_name.startswith("__"): #If the module is not __init__, add it to the registry submod_name = "{0}.{1}".format(module,module_name) module_names.append(submod_name) #Once everything is imported, the metaclass will register them automatically modules = map(import_module, module_names) return modules
Import all installed apps and add modules to registry
def find_in_registry(category = None, namespace = None, name = None): selected_registry = registry if category is not None: selected_registry = [re for re in selected_registry if re.category==category] if namespace is not None: selected_registry = [re for re in selected_registry if re.namespace==namespace] if name is not None: selected_registry = [re for re in selected_registry if re.name==name] if len(selected_registry)>0: return [sr.cls for sr in selected_registry] return None
Find a given category/namespace/name combination in the registry category - string, see utils.inputs.registrycategories namespace - module namespace, see settings.NAMESPACE name - lowercase name of module
def list(self, service_rec=None, host_rec=None, hostfilter=None): return self.send.service_list(service_rec, host_rec, hostfilter)
List a specific service or all services :param service_rec: t_services.id :param host_rec: t_hosts.id :param hostfilter: Valid hostfilter or None :return: [(svc.t_services.id, svc.t_services.f_hosts_id, svc.t_hosts.f_ipaddr, svc.t_hosts.f_hostname, svc.t_services.f_proto, svc.t_services.f_number, svc.t_services.f_status, svc.t_services.f_name, svc.t_services.f_banner), ...]
def info(self, svc_rec=None, ipaddr=None, proto=None, port=None): return self.send.service_info(svc_rec, ipaddr, proto, port)
Information about a service. :param svc_rec: t_services.id :param ipaddr: IP Address :param proto: Protocol (tcp, udp, info) :param port: Port (0-65535) :return: [ service_id, host_id, ipv4, ipv6, hostname, proto, number, status, name, banner ]
def add(self, ipaddr=None, proto=None, port=None, fields=None): return self.send.service_add(ipaddr, proto, port, fields)
Add a service record :param ipaddr: IP Address :param proto: Protocol (tcp, udp, info) :param port: Port (0-65535) :param fields: Extra fields :return: (True/False, t_services.id or response message)
def delete(self, svc_rec=None, ipaddr=None, proto=None, port=None): return self.send.service_del(svc_rec, ipaddr, proto, port)
Delete a t_services record :param svc_rec: t_services.id :param ipaddr: IP Address or t_hosts.id :param proto: Protocol (tcp, udp, info) :param port: Port (0-65535) :return: [True, Response Message]
def report_list(self, service_id=None, service_port=None, hostfilter=None): return self.send.service_report_list(service_id, service_port, hostfilter)
Returns a list of ports with IPs, banners and vulnerabilities (warning, slow!) :param service_id: t_services.id :param service_port: Port (tcp/#, udp/#, info/#) :param hostfilter: Valid hostfilter or None :return: { 'port': [t_hosts.f_ipaddr, t_services.f_banner, (t_vulndata.f_vulnid, t_vulndata.f_title, t_vulndata.f_severity, t_vulndata.f_cvss_score), ...}
def vulns_list(self, service_id=None, service_port=None, hostfilter=None): return self.send.service_vulns_list(service_id, service_port, hostfilter)
List of vulnerabilities for a service :param service_id: t_services.id :param service_port: tcp/#, udp/# or info/# :param hostfilter: Valid hostfilter or None :return: t_services.rows.as_list()
def connect(nodes): ''' Connect a list of nodes. Connected nodes have an ``output`` member which is the following node in the line. The last node's ``output`` is a :class:`Queue` for easy plumbing. ''' for a, b in zip(nodes[:-1], nodes[1:]): a.output = b b.output = queues.Queue(f connect(nodes): ''' Connect a list of nodes. Connected nodes have an ``output`` member which is the following node in the line. The last node's ``output`` is a :class:`Queue` for easy plumbing. ''' for a, b in zip(nodes[:-1], nodes[1:]): a.output = b b.output = queues.Queue()
Connect a list of nodes. Connected nodes have an ``output`` member which is the following node in the line. The last node's ``output`` is a :class:`Queue` for easy plumbing.
def render_layout(layout_name, content, **context): layout_block = "content" if ":" in layout_name: layout_name, layout_block = layout_name.split(":") tpl = '{%% extends "%s" %%}{%% block %s %%}%s{%% endblock %%}' % (layout_name, layout_block, content) return render_template_string(tpl, **context)
Uses a jinja template to wrap the content inside a layout. Wraps the content inside a block and adds the extend statement before rendering it with jinja. The block name can be specified in the layout_name after the filename separated by a colon. The default block name is "content".
def parse_template(app, filename): if not hasattr(parse_template, "cache"): parse_template.cache = {} if filename not in parse_template.cache: source = get_template_source(app, filename) parse_template.cache[filename] = app.jinja_env.parse(source, filename=filename) return parse_template.cache[filename]
Parses the given template using the jinja environment of the given app and returns the AST. ASTs are cached in parse_template.cache
def jinja_node_to_python(node): if isinstance(node, nodes.Const): return node.value if isinstance(node, nodes.Neg): return -jinja_node_to_python(node.node) if isinstance(node, nodes.Name): return node.name if isinstance(node, (nodes.List, nodes.Tuple)): value = [] for i in node.items: value.append(jinja_node_to_python(i)) return value if isinstance(node, nodes.Dict): value = {} for pair in node.items: value[pair.key.value] = jinja_node_to_python(pair.value) return value if isinstance(node, nodes.Call): if not isinstance(node.node, nodes.Name) or node.node.name not in ("_", "translate", "gettext"): raise FormDefinitionError("Cannot convert function calls from jinja to python other than translation calls") return lazy_translate(jinja_node_to_python(node.args[0])) raise Exception("Cannot convert jinja nodes to python")
Converts a Jinja2 node to its python equivalent
def groups(self): group_list = [] all_groups = self.get('memberof') for group_dn in all_groups: if self.__ldap_group_ou__ in group_dn: group_list.append(group_dn) return group_list
Get the list of Groups (by dn) that the bound CSH LDAP member object is in.
def in_group(self, group, dn=False): if dn: return group in self.groups() return group.check_member(self)
Get whether or not the bound CSH LDAP member object is part of a group. Arguments: group -- the CSHGroup object (or distinguished name) of the group to check membership for
def feedback_results_to_measurements_frame(feedback_result): ''' Extract measured data from `FeedbackResults` instance into `pandas.DataFrame`. ''' index = pd.Index(feedback_result.time * 1e-3, name='seconds') df_feedback = pd.DataFrame(np.column_stack([feedback_result.V_fb, feedback_result.V_hv, feedback_result.fb_resistor, feedback_result.hv_resistor]), columns=['V_fb', 'V_hv', 'fb_resistor', 'hv_resistor'], index=index) df_feedback.insert(0, 'frequency', feedback_result.frequency) return df_feedbacf feedback_results_to_measurements_frame(feedback_result): ''' Extract measured data from `FeedbackResults` instance into `pandas.DataFrame`. ''' index = pd.Index(feedback_result.time * 1e-3, name='seconds') df_feedback = pd.DataFrame(np.column_stack([feedback_result.V_fb, feedback_result.V_hv, feedback_result.fb_resistor, feedback_result.hv_resistor]), columns=['V_fb', 'V_hv', 'fb_resistor', 'hv_resistor'], index=index) df_feedback.insert(0, 'frequency', feedback_result.frequency) return df_feedback
Extract measured data from `FeedbackResults` instance into `pandas.DataFrame`.
def feedback_results_to_impedance_frame(feedback_result): ''' Extract computed impedance data from `FeedbackResults` instance into `pandas.DataFrame`. ''' index = pd.Index(feedback_result.time * 1e-3, name='seconds') df_feedback = pd.DataFrame(np.column_stack([feedback_result.V_actuation() .filled(np.NaN), feedback_result.capacitance() .filled(np.NaN), feedback_result.Z_device() .filled(np.NaN)]), columns=['V_actuation', 'capacitance', 'impedance'], index=index) df_feedback.insert(0, 'frequency', feedback_result.frequency) df_feedback.insert(1, 'voltage', feedback_result.voltage) return df_feedbacf feedback_results_to_impedance_frame(feedback_result): ''' Extract computed impedance data from `FeedbackResults` instance into `pandas.DataFrame`. ''' index = pd.Index(feedback_result.time * 1e-3, name='seconds') df_feedback = pd.DataFrame(np.column_stack([feedback_result.V_actuation() .filled(np.NaN), feedback_result.capacitance() .filled(np.NaN), feedback_result.Z_device() .filled(np.NaN)]), columns=['V_actuation', 'capacitance', 'impedance'], index=index) df_feedback.insert(0, 'frequency', feedback_result.frequency) df_feedback.insert(1, 'voltage', feedback_result.voltage) return df_feedback
Extract computed impedance data from `FeedbackResults` instance into `pandas.DataFrame`.
def get_firmwares(): ''' Return `dmf_control_board` compiled Arduino hex file paths. This function may be used to locate firmware binaries that are available for flashing to [Arduino Mega2560][1] boards. [1]: http://arduino.cc/en/Main/arduinoBoardMega2560 ''' return OrderedDict([(board_dir.name, [f.abspath() for f in board_dir.walkfiles('*.hex')]) for board_dir in package_path().joinpath('firmware').dirs()]f get_firmwares(): ''' Return `dmf_control_board` compiled Arduino hex file paths. This function may be used to locate firmware binaries that are available for flashing to [Arduino Mega2560][1] boards. [1]: http://arduino.cc/en/Main/arduinoBoardMega2560 ''' return OrderedDict([(board_dir.name, [f.abspath() for f in board_dir.walkfiles('*.hex')]) for board_dir in package_path().joinpath('firmware').dirs()])
Return `dmf_control_board` compiled Arduino hex file paths. This function may be used to locate firmware binaries that are available for flashing to [Arduino Mega2560][1] boards. [1]: http://arduino.cc/en/Main/arduinoBoardMega2560
def _upgrade(self): logging.debug('[FeedbackResults]._upgrade()') if hasattr(self, 'version'): version = Version.fromstring(self.version) else: version = Version(0) logging.debug('[FeedbackResults] version=%s, class_version=%s' % (str(version), self.class_version)) if version > Version.fromstring(self.class_version): logging.debug('[FeedbackResults] version>class_version') raise FutureVersionError(Version.fromstring(self.class_version), version) elif version < Version.fromstring(self.class_version): if version < Version(0, 1): self.calibration = FeedbackCalibration() if version < Version(0, 2): # flag invalid data points self.version = str(Version(0, 2)) self.fb_resistor[self.V_fb > 5] = -1 self.hv_resistor[self.V_hv > 5] = -1 if version < Version(0, 3): self.attempt = 0 if version < Version(0, 4): del self.sampling_time_ms del self.delay_between_samples_ms self.voltage = self.options.voltage del self.options del self.attempt if version < Version(0, 5): self.area = 0 self.version = str(Version(0, 5)) if version < Version(0, 6): self.amplifier_gain = None self.vgnd_hv = None self.vgnd_fb = None self.version = str(Version(0, 6)) logging.info('[FeedbackResults] upgrade to version %s' % self.version) else: # Else the versions are equal and don't need to be upgraded. pass
Upgrade the serialized object if necessary. Raises: FutureVersionError: file was written by a future version of the software.
def capacitance(self, filter_order=None, window_size=None, tol=0.05): ''' Compute the capacitance of the DMF device _(i.e., dielectric and droplet)_ based on the computed impedance value. Note: this assumes impedance is purely capacitive load. TODO: Is this assumption ok? ''' C = np.ma.masked_invalid(1.0 / (2.0 * math.pi * self.frequency * self.Z_device(filter_order=filter_order, window_size=window_size, tol=tol))) C.fill_value = np.nan C.data[C.mask] = C.fill_value return f capacitance(self, filter_order=None, window_size=None, tol=0.05): ''' Compute the capacitance of the DMF device _(i.e., dielectric and droplet)_ based on the computed impedance value. Note: this assumes impedance is purely capacitive load. TODO: Is this assumption ok? ''' C = np.ma.masked_invalid(1.0 / (2.0 * math.pi * self.frequency * self.Z_device(filter_order=filter_order, window_size=window_size, tol=tol))) C.fill_value = np.nan C.data[C.mask] = C.fill_value return C
Compute the capacitance of the DMF device _(i.e., dielectric and droplet)_ based on the computed impedance value. Note: this assumes impedance is purely capacitive load. TODO: Is this assumption ok?
def _upgrade(self): logging.debug("[FeedbackResultsSeries]._upgrade()") version = Version.fromstring(self.version) logging.debug('[FeedbackResultsSeries] version=%s, class_version=%s', str(version), self.class_version) if version > Version.fromstring(self.class_version): logging.debug('[FeedbackResultsSeries] version>class_version') raise FutureVersionError(Version.fromstring(self.class_version), version) elif version < Version.fromstring(self.class_version): if version < Version(0, 1): self.time = [None]*len(self.data) self.version = str(Version(0, 1))
Upgrade the serialized object if necessary. Raises: FutureVersionError: file was written by a future version of the software.
def c_drop(self, frequency): ''' Capacitance of an electrode covered in liquid, normalized per unit area (i.e., units are F/mm^2). ''' try: return np.interp(frequency, self._c_drop['frequency'], self._c_drop['capacitance'] ) except: pass return self._c_drof c_drop(self, frequency): ''' Capacitance of an electrode covered in liquid, normalized per unit area (i.e., units are F/mm^2). ''' try: return np.interp(frequency, self._c_drop['frequency'], self._c_drop['capacitance'] ) except: pass return self._c_drop
Capacitance of an electrode covered in liquid, normalized per unit area (i.e., units are F/mm^2).
def c_filler(self, frequency): ''' Capacitance of an electrode covered in filler media (e.g., air or oil), normalized per unit area (i.e., units are F/mm^2). ''' try: return np.interp(frequency, self._c_filler['frequency'], self._c_filler['capacitance'] ) except: pass return self._c_fillef c_filler(self, frequency): ''' Capacitance of an electrode covered in filler media (e.g., air or oil), normalized per unit area (i.e., units are F/mm^2). ''' try: return np.interp(frequency, self._c_filler['frequency'], self._c_filler['capacitance'] ) except: pass return self._c_filler
Capacitance of an electrode covered in filler media (e.g., air or oil), normalized per unit area (i.e., units are F/mm^2).
def _upgrade(self): logging.debug("[FeedbackCalibration]._upgrade()") version = Version.fromstring(self.version) logging.debug('[FeedbackCalibration] version=%s, class_version=%s', str(version), self.class_version) if version > Version.fromstring(self.class_version): logging.debug('[FeedbackCalibration] version>class_version') raise FutureVersionError(Version.fromstring(self.class_version), version) elif version < Version.fromstring(self.class_version): if version < Version(0, 1): self._c_filler = None self._c_drop = None self.version = str(Version(0, 1)) if version < Version(0, 2): self.hw_version = Version(1) self.version = str(Version(0, 2)) logging.info('[FeedbackCalibration] upgrade to version %s', self.version) if version < Version(0, 2): self.hw_version = Version(1) self.version = str(Version(0, 2)) logging.info('[FeedbackCalibration] upgrade to version %s', self.version) if version < Version(0, 3): self.version = str(Version(0, 3)) logging.info('[FeedbackCalibration] upgrade to version %s', self.version)
Upgrade the serialized object if necessary. Raises: FutureVersionError: file was written by a future version of the software.
def force_to_voltage(self, force, frequency): ''' Convert a force in uN/mm to voltage. Parameters ---------- force : float Force in **uN/mm**. frequency : float Actuation frequency. Returns ------- float Actuation voltage to apply :data:`force` at an actuation frequency of :data:`frequency`. ''' c_drop = self.calibration.c_drop(frequency) # if c_filler hasn't been set, assume c_filler = 0 if self.calibration._c_filler: c_filler = self.calibration.c_filler(frequency) else: c_filler = 0 return np.sqrt(force * 1e-9/ (0.5 * (c_drop - c_filler))f force_to_voltage(self, force, frequency): ''' Convert a force in uN/mm to voltage. Parameters ---------- force : float Force in **uN/mm**. frequency : float Actuation frequency. Returns ------- float Actuation voltage to apply :data:`force` at an actuation frequency of :data:`frequency`. ''' c_drop = self.calibration.c_drop(frequency) # if c_filler hasn't been set, assume c_filler = 0 if self.calibration._c_filler: c_filler = self.calibration.c_filler(frequency) else: c_filler = 0 return np.sqrt(force * 1e-9/ (0.5 * (c_drop - c_filler)))
Convert a force in uN/mm to voltage. Parameters ---------- force : float Force in **uN/mm**. frequency : float Actuation frequency. Returns ------- float Actuation voltage to apply :data:`force` at an actuation frequency of :data:`frequency`.
def persistent_write(self, address, byte, refresh_config=False): ''' Write a single byte to an address in persistent memory. Parameters ---------- address : int Address in persistent memory (e.g., EEPROM). byte : int Value to write to address. refresh_config : bool, optional Is ``True``, :meth:`load_config()` is called afterward to refresh the configuration settings. ''' self._persistent_write(address, byte) if refresh_config: self.load_config(Falsef persistent_write(self, address, byte, refresh_config=False): ''' Write a single byte to an address in persistent memory. Parameters ---------- address : int Address in persistent memory (e.g., EEPROM). byte : int Value to write to address. refresh_config : bool, optional Is ``True``, :meth:`load_config()` is called afterward to refresh the configuration settings. ''' self._persistent_write(address, byte) if refresh_config: self.load_config(False)
Write a single byte to an address in persistent memory. Parameters ---------- address : int Address in persistent memory (e.g., EEPROM). byte : int Value to write to address. refresh_config : bool, optional Is ``True``, :meth:`load_config()` is called afterward to refresh the configuration settings.
def persistent_write_multibyte(self, address, data, refresh_config=False): ''' Write multiple bytes to an address in persistent memory. Parameters ---------- address : int Address in persistent memory (e.g., EEPROM). data : numpy.array Data to write. refresh_config : bool, optional Is ``True``, :meth:`load_config()` is called afterward to refresh the configuration settings. ''' for i, byte in enumerate(data.view(np.uint8)): self.persistent_write(address + i, int(byte)) if refresh_config: self.load_config(Falsef persistent_write_multibyte(self, address, data, refresh_config=False): ''' Write multiple bytes to an address in persistent memory. Parameters ---------- address : int Address in persistent memory (e.g., EEPROM). data : numpy.array Data to write. refresh_config : bool, optional Is ``True``, :meth:`load_config()` is called afterward to refresh the configuration settings. ''' for i, byte in enumerate(data.view(np.uint8)): self.persistent_write(address + i, int(byte)) if refresh_config: self.load_config(False)
Write multiple bytes to an address in persistent memory. Parameters ---------- address : int Address in persistent memory (e.g., EEPROM). data : numpy.array Data to write. refresh_config : bool, optional Is ``True``, :meth:`load_config()` is called afterward to refresh the configuration settings.
def i2c_write(self, address, data): ''' Parameters ---------- address : int Address of I2C device. data : array-like Array of bytes to send to device. ''' data_ = uint8_tVector() for i in range(0, len(data)): data_.append(int(data[i])) Base.i2c_write(self, address, data_f i2c_write(self, address, data): ''' Parameters ---------- address : int Address of I2C device. data : array-like Array of bytes to send to device. ''' data_ = uint8_tVector() for i in range(0, len(data)): data_.append(int(data[i])) Base.i2c_write(self, address, data_)
Parameters ---------- address : int Address of I2C device. data : array-like Array of bytes to send to device.
def read_all_series_channel_values(self, f, channel): ''' Return all values for the specified channel of the type corresponding to the function `f`, where `f` is either `self.series_resistance` or `self.series_capacitance`. ''' values = [] channel_max_param_count = [3, 5] for i in range(channel_max_param_count[channel]): try: values.append(f(channel, i)) except RuntimeError: break return valuef read_all_series_channel_values(self, f, channel): ''' Return all values for the specified channel of the type corresponding to the function `f`, where `f` is either `self.series_resistance` or `self.series_capacitance`. ''' values = [] channel_max_param_count = [3, 5] for i in range(channel_max_param_count[channel]): try: values.append(f(channel, i)) except RuntimeError: break return values
Return all values for the specified channel of the type corresponding to the function `f`, where `f` is either `self.series_resistance` or `self.series_capacitance`.
def _get_files_modified(): cmd = "git diff-index --cached --name-only --diff-filter=ACMRTUXB HEAD" _, files_modified, _ = run(cmd) extensions = [re.escape(ext) for ext in list(SUPPORTED_FILES) + [".rst"]] test = "(?:{0})$".format("|".join(extensions)) return list(filter(lambda f: re.search(test, f), files_modified))
Get the list of modified files that are Python or Jinja2.
def _get_git_author(): _, stdout, _ = run("git var GIT_AUTHOR_IDENT") git_author = stdout[0] return git_author[:git_author.find(">") + 1]
Return the git author from the git variables.
def _get_component(filename, default="global"): if hasattr(filename, "decode"): filename = filename.decode() parts = filename.split(os.path.sep) if len(parts) >= 3: if parts[1] in "modules legacy ext".split(): return parts[2] if len(parts) >= 2: if parts[1] in "base celery utils".split(): return parts[1] if len(parts) >= 1: if parts[0] in "grunt docs".split(): return parts[0] return default
Get component name from filename.
def _prepare_commit_msg(tmp_file, author, files_modified=None, template=None): files_modified = files_modified or [] template = template or "{component}:\n\nSigned-off-by: {author}\n{extra}" if hasattr(template, "decode"): template = template.decode() with open(tmp_file, "r", "utf-8") as fh: contents = fh.readlines() msg = filter(lambda x: not (x.startswith("#") or x.isspace()), contents) if len(list(msg)): return component = "unknown" components = _get_components(files_modified) if len(components) == 1: component = components[0] elif len(components) > 1: component = "/".join(components) contents.append( "# WARNING: Multiple components detected - consider splitting " "commit.\r\n" ) with open(tmp_file, "w", "utf-8") as fh: fh.write(template.format(component=component, author=author, extra="".join(contents)))
Prepare the commit message in tmp_file. It will build the commit message prefilling the component line, as well as the signature using the git author and the modified files. The file remains untouched if it is not empty.
def _check_message(message, options): options = options or dict() options.update(get_options()) options.update(_read_local_kwalitee_configuration()) errors = check_message(message, **options) if errors: for error in errors: print(error, file=sys.stderr) return False return True
Checking the message and printing the errors.
def prepare_commit_msg_hook(argv): options = get_options() # Check if the repo has a configuration repo options.update(_read_local_kwalitee_configuration()) _prepare_commit_msg(argv[1], _get_git_author(), _get_files_modified(), options.get('template')) return 0
Hook: prepare a commit message.
def commit_msg_hook(argv): with open(argv[1], "r", "utf-8") as fh: message = "\n".join(filter(lambda x: not x.startswith("#"), fh.readlines())) options = {"allow_empty": True} if not _check_message(message, options): click.echo( "Aborting commit due to commit message errors (override with " "'git commit --no-verify').", file=sys.stderr) raise click.Abort return 0
Hook: for checking commit message (prevent commit).
def post_commit_hook(argv): _, stdout, _ = run("git log -1 --format=%B HEAD") message = "\n".join(stdout) options = {"allow_empty": True} if not _check_message(message, options): click.echo( "Commit message errors (fix with 'git commit --amend').", file=sys.stderr) return 1 # it should not fail with exit return 0
Hook: for checking commit message.
def _read_local_kwalitee_configuration(directory="."): filepath = os.path.abspath(os.path.join(directory, '.kwalitee.yml')) data = {} if os.path.exists(filepath): with open(filepath, 'r') as file_read: data = yaml.load(file_read.read()) return data
Check if the repo has a ``.kwalitee.yaml`` file.
def _pre_commit(files, options): errors = [] tmpdir = mkdtemp() files_to_check = [] try: for (file_, content) in files: # write staged version of file to temporary directory dirname, filename = os.path.split(os.path.abspath(file_)) prefix = os.path.commonprefix([dirname, tmpdir]) dirname = os.path.relpath(dirname, start=prefix) dirname = os.path.join(tmpdir, dirname) if not os.path.isdir(dirname): os.makedirs(dirname) filename = os.path.join(dirname, filename) with open(filename, "wb") as fh: fh.write(content) files_to_check.append((file_, filename)) for (file_, filename) in files_to_check: errors += list(map(lambda x: "{0}: {1}".format(file_, x), check_file(filename, **options) or [])) finally: shutil.rmtree(tmpdir, ignore_errors=True) return errors
Run the check on files of the added version. They might be different than the one on disk. Equivalent than doing a git stash, check, and git stash pop.
def pre_commit_hook(argv): options = get_options() # Check if the repo has a configuration repo options.update(_read_local_kwalitee_configuration()) files = [] for filename in _get_files_modified(): # get the staged version of the file and # write the staged version to temp dir with its full path to # avoid overwriting files with the same name _, stdout, _ = run("git show :{0}".format(filename), raw_output=True) files.append((filename, stdout)) errors = _pre_commit(files, options) for error in errors: if hasattr(error, "decode"): error = error.decode() click.echo(error, file=sys.stderr) if errors: click.echo( "Aborting commit due to kwalitee errors (override with " "'git commit --no-verify').", file=sys.stderr) raise click.Abort return 0
Hook: checking the staged files.
def run(command, raw_output=False): p = Popen(command.split(), stdout=PIPE, stderr=PIPE) (stdout, stderr) = p.communicate() # On python 3, subprocess.Popen returns bytes objects. if not raw_output: return ( p.returncode, [line.rstrip() for line in stdout.decode("utf-8").splitlines()], [line.rstrip() for line in stderr.decode("utf-8").splitlines()] ) else: return (p.returncode, stdout, stderr)
Run a command using subprocess. :param command: command line to be run :type command: str :param raw_output: does not attempt to convert the output as unicode :type raw_output: bool :return: error code, output (``stdout``) and error (``stderr``) :rtype: tuple
def mpl_weight2qt(weight): try: weight = weights_mpl2qt[weight] except KeyError: try: weight = float(weight) / 10 except (ValueError, TypeError): weight = QtGui.QFont.Normal else: try: weight = min(filter(lambda w: w >= weight, weights_qt2mpl), key=lambda w: abs(w - weight)) except ValueError: weight = QtGui.QFont.Normal return weight
Convert a weight from matplotlib definition to a Qt weight Parameters ---------- weight: int or string Either an integer between 1 and 1000 or a string out of :attr:`weights_mpl2qt` Returns ------- int One type of the PyQt5.QtGui.QFont.Weight
def artist_to_qfont(artist): size = int(artist.get_size()) weight = mpl_weight2qt(artist.get_weight()) italic = artist.get_style() == 'italic' for family in artist.get_family(): if family in ['sans-serif', 'cursive', 'monospace', 'serif']: for name in mpl.rcParams['font.' + family]: font = QtGui.QFont(name, size, weight, italic) if font.exactMatch(): break else: font = QtGui.QFont(family, size, weight, italic) return font
Convert a :class:`matplotlib.text.Text` artist to a QFont object Parameters ---------- artist: matplotlib.text.Text The text artist, e.g. an axes title Returns ------- PyQt5.QtGui.QFont The QFont object
def choose_font(self, font=None): fmt_widget = self.parent() if font is None: if self.current_font: font, ok = QFontDialog.getFont( self.current_font, fmt_widget, 'Select %s font' % self.fmto_name, QFontDialog.DontUseNativeDialog) else: font, ok = QFontDialog.getFont(fmt_widget) if not ok: return self.current_font = font properties = self.load_properties() properties.update(self.qfont_to_artist_props(font)) fmt_widget.set_obj(properties) self.refresh()
Choose a font for the label through a dialog
def refresh(self): font = self.current_font # refresh btn_bold self.btn_bold.blockSignals(True) self.btn_bold.setChecked(font.weight() > 50) self.btn_bold.blockSignals(False) # refresh btn_italic self.btn_italic.blockSignals(True) self.btn_italic.setChecked(font.italic()) self.btn_italic.blockSignals(False) # refresh font size self.spin_box.blockSignals(True) self.spin_box.setValue(font.pointSize()) self.spin_box.blockSignals(False)
Refresh the widgets from the current font
def init_app(self, app): app.config.setdefault('PRETTIFY', False) if app.config['PRETTIFY']: app.after_request(self._prettify_response)
Initializes a Flask object `app`: binds the HTML prettifying with app.after_request. :param app: The Flask application object.
def _prettify_response(self, response): if response.content_type == 'text/html; charset=utf-8': ugly = response.get_data(as_text=True) soup = BeautifulSoup(ugly, 'html.parser') pretty = soup.prettify(formatter='html') response.direct_passthrough = False response.set_data(pretty) return response
Prettify the HTML response. :param response: A Flask Response object.
async def _call(self, params): if self._session.closed: raise SabnzbdApiException('Session already closed') p = {**self._default_params, **params} try: async with timeout(self._timeout, loop=self._session.loop): async with self._session.get(self._api_url, params=p) as resp: data = await resp.json() if data.get('status', True) is False: self._handle_error(data, params) else: return data except aiohttp.ClientError: raise SabnzbdApiException('Unable to communicate with Sabnzbd API') except asyncio.TimeoutError: raise SabnzbdApiException('SABnzbd API request timed out')
Call the SABnzbd API
async def refresh_data(self): queue = await self.get_queue() history = await self.get_history() totals = {} for k in history: if k[-4:] == 'size': totals[k] = self._convert_size(history.get(k)) self.queue = {**totals, **queue}
Refresh the cached SABnzbd queue data
def _convert_size(self, size_str): suffix = size_str[-1] if suffix == 'K': multiplier = 1.0 / (1024.0 * 1024.0) elif suffix == 'M': multiplier = 1.0 / 1024.0 elif suffix == 'T': multiplier = 1024.0 else: multiplier = 1 try: val = float(size_str.split(' ')[0]) return val * multiplier except ValueError: return 0.0
Convert units to GB
def _handle_error(self, data, params): error = data.get('error', 'API call failed') mode = params.get('mode') raise SabnzbdApiException(error, mode=mode)
Handle an error response from the SABnzbd API
def __generate_key(self, config): cwd = config.get('ssh_path', self._install_directory()) if config.is_affirmative('create', default="yes"): if not os.path.exists(cwd): os.makedirs(cwd) if not os.path.exists(os.path.join(cwd, config.get('keyname'))): command = "ssh-keygen -t %(type)s -f %(keyname)s -N " % config.to_dict() lib.call(command, cwd=cwd, output_log_level=logging.DEBUG) if not config.has('ssh_path'): config.set('ssh_path', cwd) config.set('ssh_key_path', os.path.join(config.get('ssh_path'), config.get('keyname')))
Generate the ssh key, and return the ssh config location
def __install_ssh_config(self, config): if not config.is_affirmative('use_global_ssh', default="no"): ssh_config_injection = self._build_ssh_config(config) if not os.path.exists(ssh_config_path): if self.injections.in_noninjected_file(ssh_config_path, "Host %s" % config.get('host')): if config.is_affirmative('override'): self.injections.inject(ssh_config_path, ssh_config_injection) else: self.injections.inject(ssh_config_path, ssh_config_injection) else: self.injections.inject(ssh_config_path, ssh_config_injection) self.injections.commit()
Install the ssh configuration
def _build_ssh_config(self, config): ssh_config_injection = ssh_config_template % { 'host': config.get('host'), 'hostname': config.get('hostname'), 'ssh_key_path': config.get('ssh_key_path'), 'user': config.get('user') } if config.has('port'): ssh_config_injection += " Port {0}\n".format(config.get('port')) return ssh_config_injection
build the ssh injection configuration
def extract_followups(task): callbacks = task.request.callbacks errbacks = task.request.errbacks task.request.callbacks = None return {'link': callbacks, 'link_error': errbacks}
Retrieve callbacks and errbacks from provided task instance, disables tasks callbacks.
def gen_procfile(ctx, wsgi, dev): if wsgi is None: if os.path.exists("wsgi.py"): wsgi = "wsgi.py" elif os.path.exists("app.py"): wsgi = "app.py" else: wsgi = "app.py" ctx.invoke(gen_apppy) def write_procfile(filename, server_process, debug): processes = [server_process] + current_app.processes procfile = [] for name, cmd in procfile_processes(processes, debug).iteritems(): procfile.append("%s: %s" % (name, cmd)) with open(filename, "w") as f: f.write("\n".join(procfile)) write_procfile("Procfile", ("web", ["gunicorn", wsgi]), False) if dev: write_procfile("Procfile.dev", ("web", ["frasco", "serve"]), True)
Generates Procfiles which can be used with honcho or foreman.
def add(self, host, filename, data, f_type, f_other_type=None, f_text=''): return self.send.evidence_add(host, filename, data, f_type, f_other_type, f_text)
Add evidence :param host: db.t_hosts.id :param filename: Filename :param data: Content of file :param f_type: Evidence type :param f_other_type: If f_type is 'Other' what type it is :param f_text: Text information about the evidence :return: (True/False, response message)
def utc_mktime(utc_tuple): if len(utc_tuple) == 6: utc_tuple += (0, 0, 0) return time.mktime(utc_tuple) - time.mktime((1970, 1, 1, 0, 0, 0, 0, 0, 0))
Returns number of seconds elapsed since epoch Note that no timezone are taken into consideration. utc tuple must be: (year, month, day, hour, minute, second)
def f(self): if self.data.minute == 0: return self.g() return u'%s:%s' % (self.g(), self.i())
Time, in 12-hour hours and minutes, with minutes left off if they're zero. Examples: '1', '1:30', '2:05', '2' Proprietary extension.
def g(self): "Hour, 12-hour format without leading zeros; i.e. '1' to '12'" if self.data.hour == 0: return 12 if self.data.hour > 12: return self.data.hour - 12 return self.data.houf g(self): "Hour, 12-hour format without leading zeros; i.e. '1' to '12'" if self.data.hour == 0: return 12 if self.data.hour > 12: return self.data.hour - 12 return self.data.hour
Hour, 12-hour format without leading zeros; i.e. '1' to '12
def P(self): if self.data.minute == 0 and self.data.hour == 0: return _('midnight') if self.data.minute == 0 and self.data.hour == 12: return _('noon') return u'%s %s' % (self.f(), self.a())
Time, in 12-hour hours, minutes and 'a.m.'/'p.m.', with minutes left off if they're zero and the strings 'midnight' and 'noon' if appropriate. Examples: '1 a.m.', '1:30 p.m.', 'midnight', 'noon', '12:30 p.m.' Proprietary extension.