text
stringlengths 3
1.04M
| lang
stringclasses 4
values | len
int64 3
1.04M
|
---|---|---|
import os
import uuid
from osipkd.tools import row2dict, xls_reader
from datetime import datetime
from sqlalchemy import not_, func
from pyramid.view import (
view_config,
)
from pyramid.httpexceptions import (
HTTPFound,
)
import colander
from deform import (
Form,
widget,
ValidationFailure,
)
from osipkd.models import (
DBSession,
)
from osipkd.models import Route
from datatables import ColumnDT, DataTables
from osipkd.views.base_view import BaseViews
SESS_ADD_FAILED = 'Tambah routes gagal'
SESS_EDIT_FAILED = 'Edit routes gagal'
def deferred_source_type(node, kw):
values = kw.get('perm_choice', [])
return widget.SelectWidget(values=values)
PERM_CHOICE = ((None,'None'),
('view', 'View'),
('read', 'Read'),
('add', 'Add'),
('edit', 'Edit'),
('delete', 'Delete'),)
class AddSchema(colander.Schema):
kode = colander.SchemaNode(
colander.String())
nama = colander.SchemaNode(
colander.String())
path = colander.SchemaNode(
colander.String())
factory = colander.SchemaNode(
colander.String(),
missing=colander.drop)
perm_name = colander.SchemaNode(
colander.String(),
missing=colander.drop,
widget=widget.SelectWidget(values=PERM_CHOICE))
disabled = colander.SchemaNode(
colander.Boolean())
class EditSchema(AddSchema):
id = colander.SchemaNode(colander.String(),
missing=colander.drop,
widget=widget.HiddenWidget(readonly=True))
class view_routes(BaseViews):
########
# List #
########
@view_config(route_name='routes', renderer='templates/routes/list.pt',
permission='read')
def view_list(self):
return dict(a={})
##########
# Action #
##########
@view_config(route_name='routes-act', renderer='json',
permission='read')
def gaji_routes_act(self):
ses = self.request.session
req = self.request
params = req.params
url_dict = req.matchdict
if url_dict['act']=='grid':
columns = []
columns.append(ColumnDT('id'))
columns.append(ColumnDT('kode'))
columns.append(ColumnDT('nama'))
columns.append(ColumnDT('path'))
columns.append(ColumnDT('factory'))
columns.append(ColumnDT('perm_name'))
columns.append(ColumnDT('disabled'))
query = DBSession.query(Route)
rowTable = DataTables(req, Route, query, columns)
return rowTable.output_result()
elif url_dict['act']=='headof':
term = 'term' in params and params['term'] or ''
rows = DBSession.query(Route.id, Route.nama
).filter(
Route.nama.ilike('%%%s%%' % term),
Route.perm_name != None).\
order_by(Route.path).all()
r = []
for k in rows:
d={}
d['id'] = k[0]
d['value'] = k[1]
r.append(d)
return r
#######
# Add #
#######
def form_validator(self, form, value):
if 'id' in form.request.matchdict:
uid = form.request.matchdict['id']
q = DBSession.query(Route).filter_by(id=uid)
routes = q.first()
else:
routes = None
def get_form(self, class_form, row=None):
schema = class_form(validator=self.form_validator)
schema = schema.bind(perm_choice=PERM_CHOICE)
schema.request = self.request
if row:
schema.deserialize(row)
return Form(schema, buttons=('simpan','batal'))
def save(self, values, user, row=None):
if not row:
row = Route()
row.created = datetime.now()
row.create_uid = user.id
row.from_dict(values)
row.updated = datetime.now()
row.update_uid = user.id
row.disabled = 'disabled' in values and values['disabled'] and 1 or 0
DBSession.add(row)
DBSession.flush()
return row
def save_request(self, values, row=None):
if 'id' in self.request.matchdict:
values['id'] = self.request.matchdict['id']
row = self.save(values, self.request.user, row)
self.request.session.flash('routes sudah disimpan.')
def routes_list(self):
return HTTPFound(location=self.request.route_url('routes'))
def session_failed(self, session_name):
r = dict(form=self.session[session_name])
del self.session[session_name]
return r
@view_config(route_name='routes-add', renderer='templates/routes/add.pt',
permission='add')
def view_routes_add(self):
req = self.request
ses = self.session
form = self.get_form(AddSchema)
if req.POST:
if 'simpan' in req.POST:
controls = req.POST.items()
try:
c = form.validate(controls)
except ValidationFailure, e:
req.session[SESS_ADD_FAILED] = e.render()
return HTTPFound(location=req.route_url('routes-add'))
self.save_request(dict(controls))
return self.routes_list()
elif SESS_ADD_FAILED in req.session:
return self.session_failed(SESS_ADD_FAILED)
return dict(form=form.render())
########
# Edit #
########
def query_id(self):
return DBSession.query(Route).filter_by(id=self.request.matchdict['id'])
def id_not_found(self):
msg = 'routes ID %s Tidak Ditemukan.' % self.request.matchdict['id']
request.session.flash(msg, 'error')
return routes_list()
@view_config(route_name='routes-edit', renderer='templates/routes/edit.pt',
permission='edit')
def view_routes_edit(self):
request = self.request
row = self.query_id().first()
if not row:
return id_not_found(request)
form = self.get_form(EditSchema)
if request.POST:
if 'simpan' in request.POST:
controls = request.POST.items()
print controls
try:
c = form.validate(controls)
except ValidationFailure, e:
request.session[SESS_EDIT_FAILED] = e.render()
return HTTPFound(location=request.route_url('routes-edit',
id=row.id))
self.save_request(dict(controls), row)
return self.routes_list()
elif SESS_EDIT_FAILED in request.session:
return self.session_failed(SESS_EDIT_FAILED)
values = row.to_dict()
return dict(form=form.render(appstruct=values))
##########
# Delete #
##########
@view_config(route_name='routes-delete', renderer='templates/routes/delete.pt',
permission='delete')
def view_routes_delete(self):
request = self.request
q = self.query_id()
row = q.first()
if not row:
return self.id_not_found(request)
form = Form(colander.Schema(), buttons=('hapus','batal'))
if request.POST:
if 'hapus' in request.POST:
msg = 'routes ID %d %s sudah dihapus.' % (row.id, row.nama)
try:
q.delete()
DBSession.flush()
except:
msg = 'routes ID %d %s tidak dapat dihapus.' % (row.id, row.nama)
request.session.flash(msg)
return self.routes_list()
return dict(row=row,
form=form.render())
| python | 8,290 |
#!/usr/bin/env python
# coding=utf-8
# [% VIM_TAGS %]
#
# Author: Hari Sekhon
# Date: [% DATE # 2008-10-20 16:18:55 +0100 (Mon, 20 Oct 2008) %]
#
# [% URL %]
#
# [% LICENSE %]
#
# [% MESSAGE %]
#
# [% LINKEDIN %]
#
"""
TODO
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
#import logging
import os
#import re
import sys
#import time
import traceback
#try:
# from bs4 import BeautifulSoup
#except ImportError:
# print(traceback.format_exc(), end='')
# sys.exit(4)
srcdir = os.path.abspath(os.path.dirname(__file__))
libdir = os.path.join(srcdir, 'pylib')
sys.path.append(libdir)
try:
# pylint: disable=wrong-import-position
from harisekhon.utils import log
#from harisekhon.utils import CriticalError, UnknownError
from harisekhon.utils import validate_host, validate_port, validate_user, validate_password
from harisekhon.utils import isStr
from harisekhon import CLI
from harisekhon import RestNagiosPlugin
except ImportError as _:
print(traceback.format_exc(), end='')
sys.exit(4)
__author__ = 'Hari Sekhon'
__version__ = '0.1'
class [% NAME %](RestNagiosPlugin):
def __init__(self):
# Python 2.x
super([% NAME %], self).__init__()
# Python 3.x
# super().__init__()
#self.host = None
#self.port = None
#self.user = None
#self.password = None
#self.protocol = 'http'
#self.request = RequestHandler()
self.name =
self.default_port = 80
self.path = '/'
#self.auth = False
self.json = True
self.msg = 'Msg not defined yet'
def add_options(self):
super([% NAME %], self).add_options()
# TODO: fill in hostoption name and default port
#self.add_hostoption(name='', default_host='localhost', default_port=80)
#self.add_useroption(name='', default_user='admin')
#self.add_opt('-S', '--ssl', action='store_true', help='Use SSL')
#self.add_opt('-f', '--file', dest='file', metavar='<file>',
# help='Input file')
def process_options(self):
super([% NAME %], self).process_options()
#self.no_args()
#self.host = self.get_opt('host')
#self.port = self.get_opt('port')
#self.user = self.get_opt('user')
#self.password = self.get_opt('password')
#validate_host(self.host)
#validate_port(self.port)
#validate_user(self.user)
#validate_password(self.password)
#if self.get_opt('ssl'):
# self.protocol = 'https'
#filename = self.get_opt('file')
#if not filename:
# self.usage('--file not defined')
# def run(self):
# url = '{protocol}://{host}:{port}/...'.format(protocol=self.protocol, host=self.host, port=self.port)
# start_time = time.time()
# req = self.request.get(url)
# query_time = time.time() - start_time
# soup = BeautifulSoup(req.content, 'html.parser')
# if log.isEnabledFor(logging.DEBUG):
# log.debug("BeautifulSoup prettified:\n{0}\n{1}".format(soup.prettify(), '='*80))
# # TODO: XXX: soup.find() can return None - do not chain calls - must test each call 'is not None'
# # link = soup.find('p')[3]
# # link = soup.find('th', text='Uptime:')
# # link = soup.find_next_sibling('th', text='Uptime:')
#
# # link = soup.find('th', text=re.compile('Uptime:?', re.I))
# # if link is None:
# # raise UnknownError('failed to find tag')
# # link = link.find_next_sibling()
# # if link is None:
# # raise UnknownError('failed to find tag (next sibling tag not found)')
# # _ = link.get_text()
# # shorter to just catch NoneType attribute error when tag not found and returns None
# try:
# uptime = soup.find('th', text=re.compile('Uptime:?', re.I)).find_next_sibling().get_text()
# version = soup.find('th', text=re.compile('Version:?', re.I)).find_next_sibling().get_text()
# except (AttributeError, TypeError):
# #raise UnknownError('failed to find parse output')
# qquit('UNKNOWN', 'failed to parse output')
# if not _ or not isStr(_) or not re.search(r'...', _):
# #raise UnknownError('format not recognized: {0}'.format(_))
# qquit('UNKNOWN', 'format not recognized: {0}'.format(_))
# self.msg += ' | query_time={0:f}s'.format(query_time)
def parse_json(self, json_data):
_ = json_data['beans'][0]
if __name__ == '__main__':
[% NAME %]().main()
| python | 4,707 |
import inspect, platform, re, tempfile, skia, math
from enum import Enum
from subprocess import run
from pathlib import Path
from coldtype.geometry import Rect, Point
from coldtype.color import normalize_color
from coldtype.animation import Timeable, Frame
from coldtype.animation.timeline import Timeline
from coldtype.text.reader import normalize_font_prefix, Font
from coldtype.pens.datpen import DATPen, DATPenSet
from coldtype.pens.svgpen import SVGPen
from coldtype.pens.skiapen import SkiaPen
try:
import drawBot as db
import AppKit
except ImportError:
db = None
class Keylayer(Enum):
Default = 0
Cmd = 1
Editing = 2
class Action(Enum):
Initial = "initial"
Resave = "resave"
RenderAll = "render_all"
RenderWorkarea = "render_workarea"
RenderIndices = "render_indices"
Release = "release"
PreviewStoryboard = "preview_storyboard"
PreviewStoryboardReload = "preview_storyboard_reload"
PreviewPlay = "preview_play"
PreviewIndices = "preview_indices"
PreviewStoryboardNext = "preview_storyboard_next"
PreviewStoryboardPrev = "preview_storyboard_prev"
RenderedPlay = "rendered_play"
ArbitraryTyping = "arbitrary_typing"
ArbitraryCommand = "arbitrary_command"
SaveControllers = "save_controllers"
ClearControllers = "clear_controllers"
ResetControllers = "reset_controllers"
RestartRenderer = "restart_renderer"
ToggleMultiplex = "toggle_multiplex"
Kill = "kill"
class RenderPass():
def __init__(self, render, suffix, args):
self.render = render
self.fn = self.render.func
self.args = args
self.suffix = suffix
self.path = None
self.single_layer = None
self.output_path = None
def __repr__(self):
return f"<RenderPass:f{self.output_path}/>"
class renderable():
def __init__(self,
rect=(1080, 1080),
bg="whitesmoke",
fmt="png",
name=None,
rasterizer=None,
prefix=None,
dst=None,
custom_folder=None,
postfn=None,
watch=[],
watch_restarts=[],
layers=[],
solo=False,
rstate=False,
preview_only=False,
direct_draw=False,
clip=False,
style="default",
viewBox=True):
"""Base configuration for a renderable function"""
self.rect = Rect(rect)
self.bg = normalize_color(bg)
self.fmt = fmt
self.prefix = prefix
self.dst = Path(dst).expanduser().resolve() if dst else None
self.custom_folder = custom_folder
self.postfn = postfn
self.last_passes = []
self.style = style
self.watch = []
for w in watch:
self.add_watchee(w)
self.watch_restarts = []
for w in watch_restarts:
self.watch_restarts.append(self.add_watchee(w))
self.name = name
self.rasterizer = rasterizer
self.self_rasterizing = False
self.layers = layers
self.hidden = solo == -1
self.solo = solo
self.preview_only = preview_only
self.rstate = rstate
self.clip = clip
self.viewBox = viewBox
self.direct_draw = direct_draw
if not rasterizer:
if self.fmt == "svg":
self.rasterizer = "svg"
elif self.fmt == "pickle":
self.rasterizer = "pickle"
else:
self.rasterizer = "skia"
def add_watchee(self, w):
try:
pw = Path(w).expanduser().resolve()
if not pw.exists():
print(w, "<<< does not exist (cannot be watched)")
else:
self.watch.append(pw)
return pw
except TypeError:
if isinstance(w, Font):
self.watch.append(w)
else:
raise Exception("Can only watch path strings, Paths, and Fonts")
def __call__(self, func):
self.func = func
if not self.name:
self.name = self.func.__name__
return self
def folder(self, filepath):
return ""
def layer_folder(self, filepath, layer):
return ""
def pass_suffix(self):
return self.name
def passes(self, action, layers, renderer_state, indices=[]):
return [RenderPass(self, self.pass_suffix(), [self.rect])]
def package(self, filepath, output_folder):
pass
def run(self, render_pass, renderer_state):
if self.rstate:
return render_pass.fn(*render_pass.args, renderer_state)
else:
return render_pass.fn(*render_pass.args)
def runpost(self, result, render_pass):
if self.postfn:
return self.postfn(self, result)
else:
return result
def draw_preview(self, scale, canvas:skia.Canvas, rect, result, render_pass):
sr = self.rect.scale(scale, "mnx", "mxx")
SkiaPen.CompositeToCanvas(result, sr, canvas, scale, style=self.style)
def hide(self):
self.hidden = True
return self
def show(self):
self.hidden = False
return self
class skia_direct(renderable):
def __init__(self, rect=(1080, 1080), **kwargs):
super().__init__(rect=rect, direct_draw=True, **kwargs)
def run(self, render_pass, renderer_state, canvas):
if self.rstate:
return render_pass.fn(*render_pass.args, renderer_state, canvas)
else:
return render_pass.fn(*render_pass.args, canvas)
class drawbot_script(renderable):
def __init__(self, rect=(1080, 1080), scale=1, **kwargs):
if not db:
raise Exception("DrawBot not installed!")
super().__init__(rect=Rect(rect).scale(scale), rasterizer="drawbot", **kwargs)
self.self_rasterizing = True
def run(self, render_pass, renderer_state):
use_pool = True
if use_pool:
pool = AppKit.NSAutoreleasePool.alloc().init()
try:
db.newDrawing()
if renderer_state.previewing:
ps = renderer_state.preview_scale
db.size(self.rect.w*ps, self.rect.h*ps)
db.scale(ps, ps)
DATPen().rect(self.rect).f(self.bg).db_drawPath()
else:
db.size(self.rect.w, self.rect.h)
render_pass.fn(*render_pass.args)
result = None
if renderer_state.previewing:
previews = (render_pass.output_path.parent / "_previews")
previews.mkdir(exist_ok=True, parents=True)
preview_frame = previews / render_pass.output_path.name
db.saveImage(str(preview_frame))
result = preview_frame
else:
render_pass.output_path.parent.mkdir(exist_ok=True, parents=True)
db.saveImage(str(render_pass.output_path))
result = render_pass.output_path
db.endDrawing()
finally:
if use_pool:
del pool
return result
class svgicon(renderable):
def __init__(self, **kwargs):
super().__init__(fmt="svg", **kwargs)
def folder(self, filepath):
return filepath.stem
class glyph(renderable):
def __init__(self, glyphName, width=500, **kwargs):
r = Rect(kwargs.get("rect", Rect(1000, 1000)))
kwargs.pop("rect", None)
self.width = width
self.body = r.take(750, "mdy").take(self.width, "mdx")
self.glyphName = glyphName
super().__init__(rect=r, **kwargs)
def passes(self, action, layers, renderer_state, indices=[]):
return [RenderPass(self, self.glyphName, [])]
class fontpreview(renderable):
def __init__(self, font_dir, font_re, rect=(1200, 150), limit=25, **kwargs):
super().__init__(rect=rect, **kwargs)
self.dir = normalize_font_prefix(font_dir)
self.re = font_re
self.matches = []
for font in self.dir.iterdir():
if re.search(self.re, str(font)):
if len(self.matches) < limit:
self.matches.append(font)
self.matches.sort()
def passes(self, action, layers, renderer_state, indices=[]):
return [RenderPass(self, "{:s}".format(m.name), [self.rect, m]) for m in self.matches]
class iconset(renderable):
valid_sizes = [16, 32, 64, 128, 256, 512, 1024]
def __init__(self, sizes=[128, 1024], **kwargs):
super().__init__(**kwargs)
self.sizes = sizes
def folder(self, filepath):
return f"{filepath.stem}_source"
def passes(self, action, layers, renderer_state, indices=[]): # TODO could use the indices here
sizes = self.sizes
if action == Action.RenderAll:
sizes = self.valid_sizes
return [RenderPass(self, str(size), [self.rect, size]) for size in sizes]
def package(self, filepath, output_folder):
# inspired by https://retifrav.github.io/blog/2018/10/09/macos-convert-png-to-icns/
iconset = output_folder.parent / f"{filepath.stem}.iconset"
iconset.mkdir(parents=True, exist_ok=True)
system = platform.system()
if system == "Darwin":
for png in output_folder.glob("*.png"):
d = int(png.stem.split("_")[1])
for x in [1, 2]:
if x == 2 and d == 16:
continue
elif x == 1:
fn = f"icon_{d}x{d}.png"
elif x == 2:
fn = f"icon_{int(d/2)}x{int(d/2)}@2x.png"
print(fn)
run(["sips", "-z", str(d), str(d), str(png), "--out", str(iconset / fn)])
run(["iconutil", "-c", "icns", str(iconset)])
if True: # can be done windows or mac
from PIL import Image
output = output_folder.parent / f"{filepath.stem}.ico"
largest = list(output_folder.glob("*_1024.png"))[0]
img = Image.open(str(largest))
icon_sizes = [(x, x) for x in self.valid_sizes]
img.save(str(output), sizes=icon_sizes)
class animation(renderable, Timeable):
def __init__(self, rect=(1080, 1080), duration=10, storyboard=[0], timeline:Timeline=None, **kwargs):
super().__init__(**kwargs)
self.rect = Rect(rect)
self.r = self.rect
self.start = 0
self.end = duration
#self.duration = duration
self.storyboard = storyboard
if timeline:
self.timeline = timeline
self.t = timeline
self.start = timeline.start
self.end = timeline.end
#self.duration = timeline.duration
if self.storyboard != [0] and timeline.storyboard == [0]:
pass
else:
self.storyboard = timeline.storyboard.copy()
else:
self.timeline = Timeline(30)
def __call__(self, func):
res = super().__call__(func)
self.prefix = self.name + "_"
return res
def folder(self, filepath):
return filepath.stem + "/" + self.name # TODO necessary?
def layer_folder(self, filepath, layer):
return layer
def all_frames(self):
return list(range(0, self.duration))
def active_frames(self, action, layers, renderer_state, indices):
frames = self.storyboard.copy()
for fidx, frame in enumerate(frames):
frames[fidx] = (frame + renderer_state.frame_index_offset) % self.duration
if action == Action.RenderAll:
frames = self.all_frames()
elif action in [Action.PreviewIndices, Action.RenderIndices]:
frames = indices
elif action in [Action.RenderWorkarea]:
if self.timeline:
try:
frames = self.workarea()
except:
frames = self.all_frames()
#if hasattr(self.timeline, "find_workarea"):
# frames = self.timeline.find_workarea()
return frames
def workarea(self):
return list(self.timeline.workareas[0])
def pass_suffix(self, index):
return "{:04d}".format(index)
def passes(self, action, layers, renderer_state, indices=[]):
frames = self.active_frames(action, layers, renderer_state, indices)
return [RenderPass(self, self.pass_suffix(i), [Frame(i, self, layers)]) for i in frames]
def package(self, filepath, output_folder):
pass
def make_gif(self, passes):
import imageio
path = str(self.output_folder) + "_animation.gif"
with imageio.get_writer(path, mode="I") as writer:
for p in passes:
if p.render == self:
image = imageio.imread(str(p.output_path))
writer.append_data(image)
print(">>> wrote gif to", path)
def contactsheet(self, gx, sl=slice(0, None, None)):
try:
sliced = True
start, stop, step = sl.indices(self.duration)
duration = (stop - start) // step
except AttributeError: # indices storyboard
duration = len(sl)
sliced = False
ar = self.rect
gy = math.ceil(duration / gx)
@renderable(rect=(ar.w*gx, ar.h*gy), bg=self.bg, name=self.name + "_contactsheet")
def contactsheet(r:Rect):
_pngs = list(sorted(self.output_folder.glob("*.png")))
if sliced:
pngs = _pngs[sl]
else:
pngs = [p for i, p in enumerate(_pngs) if i in sl]
dps = DATPenSet()
dps += DATPen().rect(r).f(self.bg)
for idx, g in enumerate(r.grid(columns=gx, rows=gy)):
if idx < len(pngs):
dps += DATPen().rect(g).f(None).img(pngs[idx], g, pattern=False)
return dps
return contactsheet
class drawbot_animation(drawbot_script, animation):
def passes(self, action, layers, renderer_state, indices=[]):
if action in [
Action.RenderAll,
Action.RenderIndices,
Action.RenderWorkarea]:
frames = super().active_frames(action, layers, renderer_state, indices)
passes = []
if len(layers) > 0:
for layer in layers:
for i in frames:
p = RenderPass(self, "{:04d}".format(i), [Frame(i, self, [layer])])
p.single_layer = layer
passes.append(p)
else:
for i in frames:
p = RenderPass(self, "{:04d}".format(i), [Frame(i, self, [])])
#p.single_layer = layer
passes.append(p)
return passes
else:
return super().passes(action, layers, renderer_state, indices) | python | 15,136 |
import re
patterns = 'xy+?'
if re.search(patterns, "xyz"):
print('Starts with x and has one z')
else:
print('No matching')
#Starts with x and has one z | python | 168 |
from typing import Tuple
from torch.utils.data import DataLoader
from .base_dataset import BaseDataset
def build_dataset_helper(dataset_name: str, dataset_cfg: dict, is_train: bool, transform=None) -> BaseDataset:
return eval(f'{dataset_name}(dataset_cfg, is_train, transform=transform)')
def build_dataloader_helper(dataset_name, dataset_cfg: dict, batch_size, transform=None,
shuffle=True) -> Tuple[DataLoader, DataLoader]:
train_dataset = build_dataset_helper(dataset_name, dataset_cfg, is_train=True, transform=transform)
val_dataset = build_dataset_helper(dataset_name, dataset_cfg, is_train=False)
train_dataloader = DataLoader(train_dataset, batch_size=batch_size, shuffle=shuffle)
val_dataloader = DataLoader(val_dataset, batch_size=1, shuffle=False)
return train_dataloader, val_dataloader
| python | 858 |
from ray import tune
from ray.tune.registry import register_env
from ray.rllib.env.wrappers.pettingzoo_env import PettingZooEnv
from pettingzoo.sisl import waterworld_v0
# Based on code from github.com/parametersharingmadrl/parametersharingmadrl
if __name__ == "__main__":
# RDQN - Rainbow DQN
# ADQN - Apex DQN
register_env("waterworld", lambda _: PettingZooEnv(waterworld_v0.env()))
tune.run(
"APEX_DDPG",
stop={"episodes_total": 60000},
checkpoint_freq=10,
config={
# Enviroment specific.
"env": "waterworld",
# General
"num_gpus": 1,
"num_workers": 2,
"num_envs_per_worker": 8,
"learning_starts": 1000,
"buffer_size": int(1e5),
"compress_observations": True,
"rollout_fragment_length": 20,
"train_batch_size": 512,
"gamma": .99,
"n_step": 3,
"lr": .0001,
"prioritized_replay_alpha": 0.5,
"final_prioritized_replay_beta": 1.0,
"target_network_update_freq": 50000,
"timesteps_per_iteration": 25000,
# Method specific.
"multiagent": {
# We only have one policy (calling it "shared").
# Class, obs/act-spaces, and config will be derived
# automatically.
"policies": {"shared_policy"},
# Always use "shared" policy.
"policy_mapping_fn": (
lambda agent_id, episode, **kwargs: "shared_policy"),
},
},
)
| python | 1,634 |
# Copyright 2020 Board of Trustees of the University of Illinois.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| python | 626 |
import imp
import os
import marshal
import struct
import sys
import types
from cStringIO import StringIO
from compiler import ast, parse, walk, syntax
#from compiler import pyassem, misc, future, symbols
from compiler import misc, future, symbols
from compiler.consts import SC_LOCAL, SC_GLOBAL, SC_FREE, SC_CELL
from compiler.consts import CO_VARARGS, CO_VARKEYWORDS, CO_NEWLOCALS,\
CO_NESTED, CO_GENERATOR, CO_GENERATOR_ALLOWED, CO_FUTURE_DIVISION
#from compiler.pyassem import TupleArg
TupleArg = None
# XXX The version-specific code can go, since this code only works with 2.x.
# Do we have Python 1.x or Python 2.x?
try:
VERSION = sys.version_info[0]
except AttributeError:
VERSION = 1
callfunc_opcode_info = {
# (Have *args, Have **args) : opcode
(0,0) : "CALL_FUNCTION",
(1,0) : "CALL_FUNCTION_VAR",
(0,1) : "CALL_FUNCTION_KW",
(1,1) : "CALL_FUNCTION_VAR_KW",
}
LOOP = 1
EXCEPT = 2
TRY_FINALLY = 3
END_FINALLY = 4
def compileFile(filename, display=0):
f = open(filename, 'U')
buf = f.read()
f.close()
mod = Module(buf, filename)
try:
mod.compile(display)
except SyntaxError:
raise
else:
f = open(filename + "c", "wb")
mod.dump(f)
f.close()
def compile(source, filename, mode, flags=None, dont_inherit=None):
"""Replacement for builtin compile() function"""
if flags is not None or dont_inherit is not None:
raise RuntimeError, "not implemented yet"
if mode == "single":
gen = Interactive(source, filename)
elif mode == "exec":
gen = Module(source, filename)
elif mode == "eval":
gen = Expression(source, filename)
else:
raise ValueError("compile() 3rd arg must be 'exec' or "
"'eval' or 'single'")
gen.compile()
return gen.code
class AbstractCompileMode:
mode = None # defined by subclass
def __init__(self, source, filename):
self.source = source
self.filename = filename
self.code = None
def _get_tree(self):
tree = parse(self.source, self.mode)
misc.set_filename(self.filename, tree)
syntax.check(tree)
return tree
def compile(self):
pass # implemented by subclass
def getCode(self):
return self.code
class Expression(AbstractCompileMode):
mode = "eval"
def compile(self):
tree = self._get_tree()
gen = ExpressionCodeGenerator(tree)
self.code = gen.getCode()
class Interactive(AbstractCompileMode):
mode = "single"
def compile(self):
tree = self._get_tree()
gen = InteractiveCodeGenerator(tree)
self.code = gen.getCode()
class Module(AbstractCompileMode):
mode = "exec"
def compile(self, display=0):
tree = self._get_tree()
gen = ModuleCodeGenerator(tree)
if display:
import pprint
print pprint.pprint(tree)
self.code = gen.getCode()
def dump(self, f):
f.write(self.getPycHeader())
marshal.dump(self.code, f)
#MAGIC = imp.get_magic()
MAGIC = None
def getPycHeader(self):
# compile.c uses marshal to write a long directly, with
# calling the interface that would also generate a 1-byte code
# to indicate the type of the value. simplest way to get the
# same effect is to call marshal and then skip the code.
mtime = os.path.getmtime(self.filename)
mtime = struct.pack('<i', mtime)
return self.MAGIC + mtime
class LocalNameFinder:
"""Find local names in scope"""
def __init__(self, names=()):
self.names = misc.Set()
self.globals = misc.Set()
for name in names:
self.names.add(name)
# XXX list comprehensions and for loops
def getLocals(self):
for elt in self.globals.elements():
if self.names.has_elt(elt):
self.names.remove(elt)
return self.names
def visitDict(self, node):
pass
def visitGlobal(self, node):
for name in node.names:
self.globals.add(name)
def visitFunction(self, node):
self.names.add(node.name)
def visitLambda(self, node):
pass
def visitImport(self, node):
for name, alias in node.names:
self.names.add(alias or name)
def visitFrom(self, node):
for name, alias in node.names:
self.names.add(alias or name)
def visitClass(self, node):
self.names.add(node.name)
def visitAssName(self, node):
self.names.add(node.name)
def is_constant_false(node):
if isinstance(node, ast.Const):
if not node.value:
return 1
return 0
class CodeGenerator:
"""Defines basic code generator for Python bytecode
This class is an abstract base class. Concrete subclasses must
define an __init__() that defines self.graph and then calls the
__init__() defined in this class.
The concrete class must also define the class attributes
NameFinder, FunctionGen, and ClassGen. These attributes can be
defined in the initClass() method, which is a hook for
initializing these methods after all the classes have been
defined.
"""
optimized = 0 # is namespace access optimized?
__initialized = None
class_name = None # provide default for instance variable
def __init__(self):
if self.__initialized is None:
self.initClass()
self.__class__.__initialized = 1
self.checkClass()
self.locals = misc.Stack()
self.setups = misc.Stack()
self.curStack = 0
self.maxStack = 0
self.last_lineno = None
self._setupGraphDelegation()
self._div_op = "BINARY_DIVIDE"
# XXX set flags based on future features
futures = self.get_module().futures
for feature in futures:
if feature == "division":
self.graph.setFlag(CO_FUTURE_DIVISION)
self._div_op = "BINARY_TRUE_DIVIDE"
elif feature == "generators":
self.graph.setFlag(CO_GENERATOR_ALLOWED)
def initClass(self):
"""This method is called once for each class"""
def checkClass(self):
"""Verify that class is constructed correctly"""
try:
assert hasattr(self, 'graph')
assert getattr(self, 'NameFinder')
assert getattr(self, 'FunctionGen')
assert getattr(self, 'ClassGen')
except AssertionError, msg:
intro = "Bad class construction for %s" % self.__class__.__name__
raise AssertionError, intro
def _setupGraphDelegation(self):
self.emit = self.graph.emit
self.newBlock = self.graph.newBlock
self.startBlock = self.graph.startBlock
self.nextBlock = self.graph.nextBlock
self.setDocstring = self.graph.setDocstring
def getCode(self):
"""Return a code object"""
return self.graph.getCode()
def mangle(self, name):
if self.class_name is not None:
return misc.mangle(name, self.class_name)
else:
return name
def parseSymbols(self, tree):
s = symbols.SymbolVisitor()
walk(tree, s)
return s.scopes
def get_module(self):
raise RuntimeError, "should be implemented by subclasses"
# Next five methods handle name access
def isLocalName(self, name):
return self.locals.top().has_elt(name)
def storeName(self, name):
self._nameOp('STORE', name)
def loadName(self, name):
self._nameOp('LOAD', name)
def delName(self, name):
self._nameOp('DELETE', name)
def _nameOp(self, prefix, name):
name = self.mangle(name)
scope = self.scope.check_name(name)
if scope == SC_LOCAL:
if not self.optimized:
self.emit(prefix + '_NAME', name)
else:
self.emit(prefix + '_FAST', name)
elif scope == SC_GLOBAL:
if not self.optimized:
self.emit(prefix + '_NAME', name)
else:
self.emit(prefix + '_GLOBAL', name)
elif scope == SC_FREE or scope == SC_CELL:
self.emit(prefix + '_DEREF', name)
else:
raise RuntimeError, "unsupported scope for var %s: %d" % \
(name, scope)
def _implicitNameOp(self, prefix, name):
"""Emit name ops for names generated implicitly by for loops
The interpreter generates names that start with a period or
dollar sign. The symbol table ignores these names because
they aren't present in the program text.
"""
if self.optimized:
self.emit(prefix + '_FAST', name)
else:
self.emit(prefix + '_NAME', name)
# The set_lineno() function and the explicit emit() calls for
# SET_LINENO below are only used to generate the line number table.
# As of Python 2.3, the interpreter does not have a SET_LINENO
# instruction. pyassem treats SET_LINENO opcodes as a special case.
def set_lineno(self, node, force=False):
"""Emit SET_LINENO if necessary.
The instruction is considered necessary if the node has a
lineno attribute and it is different than the last lineno
emitted.
Returns true if SET_LINENO was emitted.
There are no rules for when an AST node should have a lineno
attribute. The transformer and AST code need to be reviewed
and a consistent policy implemented and documented. Until
then, this method works around missing line numbers.
"""
lineno = getattr(node, 'lineno', None)
if lineno is not None and (lineno != self.last_lineno
or force):
self.emit('SET_LINENO', lineno)
self.last_lineno = lineno
return True
return False
# The first few visitor methods handle nodes that generator new
# code objects. They use class attributes to determine what
# specialized code generators to use.
NameFinder = LocalNameFinder
FunctionGen = None
ClassGen = None
def visitModule(self, node):
self.scopes = self.parseSymbols(node)
self.scope = self.scopes[node]
self.emit('SET_LINENO', 0)
if node.doc:
self.emit('LOAD_CONST', node.doc)
self.storeName('__doc__')
lnf = walk(node.node, self.NameFinder(), verbose=0)
self.locals.push(lnf.getLocals())
self.visit(node.node)
self.emit('LOAD_CONST', None)
self.emit('RETURN_VALUE')
def visitExpression(self, node):
self.set_lineno(node)
self.scopes = self.parseSymbols(node)
self.scope = self.scopes[node]
self.visit(node.node)
self.emit('RETURN_VALUE')
def visitFunction(self, node):
self._visitFuncOrLambda(node, isLambda=0)
if node.doc:
self.setDocstring(node.doc)
self.storeName(node.name)
def visitLambda(self, node):
self._visitFuncOrLambda(node, isLambda=1)
def _visitFuncOrLambda(self, node, isLambda=0):
gen = self.FunctionGen(node, self.scopes, isLambda,
self.class_name, self.get_module())
walk(node.code, gen)
gen.finish()
self.set_lineno(node)
for default in node.defaults:
self.visit(default)
frees = gen.scope.get_free_vars()
if frees:
for name in frees:
self.emit('LOAD_CLOSURE', name)
self.emit('LOAD_CONST', gen)
self.emit('MAKE_CLOSURE', len(node.defaults))
else:
self.emit('LOAD_CONST', gen)
self.emit('MAKE_FUNCTION', len(node.defaults))
def visitClass(self, node):
gen = self.ClassGen(node, self.scopes,
self.get_module())
walk(node.code, gen)
gen.finish()
self.set_lineno(node)
self.emit('LOAD_CONST', node.name)
for base in node.bases:
self.visit(base)
self.emit('BUILD_TUPLE', len(node.bases))
frees = gen.scope.get_free_vars()
for name in frees:
self.emit('LOAD_CLOSURE', name)
self.emit('LOAD_CONST', gen)
if frees:
self.emit('MAKE_CLOSURE', 0)
else:
self.emit('MAKE_FUNCTION', 0)
self.emit('CALL_FUNCTION', 0)
self.emit('BUILD_CLASS')
self.storeName(node.name)
# The rest are standard visitor methods
# The next few implement control-flow statements
def visitIf(self, node):
end = self.newBlock()
numtests = len(node.tests)
for i in range(numtests):
test, suite = node.tests[i]
if is_constant_false(test):
# XXX will need to check generator stuff here
continue
self.set_lineno(test)
self.visit(test)
nextTest = self.newBlock()
self.emit('JUMP_IF_FALSE', nextTest)
self.nextBlock()
self.emit('POP_TOP')
self.visit(suite)
self.emit('JUMP_FORWARD', end)
self.startBlock(nextTest)
self.emit('POP_TOP')
if node.else_:
self.visit(node.else_)
self.nextBlock(end)
def visitWhile(self, node):
self.set_lineno(node)
loop = self.newBlock()
else_ = self.newBlock()
after = self.newBlock()
self.emit('SETUP_LOOP', after)
self.nextBlock(loop)
self.setups.push((LOOP, loop))
self.set_lineno(node, force=True)
self.visit(node.test)
self.emit('JUMP_IF_FALSE', else_ or after)
self.nextBlock()
self.emit('POP_TOP')
self.visit(node.body)
self.emit('JUMP_ABSOLUTE', loop)
self.startBlock(else_) # or just the POPs if not else clause
self.emit('POP_TOP')
self.emit('POP_BLOCK')
self.setups.pop()
if node.else_:
self.visit(node.else_)
self.nextBlock(after)
def visitFor(self, node):
start = self.newBlock()
anchor = self.newBlock()
after = self.newBlock()
self.setups.push((LOOP, start))
self.set_lineno(node)
self.emit('SETUP_LOOP', after)
self.visit(node.list)
self.emit('GET_ITER')
self.nextBlock(start)
self.set_lineno(node, force=1)
self.emit('FOR_ITER', anchor)
self.visit(node.assign)
self.visit(node.body)
self.emit('JUMP_ABSOLUTE', start)
self.nextBlock(anchor)
self.emit('POP_BLOCK')
self.setups.pop()
if node.else_:
self.visit(node.else_)
self.nextBlock(after)
def visitBreak(self, node):
if not self.setups:
raise SyntaxError, "'break' outside loop (%s, %d)" % \
(node.filename, node.lineno)
self.set_lineno(node)
self.emit('BREAK_LOOP')
def visitContinue(self, node):
if not self.setups:
raise SyntaxError, "'continue' outside loop (%s, %d)" % \
(node.filename, node.lineno)
kind, block = self.setups.top()
if kind == LOOP:
self.set_lineno(node)
self.emit('JUMP_ABSOLUTE', block)
self.nextBlock()
elif kind == EXCEPT or kind == TRY_FINALLY:
self.set_lineno(node)
# find the block that starts the loop
top = len(self.setups)
while top > 0:
top = top - 1
kind, loop_block = self.setups[top]
if kind == LOOP:
break
if kind != LOOP:
raise SyntaxError, "'continue' outside loop (%s, %d)" % \
(node.filename, node.lineno)
self.emit('CONTINUE_LOOP', loop_block)
self.nextBlock()
elif kind == END_FINALLY:
msg = "'continue' not allowed inside 'finally' clause (%s, %d)"
raise SyntaxError, msg % (node.filename, node.lineno)
def visitTest(self, node, jump):
end = self.newBlock()
for child in node.nodes[:-1]:
self.visit(child)
self.emit(jump, end)
self.nextBlock()
self.emit('POP_TOP')
self.visit(node.nodes[-1])
self.nextBlock(end)
def visitAnd(self, node):
self.visitTest(node, 'JUMP_IF_FALSE')
def visitOr(self, node):
self.visitTest(node, 'JUMP_IF_TRUE')
def visitCompare(self, node):
self.visit(node.expr)
cleanup = self.newBlock()
for op, code in node.ops[:-1]:
self.visit(code)
self.emit('DUP_TOP')
self.emit('ROT_THREE')
self.emit('COMPARE_OP', op)
self.emit('JUMP_IF_FALSE', cleanup)
self.nextBlock()
self.emit('POP_TOP')
# now do the last comparison
if node.ops:
op, code = node.ops[-1]
self.visit(code)
self.emit('COMPARE_OP', op)
if len(node.ops) > 1:
end = self.newBlock()
self.emit('JUMP_FORWARD', end)
self.startBlock(cleanup)
self.emit('ROT_TWO')
self.emit('POP_TOP')
self.nextBlock(end)
# list comprehensions
__list_count = 0
def visitListComp(self, node):
self.set_lineno(node)
# setup list
append = "$append%d" % self.__list_count
self.__list_count = self.__list_count + 1
self.emit('BUILD_LIST', 0)
self.emit('DUP_TOP')
self.emit('LOAD_ATTR', 'append')
self._implicitNameOp('STORE', append)
stack = []
for i, for_ in zip(range(len(node.quals)), node.quals):
start, anchor = self.visit(for_)
cont = None
for if_ in for_.ifs:
if cont is None:
cont = self.newBlock()
self.visit(if_, cont)
stack.insert(0, (start, cont, anchor))
self._implicitNameOp('LOAD', append)
self.visit(node.expr)
self.emit('CALL_FUNCTION', 1)
self.emit('POP_TOP')
for start, cont, anchor in stack:
if cont:
skip_one = self.newBlock()
self.emit('JUMP_FORWARD', skip_one)
self.startBlock(cont)
self.emit('POP_TOP')
self.nextBlock(skip_one)
self.emit('JUMP_ABSOLUTE', start)
self.startBlock(anchor)
self._implicitNameOp('DELETE', append)
self.__list_count = self.__list_count - 1
def visitListCompFor(self, node):
start = self.newBlock()
anchor = self.newBlock()
self.visit(node.list)
self.emit('GET_ITER')
self.nextBlock(start)
self.set_lineno(node, force=True)
self.emit('FOR_ITER', anchor)
self.nextBlock()
self.visit(node.assign)
return start, anchor
def visitListCompIf(self, node, branch):
self.set_lineno(node, force=True)
self.visit(node.test)
self.emit('JUMP_IF_FALSE', branch)
self.newBlock()
self.emit('POP_TOP')
# exception related
def visitAssert(self, node):
# XXX would be interesting to implement this via a
# transformation of the AST before this stage
end = self.newBlock()
self.set_lineno(node)
# XXX __debug__ and AssertionError appear to be special cases
# -- they are always loaded as globals even if there are local
# names. I guess this is a sort of renaming op.
self.emit('LOAD_GLOBAL', '__debug__')
self.emit('JUMP_IF_FALSE', end)
self.nextBlock()
self.emit('POP_TOP')
self.visit(node.test)
self.emit('JUMP_IF_TRUE', end)
self.nextBlock()
self.emit('POP_TOP')
self.emit('LOAD_GLOBAL', 'AssertionError')
if node.fail:
self.visit(node.fail)
self.emit('RAISE_VARARGS', 2)
else:
self.emit('RAISE_VARARGS', 1)
self.nextBlock(end)
self.emit('POP_TOP')
def visitRaise(self, node):
self.set_lineno(node)
n = 0
if node.expr1:
self.visit(node.expr1)
n = n + 1
if node.expr2:
self.visit(node.expr2)
n = n + 1
if node.expr3:
self.visit(node.expr3)
n = n + 1
self.emit('RAISE_VARARGS', n)
def visitTryExcept(self, node):
body = self.newBlock()
handlers = self.newBlock()
end = self.newBlock()
if node.else_:
lElse = self.newBlock()
else:
lElse = end
self.set_lineno(node)
self.emit('SETUP_EXCEPT', handlers)
self.nextBlock(body)
self.setups.push((EXCEPT, body))
self.visit(node.body)
self.emit('POP_BLOCK')
self.setups.pop()
self.emit('JUMP_FORWARD', lElse)
self.startBlock(handlers)
last = len(node.handlers) - 1
for i in range(len(node.handlers)):
expr, target, body = node.handlers[i]
self.set_lineno(expr)
if expr:
self.emit('DUP_TOP')
self.visit(expr)
self.emit('COMPARE_OP', 'exception match')
next = self.newBlock()
self.emit('JUMP_IF_FALSE', next)
self.nextBlock()
self.emit('POP_TOP')
self.emit('POP_TOP')
if target:
self.visit(target)
else:
self.emit('POP_TOP')
self.emit('POP_TOP')
self.visit(body)
self.emit('JUMP_FORWARD', end)
if expr:
self.nextBlock(next)
else:
self.nextBlock()
if expr: # XXX
self.emit('POP_TOP')
self.emit('END_FINALLY')
if node.else_:
self.nextBlock(lElse)
self.visit(node.else_)
self.nextBlock(end)
def visitTryFinally(self, node):
body = self.newBlock()
final = self.newBlock()
self.set_lineno(node)
self.emit('SETUP_FINALLY', final)
self.nextBlock(body)
self.setups.push((TRY_FINALLY, body))
self.visit(node.body)
self.emit('POP_BLOCK')
self.setups.pop()
self.emit('LOAD_CONST', None)
self.nextBlock(final)
self.setups.push((END_FINALLY, final))
self.visit(node.final)
self.emit('END_FINALLY')
self.setups.pop()
# misc
def visitDiscard(self, node):
self.set_lineno(node)
self.visit(node.expr)
self.emit('POP_TOP')
def visitConst(self, node):
self.emit('LOAD_CONST', node.value)
def visitKeyword(self, node):
self.emit('LOAD_CONST', node.name)
self.visit(node.expr)
def visitGlobal(self, node):
# no code to generate
pass
def visitName(self, node):
self.set_lineno(node)
self.loadName(node.name)
def visitPass(self, node):
self.set_lineno(node)
def visitImport(self, node):
self.set_lineno(node)
for name, alias in node.names:
if VERSION > 1:
self.emit('LOAD_CONST', None)
self.emit('IMPORT_NAME', name)
mod = name.split(".")[0]
if alias:
self._resolveDots(name)
self.storeName(alias)
else:
self.storeName(mod)
def visitFrom(self, node):
self.set_lineno(node)
fromlist = map(lambda (name, alias): name, node.names)
if VERSION > 1:
self.emit('LOAD_CONST', tuple(fromlist))
self.emit('IMPORT_NAME', node.modname)
for name, alias in node.names:
if VERSION > 1:
if name == '*':
self.namespace = 0
self.emit('IMPORT_STAR')
# There can only be one name w/ from ... import *
assert len(node.names) == 1
return
else:
self.emit('IMPORT_FROM', name)
self._resolveDots(name)
self.storeName(alias or name)
else:
self.emit('IMPORT_FROM', name)
self.emit('POP_TOP')
def _resolveDots(self, name):
elts = name.split(".")
if len(elts) == 1:
return
for elt in elts[1:]:
self.emit('LOAD_ATTR', elt)
def visitGetattr(self, node):
self.visit(node.expr)
self.emit('LOAD_ATTR', self.mangle(node.attrname))
# next five implement assignments
def visitAssign(self, node):
self.set_lineno(node)
self.visit(node.expr)
dups = len(node.nodes) - 1
for i in range(len(node.nodes)):
elt = node.nodes[i]
if i < dups:
self.emit('DUP_TOP')
if isinstance(elt, ast.Node):
self.visit(elt)
def visitAssName(self, node):
if node.flags == 'OP_ASSIGN':
self.storeName(node.name)
elif node.flags == 'OP_DELETE':
self.set_lineno(node)
self.delName(node.name)
else:
print "oops", node.flags
def visitAssAttr(self, node):
self.visit(node.expr)
if node.flags == 'OP_ASSIGN':
self.emit('STORE_ATTR', self.mangle(node.attrname))
elif node.flags == 'OP_DELETE':
self.emit('DELETE_ATTR', self.mangle(node.attrname))
else:
print "warning: unexpected flags:", node.flags
print node
def _visitAssSequence(self, node, op='UNPACK_SEQUENCE'):
if findOp(node) != 'OP_DELETE':
self.emit(op, len(node.nodes))
for child in node.nodes:
self.visit(child)
if VERSION > 1:
visitAssTuple = _visitAssSequence
visitAssList = _visitAssSequence
else:
def visitAssTuple(self, node):
self._visitAssSequence(node, 'UNPACK_TUPLE')
def visitAssList(self, node):
self._visitAssSequence(node, 'UNPACK_LIST')
# augmented assignment
def visitAugAssign(self, node):
self.set_lineno(node)
aug_node = wrap_aug(node.node)
self.visit(aug_node, "load")
self.visit(node.expr)
self.emit(self._augmented_opcode[node.op])
self.visit(aug_node, "store")
_augmented_opcode = {
'+=' : 'INPLACE_ADD',
'-=' : 'INPLACE_SUBTRACT',
'*=' : 'INPLACE_MULTIPLY',
'/=' : 'INPLACE_DIVIDE',
'//=': 'INPLACE_FLOOR_DIVIDE',
'%=' : 'INPLACE_MODULO',
'**=': 'INPLACE_POWER',
'>>=': 'INPLACE_RSHIFT',
'<<=': 'INPLACE_LSHIFT',
'&=' : 'INPLACE_AND',
'^=' : 'INPLACE_XOR',
'|=' : 'INPLACE_OR',
}
def visitAugName(self, node, mode):
if mode == "load":
self.loadName(node.name)
elif mode == "store":
self.storeName(node.name)
def visitAugGetattr(self, node, mode):
if mode == "load":
self.visit(node.expr)
self.emit('DUP_TOP')
self.emit('LOAD_ATTR', self.mangle(node.attrname))
elif mode == "store":
self.emit('ROT_TWO')
self.emit('STORE_ATTR', self.mangle(node.attrname))
def visitAugSlice(self, node, mode):
if mode == "load":
self.visitSlice(node, 1)
elif mode == "store":
slice = 0
if node.lower:
slice = slice | 1
if node.upper:
slice = slice | 2
if slice == 0:
self.emit('ROT_TWO')
elif slice == 3:
self.emit('ROT_FOUR')
else:
self.emit('ROT_THREE')
self.emit('STORE_SLICE+%d' % slice)
def visitAugSubscript(self, node, mode):
if len(node.subs) > 1:
raise SyntaxError, "augmented assignment to tuple is not possible"
if mode == "load":
self.visitSubscript(node, 1)
elif mode == "store":
self.emit('ROT_THREE')
self.emit('STORE_SUBSCR')
def visitExec(self, node):
self.visit(node.expr)
if node.locals is None:
self.emit('LOAD_CONST', None)
else:
self.visit(node.locals)
if node.globals is None:
self.emit('DUP_TOP')
else:
self.visit(node.globals)
self.emit('EXEC_STMT')
def visitCallFunc(self, node):
pos = 0
kw = 0
self.set_lineno(node)
self.visit(node.node)
for arg in node.args:
self.visit(arg)
if isinstance(arg, ast.Keyword):
kw = kw + 1
else:
pos = pos + 1
if node.star_args is not None:
self.visit(node.star_args)
if node.dstar_args is not None:
self.visit(node.dstar_args)
have_star = node.star_args is not None
have_dstar = node.dstar_args is not None
opcode = callfunc_opcode_info[have_star, have_dstar]
self.emit(opcode, kw << 8 | pos)
def visitPrint(self, node, newline=0):
self.set_lineno(node)
if node.dest:
self.visit(node.dest)
for child in node.nodes:
if node.dest:
self.emit('DUP_TOP')
self.visit(child)
if node.dest:
self.emit('ROT_TWO')
self.emit('PRINT_ITEM_TO')
else:
self.emit('PRINT_ITEM')
if node.dest and not newline:
self.emit('POP_TOP')
def visitPrintnl(self, node):
self.visitPrint(node, newline=1)
if node.dest:
self.emit('PRINT_NEWLINE_TO')
else:
self.emit('PRINT_NEWLINE')
def visitReturn(self, node):
self.set_lineno(node)
self.visit(node.value)
self.emit('RETURN_VALUE')
def visitYield(self, node):
self.set_lineno(node)
self.visit(node.value)
self.emit('YIELD_VALUE')
# slice and subscript stuff
def visitSlice(self, node, aug_flag=None):
# aug_flag is used by visitAugSlice
self.visit(node.expr)
slice = 0
if node.lower:
self.visit(node.lower)
slice = slice | 1
if node.upper:
self.visit(node.upper)
slice = slice | 2
if aug_flag:
if slice == 0:
self.emit('DUP_TOP')
elif slice == 3:
self.emit('DUP_TOPX', 3)
else:
self.emit('DUP_TOPX', 2)
if node.flags == 'OP_APPLY':
self.emit('SLICE+%d' % slice)
elif node.flags == 'OP_ASSIGN':
self.emit('STORE_SLICE+%d' % slice)
elif node.flags == 'OP_DELETE':
self.emit('DELETE_SLICE+%d' % slice)
else:
print "weird slice", node.flags
raise
def visitSubscript(self, node, aug_flag=None):
self.visit(node.expr)
for sub in node.subs:
self.visit(sub)
if aug_flag:
self.emit('DUP_TOPX', 2)
if len(node.subs) > 1:
self.emit('BUILD_TUPLE', len(node.subs))
if node.flags == 'OP_APPLY':
self.emit('BINARY_SUBSCR')
elif node.flags == 'OP_ASSIGN':
self.emit('STORE_SUBSCR')
elif node.flags == 'OP_DELETE':
self.emit('DELETE_SUBSCR')
# binary ops
def binaryOp(self, node, op):
self.visit(node.left)
self.visit(node.right)
self.emit(op)
def visitAdd(self, node):
return self.binaryOp(node, 'BINARY_ADD')
def visitSub(self, node):
return self.binaryOp(node, 'BINARY_SUBTRACT')
def visitMul(self, node):
return self.binaryOp(node, 'BINARY_MULTIPLY')
def visitDiv(self, node):
return self.binaryOp(node, self._div_op)
def visitFloorDiv(self, node):
return self.binaryOp(node, 'BINARY_FLOOR_DIVIDE')
def visitMod(self, node):
return self.binaryOp(node, 'BINARY_MODULO')
def visitPower(self, node):
return self.binaryOp(node, 'BINARY_POWER')
def visitLeftShift(self, node):
return self.binaryOp(node, 'BINARY_LSHIFT')
def visitRightShift(self, node):
return self.binaryOp(node, 'BINARY_RSHIFT')
# unary ops
def unaryOp(self, node, op):
self.visit(node.expr)
self.emit(op)
def visitInvert(self, node):
return self.unaryOp(node, 'UNARY_INVERT')
def visitUnarySub(self, node):
return self.unaryOp(node, 'UNARY_NEGATIVE')
def visitUnaryAdd(self, node):
return self.unaryOp(node, 'UNARY_POSITIVE')
def visitUnaryInvert(self, node):
return self.unaryOp(node, 'UNARY_INVERT')
def visitNot(self, node):
return self.unaryOp(node, 'UNARY_NOT')
def visitBackquote(self, node):
return self.unaryOp(node, 'UNARY_CONVERT')
# bit ops
def bitOp(self, nodes, op):
self.visit(nodes[0])
for node in nodes[1:]:
self.visit(node)
self.emit(op)
def visitBitand(self, node):
return self.bitOp(node.nodes, 'BINARY_AND')
def visitBitor(self, node):
return self.bitOp(node.nodes, 'BINARY_OR')
def visitBitxor(self, node):
return self.bitOp(node.nodes, 'BINARY_XOR')
# object constructors
def visitEllipsis(self, node):
self.emit('LOAD_CONST', Ellipsis)
def visitTuple(self, node):
self.set_lineno(node)
for elt in node.nodes:
self.visit(elt)
self.emit('BUILD_TUPLE', len(node.nodes))
def visitList(self, node):
self.set_lineno(node)
for elt in node.nodes:
self.visit(elt)
self.emit('BUILD_LIST', len(node.nodes))
def visitSliceobj(self, node):
for child in node.nodes:
self.visit(child)
self.emit('BUILD_SLICE', len(node.nodes))
def visitDict(self, node):
self.set_lineno(node)
self.emit('BUILD_MAP', 0)
for k, v in node.items:
self.emit('DUP_TOP')
self.visit(k)
self.visit(v)
self.emit('ROT_THREE')
self.emit('STORE_SUBSCR')
class NestedScopeMixin:
"""Defines initClass() for nested scoping (Python 2.2-compatible)"""
def initClass(self):
self.__class__.NameFinder = LocalNameFinder
self.__class__.FunctionGen = FunctionCodeGenerator
self.__class__.ClassGen = ClassCodeGenerator
class ModuleCodeGenerator(NestedScopeMixin, CodeGenerator):
__super_init = CodeGenerator.__init__
scopes = None
def __init__(self, tree):
self.graph = pyassem.PyFlowGraph("<module>", tree.filename)
self.futures = future.find_futures(tree)
self.__super_init()
walk(tree, self)
def get_module(self):
return self
class ExpressionCodeGenerator(NestedScopeMixin, CodeGenerator):
__super_init = CodeGenerator.__init__
scopes = None
futures = ()
def __init__(self, tree):
self.graph = pyassem.PyFlowGraph("<expression>", tree.filename)
self.__super_init()
walk(tree, self)
def get_module(self):
return self
class InteractiveCodeGenerator(NestedScopeMixin, CodeGenerator):
__super_init = CodeGenerator.__init__
scopes = None
futures = ()
def __init__(self, tree):
self.graph = pyassem.PyFlowGraph("<interactive>", tree.filename)
self.__super_init()
self.set_lineno(tree)
walk(tree, self)
self.emit('RETURN_VALUE')
def get_module(self):
return self
def visitDiscard(self, node):
# XXX Discard means it's an expression. Perhaps this is a bad
# name.
self.visit(node.expr)
self.emit('PRINT_EXPR')
class AbstractFunctionCode:
optimized = 1
lambdaCount = 0
def __init__(self, func, scopes, isLambda, class_name, mod):
self.class_name = class_name
self.module = mod
if isLambda:
klass = FunctionCodeGenerator
name = "<lambda.%d>" % klass.lambdaCount
klass.lambdaCount = klass.lambdaCount + 1
else:
name = func.name
args, hasTupleArg = generateArgList(func.argnames)
self.graph = pyassem.PyFlowGraph(name, func.filename, args,
optimized=1)
self.isLambda = isLambda
self.super_init()
if not isLambda and func.doc:
self.setDocstring(func.doc)
lnf = walk(func.code, self.NameFinder(args), verbose=0)
self.locals.push(lnf.getLocals())
if func.varargs:
self.graph.setFlag(CO_VARARGS)
if func.kwargs:
self.graph.setFlag(CO_VARKEYWORDS)
self.set_lineno(func)
if hasTupleArg:
self.generateArgUnpack(func.argnames)
def get_module(self):
return self.module
def finish(self):
self.graph.startExitBlock()
if not self.isLambda:
self.emit('LOAD_CONST', None)
self.emit('RETURN_VALUE')
def generateArgUnpack(self, args):
for i in range(len(args)):
arg = args[i]
if type(arg) == types.TupleType:
self.emit('LOAD_FAST', '.%d' % (i * 2))
self.unpackSequence(arg)
def unpackSequence(self, tup):
if VERSION > 1:
self.emit('UNPACK_SEQUENCE', len(tup))
else:
self.emit('UNPACK_TUPLE', len(tup))
for elt in tup:
if type(elt) == types.TupleType:
self.unpackSequence(elt)
else:
self._nameOp('STORE', elt)
unpackTuple = unpackSequence
class FunctionCodeGenerator(NestedScopeMixin, AbstractFunctionCode,
CodeGenerator):
super_init = CodeGenerator.__init__ # call be other init
scopes = None
__super_init = AbstractFunctionCode.__init__
def __init__(self, func, scopes, isLambda, class_name, mod):
self.scopes = scopes
self.scope = scopes[func]
self.__super_init(func, scopes, isLambda, class_name, mod)
self.graph.setFreeVars(self.scope.get_free_vars())
self.graph.setCellVars(self.scope.get_cell_vars())
if self.scope.generator is not None:
self.graph.setFlag(CO_GENERATOR)
class AbstractClassCode:
def __init__(self, klass, scopes, module):
self.class_name = klass.name
self.module = module
self.graph = pyassem.PyFlowGraph(klass.name, klass.filename,
optimized=0, klass=1)
self.super_init()
lnf = walk(klass.code, self.NameFinder(), verbose=0)
self.locals.push(lnf.getLocals())
self.graph.setFlag(CO_NEWLOCALS)
if klass.doc:
self.setDocstring(klass.doc)
def get_module(self):
return self.module
def finish(self):
self.graph.startExitBlock()
self.emit('LOAD_LOCALS')
self.emit('RETURN_VALUE')
class ClassCodeGenerator(NestedScopeMixin, AbstractClassCode, CodeGenerator):
super_init = CodeGenerator.__init__
scopes = None
__super_init = AbstractClassCode.__init__
def __init__(self, klass, scopes, module):
self.scopes = scopes
self.scope = scopes[klass]
self.__super_init(klass, scopes, module)
self.graph.setFreeVars(self.scope.get_free_vars())
self.graph.setCellVars(self.scope.get_cell_vars())
self.set_lineno(klass)
self.emit("LOAD_GLOBAL", "__name__")
self.storeName("__module__")
if klass.doc:
self.emit("LOAD_CONST", klass.doc)
self.storeName('__doc__')
def generateArgList(arglist):
"""Generate an arg list marking TupleArgs"""
args = []
extra = []
count = 0
for i in range(len(arglist)):
elt = arglist[i]
if type(elt) == types.StringType:
args.append(elt)
elif type(elt) == types.TupleType:
args.append(TupleArg(i * 2, elt))
extra.extend(misc.flatten(elt))
count = count + 1
else:
raise ValueError, "unexpect argument type:", elt
return args + extra, count
def findOp(node):
"""Find the op (DELETE, LOAD, STORE) in an AssTuple tree"""
v = OpFinder()
walk(node, v, verbose=0)
return v.op
class OpFinder:
def __init__(self):
self.op = None
def visitAssName(self, node):
if self.op is None:
self.op = node.flags
elif self.op != node.flags:
raise ValueError, "mixed ops in stmt"
visitAssAttr = visitAssName
visitSubscript = visitAssName
class Delegator:
"""Base class to support delegation for augmented assignment nodes
To generator code for augmented assignments, we use the following
wrapper classes. In visitAugAssign, the left-hand expression node
is visited twice. The first time the visit uses the normal method
for that node . The second time the visit uses a different method
that generates the appropriate code to perform the assignment.
These delegator classes wrap the original AST nodes in order to
support the variant visit methods.
"""
def __init__(self, obj):
self.obj = obj
def __getattr__(self, attr):
return getattr(self.obj, attr)
class AugGetattr(Delegator):
pass
class AugName(Delegator):
pass
class AugSlice(Delegator):
pass
class AugSubscript(Delegator):
pass
wrapper = {
ast.Getattr: AugGetattr,
ast.Name: AugName,
ast.Slice: AugSlice,
ast.Subscript: AugSubscript,
}
def wrap_aug(node):
return wrapper[node.__class__](node)
if __name__ == "__main__":
for file in sys.argv[1:]:
compileFile(file)
| python | 43,718 |
from pyrogram import filters
from pyrogram.types import Message
from megumin import megux, Config
from megumin.utils import (
check_bot_rights,
check_rights,
is_admin,
is_dev,
is_self,
sed_sticker,
get_collection,
get_string
)
from megumin.utils.decorators import input_str
@megux.on_message(filters.command("setgrouppic", prefixes=["/", "!"]))
async def set_chat_photo(_, message: Message):
chat_id = message.chat.id
user_id = message.from_user.id
reply = message.reply_to_message
if not reply:
return await message.reply_text(
"Marque uma foto ou documento para que eu possa alterar a foto do Grupo"
)
if not await check_rights(chat_id, message.from_user.id, "can_change_info"):
await message.reply("Você não tem direitos administrativos suficientes para alterar dados do grupo!")
return
file = reply.document or reply.photo
if not file:
return await message.reply_text(
"Marque uma foto ou documento para que eu possa alterar a foto do Grupo"
)
if file.file_size > 5000000:
return await message.reply("__Esse arquivo é muito grande__")
photo = await reply.download()
sucess = await message.chat.set_photo(photo)
await message.reply_text(f"Foto alterada com sucesso no grupo <b>{message.chat.title}</b>")
@megux.on_message(filters.command("setrules", Config.TRIGGER))
async def rules_set(_, m: Message):
x = ""
if input_str(m):
x += m.text.split(None, 1)[1]
if m.reply_to_message:
x += m.reply_to_message.text
data = get_collection(f"RULES {m.chat.id}")
if x in "":
return await m.reply(await get_string(m.chat.id, "RULES_NO_ARGS"))
else:
if await check_rights(m.chat.id, m.from_user.id, "can_change_info"):
await data.drop()
await data.insert_one({"_rules": x})
await m.reply(await get_string(m.chat.id, "RULES_UPDATED"))
@megux.on_message(filters.command("clearrules", Config.TRIGGER))
async def del_rules_(_, m: Message):
if await check_rights(m.chat.id, m.from_user.id, "can_change_info"):
RULES = get_collection(f"RULES {m.chat.id}")
i = await RULES.find_one()
res = i["_rules"]
await RULES.delete_one({"_rules": res})
await m.reply(await get_string(m.chat.id, "RULES_CLEAR_SUCCESS"))
@megux.on_message(filters.command("setlog", prefixes=["/", "!"]))
async def set_log(_, m: Message):
chat_log = ""
chat_log += input_str(m)
if not "-100" in chat_log:
return await m.reply("Isso não é um grupo!")
if chat_log == f"{m.chat.id}":
return await m.reply("Você não pode definir o grupo de registro nesse grupo!")
if await check_rights(m.chat.id, m.from_user.id, "can_change_info"):
if input_str(m):
data = get_collection(f"LOGS {m.chat.id}")
to_chat = int(chat_log)
if await check_rights(to_chat, m.from_user.id, "can_promote_members"):
await data.drop()
await data.insert_one({"log_id": chat_log})
chat = await data.find_one()
await megux.send_message(chat["log_id"], (await get_string(m.chat.id, "LOGS_DEFINED")).format(m.chat.title))
await m.reply(await get_string(m.chat.id, "LOGS_DEFINED_MESSAGE"))
else:
return
| python | 3,408 |
"""Herewithin lies the ability to have ansible-runner
run a command in a synchronous manner
"""
from typing import Tuple
from ansible_runner import run_command # type: ignore[import]
from .command_base import CommandBase
class Command(CommandBase):
"""a runner wrapper"""
def run(self) -> Tuple[str, str, int]:
"""run"""
self.generate_run_command_args()
out, err, ret_code = run_command(**self._runner_args)
return out, err, ret_code
| python | 481 |
import math
import random
import time
import re
from queue import Queue
import urllib.request
import urllib.error
import jieba
from bs4 import BeautifulSoup
urlSet = set()
urlList = []
doc = 0
que = Queue()
user_agents = [
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/14.0.835.163 Safari/535.1',
'Mozilla/5.0 (Windows NT 6.1; rv:2.0.1) Gecko/20100101 Firefox/4.0.1',
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50',
'Opera/9.80 (Windows NT 6.1; U; en) Presto/2.8.131 Version/11.11',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:6.0) Gecko/20100101 Firefox/6.0'
]
ipList = ["112.85.129.100:9999", "112.85.175.4:9999", "112.87.70.92:9999"]
# proxy_support = urllib.request.ProxyHandler({"http": random.choice(ipList)})
# opener = urllib.request.build_opener(proxy_support)
# urllib.request.install_opener(opener)
def get_html(url):
req = urllib.request.Request(url=url, headers={'User-Agent': random.choice(user_agents)})
link = urllib.request.urlopen(req, timeout=1)
return link.read()
def getSave(url):
soup = BeautifulSoup(get_html(url), 'html.parser') # 初始化BeautifulSoup库,并设置解析器
# 提取超链接
for a in soup.findAll('a', href=True):
u = a.get("href")
if u and ('@suda.edu.cn' not in u) and ("javascript" not in u):
if u[0:4] == "http" and "suda" not in u:
break
if u[0:4] != "http":
if u[0] == '/':
u = re.findall("http.*edu.cn", url)[0]+u
else:
site = re.findall("http.*/", url)[0]
if site[-2] == '/':
site = re.findall("http.*/", url+'/')[0]
u = site+u
if u[-1] == '/':
u = u[0:len(u)-1]
if u not in urlSet:
que.put(u)
urlSet.add(u)
# 提取正文
[script.extract() for script in soup.findAll('script')]
[style.extract() for style in soup.findAll('style')]
soup.prettify()
content = re.sub("<[^>]*>", '', soup.prettify())
content = re.sub("\s{2,}", "\n", content)
with open("{}".format(doc), "w", encoding='utf-8') as f:
f.write(content)
def search():
query = input("网站爬取完毕,请输入查询:").split() # 输入查询
queryDict = {} # 单词在查询中的出现次数
for i in query:
if i in queryDict:
queryDict[i] += 1
else:
queryDict[i] = 1
queryDf = {i: 0 for i in queryDict} # 用来之后记录查询词的df值,默认不存在为0
fenciDict = [] # 各个文档分词结果的单词计数
for i in range(len(urlList)):
with open("{}".format(i), "r", encoding='utf-8') as f:
s = f.read()
fenci = jieba.lcut_for_search(s)
fenciSet = set(fenci)
fenciDict.append({i: fenci.count(i) for i in fenciSet})
# 与上面对query的处理类似
for word in queryDf:
if word in fenciDict[i]:
queryDf[word] += 1
# 若关键词在文档中出现,则df加1
similarList = []
for i in range(len(urlList)):
sum_qd = 0.0 # 作分子
sum_q2 = 0.0
sum_d2 = 0.0 # sum_q2*sum_d2的平方根作分母
for word in queryDict:
w_query = 1.0 + math.log10(queryDict[word]) # word在query中的tf-idf权重
w_doc = 0 # word在第i个文档中的tf-idf权重
if word in fenciDict[i]:
w_doc = (1.0 + math.log10(fenciDict[i][word])) * math.log10(10000.0 / queryDf[word])
sum_qd += w_query * w_doc
sum_q2 += w_query ** 2
sum_d2 += w_doc ** 2
similar = 0.0 # 余弦相似度
len_q2d2 = math.sqrt(sum_q2 * sum_d2)
if math.fabs(len_q2d2) > 1e-5:
similar = sum_qd / len_q2d2
similarList.append((i, similar)) # 文档编号和余弦相似度的元祖
similarList.sort(key=lambda x: x[1], reverse=True)
for i in range(min(10,len(similarList))):
d = similarList[i][0]
print(urlList[d], similarList[i][1])
if __name__ == "__main__":
que.put("http://www.suda.edu.cn")
#while not que.empty():
for i in range(100): #可以选择for控制循环次数进行测试
url = que.get()
urlList.append(url)
#print(url) #打印出访问的网站
flag = False
for i in range(3): # 超时超过三次即认为访问失败
try:
getSave(url)
flag = True
break
except:
pass
if flag:
doc += 1
else:
#print("false") # 可体现出什么网站访问失败
pass
# 控制访问时间间隔,可调整
time.sleep(0.2)
if doc % 10 == 0:
time.sleep(1.5)
search()
| python | 4,686 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# thumbor imaging service
# https://github.com/thumbor/thumbor/wiki
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2011 globo.com [email protected]
import base64
import hmac
import hashlib
from unittest import TestCase
from preggy import expect
from thumbor.url_signers.base64_hmac_sha1 import UrlSigner
try:
unicode # Python 2
except NameError:
unicode = str # Python 3
class Base64HmacSha1UrlSignerTestCase(TestCase):
def test_can_create_signer(self):
signer = UrlSigner(security_key="something")
expect(signer).to_be_instance_of(UrlSigner)
expect(signer.security_key).to_equal('something')
def test_can_sign_url(self):
signer = UrlSigner(security_key="something")
url = '10x11:12x13/-300x-300/center/middle/smart/some/image.jpg'
expected = base64.urlsafe_b64encode(
hmac.new(
'something', unicode(url).encode('utf-8'), hashlib.sha1
).digest()
)
actual = signer.signature(url)
expect(actual).to_equal(expected)
| python | 1,158 |
import pyimgur
import sys
im = pyimgur.Imgur('37a3379f24710f8')
uploaded_image = im.upload_image(sys.argv[1], title="")
#print(uploaded_image.title)
print(uploaded_image.link)
#print(uploaded_image.size)
#print(uploaded_image.type) | python | 232 |
print 10**5,2
n = 10**5
for i in xrange(n):
print n-i,
| python | 56 |
# from sqlalchemy.orm import relationship
from app.db.base_class import Base
from sqlalchemy import Column, String
# from app.models.world_tag import world_tag
class Tag(Base):
"""
Tags used to search for worlds.
"""
name = Column(String(30), primary_key=True)
| python | 282 |
#!/usr/bin/env python
# coding: utf-8
# In[1]:
import pandas as pd
import numpy as np
import re
import nltk
import matplotlib.pyplot as plt
# In[2]:
from nltk.corpus import stopwords
from nltk.tokenize import sent_tokenize,word_tokenize
from nltk.corpus import state_union
from nltk.stem import WordNetLemmatizer
# In[3]:
lemma=WordNetLemmatizer()
stop_words = set(stopwords.words('english'))
# In[148]:
text1=state_union.raw('2005-GWBush.txt')
# In[149]:
text1
# In[6]:
words_token=word_tokenize(text)
# In[10]:
words_token=[w.lower() for w in words_token]
words_token=[w for w in words_token if w.isalpha()]
words_token=[w for w in words_token if not w in stop_words]
words_token=[lemma.lemmatize(w) for w in words_token]
# In[230]:
from wordcloud import WordCloud
plt.figure(figsize = (20,20)) # Text Reviews with Poor Ratings
wc = WordCloud(min_font_size = 3, max_words = 2000 , width = 1600 , height = 800).generate(" ".join(words_token))
plt.imshow(wc,interpolation = 'bilinear')
# In[18]:
import keras
from keras.preprocessing import text
from keras.utils import np_utils
from keras.preprocessing import sequence
from keras.layers import *
import keras.backend as K
from keras.models import Sequential
# In[60]:
tokenizer = text.Tokenizer()
tokenizer.fit_on_texts(words_token)
word2id = tokenizer.word_index
# build vocabulary of unique words
word2id['PAD'] = 0
id2word = {v:k for k, v in word2id.items()}
wids=[word2id[w] for w in words_token]
vocab_size = len(word2id)
embed_size = 100
window_size = 2 # context window size
print('Vocabulary Size:', vocab_size)
print('Vocabulary Sample:', list(word2id.items())[:10])
# In[96]:
def generates(corpus,window_size,vocab_size):
context_length = window_size*2
for index, word in enumerate(corpus):
context_words = []
label_word = []
start = index - window_size
end = index + window_size + 1
context_words.append([corpus[i]
for i in range(start, end)
if 0 <= i < len(corpus)
and i != index])
label_word.append(word)
x = sequence.pad_sequences(context_words, maxlen=context_length)
y = np_utils.to_categorical(label_word, vocab_size)
#print(context_words)
yield (x, y)
# In[97]:
i = 0
for x, y in generates(corpus=wids, window_size=window_size, vocab_size=vocab_size):
if 0 not in x[0]:
print('Context (X):', [id2word[w] for w in x[0]], '-> Target (Y):', id2word[np.argwhere(y[0])[0][0]])
if i == 10:
break
i += 1
# In[98]:
cbow = Sequential()
cbow.add(Embedding(input_dim=vocab_size, output_dim=embed_size, input_length=window_size*2))
cbow.add(Lambda(lambda x: K.mean(x, axis=1), output_shape=(embed_size,)))
cbow.add(Dense(vocab_size, activation='softmax'))
cbow.compile(loss='categorical_crossentropy', optimizer='rmsprop')
print(cbow.summary())
# In[99]:
for epoch in range(1, 6):
loss = 0.
i = 0
for x, y in generates(corpus=wids, window_size=window_size, vocab_size=vocab_size):
i += 1
loss += cbow.train_on_batch(x, y)
if i % 1000 == 0:
print('Processed {} (context, word) pairs'.format(i))
print('Epoch:', epoch, '\tLoss:', loss)
print()
# In[106]:
weights = cbow.get_weights()[0]
weights = weights[1:]
print(weights.shape)
pd.DataFrame(weights, index=list(id2word.values())[1:]).head()
# In[116]:
from sklearn.manifold import TSNE
tsne = TSNE(n_components=2, random_state=0, n_iter=10000, perplexity=3)
np.set_printoptions(suppress=True)
T = tsne.fit_transform(weights)
labels = list(id2word.values())
plt.figure(figsize=(24, 14))
plt.scatter(T[:80, 0], T[:80, 1], c='steelblue', edgecolors='k')
for label, x, y in zip(labels, T[:80, 0], T[:, 1]):
plt.annotate(label, xy=(x+1, y+1), xytext=(0, 0), textcoords='offset points')
# In[192]:
from gensim.models import word2vec
x=[]
x.append([w for w in words_token])
# Set values for various parameters
feature_size = 100 # Word vector dimensionality
window_context = 5 # Context window size
min_word_count = 1 # Minimum word count
sample = 1e-3 # Downsample setting for frequent words
w2v_model = word2vec.Word2Vec(x, size=feature_size,
window=window_context, min_count=min_word_count,
sample=sample, iter=50)
# In[193]:
w2v_model.similarity('terror','terrorist')
# In[218]:
model2_skip = gensim.models.Word2Vec(x, min_count = 1, size = 100,
window = 5,iter=50, sg = 1)
# In[219]:
model2_skip.similarity('terror','terrorist')
# In[224]:
words = w2v_model.wv.index2word
wvs = w2v_model.wv[words]
tsne = TSNE(n_components=2, random_state=0, n_iter=10000, perplexity=2)
np.set_printoptions(suppress=True)
T = tsne.fit_transform(wvs)
labels = words
plt.figure(figsize=(24, 14))
plt.scatter(T[:40, 0], T[:40, 1], c='orange', edgecolors='r')
for label, x, y in zip(labels, T[:40, 0], T[:40, 1]):
plt.annotate(label, xy=(x+1, y+1), xytext=(0, 0), textcoords='offset points')
# In[222]:
words =model2_skip.wv.index2word
wvs = model2_skip.wv[words]
tsne = TSNE(n_components=2, random_state=0, n_iter=5000, perplexity=2)
np.set_printoptions(suppress=True)
T = tsne.fit_transform(wvs)
labels = words
plt.figure(figsize=(24, 14))
plt.scatter(T[:40, 0], T[:40, 1], c='orange', edgecolors='r')
for label, x, y in zip(labels, T[:40, 0], T[:40, 1]):
plt.annotate(label, xy=(x+1, y+1), xytext=(0, 0), textcoords='offset points')
# In[ ]:
| python | 5,777 |
#
# Copyright 2021 Red Hat Inc.
# SPDX-License-Identifier: Apache-2.0
#
"""Models for OCP on AWS tables."""
from django.contrib.postgres.fields import ArrayField
from django.contrib.postgres.indexes import GinIndex
from django.db import models
from django.db.models import JSONField
VIEWS = (
"reporting_ocpallcostlineitem_daily_summary",
"reporting_ocpallcostlineitem_project_daily_summary",
"reporting_ocpall_compute_summary",
"reporting_ocpall_storage_summary",
"reporting_ocpall_cost_summary",
"reporting_ocpall_cost_summary_by_account",
"reporting_ocpall_cost_summary_by_region",
"reporting_ocpall_cost_summary_by_service",
"reporting_ocpall_database_summary",
"reporting_ocpall_network_summary",
)
class OCPAllCostLineItemDailySummary(models.Model):
"""A summarized view of OCP on All infrastructure cost."""
class Meta:
"""Meta for OCPAllCostLineItemDailySummary."""
db_table = "reporting_ocpallcostlineitem_daily_summary"
managed = False
indexes = [
models.Index(fields=["usage_start"], name="ocpall_usage_idx"),
models.Index(fields=["namespace"], name="ocpall_namespace_idx"),
models.Index(fields=["node"], name="ocpall_node_idx", opclasses=["varchar_pattern_ops"]),
models.Index(fields=["resource_id"], name="ocpall_resource_idx"),
GinIndex(fields=["tags"], name="ocpall_tags_idx"),
models.Index(fields=["product_family"], name="ocpall_product_family_idx"),
models.Index(fields=["instance_type"], name="ocpall_instance_type_idx"),
# A GIN functional index named "ocpall_product_code_ilike" was created manually
# via RunSQL migration operation
# Function: (upper(product_code) gin_trgm_ops)
# A GIN functional index named "ocpall_product_family_ilike" was created manually
# via RunSQL migration operation
# Function: (upper(product_family) gin_trgm_ops)
]
id = models.IntegerField(primary_key=True)
# The infrastructure provider type
source_type = models.TextField()
# OCP Fields
cluster_id = models.CharField(max_length=50, null=True)
cluster_alias = models.CharField(max_length=256, null=True)
# Kubernetes objects by convention have a max name length of 253 chars
namespace = ArrayField(models.CharField(max_length=253, null=False))
node = models.CharField(max_length=253, null=True)
resource_id = models.CharField(max_length=253, null=True)
usage_start = models.DateField(null=False)
usage_end = models.DateField(null=False)
# Infrastructure source fields
usage_account_id = models.CharField(max_length=50, null=False)
account_alias = models.ForeignKey("AWSAccountAlias", on_delete=models.SET_NULL, null=True)
product_code = models.CharField(max_length=50, null=False)
product_family = models.CharField(max_length=150, null=True)
instance_type = models.CharField(max_length=50, null=True)
region = models.CharField(max_length=50, null=True)
availability_zone = models.CharField(max_length=50, null=True)
tags = JSONField(null=True)
usage_amount = models.DecimalField(max_digits=24, decimal_places=9, null=True)
unit = models.CharField(max_length=63, null=True)
# Cost breakdown can be done by cluster, node, project, and pod.
# Cluster and node cost can be determined by summing the AWS unblended_cost
# with a GROUP BY cluster/node.
# Project cost is a summation of pod costs with a GROUP BY project
# The cost of un-utilized resources = sum(unblended_cost) - sum(project_cost)
unblended_cost = models.DecimalField(max_digits=30, decimal_places=15, null=True)
markup_cost = models.DecimalField(max_digits=30, decimal_places=15, null=True)
currency_code = models.CharField(max_length=10, null=True)
# This is a count of the number of projects that share an AWS resource
# It is used to divide cost evenly among projects
shared_projects = models.IntegerField(null=False, default=1)
source_uuid = models.UUIDField(unique=False, null=True)
tags_hash = models.TextField(max_length=512)
# Materialized Views for UI Reporting
class OCPAllCostSummary(models.Model):
"""A MATERIALIZED VIEW specifically for UI API queries.
This table gives a daily breakdown of total cost.
"""
class Meta:
"""Meta for OCPAllCostSummary."""
db_table = "reporting_ocpall_cost_summary"
managed = False
id = models.IntegerField(primary_key=True)
usage_start = models.DateField(null=False)
usage_end = models.DateField(null=False)
cluster_id = models.CharField(max_length=50, null=True)
cluster_alias = models.CharField(max_length=256, null=True)
unblended_cost = models.DecimalField(max_digits=24, decimal_places=9, null=True)
markup_cost = models.DecimalField(max_digits=24, decimal_places=9, null=True)
currency_code = models.CharField(max_length=10)
source_uuid = models.UUIDField(unique=False, null=True)
class OCPAllCostSummaryByAccount(models.Model):
"""A MATERIALIZED VIEW specifically for UI API queries.
This table gives a daily breakdown of total cost by account.
"""
class Meta:
"""Meta for OCPAllCostSummaryByAccount."""
db_table = "reporting_ocpall_cost_summary_by_account"
managed = False
id = models.IntegerField(primary_key=True)
usage_start = models.DateField(null=False)
usage_end = models.DateField(null=False)
cluster_id = models.CharField(max_length=50, null=True)
cluster_alias = models.CharField(max_length=256, null=True)
usage_account_id = models.CharField(max_length=50, null=False)
account_alias = models.ForeignKey("AWSAccountAlias", on_delete=models.DO_NOTHING, null=True)
unblended_cost = models.DecimalField(max_digits=24, decimal_places=9, null=True)
markup_cost = models.DecimalField(max_digits=24, decimal_places=9, null=True)
currency_code = models.CharField(max_length=10)
source_uuid = models.UUIDField(unique=False, null=True)
class OCPAllCostSummaryByService(models.Model):
"""A MATERIALIZED VIEW specifically for UI API queries.
This table gives a daily breakdown of total cost by account.
"""
class Meta:
"""Meta for OCPAllCostSummaryByService."""
db_table = "reporting_ocpall_cost_summary_by_service"
managed = False
id = models.IntegerField(primary_key=True)
usage_start = models.DateField(null=False)
usage_end = models.DateField(null=False)
cluster_id = models.CharField(max_length=50, null=True)
cluster_alias = models.CharField(max_length=256, null=True)
usage_account_id = models.CharField(max_length=50, null=False)
account_alias = models.ForeignKey("AWSAccountAlias", on_delete=models.DO_NOTHING, null=True)
product_code = models.CharField(max_length=50, null=False)
product_family = models.CharField(max_length=150, null=True)
unblended_cost = models.DecimalField(max_digits=24, decimal_places=9, null=True)
markup_cost = models.DecimalField(max_digits=24, decimal_places=9, null=True)
currency_code = models.CharField(max_length=10)
source_uuid = models.UUIDField(unique=False, null=True)
class OCPAllCostSummaryByRegion(models.Model):
"""A MATERIALIZED VIEW specifically for UI API queries.
This table gives a daily breakdown of total cost by region.
"""
class Meta:
"""Meta for OCPAllCostSummaryByRegion."""
db_table = "reporting_ocpall_cost_summary_by_region"
managed = False
id = models.IntegerField(primary_key=True)
usage_start = models.DateField(null=False)
usage_end = models.DateField(null=False)
cluster_id = models.CharField(max_length=50, null=True)
cluster_alias = models.CharField(max_length=256, null=True)
usage_account_id = models.CharField(max_length=50, null=False)
account_alias = models.ForeignKey("AWSAccountAlias", on_delete=models.DO_NOTHING, null=True)
region = models.CharField(max_length=50, null=True)
availability_zone = models.CharField(max_length=50, null=True)
unblended_cost = models.DecimalField(max_digits=24, decimal_places=9, null=True)
markup_cost = models.DecimalField(max_digits=24, decimal_places=9, null=True)
currency_code = models.CharField(max_length=10)
source_uuid = models.UUIDField(unique=False, null=True)
class OCPAllComputeSummary(models.Model):
"""A summarized view of OCP on All infrastructure cost for products in the compute service category."""
class Meta:
"""Meta for OCPAllComputeSummary."""
db_table = "reporting_ocpall_compute_summary"
managed = False
id = models.IntegerField(primary_key=True)
cluster_id = models.CharField(max_length=50, null=True)
cluster_alias = models.CharField(max_length=256, null=True)
usage_account_id = models.CharField(max_length=50, null=False)
account_alias = models.ForeignKey("AWSAccountAlias", on_delete=models.DO_NOTHING, null=True)
usage_start = models.DateField(null=False)
usage_end = models.DateField(null=False)
product_code = models.CharField(max_length=50, null=False)
instance_type = models.CharField(max_length=50)
resource_id = models.CharField(max_length=253)
usage_amount = models.DecimalField(max_digits=30, decimal_places=15, null=True)
unit = models.CharField(max_length=63, null=True)
unblended_cost = models.DecimalField(max_digits=30, decimal_places=15, null=True)
markup_cost = models.DecimalField(max_digits=30, decimal_places=15, null=True)
currency_code = models.CharField(max_length=10, null=True)
source_uuid = models.UUIDField(unique=False, null=True)
class OCPAllDatabaseSummary(models.Model):
"""A summarized view of OCP on All infrastructure cost for products in the database service category."""
class Meta:
"""Meta for OCPAllDatabaseSummary."""
db_table = "reporting_ocpall_database_summary"
managed = False
id = models.IntegerField(primary_key=True)
# OCP Fields
cluster_id = models.CharField(max_length=50, null=True)
cluster_alias = models.CharField(max_length=256, null=True)
usage_account_id = models.CharField(max_length=50, null=False)
account_alias = models.ForeignKey("AWSAccountAlias", on_delete=models.DO_NOTHING, null=True)
usage_start = models.DateField(null=False)
usage_end = models.DateField(null=False)
product_code = models.CharField(max_length=50, null=False)
usage_amount = models.DecimalField(max_digits=30, decimal_places=15, null=True)
unit = models.CharField(max_length=63, null=True)
unblended_cost = models.DecimalField(max_digits=30, decimal_places=15, null=True)
markup_cost = models.DecimalField(max_digits=30, decimal_places=15, null=True)
currency_code = models.CharField(max_length=10, null=True)
source_uuid = models.UUIDField(unique=False, null=True)
class OCPAllNetworkSummary(models.Model):
"""A summarized view of OCP on All infrastructure cost for products in the network service category."""
class Meta:
"""Meta for OCPAllNetworkSummary."""
db_table = "reporting_ocpall_network_summary"
managed = False
id = models.IntegerField(primary_key=True)
cluster_id = models.CharField(max_length=50, null=True)
cluster_alias = models.CharField(max_length=256, null=True)
usage_account_id = models.CharField(max_length=50, null=False)
account_alias = models.ForeignKey("AWSAccountAlias", on_delete=models.DO_NOTHING, null=True)
usage_start = models.DateField(null=False)
usage_end = models.DateField(null=False)
product_code = models.CharField(max_length=50, null=False)
usage_amount = models.DecimalField(max_digits=30, decimal_places=15, null=True)
unit = models.CharField(max_length=63, null=True)
unblended_cost = models.DecimalField(max_digits=30, decimal_places=15, null=True)
markup_cost = models.DecimalField(max_digits=30, decimal_places=15, null=True)
currency_code = models.CharField(max_length=10, null=True)
source_uuid = models.UUIDField(unique=False, null=True)
class OCPAllStorageSummary(models.Model):
"""A summarized view of OCP on All infrastructure cost for products in the storage service category."""
class Meta:
"""Meta for OCPAllStorageSummary."""
db_table = "reporting_ocpall_storage_summary"
managed = False
id = models.IntegerField(primary_key=True)
# OCP Fields
cluster_id = models.CharField(max_length=50, null=True)
cluster_alias = models.CharField(max_length=256, null=True)
usage_account_id = models.CharField(max_length=50, null=False)
account_alias = models.ForeignKey("AWSAccountAlias", on_delete=models.DO_NOTHING, null=True)
usage_start = models.DateField(null=False)
usage_end = models.DateField(null=False)
product_family = models.CharField(max_length=150, null=True)
product_code = models.CharField(max_length=50, null=False)
usage_amount = models.DecimalField(max_digits=30, decimal_places=15, null=True)
unit = models.CharField(max_length=63, null=True)
unblended_cost = models.DecimalField(max_digits=30, decimal_places=15, null=True)
markup_cost = models.DecimalField(max_digits=30, decimal_places=15, null=True)
currency_code = models.CharField(max_length=10, null=True)
source_uuid = models.UUIDField(unique=False, null=True)
class OCPAllCostLineItemProjectDailySummary(models.Model):
"""A summarized view of OCP on AWS cost by OpenShift project."""
class Meta:
"""Meta for OCPAllCostLineItemProjectDailySummary."""
db_table = "reporting_ocpallcostlineitem_project_daily_summary"
managed = False
indexes = [
models.Index(fields=["usage_start"], name="ocpall_proj_usage_idx"),
models.Index(fields=["namespace"], name="ocpall_proj_namespace_idx"),
models.Index(fields=["node"], name="ocpall_proj_node_idx"),
models.Index(fields=["resource_id"], name="ocpall_proj_resource_idx"),
GinIndex(fields=["pod_labels"], name="ocpall_proj_pod_labels_idx"),
models.Index(fields=["product_family"], name="ocpall_proj_prod_fam_idx"),
models.Index(fields=["instance_type"], name="ocpall_proj_inst_type_idx"),
]
id = models.IntegerField(primary_key=True)
# The infrastructure provider type
source_type = models.TextField()
# OCP Fields
cluster_id = models.CharField(max_length=50, null=True)
cluster_alias = models.CharField(max_length=256, null=True)
# Whether the data comes from a pod or volume report
data_source = models.CharField(max_length=64, null=True)
# Kubernetes objects by convention have a max name length of 253 chars
namespace = models.CharField(max_length=253, null=False)
node = models.CharField(max_length=253, null=True)
pod_labels = JSONField(null=True)
resource_id = models.CharField(max_length=253, null=True)
usage_start = models.DateField(null=False)
usage_end = models.DateField(null=False)
# AWS Fields
usage_account_id = models.CharField(max_length=50, null=False)
account_alias = models.ForeignKey("AWSAccountAlias", on_delete=models.SET_NULL, null=True)
product_code = models.CharField(max_length=50, null=False)
product_family = models.CharField(max_length=150, null=True)
instance_type = models.CharField(max_length=50, null=True)
region = models.CharField(max_length=50, null=True)
availability_zone = models.CharField(max_length=50, null=True)
# Need more precision on calculated fields, otherwise there will be
# Rounding errors
usage_amount = models.DecimalField(max_digits=30, decimal_places=15, null=True)
unit = models.CharField(max_length=63, null=True)
unblended_cost = models.DecimalField(max_digits=30, decimal_places=15, null=True)
project_markup_cost = models.DecimalField(max_digits=30, decimal_places=15, null=True)
pod_cost = models.DecimalField(max_digits=30, decimal_places=15, null=True)
currency_code = models.CharField(max_length=10, null=True)
source_uuid = models.UUIDField(unique=False, null=True)
| python | 16,437 |
import json
import logging
import mimetypes
import os
import re
import subprocess
from datetime import datetime
from dateutil.parser import parse
from dateutil.tz import UTC
from pie.domain import MediaFile, ScannedFile, ScannedFileType
from pie.util import MiscUtils
class ExifHelper:
__logger = logging.getLogger('ExifHelper')
@staticmethod
def create_media_file(path_exiftool: str, index_time: datetime, scanned_file: ScannedFile, existing_media_file: MediaFile) -> MediaFile:
file_path = scanned_file.file_path
exif = ExifHelper.__get_exif_dict(path_exiftool, file_path)
error_str = ExifHelper.__get_exif(exif, "Error")
exif_file_type_str = ExifHelper.__get_exif(exif, "FileType")
if error_str:
ExifHelper.__logger.error("Error processing file. EXIF: %s", exif)
if error_str == 'File is empty' or error_str == 'File format error' or 'file is binary' in error_str:
return None
if exif_file_type_str == "TXT":
ExifHelper.__logger.error("Possibly corrupt file. EXIF: %s", exif)
return None
media_file = existing_media_file if existing_media_file else MediaFile()
media_file.parent_dir_path = scanned_file.parent_dir_path
media_file.file_path = file_path
media_file.extension = scanned_file.extension
media_file.file_type = scanned_file.file_type.name
media_file.is_raw = scanned_file.is_raw
media_file.mime = ExifHelper.__get_mime(file_path, exif)
media_file.original_size = os.path.getsize(file_path)
media_file.creation_time = scanned_file.creation_time
media_file.last_modification_time = scanned_file.last_modification_time
media_file.original_file_hash = scanned_file.hash if (scanned_file.hash is not None) else MiscUtils.generate_hash(file_path)
media_file.converted_file_hash = None
media_file.conversion_settings_hash = None
media_file.index_time = index_time
ExifHelper.__append_dimentions(media_file, exif)
media_file.capture_date = ExifHelper.__get_capture_date(scanned_file, exif)
media_file.camera_make = ExifHelper.__get_exif(exif, "Make")
media_file.camera_model = ExifHelper.__get_exif(exif, "CameraModelName", "Model")
media_file.lens_model = ExifHelper.__get_exif(exif, "LensModel", "LensType", "LensInfo")
gps_info = ExifHelper.__get_gps_info(exif)
media_file.gps_alt = gps_info.get('altitude')
media_file.gps_lat = gps_info.get('latitude')
media_file.gps_long = gps_info.get('longitude')
exif_orientation = ExifHelper.__get_exif(exif, "Orientation", "CameraOrientation")
media_file.view_rotation = ExifHelper.__get_view_rotation(exif_orientation)
media_file.image_orientation = exif_orientation
media_file.video_duration = ExifHelper.__get_video_duration(exif)
ExifHelper.__append_video_rotation(media_file, exif)
return media_file
@staticmethod
def __get_exif_dict(path_exiftool: str, file_path: str):
json = ExifHelper.__get_json_from_exiftool(path_exiftool, file_path)[0]
exif = {}
for key, value in json.items():
key_parts = key.split(":")
modified_key = key_parts[1] if len(key_parts) > 1 else key_parts[0]
exif[modified_key] = value
return exif
@staticmethod
def __get_json_from_exiftool(path_exiftool: str, filename: str):
""" Return a json value of the exif
Get a filename and return a JSON object
Arguments:
filename {string} -- your filename
Returns:
[JSON] -- Return a JSON object
"""
#Process this function
filename = os.path.abspath(filename)
output = ExifHelper.__run_exiftool_command_line([path_exiftool, '-G', '-j', '-sort', filename])
if output:
#convert bytes to string
output = output.decode('utf-8').rstrip('\r\n')
return json.loads(output)
else:
return output
@staticmethod
def __run_exiftool_command_line(cmd):
"""Handle the command line call
keyword arguments:
cmd = a list
return
0 if error
or a string for the command line output
"""
try:
output = subprocess.Popen(cmd, **MiscUtils.subprocess_args())
output = output.stdout.read()
return output.strip()
except subprocess.CalledProcessError:
return 0
@staticmethod
def __get_mime(file_path: str, exif: dict):
exif_mime = ExifHelper.__get_exif(exif, "MIMEType")
if exif_mime:
return exif_mime
else:
mime_type = mimetypes.guess_type(file_path, False)
return mime_type[0]
@staticmethod
def __get_capture_date(scanned_file: ScannedFile, exif: dict):
# Candidates: "GPSDateTime", "DateTimeOriginal", "DateTimeDigitized", "CreateDate", "CreationDate"
date_str = None
if scanned_file.file_type == ScannedFileType.IMAGE:
date_str = ExifHelper.__get_exif(exif, "DateTimeOriginal")
if scanned_file.file_type == ScannedFileType.VIDEO:
date_str = ExifHelper.__get_exif(exif, "MediaCreateDate", "TrackCreateDate")
if date_str and not re.search(": +:|0000:00:00 00:00:00", date_str):
# Possible formats are yyyy:MM:dd HH:mm / yyyy.MM.dd HH:mm:ss / iPhoneImage: yyyy.MM.dd HH:mm:ss.FFF / iPhone 5: yyyy.MM.dd HH:mm:ss.XXZ
# iPhoneVideo: yyyy.MM.dd HH:mm:sszzz, etc. To work with the automatic parser, we modify the date part a bit.
date_str_parts = date_str.split(" ")
if len(date_str_parts) > 1:
date_str_parts[0] = date_str_parts[0].replace(':', ".")
if re.match(r"^.+\.\d{1,2}Z$", date_str_parts[1]): # Removing XX from yyyy.MM.dd HH:mm:ss.XXZ
time_str_parts = re.split(r"\.", date_str_parts[1])
date_str_parts[1] = time_str_parts[0] + "Z"
date_str = " ".join(date_str_parts)
capture_datetime = parse(date_str)
return capture_datetime.astimezone(UTC) # TODO date gets messed up when timeZone is not specified. Sometimes it is local, sometimes UTC.
@staticmethod
def __append_dimentions(media_file: MediaFile, exif: dict):
default_crop_size_str = ExifHelper.__get_exif(exif, "DefaultCropSize")
if default_crop_size_str:
default_crop_size_str_parts = default_crop_size_str.split(" ")
width = default_crop_size_str_parts[0]
height = default_crop_size_str_parts[1]
else:
width = ExifHelper.__get_exif(exif, "ImageWidth", "ExifImageWidth")
height = ExifHelper.__get_exif(exif, "ImageHeight", "ExifImageHeight")
media_file.width = int(width)
media_file.height = int(height)
@staticmethod
def __get_gps_info(exif: dict):
gps_info = {}
altitude_str = ExifHelper.__get_exif(exif, "GPSAltitude")
if altitude_str:
altitude = float(altitude_str.split(" ")[0])
altitude_ref_str = ExifHelper.__get_exif(exif, "GPSAltitudeRef")
below_sea_level = "Below Sea Level" # "Above Sea Level" is not used for anything yet
if (below_sea_level in altitude_str or (altitude_ref_str and below_sea_level in altitude_ref_str)):
altitude = altitude * -1.0
gps_info['altitude'] = altitude
longitude = ExifHelper.__gps_coordinate_str_to_float(ExifHelper.__get_exif(exif, "GPSLongitude"))
latitude = ExifHelper.__gps_coordinate_str_to_float(ExifHelper.__get_exif(exif, "GPSLatitude"))
if (longitude and latitude):
gps_info['longitude'] = longitude
gps_info['latitude'] = latitude
return gps_info
@staticmethod
def __gps_coordinate_str_to_float(coordinate_str: str):
if coordinate_str:
# Expects string to be in a format like 77 33 25.070000 or 77 33 25.070000 N or 47 deg 36' 27.90" N
coordinate_str_parts = re.sub("deg |'|\"", "", coordinate_str).split(" ")
degrees = float(coordinate_str_parts[0])
minutes = float(coordinate_str_parts[1])
seconds = float(coordinate_str_parts[2])
float_value = (degrees + (minutes / 60) + (seconds / 3600))
if len(coordinate_str_parts) == 4:
# This means direction is also present. Exceptions are possible due to software issues.
if coordinate_str_parts[3] == "S" or coordinate_str_parts[3] == "W":
float_value = float_value * -1
return float_value
return None
@staticmethod
def __get_view_rotation(exif_orientation: str):
rotations = {
"Horizontal (normal)": "0",
"Mirror horizontal": "!0",
"Rotate 180": "180",
"Mirror vertical": "!180",
"Mirror horizontal and rotate 270 CW": "!270",
"Rotate 90 CW": "90",
"Mirror horizontal and rotate 90 CW": "!90",
"Rotate 270 CW": "270"
}
if (exif_orientation and exif_orientation in rotations):
return rotations[exif_orientation]
else:
# viewRoation for movies is 0 since their thumbnails don't need rotation
return "0"
@staticmethod
def __get_video_duration(exif: dict):
video_duration_str = ExifHelper.__get_exif(exif, "Duration", "MediaDuration", "TrackDuration")
if video_duration_str:
if re.match(r"^\d{1,2}:\d{2}:\d{2}$", video_duration_str): # 0:00:46
duration_parts = video_duration_str.split(":")
return ((int(duration_parts[0]) * 60 * 60) + (int(duration_parts[1]) * 60) + int(duration_parts[2])) * 1000
elif re.match(r"^\d{1,2}\.\d{1,3} s$", video_duration_str): # 14.44 s
duration_parts = re.split(r"\.| ", video_duration_str)
return (int(duration_parts[0]) * 1000) + int(duration_parts[1])
else:
raise RuntimeError('Unknown video duration format: ' + video_duration_str)
return None
@staticmethod
def __append_video_rotation(media_file: MediaFile, exif: dict):
video_rotation_str = str(ExifHelper.__get_exif(exif, "Rotation"))
media_file.video_rotation = video_rotation_str
# TODO Following logic messes with ffmpeg. Need to explore.
# if ("90" == video_rotation_str or "270" == video_rotation_str):
# # If rotation is 90 or 270 flip recorded hight/width
# temp = media_file.width
# media_file.width = media_file.height
# media_file.height = temp
@staticmethod
def __get_exif(exif: dict, *keys):
if len(keys) < 1:
raise ValueError('get_exif() takes at least 1 key for looking up exif')
for key in keys:
if key in exif:
return exif[key]
return None
| python | 11,156 |
"""RestProfile URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path,include
urlpatterns = [
path('admin/', admin.site.urls),
path('api/', include('Profiles_api.urls')),
]
| python | 809 |
'''
Program Documentation:
Objective:
Suppose we have two files with either "txt/text/xls/xlsx/xlsm" extensions. One file contains Twitter tweets by various users and the other (must be a text file with extensions "txt/text") file contains a set of "bad" (foul) words indicating racial slurs and/or abuses.
The purose of this program is to identify and output the number of abusive words/racial slurs per "@user_handle" and the degree of profanity. Degree of profanity is indicated by the ratio of the (number of profanity words in a tweet) to (total word count of the tweet) x 100.
Args:
1. A file in txt/text/csv/xls/xlsx/xlsm format containing Twitter handles and their tweets. Can be with/without a header.
2. A text file (txt/text format) the "bad words" that needs to be searched - see requirement section of docstring for the correct way of creating this file
Outputs:
A CSV file containing the user handles, original tweets, abuses/racial slurs, and degree of profanity
Reqs:
1. The following libraries are needed for this program:
a. pandas
b. openpyxl
2. The file containing tweets should be a single column file with the Twitter handles of the users (posting the tweets) and the tweet itself, i.e., each line has the format, "@user_handle tweet". See file "tweets.txt" for correct format.
3. The file containing the abusive/racial slurs should have one "bad" word per line. See file "search_words.txt" for correct format.
'''
import re, os, sys, platform
import pandas as pd
import openpyxl
from platform import python_version
env_path = sys.executable
py_version = python_version()
print()
print("working env: %s" % (env_path))
print("python --version: %s" %(py_version))
print("re --version: %s" % (re.__version__))
print("pandas --version: %s" % (pd.__version__))
print("openpyxl --version: %s" % (openpyxl.__version__))
print()
tweet_file_name = input("give name of the tweet file: ")
print()
abuses_file_name = input("give name of the search (abuses) file: ")
print()
def get_file_path(file_name):
pwd = os.getcwd()
abs_file_path = []
for root, dir, files in os.walk(pwd):
if file_name in files:
abs_file_path.append(os.path.join(root, file_name))
return abs_file_path[0]
def get_input_file(header, file_path, file_ext):
header = header.lower()
if header == 'y':
header_option = "infer"
else:
header_option = None
if file_ext in ["txt", "text", "csv"]:
input_file = pd.read_csv(file_path, header=header_option, \
delimiter="\n")
elif file_ext == "xls":
input_file = pd.read_excel(file_path, header=header_option)
else:
input_file = pd.read_excel(tweet_file_path, header=header_option, \
engine="openpyxl")
return input_file
tweet_file_path = get_file_path(tweet_file_name)
abuses_file_path = get_file_path(abuses_file_name)
print("path of the tweet file: %s" %(tweet_file_path))
print()
print("path of the search (abuses) file: %s" %(abuses_file_path))
print()
if not os.path.isfile(tweet_file_path):
print("path of tweet file incorrect - check file path and try again!")
elif not os.path.isfile(abuses_file_path):
print("path of search (abuses) file incorrect - check file path and try again!")
sys.exit()
else:
if abuses_file_path.split("\\")[-1].split(".")[-1] not in ['txt', "text"]:
print("file with abuses/racial slurs must be a text file with extension txt/text - create file with correct extension and try again")
sys.exit()
else:
if os.stat(abuses_file_path).st_size > 0:
with open(abuses_file_path, "r") as f_bad_words:
list_of_abuses = []
for line in f_bad_words:
line = line.strip("\n")
list_of_abuses.append(line)
if os.stat(tweet_file_path).st_size > 0:
tweet_file_ext = tweet_file_path.split("\\")[-1].split(".")[-1]
allowed_extensions = ["txt", "csv", "xls", "xlsm", "xlsx"]
if tweet_file_ext not in allowed_extensions:
print(f'input file extension is not allowed - allowed extensions:, {allowed_extensions} \n')
print("convert file to allowed extension only and try again!")
print()
sys.exit()
else:
file_header_option = input("does the tweet file has any header? [y/n] ")
print()
input_file = get_input_file(file_header_option, tweet_file_path, tweet_file_ext)
num_cols = input_file.shape[1]
old_col0 = input_file.columns.to_list()[0]
input_file = input_file.rename(columns={old_col0: "Tweets"})
print("first 3 lines of the input file:")
print(input_file.head(3))
print()
print("tweet file has %d rows and %d columns" %(input_file.shape[0], input_file.shape[1]))
input_file["User"] = input_file["Tweets"].apply(lambda x: [x for x \
in x.split(" ") if x.startswith("@")][0])
input_file["User"] = input_file["User"].apply(lambda x: str(x))
input_file["Tweets"] = input_file["Tweets"].apply(lambda x: [x for x \
in x.split(" ") ][1:])
input_file["Tweets"] = input_file["Tweets"].apply(lambda x: " ".join(x))
def search_foul_words(x):
tmp_list = []
for y in list_of_abuses:
if re.search(y, x, re.IGNORECASE) is not None:
tmp_list.append(y)
else:
pass
another_tmp_list = []
for item in tmp_list:
if len(item) == 0:
pass
else:
another_tmp_list.append(item)
foul_words = ", ".join(another_tmp_list)
return foul_words
input_file["Foul Words"] = input_file["Tweets"].apply(search_foul_words)
input_file.insert(0, "User Handle", input_file["User"].values)
input_file = input_file.drop(["User"], axis=1)
input_file["Num. Foul Words"] = input_file["Foul Words"].apply(\
lambda x: len(re.findall(r'\w+', x)))
input_file['Total Word Count'] = input_file['Tweets'].apply(lambda x: \
len(re.findall(r'\w+', x)))
input_file["Deg. Profanity"] = input_file["Num. Foul Words"]/input_file["Total Word Count"]
print("final output after analysis:")
print(input_file)
print()
input_file.to_csv("tweets_analysis.csv", index=False) | python | 6,901 |
# web_app/classifier.py
import os
import pickle
from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression # for example
MODEL_FILEPATH = os.path.join(os.path.dirname(__file__), "..", "models", "latest_model.pkl")
def train_and_save_model():
print("TRAINING THE MODEL...")
X, y = load_iris(return_X_y=True)
#print(type(X), X.shape) #> <class 'numpy.ndarray'> (150, 4)
#print(type(y), y.shape) #> <class 'numpy.ndarray'> (150,)
classifier = LogisticRegression() # for example
classifier.fit(X, y)
print("SAVING THE MODEL...")
# save our classifier model to a new filepath we specified above
# "wb" stands for: "write binary"
with open(MODEL_FILEPATH, "wb") as model_file:
pickle.dump(classifier, model_file)
return classifier
def load_model():
print("LOADING THE MODEL...")
# "rb" stands for read binary
with open(MODEL_FILEPATH, "rb") as model_file:
saved_model = pickle.load(model_file)
return saved_model
if __name__ == "__main__":
train_and_save_model()
clf = load_model()
print("CLASSIFIER:", clf)
breakpoint()
X, y = load_iris(return_X_y=True) # just to have some data to use when predicting
inputs = X[:2, :]
print(type(inputs), inputs)
result = clf.predict(inputs)
print("RESULT:", result) | python | 1,357 |
#!/usr/bin/env python3
#----------------------------------------------------------------------------------------------------------------------#
# #
# Tuplex: Blazing Fast Python Data Science #
# #
# #
# (c) 2017 - 2021, Tuplex team #
# Created by Leonhard Spiegelberg first on 1/1/2021 #
# License: Apache 2.0 #
#----------------------------------------------------------------------------------------------------------------------#
import types
import inspect
import re
# ALWAYS import cloudpickle before dill, b.c. of https://github.com/uqfoundation/dill/issues/383
import cloudpickle
import dill
import ast
import weakref
import dis
import opcode
import types
import itertools
import sys
from tuplex.utils.errors import TuplexException
from tuplex.utils.globs import get_globals
from tuplex.utils.source_vault import SourceVault, supports_lambda_closure
from tuplex.utils.common import in_jupyter_notebook, in_google_colab, is_in_interactive_mode
# only export get_source function, rest shall be private.
__all__ = ['get_source', 'get_globals', 'supports_lambda_closure']
def get_jupyter_raw_code(function_name):
# ignore here unresolved reference
history_manager = get_ipython().history_manager
hist = history_manager.get_range()
regex = r"def\s*{}\(.*\)\s*:[\t ]*\n".format(function_name)
signature = 'hist = history_manager.get_range()'
prog = re.compile(regex)
matched_cells = []
for session, lineno, inline in hist:
test_str = inline
# skip history referring to this
if signature in inline:
continue
if 'get_function_code' in inline:
continue
if prog.search(test_str):
matched_cells.append((session, lineno, inline))
return matched_cells[-1][2]
def extractFunctionByName(code, func_name, return_linenos=False):
class FunctionVisitor(ast.NodeVisitor):
def __init__(self):
self.lastStmtLineno = 0
self.funcInfo = []
def visit_FunctionDef(self, node):
print(self.lastStmtLineno)
self.generic_visit(node)
print(self.lastStmtLineno)
def visit(self, node):
funcStartLineno = -1
if hasattr(node, 'lineno'):
self.lastStmtLineno = node.lineno
if isinstance(node, ast.FunctionDef):
funcStartLineno = node.lineno
self.generic_visit(node)
if isinstance(node, ast.FunctionDef):
self.funcInfo.append({'name': node.name,
'start': funcStartLineno - 1,
'end': self.lastStmtLineno - 1})
root = ast.parse(code)
fv = FunctionVisitor()
fv.visit(root)
# find function with name
candidates = filter(lambda x: x['name'] == func_name, fv.funcInfo)
def indent(s):
return len(s) - len(s.lstrip(' \t'))
lines = code.split('\n')
# find out level
candidates = map(lambda x: {**x, 'level': indent(lines[x['start']])}, candidates)
info = sorted(candidates, key=lambda x: x['level'])[0]
func_code = '\n'.join(lines[info['start']:info['end'] + 1])
if return_linenos:
return func_code, info['start'], info['end']
else:
return func_code
def extract_function_code(function_name, raw_code):
# remove greedily up to num_tabs and num_spaces
def remove_tabs_and_spaces(line, num_tabs, num_spaces):
t = 0
s = 0
pos = 0
while pos < len(line):
c = line[pos]
if c == ' ':
s += 1
elif c == '\t':
t += 1
else:
break
pos += 1
return ' ' * max(s - num_spaces, 0) + '\t' * max(t - num_tabs, 0) + line[pos:]
# remove leading spaces / tabs
assert len(raw_code) >= 1
# let's first check whether the function starts that needs to be extracted
regex = r"[\t ]*def\s*{}\(.*\)\s*:[\t ]*\n".format(function_name)
start_idx = 0
for match in re.finditer(regex, raw_code, re.MULTILINE):
start_idx = match.start()
first_line = raw_code[start_idx:]
first_line_num_tabs = len(first_line) - len(first_line.lstrip('\t'))
first_line_num_spaces = len(first_line) - len(first_line.lstrip(' '))
func_lines = [remove_tabs_and_spaces(line, first_line_num_tabs, first_line_num_spaces) \
for line in raw_code[start_idx:].split('\n')]
# greedily remove for each line tabs/spaces
out = '\n'.join(func_lines)
return extractFunctionByName(out, function_name)
def get_function_code(f):
""" jupyter notebook, retrieve function history """
assert isinstance(f, types.FunctionType)
function_name = f.__code__.co_name
assert isinstance(function_name, str)
if in_jupyter_notebook() or in_google_colab():
return extract_function_code(function_name, get_jupyter_raw_code(function_name))
else:
return extract_function_code(function_name, dill.source.getsource(f))
vault = SourceVault()
def get_source(f):
""" Jupyter notebook code reflection """
if isinstance(f, types.FunctionType):
# lambda function?
# use inspect module
# need to clean out lambda...
if f.__name__ == '<lambda>':
# interpreter in interactive mode or not?
# beware jupyter notebook also returns true for interactive mode!
if is_in_interactive_mode() and not in_jupyter_notebook() and not in_google_colab():
# import here, avoids also trouble with jupyter notebooks
from tuplex.utils.interactive_shell import TuplexShell
# for this to work, a dummy shell has to be instantiated
# through which all typing occurs. Thus, the history can
# be properly captured for source code lookup.
# shell is a borg object, i.e. singleton alike behaviour
shell = TuplexShell()
return shell.get_lambda_source(f)
else:
# does lambda have globals?
# if yes, then extract won't work IFF there's more than one lambda per line!
# => display warning then.
# => change hashing method...
f_globs = get_globals(f)
f_filename = f.__code__.co_filename
f_lineno = f.__code__.co_firstlineno
f_colno = f.__code__.co_firstcolno if hasattr(f.__code__, 'co_firstcolno') else None
# special case: some unknown jupyter magic has been used...
if (in_jupyter_notebook() or in_google_colab()) and (f_filename == '<timed exec>' or f_filename == '<timed eval>'):
raise TuplexException('%%time magic not supported for Tuplex code')
src_info = inspect.getsourcelines(f)
vault.extractAndPutAllLambdas(src_info,
f_filename,
f_lineno,
f_colno,
f_globs)
return vault.get(f, f_filename, f_lineno, f_colno, f_globs)
else:
# works always, because functions can be only defined on a single line!
return get_function_code(f)
else:
# TODO: for constants, create dummy source code, i.e. lambda x: 20
# when desired to retrieve a constant or so!
return '' | python | 8,234 |
from django.db import models
from django.contrib.auth.models import AbstractBaseUser, BaseUserManager, \
PermissionsMixin
class UserManager (BaseUserManager):
def create_user(self, email, password=None, **extra_fields):
"""Creates and save new user"""
if not email:
raise ValueError('User must have an email address')
user = self.model(email=self.normalize_email(email), **extra_fields)
user.set_password(password)
user.save(using=self._db)
return user
def create_superuser(self, email, password):
"""Creates and saves new superuser"""
user = self.create_user(email, password)
user.is_staff = True
user.is_superuser = True
user.save(using=self._db)
return user
class User(AbstractBaseUser, PermissionsMixin):
""" Custom user model that supports using email instead of username """
email = models.EmailField(max_length=254, unique=True)
name = models.CharField(max_length=255)
is_active = models.BooleanField(default=True)
is_staff = models.BooleanField(default=False)
objects = UserManager()
USERNAME_FIELD = 'email'
| python | 1,213 |
reddit_data = {
"post": {
"title": [],
"flair": [],
"created_at": [],
"body": [],
"url": [],
"score": []
},
"comment": {
"body": [],
"created_at": [],
"score": []
}
} | python | 210 |
import typing
import sys
from math import gcd
def solve(
ab: typing.Iterator[
typing.Tuple[int, int],
],
) -> typing.NoReturn:
s = {(0, 0)}
g = 0
for a, b in ab:
ns = set()
for gx, gy in s:
ns.add((
gcd(gx, a),
gcd(gy, b),
))
ns.add((
gcd(gx, b),
gcd(gy, a),
))
s = ns
g = gcd(gcd(g, a), b)
mx = 0
for gx, gy in s:
mx = max(mx, gx * gy)
print(mx // g)
def main() -> typing.NoReturn:
n = int(input())
ab = map(
int,
sys.stdin.read().split(),
)
ab = zip(*[ab] * 2)
solve(ab)
main() | python | 603 |
import io
from unittest import TestCase
from followthemoney import model
from followthemoney.types import registry
from followthemoney.export.graph import NXGraphExporter
ENTITIES = [
{
'id': 'person',
'schema': 'Person',
'properties': {
'name': 'Ralph Tester',
'birthDate': '1972-05-01',
'idNumber': ['9177171', '8e839023'],
'website': 'https://ralphtester.me',
'phone': '+12025557612',
'email': '[email protected]'
}
},
{
'id': 'sanction',
'schema': 'Sanction',
'properties': {
'entity': 'person',
'program': 'Hateys'
}
},
{
'id': 'company',
'schema': 'Company',
'properties': {
'name': 'Ralph Industries, Inc.',
}
},
{
'id': 'owner',
'schema': 'Ownership',
'properties': {
'startDate': '2003-04-01',
'owner': 'person',
'asset': 'company'
}
}
]
class ExportTestCase(TestCase):
def test_nxgraph_simple(self):
sio = io.StringIO()
exporter = NXGraphExporter(sio)
for entity in ENTITIES:
proxy = model.get_proxy(entity)
exporter.write(proxy)
self.assertEqual(len(exporter.graph.nodes), 3)
self.assertEqual(len(exporter.graph.edges), 2)
exporter.finalize()
value = sio.getvalue()
assert len(value), len(value)
def test_nxgraph_full(self):
sio = io.StringIO()
edge_types = (registry.entity.name,
registry.email.name,
registry.phone.name,)
exporter = NXGraphExporter(sio, edge_types=edge_types)
for entity in ENTITIES:
proxy = model.get_proxy(entity)
exporter.write(proxy)
self.assertEqual(len(exporter.graph.nodes), 5)
self.assertEqual(len(exporter.graph.edges), 4)
| python | 1,989 |
from __future__ import unicode_literals
from django.apps import AppConfig
class RendafixaConfig(AppConfig):
name = 'rendafixa'
| python | 134 |
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim | python | 94 |
import unittest
from arangodb.api import Database
from arangodb.orm.fields import DictField
from arangodb.orm.models import CollectionModel
class DictFieldTestCase(unittest.TestCase):
def setUp(self):
self.database_name = 'test_case_dict_field_123'
self.db = Database.create(name=self.database_name)
def tearDown(self):
Database.remove(name=self.database_name)
def test_field_not_null_without_default(self):
class TestModel(CollectionModel):
dict_field = DictField(null=False)
# Init collections
TestModel.init()
# Create model
model = TestModel()
model.dict_field = {
'test_1': 'foo',
'test_2': 'bar',
}
model.save()
documents = TestModel.collection_instance.documents()
self.assertEqual(len(documents), 1)
doc1 = documents[0]
self.assertTrue(isinstance(doc1.dict_field, dict))
self.assertTrue('test_1' in doc1.dict_field)
self.assertEqual(doc1.dict_field['test_1'], 'foo')
self.assertTrue('test_2' in doc1.dict_field)
self.assertEqual(doc1.dict_field['test_2'], 'bar')
# Destroy
TestModel.destroy()
def test_field_with_special_values(self):
class TestModel(CollectionModel):
dict_field = DictField(null=False)
# Init collections
TestModel.init()
# Create model
model = TestModel()
model.dict_field = {
'number': 13,
'a_dict': {'test': 'foo'},
'a_list': [50, 60]
}
model.save()
documents = TestModel.collection_instance.documents()
self.assertEqual(len(documents), 1)
doc1 = documents[0]
self.assertTrue(isinstance(doc1.dict_field, dict))
self.assertEqual(len(doc1.dict_field.keys()), 3)
val1 = doc1.dict_field['number']
val2 = doc1.dict_field['a_dict']
val3 = doc1.dict_field['a_list']
self.assertTrue(isinstance(val1, int))
self.assertTrue(isinstance(val2, dict))
self.assertTrue(isinstance(val3, list))
# Destroy
TestModel.destroy() | python | 2,197 |
# Generated by Django 3.1.1 on 2020-09-28 14:13
from django.db import migrations
def create_cabins(apps, schema_editor):
Cabin = apps.get_model("cabins", "Cabin")
Cabin.objects.create(name="Oksen")
Cabin.objects.create(name="Bjørnen")
class Migration(migrations.Migration):
dependencies = [
("cabins", "0001_initial"),
]
operations = []
| python | 376 |
"""Hisense TV config flow."""
import json
from json.decoder import JSONDecodeError
import logging
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.components import mqtt
from homeassistant.const import CONF_MAC, CONF_NAME, CONF_PIN
from homeassistant.data_entry_flow import FlowResult
from .const import (
CONF_MQTT_IN,
CONF_MQTT_OUT,
DEFAULT_CLIENT_ID,
DEFAULT_MQTT_PREFIX,
DEFAULT_NAME,
DOMAIN,
)
_LOGGER = logging.getLogger(__name__)
class HisenseTvFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Hisense TV config flow."""
VERSION = 1
task_mqtt = None
task_auth = None
def __init__(self):
"""Initialize the config flow."""
self._mac = None
self._name = None
self._mqtt_in = None
self._mqtt_out = None
self._unsubscribe_auth = None
self._unsubscribe_sourcelist = None
async def _async_pin_needed(self, message):
_LOGGER.debug("_async_pin_needed")
self._unsubscribe()
self.task_auth = False
self.hass.async_create_task(
self.hass.config_entries.flow.async_configure(flow_id=self.flow_id)
)
async def _async_pin_not_needed(self, message):
_LOGGER.debug("_async_pin_not_needed")
self._unsubscribe()
self.task_auth = True
self.hass.async_create_task(
self.hass.config_entries.flow.async_configure(flow_id=self.flow_id)
)
async def _async_authcode_response(self, message):
self._unsubscribe()
try:
payload = json.loads(message.payload)
except JSONDecodeError:
payload = {}
_LOGGER.debug("_async_authcode_respone %s", payload)
self.task_auth = payload.get("result") == 1
self.hass.async_create_task(
self.hass.config_entries.flow.async_configure(flow_id=self.flow_id)
)
def _unsubscribe(self):
if self._unsubscribe_auth is not None:
self._unsubscribe_auth()
self._unsubscribe_auth = None
if self._unsubscribe_sourcelist is not None:
self._unsubscribe_sourcelist()
self._unsubscribe_sourcelist = None
async def async_step_user(self, user_input) -> FlowResult:
if self.task_auth is True:
return self.async_show_progress_done(next_step_id="finish")
if self.task_auth is False:
self.task_auth = None
return self.async_show_progress_done(next_step_id="auth")
if user_input is None:
_LOGGER.debug("async_step_user INFO None")
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(CONF_NAME, default=DEFAULT_NAME): str,
vol.Required(CONF_MAC): str,
vol.Optional(CONF_MQTT_IN, default=DEFAULT_MQTT_PREFIX): str,
vol.Optional(CONF_MQTT_OUT, default=DEFAULT_MQTT_PREFIX): str,
}
),
)
_LOGGER.debug("async_step_user NOT task_mqtt")
self.task_mqtt = {
CONF_MAC: user_input.get(CONF_MAC),
CONF_NAME: user_input.get(CONF_NAME),
CONF_MQTT_IN: user_input.get(CONF_MQTT_IN),
CONF_MQTT_OUT: user_input.get(CONF_MQTT_OUT),
}
await self._check_authentication(client_id=DEFAULT_CLIENT_ID)
return self.async_show_progress(
step_id="user",
progress_action="progress_action",
)
async def _check_authentication(self, client_id):
self._unsubscribe_auth = await mqtt.async_subscribe(
hass=self.hass,
topic="%s/remoteapp/mobile/%s/ui_service/data/authentication"
% (self.task_mqtt.get(CONF_MQTT_IN), client_id),
msg_callback=self._async_pin_needed,
)
self._unsubscribe_sourcelist = await mqtt.async_subscribe(
hass=self.hass,
topic="%s/remoteapp/mobile/%s/ui_service/data/sourcelist"
% (self.task_mqtt.get(CONF_MQTT_IN), client_id),
msg_callback=self._async_pin_not_needed,
)
mqtt.publish(
hass=self.hass,
topic="%s/remoteapp/tv/ui_service/%s/actions/gettvstate"
% (self.task_mqtt.get(CONF_MQTT_OUT), client_id),
payload="",
)
mqtt.publish(
hass=self.hass,
topic="%s/remoteapp/tv/ui_service/%s/actions/sourcelist"
% (self.task_mqtt.get(CONF_MQTT_OUT), client_id),
payload="",
)
async def async_step_reauth(self, user_input=None):
"""Reauth handler."""
self.task_auth = None
return await self.async_step_auth(user_input=user_input)
async def async_step_auth(self, user_input=None):
"""Auth handler."""
if self.task_auth is True:
_LOGGER.debug("async_step_auth finish")
return self.async_show_progress_done(next_step_id="finish")
if self.task_auth is False:
_LOGGER.debug("async_step_auth reauth")
return self.async_show_progress_done(next_step_id="reauth")
if user_input is None:
self.task_auth = None
_LOGGER.debug("async_step_auth show form")
return self.async_show_form(
step_id="auth",
data_schema=vol.Schema(
{
vol.Required(CONF_PIN): int,
}
),
)
else:
_LOGGER.debug("async_step_auth send authentication")
client_id = DEFAULT_CLIENT_ID
self._unsubscribe_auth = await mqtt.async_subscribe(
hass=self.hass,
topic="%s/remoteapp/mobile/%s/ui_service/data/authenticationcode"
% (self.task_mqtt.get(CONF_MQTT_IN), client_id),
msg_callback=self._async_authcode_response,
)
payload = json.dumps({"authNum": user_input.get(CONF_PIN)})
mqtt.publish(
hass=self.hass,
topic="%s/remoteapp/tv/ui_service/%s/actions/authenticationcode"
% (self.task_mqtt.get(CONF_MQTT_OUT), client_id),
payload=payload,
)
return self.async_show_progress(
step_id="auth",
progress_action="progress_action",
)
async def async_step_finish(self, user_input=None):
"""Finish config flow."""
_LOGGER.debug("async_step_finish")
return self.async_create_entry(title=self._name, data=self.task_mqtt)
async def async_step_import(self, data):
"""Handle import from YAML."""
_LOGGER.debug("async_step_import")
return self.async_create_entry(title=data[CONF_NAME], data=data)
| python | 6,943 |
from typing import Dict, Optional
from great_expectations.core import ExpectationConfiguration
from great_expectations.exceptions import InvalidExpectationConfigurationError
from great_expectations.execution_engine import ExecutionEngine
from great_expectations.expectations.expectation import TableExpectation
from great_expectations.render.renderer.renderer import renderer
from great_expectations.render.types import RenderedStringTemplateContent
from great_expectations.render.util import substitute_none_for_missing
class ExpectTableColumnsToMatchSet(TableExpectation):
metric_dependencies = ("table.columns",)
success_keys = (
"column_set",
"exact_match",
)
default_kwarg_values = {
"column_set": None,
"exact_match": True,
"result_format": "BASIC",
"include_config": True,
"catch_exceptions": False,
}
def validate_configuration(self, configuration: Optional[ExpectationConfiguration]):
"""
Validates that a configuration has been set, and sets a configuration if it has yet to be set. Ensures that
necessary configuration arguments have been provided for the validation of the expectation.
Args:
configuration (OPTIONAL[ExpectationConfiguration]): \
An optional Expectation Configuration entry that will be used to configure the expectation
Returns:
True if the configuration has been validated successfully. Otherwise, raises an exception
"""
# Setting up a configuration
super().validate_configuration(configuration)
if configuration is None:
configuration = self.configuration
# Ensuring that a proper value has been provided
try:
assert "column_set" in configuration.kwargs, "column_set is required"
assert (
isinstance(configuration.kwargs["column_set"], (list, set))
or configuration.kwargs["column_set"] is None
), "column_set must be a list, set, or None"
except AssertionError as e:
raise InvalidExpectationConfigurationError(str(e))
return True
@classmethod
@renderer(renderer_type="renderer.prescriptive")
def _prescriptive_renderer(
cls,
configuration=None,
result=None,
language=None,
runtime_configuration=None,
**kwargs,
):
runtime_configuration = runtime_configuration or {}
include_column_name = runtime_configuration.get("include_column_name", True)
include_column_name = (
include_column_name if include_column_name is not None else True
)
styling = runtime_configuration.get("styling")
params = substitute_none_for_missing(
configuration.kwargs, ["column_set", "exact_match"]
)
if params["column_set"] is None:
template_str = "Must specify a set or list of columns."
else:
# standardize order of the set for output
params["column_list"] = list(params["column_set"])
column_list_template_str = ", ".join(
[f"$column_list_{idx}" for idx in range(len(params["column_list"]))]
)
exact_match_str = "exactly" if params["exact_match"] is True else "at least"
template_str = f"Must have {exact_match_str} these columns (in any order): {column_list_template_str}"
for idx in range(len(params["column_list"])):
params["column_list_" + str(idx)] = params["column_list"][idx]
return [
RenderedStringTemplateContent(
**{
"content_block_type": "string_template",
"string_template": {
"template": template_str,
"params": params,
"styling": styling,
},
}
)
]
def _validate(
self,
configuration: ExpectationConfiguration,
metrics: Dict,
runtime_configuration: dict = None,
execution_engine: ExecutionEngine = None,
):
# Obtaining columns and ordered list for sake of comparison
expected_column_set = self.get_success_kwargs(configuration).get("column_set")
expected_column_set = (
set(expected_column_set) if expected_column_set is not None else set()
)
actual_column_list = metrics.get("table.columns")
actual_column_set = set(actual_column_list)
exact_match = self.get_success_kwargs(configuration).get("exact_match")
if (
(expected_column_set is None) and (exact_match is not True)
) or actual_column_set == expected_column_set:
return {"success": True, "result": {"observed_value": actual_column_list}}
else:
# Convert to lists and sort to lock order for testing and output rendering
# unexpected_list contains items from the dataset columns that are not in expected_column_set
unexpected_list = sorted(list(actual_column_set - expected_column_set))
# missing_list contains items from expected_column_set that are not in the dataset columns
missing_list = sorted(list(expected_column_set - actual_column_set))
# observed_value contains items that are in the dataset columns
observed_value = sorted(actual_column_list)
mismatched = {}
if len(unexpected_list) > 0:
mismatched["unexpected"] = unexpected_list
if len(missing_list) > 0:
mismatched["missing"] = missing_list
result = {
"observed_value": observed_value,
"details": {"mismatched": mismatched},
}
return_success = {
"success": True,
"result": result,
}
return_failed = {
"success": False,
"result": result,
}
if exact_match:
return return_failed
else:
# Failed if there are items in the missing list (but OK to have unexpected_list)
if len(missing_list) > 0:
return return_failed
# Passed if there are no items in the missing list
else:
return return_success
| python | 6,490 |
"""Support for device tracking of Huawei LTE routers."""
from __future__ import annotations
from dataclasses import dataclass, field
import logging
import re
from typing import Any, cast
from stringcase import snakecase
from homeassistant.components.device_tracker.config_entry import ScannerEntity
from homeassistant.components.device_tracker.const import (
DOMAIN as DEVICE_TRACKER_DOMAIN,
SOURCE_TYPE_ROUTER,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import entity_registry
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import HuaweiLteBaseEntity, Router
from .const import (
CONF_TRACK_WIRED_CLIENTS,
DEFAULT_TRACK_WIRED_CLIENTS,
DOMAIN,
KEY_LAN_HOST_INFO,
KEY_WLAN_HOST_LIST,
UPDATE_SIGNAL,
)
_LOGGER = logging.getLogger(__name__)
_DEVICE_SCAN = f"{DEVICE_TRACKER_DOMAIN}/device_scan"
_HostType = dict[str, Any]
def _get_hosts(
router: Router, ignore_subscriptions: bool = False
) -> list[_HostType] | None:
for key in KEY_LAN_HOST_INFO, KEY_WLAN_HOST_LIST:
if not ignore_subscriptions and key not in router.subscriptions:
continue
try:
return cast(list[_HostType], router.data[key]["Hosts"]["Host"])
except KeyError:
_LOGGER.debug("%s[%s][%s] not in data", key, "Hosts", "Host")
return None
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up from config entry."""
# Grab hosts list once to examine whether the initial fetch has got some data for
# us, i.e. if wlan host list is supported. Only set up a subscription and proceed
# with adding and tracking entities if it is.
router = hass.data[DOMAIN].routers[config_entry.unique_id]
if (hosts := _get_hosts(router, True)) is None:
return
# Initialize already tracked entities
tracked: set[str] = set()
registry = entity_registry.async_get(hass)
known_entities: list[Entity] = []
track_wired_clients = router.config_entry.options.get(
CONF_TRACK_WIRED_CLIENTS, DEFAULT_TRACK_WIRED_CLIENTS
)
for entity in registry.entities.values():
if (
entity.domain == DEVICE_TRACKER_DOMAIN
and entity.config_entry_id == config_entry.entry_id
):
mac = entity.unique_id.partition("-")[2]
# Do not add known wired clients if not tracking them (any more)
skip = False
if not track_wired_clients:
for host in hosts:
if host.get("MacAddress") == mac:
skip = not _is_wireless(host)
break
if not skip:
tracked.add(entity.unique_id)
known_entities.append(HuaweiLteScannerEntity(router, mac))
async_add_entities(known_entities, True)
# Tell parent router to poll hosts list to gather new devices
router.subscriptions[KEY_LAN_HOST_INFO].add(_DEVICE_SCAN)
router.subscriptions[KEY_WLAN_HOST_LIST].add(_DEVICE_SCAN)
async def _async_maybe_add_new_entities(unique_id: str) -> None:
"""Add new entities if the update signal comes from our router."""
if config_entry.unique_id == unique_id:
async_add_new_entities(router, async_add_entities, tracked)
# Register to handle router data updates
disconnect_dispatcher = async_dispatcher_connect(
hass, UPDATE_SIGNAL, _async_maybe_add_new_entities
)
config_entry.async_on_unload(disconnect_dispatcher)
# Add new entities from initial scan
async_add_new_entities(router, async_add_entities, tracked)
def _is_wireless(host: _HostType) -> bool:
# LAN host info entries have an "InterfaceType" property, "Ethernet" / "Wireless".
# WLAN host list ones don't, but they're expected to be all wireless.
return cast(str, host.get("InterfaceType", "Wireless")) != "Ethernet"
def _is_connected(host: _HostType | None) -> bool:
# LAN host info entries have an "Active" property, "1" or "0".
# WLAN host list ones don't, but that call appears to return active hosts only.
return False if host is None else cast(str, host.get("Active", "1")) != "0"
def _is_us(host: _HostType) -> bool:
"""Try to determine if the host entry is us, the HA instance."""
# LAN host info entries have an "isLocalDevice" property, "1" / "0"; WLAN host list ones don't.
return cast(str, host.get("isLocalDevice", "0")) == "1"
@callback
def async_add_new_entities(
router: Router,
async_add_entities: AddEntitiesCallback,
tracked: set[str],
) -> None:
"""Add new entities that are not already being tracked."""
if not (hosts := _get_hosts(router)):
return
track_wired_clients = router.config_entry.options.get(
CONF_TRACK_WIRED_CLIENTS, DEFAULT_TRACK_WIRED_CLIENTS
)
new_entities: list[Entity] = []
for host in (
x
for x in hosts
if not _is_us(x)
and _is_connected(x)
and x.get("MacAddress")
and (track_wired_clients or _is_wireless(x))
):
entity = HuaweiLteScannerEntity(router, host["MacAddress"])
if entity.unique_id in tracked:
continue
tracked.add(entity.unique_id)
new_entities.append(entity)
async_add_entities(new_entities, True)
def _better_snakecase(text: str) -> str:
# Awaiting https://github.com/okunishinishi/python-stringcase/pull/18
if text == text.upper():
# All uppercase to all lowercase to get http for HTTP, not h_t_t_p
text = text.lower()
else:
# Three or more consecutive uppercase with middle part lowercased
# to get http_response for HTTPResponse, not h_t_t_p_response
text = re.sub(
r"([A-Z])([A-Z]+)([A-Z](?:[^A-Z]|$))",
lambda match: f"{match.group(1)}{match.group(2).lower()}{match.group(3)}",
text,
)
return cast(str, snakecase(text))
@dataclass
class HuaweiLteScannerEntity(HuaweiLteBaseEntity, ScannerEntity):
"""Huawei LTE router scanner entity."""
_mac_address: str
_ip_address: str | None = field(default=None, init=False)
_is_connected: bool = field(default=False, init=False)
_hostname: str | None = field(default=None, init=False)
_extra_state_attributes: dict[str, Any] = field(default_factory=dict, init=False)
@property
def _entity_name(self) -> str:
return self.hostname or self.mac_address
@property
def _device_unique_id(self) -> str:
return self.mac_address
@property
def source_type(self) -> str:
"""Return SOURCE_TYPE_ROUTER."""
return SOURCE_TYPE_ROUTER
@property
def ip_address(self) -> str | None:
"""Return the primary ip address of the device."""
return self._ip_address
@property
def mac_address(self) -> str:
"""Return the mac address of the device."""
return self._mac_address
@property
def hostname(self) -> str | None:
"""Return hostname of the device."""
return self._hostname
@property
def is_connected(self) -> bool:
"""Get whether the entity is connected."""
return self._is_connected
@property
def extra_state_attributes(self) -> dict[str, Any]:
"""Get additional attributes related to entity state."""
return self._extra_state_attributes
async def async_update(self) -> None:
"""Update state."""
if (hosts := _get_hosts(self.router)) is None:
self._available = False
return
self._available = True
host = next(
(x for x in hosts if x.get("MacAddress") == self._mac_address), None
)
self._is_connected = _is_connected(host)
if host is not None:
# IpAddress can contain multiple semicolon separated addresses.
# Pick one for model sanity; e.g. the dhcp component to which it is fed, parses and expects to see just one.
self._ip_address = (host.get("IpAddress") or "").split(";", 2)[0] or None
self._hostname = host.get("HostName")
self._extra_state_attributes = {
_better_snakecase(k): v
for k, v in host.items()
if k
in {
"AddressSource",
"AssociatedSsid",
"InterfaceType",
}
}
| python | 8,723 |
import torch
import torch.nn as nn
class SiameseNetwork(nn.Module):
def __init__(self):
super(SiameseNetwork, self).__init__()
# Outputs batch X 512 X 1 X 1
self.net = nn.Sequential(
nn.Conv2d(3, 32, kernel_size=3, stride=2),
nn.ReLU(inplace=True),
nn.BatchNorm2d(32),
# nn.Dropout2d(p=0.4),
nn.Conv2d(32, 64, kernel_size=3, stride=2),
nn.ReLU(inplace=True),
nn.BatchNorm2d(64),
# nn.Dropout2d(p=0.4),
nn.Conv2d(64, 128, kernel_size=3, stride=2),
nn.ReLU(inplace=True),
nn.BatchNorm2d(128),
# nn.Dropout2d(p=0.4),
nn.Conv2d(128, 256, kernel_size=1, stride=2),
nn.ReLU(inplace=True),
nn.BatchNorm2d(256),
# nn.Dropout2d(p=0.4),
nn.Conv2d(256, 256, kernel_size=1, stride=2),
nn.ReLU(inplace=True),
nn.BatchNorm2d(256),
# nn.Dropout2d(p=0.4),
nn.Conv2d(256, 512, kernel_size=3, stride=2),
nn.ReLU(inplace=True),
nn.BatchNorm2d(512),
# 1X1 filters to increase dimensions
nn.Conv2d(512, 1024, kernel_size=1, stride=1),
nn.ReLU(inplace=True),
nn.BatchNorm2d(1024),
)
def forward_once(self, x: torch.Tensor):
output = self.net(x)
output = torch.squeeze(output)
return output
def forward(self, input1: torch.Tensor, input2: torch.Tensor, input3: torch.Tensor = None):
output1 = self.forward_once(input1)
output2 = self.forward_once(input2)
if input3 is not None:
output3 = self.forward_once(input3)
return output1, output2, output3
return output1, output2
| python | 1,810 |
# -*- coding: utf-8 -*-
"""
@Time : 2019/04/17 10:08
@Author : Yuppie
"""
import torch
import torch.nn as nn
from Sublayers import General_Attention, Inception_Temporal_Layer, Non_local_gcn, Local_gcn
from Normalize import Switch_Norm_2D
class Prediction_Model(nn.Module):
def __init__(self, Ks, encoder_in_channel, encoder_out_channel, num_stations, switch):
super(Prediction_Model, self).__init__()
self.encoder = Prediction_Encoder(Ks, encoder_in_channel, encoder_out_channel, num_stations, switch)
self.decoder = Prediction_Decoder(encoder_out_channel, encoder_in_channel)
def forward(self, inputs, graph):
st_outputs = self.encoder(inputs, graph[0], graph[1])
predictions = self.decoder(inputs=st_outputs[:, :, -1, :].unsqueeze(-2),
key=st_outputs[:, :, :-1, :],
value=inputs[:, :, 1:, :])
return predictions
class Prediction_Encoder(nn.Module):
def __init__(self, K, in_channels, out_channels, num_stations, switch='gaussian'):
super(Prediction_Encoder, self).__init__()
self.tc_1 = Inception_Temporal_Layer(num_stations, in_channels, 4*in_channels, out_channels)
self.sa_1 = Non_local_gcn(K, out_channels, out_channels, num_stations, switch)
# self.sa_1 = Local_gcn(K, out_channels, out_channels)
self.tc_2 = Inception_Temporal_Layer(num_stations, out_channels, out_channels, out_channels)
self.sa_2 = Non_local_gcn(K, out_channels, out_channels, num_stations, switch)
# self.sa_2 = Local_gcn(K, out_channels, out_channels)
# self.conv1_1 = CausalConv1d(in_channels, out_channels, 1)
self.norm_1 = Switch_Norm_2D(out_channels)
self.norm_2 = Switch_Norm_2D(out_channels)
self.norm_3 = Switch_Norm_2D(out_channels)
self.norm_4 = Switch_Norm_2D(out_channels)
self.act = nn.LeakyReLU(inplace=True)
self.dropout = nn.Dropout(p=0.1)
self.num_stations = num_stations
self.in_channels = in_channels
def forward(self, inputs, c_graph, s_graph):
c_graph = c_graph if c_graph.dim() == 3 else c_graph.squeeze(0)
s_graph = s_graph if s_graph.dim() == 3 else s_graph.squeeze(0)
batch_size, num_stations, seq_len, temporal_in_channel = inputs.size()
assert num_stations == self.num_stations
assert temporal_in_channel == self.in_channels
# inputs = torch.cat([self.conv1_1(inputs[:, s_i].transpose(1, 2)).transpose(1, 2).unsqueeze(1)
# for s_i in range(self.num_stations)], dim=1)
temporal_feature = self.norm_1(self.tc_1(inputs))
spatial_feature = torch.cat([self.sa_1(temporal_feature[:, :, i], c_graph, s_graph).unsqueeze(2) for i in range(seq_len)], dim=2)
spatial_feature = self.act(self.norm_2(spatial_feature))
temporal_feature = self.norm_3(self.tc_2(spatial_feature))
spatial_feature = torch.cat([self.sa_2(temporal_feature[:, :, i], c_graph, s_graph).unsqueeze(2) for i in range(seq_len)], dim=2)
spatial_feature = self.act(self.norm_4(spatial_feature))
return spatial_feature
class Prediction_Decoder(nn.Module):
def __init__(self, spatial_out_channel, temporal_in_channel):
super(Prediction_Decoder, self).__init__()
self.attention = General_Attention(spatial_out_channel)
self.hidden_dim = spatial_out_channel
self.W_q = nn.Linear(spatial_out_channel, self.hidden_dim, bias=False)
self.W_k = nn.Linear(spatial_out_channel, self.hidden_dim, bias=False)
nn.init.xavier_normal_(self.W_q.weight)
nn.init.xavier_normal_(self.W_k.weight)
def forward(self, inputs, key, value):
batch_size, num_stations, new_len, spatial_out_channel = inputs.size()
batch_size, num_stations, seq_len, temporal_in_channel = value.size()
inputs = self.W_q(inputs).view(-1, new_len, self.hidden_dim)
key = self.W_k(key).view(-1, seq_len, self.hidden_dim)
value = value.view(-1, seq_len, temporal_in_channel)
outputs, _ = self.attention(inputs, key, value)
outputs = torch.cat([batch_i.unsqueeze(0) for batch_i in torch.chunk(outputs, batch_size, dim=0)], dim=0)
return outputs
| python | 4,403 |
from SublimeLinter.lint import NodeLinter
class XO(NodeLinter):
npm_name = 'xo'
cmd = ('xo', '--stdin', '--reporter', 'compact', '--filename', '${file}')
regex = (
r'^.+?: line (?P<line>\d+), col (?P<col>\d+), '
r'(?:(?P<error>Error)|(?P<warning>Warning)) - '
r'(?P<message>.+)'
r' \((?P<code>.+)\)$'
)
defaults = {
'selector': 'source.js - meta.attribute-with-value',
'disable_if_not_dependency': True
}
| python | 424 |
from unittest.mock import patch
from django.core.management import call_command
from django.db.utils import OperationalError
from django.test import TestCase
class CommandTests(TestCase):
def test_wait_for_db_ready(self):
with patch('django.db.utils.ConnectionHandler.__getitem__') as gi:
gi.return_value = True
call_command("wait_for_db")
self.assertEqual(gi.call_count,1)
@patch('time.sleep',return_value=True)
def test_wait_for_db(self,ts):
with patch('django.db.utils.ConnectionHandler.__getitem__') as gi:
gi.side_effect = [OperationalError] * 5 + [True]
call_command('wait_for_db')
self.assertEqual(gi.call_count,6)
| python | 727 |
import time
from datetime import timedelta
from django.db.backends.postgresql.operations import (
DatabaseOperations as PostgresDatabaseOperations,
)
from django.db.utils import OperationalError
from psycopg2 import errorcodes
from pytz import timezone
class DatabaseOperations(PostgresDatabaseOperations):
integer_field_ranges = {
'SmallIntegerField': (-32768, 32767),
'IntegerField': (-9223372036854775808, 9223372036854775807),
'BigIntegerField': (-9223372036854775808, 9223372036854775807),
'PositiveSmallIntegerField': (0, 32767),
'PositiveBigIntegerField': (0, 9223372036854775807),
'PositiveIntegerField': (0, 9223372036854775807),
'SmallAutoField': (-32768, 32767),
'AutoField': (-9223372036854775808, 9223372036854775807),
'BigAutoField': (-9223372036854775808, 9223372036854775807),
}
def deferrable_sql(self):
# Deferrable constraints aren't supported:
# https://github.com/cockroachdb/cockroach/issues/31632
return ''
def adapt_datetimefield_value(self, value):
"""
Add a timezone to datetimes so that psycopg2 will cast it to
TIMESTAMPTZ (as cockroach expects) rather than TIMESTAMP.
"""
# getattr() guards against F() objects which don't have tzinfo.
if value and getattr(value, 'tzinfo', '') is None and self.connection.timezone_name is not None:
connection_timezone = timezone(self.connection.timezone_name)
try:
value = connection_timezone.localize(value)
except OverflowError:
# Localizing datetime.datetime.max (used to cache a value
# forever, for example) may overflow. Subtract a day to prevent
# that.
value -= timedelta(days=1)
value = connection_timezone.localize(value)
return value
def sequence_reset_by_name_sql(self, style, sequences):
# Not implemented: https://github.com/cockroachdb/cockroach/issues/20956
return []
def sequence_reset_sql(self, style, model_list):
return []
def explain_query_prefix(self, format=None, **options):
if format:
raise ValueError("CockroachDB's EXPLAIN doesn't support any formats.")
prefix = self.explain_prefix
extra = [name for name, value in options.items() if value]
if extra:
prefix += ' (%s)' % ', '.join(extra)
return prefix
def execute_sql_flush(self, sql_list):
# Retry TRUNCATE if it fails with a serialization error.
num_retries = 10
initial_retry_delay = 0.5 # The initial retry delay, in seconds.
backoff_ = 1.5 # For each retry, the last delay is multiplied by this.
next_retry_delay = initial_retry_delay
for retry in range(1, num_retries + 1):
try:
return super().execute_sql_flush(sql_list)
except OperationalError as exc:
if (getattr(exc.__cause__, 'pgcode', '') != errorcodes.SERIALIZATION_FAILURE or
retry >= num_retries):
raise
time.sleep(next_retry_delay)
next_retry_delay *= backoff_
def sql_flush(self, style, tables, *, reset_sequences=False, allow_cascade=False):
# CockroachDB doesn't support resetting sequences.
return super().sql_flush(style, tables, reset_sequences=False, allow_cascade=allow_cascade)
| python | 3,515 |
# Copyright 2016 Leon Poon and Contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from xml.dom import minidom
from test import res
dtsx_res = res.pydtsxplode.dtsx # @UndefinedVariable
class TestXml(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def testMakeXmlNs(self):
pass
def testReadPackage(self):
f = dtsx_res['Package.dtsx']('rb')
dom = minidom.parse(f)
self.assertIs(dom.documentElement.ownerDocument, dom)
self.assertIs(dom.documentElement.parentNode, dom)
if __name__ == "__main__":
unittest.main()
| python | 1,191 |
# Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""CorrelationCholesky bijector."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
import tensorflow.compat.v2 as tf
from tensorflow_probability.python.bijectors import bijector
from tensorflow_probability.python.bijectors import fill_triangular
from tensorflow_probability.python.internal import prefer_static as ps
from tensorflow_probability.python.internal import tensorshape_util
__all__ = [
'CorrelationCholesky',
]
class CorrelationCholesky(bijector.AutoCompositeTensorBijector):
"""Maps unconstrained reals to Cholesky-space correlation matrices.
#### Mathematical Details
This bijector provides a change of variables from unconstrained reals to a
parameterization of the CholeskyLKJ distribution. The CholeskyLKJ distribution
[1] is a distribution on the set of Cholesky factors of positive definite
correlation matrices. The CholeskyLKJ probability density function is
obtained from the LKJ density on n x n matrices as follows:
1 = int p(A | eta) dA
= int Z(eta) * det(A) ** (eta - 1) dA
= int Z(eta) L_ii ** {(n - i - 1) + 2 * (eta - 1)} ^dL_ij (0 <= i < j < n)
where Z(eta) is the normalizer; the matrix L is the Cholesky factor of the
correlation matrix A; and ^dL_ij denotes the wedge product (or differential)
of the strictly lower triangular entries of L. The entries L_ij are
constrained such that each entry lies in [-1, 1] and the norm of each row is
1. The norm includes the diagonal; which is not included in the wedge product.
To preserve uniqueness, we further specify that the diagonal entries are
positive.
The image of unconstrained reals under the `CorrelationCholesky` bijector is
the set of correlation matrices which are positive definite. A [correlation
matrix](https://en.wikipedia.org/wiki/Correlation_and_dependence#Correlation_matrices)
can be characterized as a symmetric positive semidefinite matrix with 1s on
the main diagonal.
For a lower triangular matrix `L` to be a valid Cholesky-factor of a positive
definite correlation matrix, it is necessary and sufficient that each row of
`L` have unit Euclidean norm [1]. To see this, observe that if `L_i` is the
`i`th row of the Cholesky factor corresponding to the correlation matrix `R`,
then the `i`th diagonal entry of `R` satisfies:
1 = R_i,i = L_i . L_i = ||L_i||^2
where '.' is the dot product of vectors and `||...||` denotes the Euclidean
norm.
Furthermore, observe that `R_i,j` lies in the interval `[-1, 1]`. By the
Cauchy-Schwarz inequality:
|R_i,j| = |L_i . L_j| <= ||L_i|| ||L_j|| = 1
This is a consequence of the fact that `R` is symmetric positive definite with
1s on the main diagonal.
We choose the mapping from x in `R^{m}` to `R^{n^2}` where `m` is the
`(n - 1)`th triangular number; i.e. `m = 1 + 2 + ... + (n - 1)`.
L_ij = x_i,j / s_i (for i < j)
L_ii = 1 / s_i
where s_i = sqrt(1 + x_i,0^2 + x_i,1^2 + ... + x_(i,i-1)^2). We can check that
the required constraints on the image are satisfied.
#### Examples
```python
bijector.CorrelationCholesky().forward([2., 2., 1.])
# Result: [[ 1. , 0. , 0. ],
[ 0.70710678, 0.70710678, 0. ],
[ 0.66666667, 0.66666667, 0.33333333]]
bijector.CorrelationCholesky().inverse(
[[ 1. , 0. , 0. ],
[ 0.70710678, 0.70710678, 0. ],
[ 0.66666667, 0.66666667, 0.33333333]])
# Result: [2., 2., 1.]
```
#### References
[1] Stan Manual. Section 24.2. Cholesky LKJ Correlation Distribution.
https://mc-stan.org/docs/2_18/functions-reference/cholesky-lkj-correlation-distribution.html
[2] Daniel Lewandowski, Dorota Kurowicka, and Harry Joe,
"Generating random correlation matrices based on vines and extended
onion method," Journal of Multivariate Analysis 100 (2009), pp
1989-2001.
"""
def __init__(self, validate_args=False, name='correlation_cholesky'):
parameters = dict(locals())
with tf.name_scope(name) as name:
super(CorrelationCholesky, self).__init__(
validate_args=validate_args,
forward_min_event_ndims=1,
inverse_min_event_ndims=2,
parameters=parameters,
name=name)
@classmethod
def _parameter_properties(cls, dtype):
return dict()
def _forward_event_shape(self, input_shape):
if tensorshape_util.rank(input_shape) is None:
return input_shape
tril_shape = fill_triangular.FillTriangular().forward_event_shape(
input_shape)
n = tril_shape[-1]
if n is not None:
n += 1
return tril_shape[:-2].concatenate([n, n])
def _forward_event_shape_tensor(self, input_shape):
tril_shape = fill_triangular.FillTriangular().forward_event_shape_tensor(
input_shape)
n = tril_shape[-1] + 1
return tf.concat([tril_shape[:-2], [n, n]], axis=-1)
def _inverse_event_shape(self, input_shape):
if not input_shape.rank:
return input_shape
n = input_shape[-1]
if n is not None:
n -= 1
y_shape = input_shape[:-2].concatenate([n, n])
return fill_triangular.FillTriangular().inverse_event_shape(y_shape)
def _inverse_event_shape_tensor(self, input_shape):
n = input_shape[-1] - 1
y_shape = tf.concat([input_shape[:-2], [n, n]], axis=-1)
return fill_triangular.FillTriangular().inverse_event_shape_tensor(y_shape)
def _forward(self, x):
x = tf.convert_to_tensor(x, name='x')
batch_shape = ps.shape(x)[:-1]
# Pad zeros on the top row and right column.
y = fill_triangular.FillTriangular().forward(x)
rank = ps.rank(y)
paddings = ps.concat(
[ps.zeros([rank - 2, 2], dtype=tf.int32),
[[1, 0], [0, 1]]],
axis=0)
y = tf.pad(y, paddings)
# Set diagonal to 1s.
n = ps.shape(y)[-1]
diag = tf.ones(ps.concat([batch_shape, [n]], axis=-1), dtype=x.dtype)
y = tf.linalg.set_diag(y, diag)
# Normalize each row to have Euclidean (L2) norm 1.
y /= tf.norm(y, axis=-1)[..., tf.newaxis]
return y
def _inverse(self, y):
n = ps.shape(y)[-1]
batch_shape = ps.shape(y)[:-2]
# Extract the reciprocal of the row norms from the diagonal.
diag = tf.linalg.diag_part(y)[..., tf.newaxis]
# Set the diagonal to 0s.
y = tf.linalg.set_diag(
y, tf.zeros(ps.concat([batch_shape, [n]], axis=-1), dtype=y.dtype))
# Multiply with the norm (or divide by its reciprocal) to recover the
# unconstrained reals in the (strictly) lower triangular part.
x = y / diag
# Remove the first row and last column before inverting the FillTriangular
# transformation.
return fill_triangular.FillTriangular().inverse(x[..., 1:, :-1])
def _forward_log_det_jacobian(self, x):
# TODO(b/133442896): It should be possible to use the fallback
# implementation of _forward_log_det_jacobian in terms of
# _inverse_log_det_jacobian in the base Bijector class.
return -self._inverse_log_det_jacobian(self.forward(x))
def _inverse_log_det_jacobian(self, y):
# The inverse log det jacobian (ILDJ) of the entire mapping is the sum of
# the ILDJs of each row's mapping.
#
# To compute the ILDJ for each row's mapping, consider the forward mapping
# `f_k` restricted to the `k`th (0-indexed) row. It maps unconstrained reals
# in `R^k` to the unit disk in `R^k`. `f_k : R^k -> R^k` is:
#
# f(x_1, x_2, ... x_k) = (x_1/s, x_2/s, ..., x_k/s)
#
# where `s = norm(x_1, x_2, ..., x_k, 1)`.
#
# The change in infinitesimal `k`-dimensional volume is given by
# |det(J)|; where J is the `k x k` Jacobian matrix.
#
# Claim: |det(J)| = s^{-(k + 2)}.
#
# Proof: We compute the entries of the Jacobian matrix J:
#
# J_ij = (s^2 - x_i^2) / s^3 if i == j
# J_ij = -(x_i * x_j) / s^3 if i != j
#
# We multiply each row by s^3, which contributes a factor of s^{-3k} to
# det(J). The remaining matrix can be written as s^2 I - xx^T. By the
# matrix determinant lemma
# (https://en.wikipedia.org/wiki/Matrix_determinant_lemma),
# det(s^2 I - xx^T) = s^{2k} (1 - (x^Tx / s^2)) = s^{2k - 2}. The last
# equality follows from s^2 - x^Tx = s^2 - sum x_i^2 = 1. Hence,
# det(J) = s^{-3k} s^{2k - 2} = s^{-(k + 2)}.
#
n = ps.shape(y)[-1]
return -tf.reduce_sum(
tf.range(2, n + 2, dtype=y.dtype) * tf.math.log(tf.linalg.diag_part(y)),
axis=-1)
| python | 9,230 |
# Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
from graphlearn import pywrap_graphlearn as pywrap
from graphlearn.python.config import * # pylint: disable=wildcard-import
from graphlearn.python.graph import Graph
from graphlearn.python.values import Nodes, Edges, Layer, Layers, \
SparseNodes, SparseEdges
from graphlearn.python.errors import * # pylint: disable=wildcard-import
from graphlearn.python.decoder import Decoder
from graphlearn.python.topology import Topology
from graphlearn.python.sampler import *
# model
from graphlearn.python.model.base_encoder import *
from graphlearn.python.model.ego_graph import *
from graphlearn.python.model.ego_spec import *
from graphlearn.python.model.learning_based_model import *
from graphlearn.python.model.utils import *
# tf based model
from graphlearn.python.model.tf import aggregators
from graphlearn.python.model.tf import encoders
from graphlearn.python.model.tf import layers
from graphlearn.python.model.tf import utils
from graphlearn.python.model.tf.trainer import *
from graphlearn.python.model.tf.optimizer import *
from graphlearn.python.model.tf.loss_fn import *
from graphlearn.python.model.tf.ego_tensor import *
from graphlearn.python.model.tf.ego_flow import *
EDGE_SRC = pywrap.NodeFrom.EDGE_SRC
EDGE_DST = pywrap.NodeFrom.EDGE_DST
NODE = pywrap.NodeFrom.NODE
REPLICATE = pywrap.PaddingMode.REPLICATE
CIRCULAR = pywrap.PaddingMode.CIRCULAR
| python | 2,064 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Mar 20 16:25:08 2020
@author: admangli
"""
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
dataset = pd.read_csv("Social_Network_Ads.csv")
X = dataset.iloc[:, 2:-1].values
y = dataset.iloc[:, -1].values
#%%
from sklearn.preprocessing import StandardScaler
scaler = StandardScaler()
X = scaler.fit_transform(X)
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25)
#%% Fitting logistic regression to training set
from sklearn.linear_model import LogisticRegression
classifier = LogisticRegression()
classifier.fit(X_train, y_train)
y_pred = classifier.predict(X_test)
#%% Visualize predictions using confusion matrix
from sklearn.metrics import confusion_matrix
cm = confusion_matrix(y_test, y_pred)
print(cm)
#%%
from matplotlib.colors import ListedColormap
X_set, y_set = X_test, y_test
X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = .01),
np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = .01))
plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape),
alpha = 0.75, cmap = ListedColormap(('red', 'green')))
plt.xlim(X1.min(), X1.max())
plt.ylim(X2.min(), X2.max())
for i, j in enumerate(np.unique(y_set)):
plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1],
c = ListedColormap(('darkred', 'darkgreen'))(i), label = j)
plt.title('Logistic Regression (Training set)')
plt.xlabel('Age')
plt.ylabel('Estimated Salary')
plt.legend()
plt.show()
| python | 1,685 |
#!/usr/bin/env python
import csv
from datetime import date, datetime, timedelta
import sys
def read_csv_data(csvfilename):
dates = {}
with open(csvfilename) as csvfile:
insightreader = csv.reader(csvfile)
for index, row in enumerate(insightreader):
# skip the first two lines
if index < 2:
continue
# the date is formatted like 08/16/2014 11:33:11
sitting_date = datetime.strptime(row[0], "%m/%d/%Y %H:%M:%S").date()
duration = int(row[1])
if sitting_date not in dates:
dates[sitting_date] = []
dates[sitting_date].append(duration)
return dates
def percent_sat_in_last_year(dates):
days_sat = 0
for i in xrange(365):
olddate = date.today() - timedelta(i)
if olddate in dates:
days_sat += 1
return days_sat / 365.0
def main():
dates = read_csv_data("insight_connect_export_20140816.csv")
#import pprint
#pprint.pprint(dates, width=2)
print "Sat %.1f%% days in last year" % (100 * percent_sat_in_last_year(dates))
return 0
if __name__ == "__main__":
sys.exit(main())
| python | 1,181 |
from ._person import Person as _Person
__all__ = ["People"]
def _generate_person_uid():
import uuid as _uuid
uid = _uuid.uuid4()
return "P" + str(uid)[:7]
class People:
"""This holds a registry of individual Persons"""
def __init__(self, props=None, getHook=None):
self._getHook = getHook
self.state = {
"registry": {},
}
self._names = {}
self.load(props)
self._log = []
def addLog(self, log):
self._log.append(log)
def add(self, person: _Person):
if person is None:
return
if isinstance(person, str):
# try to find an existing person with this name
try:
return self.getByName(person)
except Exception:
return self.add(_Person({"name": person}))
if not isinstance(person, _Person):
raise TypeError("Can only add a Person to People")
existing = None
try:
existing = self.getByName(person.getName())
except Exception:
existing = None
if existing is None:
try:
existing = self.getByFuzzyName(person)
except Exception:
existing = None
if existing:
self.addLog(
f"Have fuzzy matched {person.getName()} " f"to {existing.getName()}"
)
if existing:
del self._names[existing.getName()]
existing = existing.merge(person)
self._names[existing.getName()] = existing.getID()
self.state["registry"][existing.getID()] = existing
return existing
id = person.getID()
if id:
if id in self.state["registry"]:
raise KeyError(f"Duplicate Person ID {person}")
self.state["registry"][id] = person
else:
uid = _generate_person_uid()
while uid in self.state["registry"]:
uid = _generate_person_uid()
person.state["id"] = uid
self.state["registry"][uid] = person
person._getHook = self._getHook
self._names[person.getName()] = person.getID()
return person
def getLog(self):
if len(self._log) == 0:
return None
else:
return "\n".join(self._log)
def values(self):
return self.state["registry"].values()
def get(self, id):
try:
return self.state["registry"][id]
except Exception:
raise KeyError(f"No Person with ID {id}")
def getByFuzzyName(self, person):
for (pid, p) in self.state["registry"].items():
if p.couldBe(person):
y = input(
f"Is {person.getName()} the same person "
f"as {p.getName()}? (y/n) "
)
if y and y.lower()[0] == "y":
return p
return None
def getByName(self, name):
try:
return self.get(self._names[name])
except Exception:
raise KeyError(f"No Person with name {name}")
def getAll(self):
""" Returns all the names stored in this object
Returns:
dict: Dictionary of people as name: ID pairs
"""
return {v: k for k, v in self._names.items()}
def getAllForImages(self):
""" Returns all the names stored in this object
Returns:
dict: Dictionary of people as name: ID pairs
"""
imageDict = {}
for id, name in self.getAll().items():
imageDict[id] = {}
imageDict[id]["name"] = name
imageDict[id]["filename"] = "The_Steamer_Great_Western_small.jpg"
return imageDict
def find(self, value, best_match=False):
if isinstance(value, _Person):
return self.get(value.getID())
value = value.lstrip().rstrip().lower()
results = []
shortest = None
shortest_length = None
for name in self._names.keys():
if name.lower().find(value) != -1:
if shortest is None:
shortest = name
shortest_length = len(name)
elif len(name) < shortest_length:
shortest_length = len(name)
shortest = name
results.append(self.get(self._names[name]))
if len(results) == 1:
return results[0]
elif len(results) > 1:
if best_match:
return self.get(self._names[shortest])
else:
return results
keys = "', '".join(self._names.keys())
raise KeyError(
f"No person matches '{value}'. Available people " + f"are '{keys}'"
)
def load(self, data):
if data:
for item in data:
person = _Person.load(item)
self.add(person)
def toDry(self):
return self.state
@staticmethod
def unDry(value):
people = People()
people.state = value
return people
| python | 5,205 |
from astropy.coordinates import SkyCoord, EarthLocation
#from astropy import coordinates as coord
#from astropy.coordinates.tests.utils import randomly_sample_sphere
from astropy.time import Time
from astropy import units as u
#import numpy as np
#import matplotlib.pyplot as plt
#coos = SkyCoord.from_name('M1')
# 300 times over the space of 10 hours
time = Time.now()# + np.linspace(-5, 5, 300)*u.hour
# note the use of broadcasting so that 300 times are broadcast against 1000 positions
home = EarthLocation.from_geodetic(51.023, 0.31, 100)
#aa_frame = coord.AltAz(obstime=times[:, np.newaxis], location=home)
# calculate alt-az of each object at each time.
#aa_coos = coos.transform_to(aa_frame)
newAltAzcoordiantes = SkyCoord(alt = 45*u.deg, az = 100*u.deg, obstime = time, frame = 'altaz', location = home)
print(newAltAzcoordiantes.icrs)
| python | 853 |
# Generated by Django 2.2.1 on 2019-08-31 12:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("core", "0010_auto_20190831_1021")]
operations = [
migrations.AddField(
model_name="activeplugin",
name="type",
field=models.CharField(default="", max_length=50, verbose_name="Type"),
preserve_default=False,
)
]
| python | 438 |
from __future__ import absolute_import
from datetime import timedelta
import operator
from rest_framework import serializers
from django.db import transaction
from sentry.api.serializers.rest_framework.base import CamelSnakeModelSerializer
from sentry.api.serializers.rest_framework.environment import EnvironmentField
from sentry.api.serializers.rest_framework.project import ProjectField
from sentry.incidents.logic import (
AlertRuleNameAlreadyUsedError,
AlertRuleTriggerLabelAlreadyUsedError,
InvalidTriggerActionError,
create_alert_rule,
create_alert_rule_trigger,
create_alert_rule_trigger_action,
update_alert_rule,
update_alert_rule_trigger,
update_alert_rule_trigger_action,
delete_alert_rule_trigger_action,
delete_alert_rule_trigger,
)
from sentry.incidents.models import (
AlertRule,
AlertRuleThresholdType,
AlertRuleTrigger,
AlertRuleTriggerAction,
)
from sentry.models.organizationmember import OrganizationMember
from sentry.models.team import Team
from sentry.models.user import User
from sentry.snuba.models import QueryAggregations
from sentry.utils.compat import zip
string_to_action_type = {
registration.slug: registration.type
for registration in AlertRuleTriggerAction.get_registered_types()
}
action_target_type_to_string = {
AlertRuleTriggerAction.TargetType.USER: "user",
AlertRuleTriggerAction.TargetType.TEAM: "team",
AlertRuleTriggerAction.TargetType.SPECIFIC: "specific",
}
string_to_action_target_type = {v: k for (k, v) in action_target_type_to_string.items()}
CRITICAL_TRIGGER_LABEL = "critical"
WARNING_TRIGGER_LABEL = "warning"
class AlertRuleTriggerActionSerializer(CamelSnakeModelSerializer):
"""
Serializer for creating/updating a trigger action. Required context:
- `trigger`: The trigger related to this action.
- `alert_rule`: The alert_rule related to this action.
- `organization`: The organization related to this action.
- `access`: An access object (from `request.access`)
"""
id = serializers.IntegerField(required=False)
type = serializers.CharField()
target_type = serializers.CharField()
class Meta:
model = AlertRuleTriggerAction
fields = ["id", "type", "target_type", "target_identifier", "integration"]
extra_kwargs = {
"target_identifier": {"required": True},
"target_display": {"required": False},
"integration": {"required": False, "allow_null": True},
}
def validate_type(self, type):
if type not in string_to_action_type:
raise serializers.ValidationError(
"Invalid type, valid values are [%s]" % ", ".join(string_to_action_type.keys())
)
return string_to_action_type[type]
def validate_target_type(self, target_type):
if target_type not in string_to_action_target_type:
raise serializers.ValidationError(
"Invalid targetType, valid values are [%s]"
% ", ".join(string_to_action_target_type.keys())
)
return string_to_action_target_type[target_type]
def validate(self, attrs):
if ("type" in attrs) != ("target_type" in attrs) != ("target_identifier" in attrs):
raise serializers.ValidationError(
"type, targetType and targetIdentifier must be passed together"
)
type = attrs.get("type")
target_type = attrs.get("target_type")
access = self.context["access"]
identifier = attrs.get("target_identifier")
if type is not None:
type_info = AlertRuleTriggerAction.get_registered_type(type)
if target_type not in type_info.supported_target_types:
allowed_target_types = ",".join(
[
action_target_type_to_string[type_name]
for type_name in type_info.supported_target_types
]
)
raise serializers.ValidationError(
{
"target_type": "Invalid target type for %s. Valid types are [%s]"
% (type_info.slug, allowed_target_types)
}
)
if attrs.get("type") == AlertRuleTriggerAction.Type.EMAIL:
if target_type == AlertRuleTriggerAction.TargetType.TEAM:
try:
team = Team.objects.get(id=identifier)
except Team.DoesNotExist:
raise serializers.ValidationError("Team does not exist")
if not access.has_team(team):
raise serializers.ValidationError("Team does not exist")
elif target_type == AlertRuleTriggerAction.TargetType.USER:
try:
user = User.objects.get(id=identifier)
except User.DoesNotExist:
raise serializers.ValidationError("User does not exist")
if not OrganizationMember.objects.filter(
organization=self.context["organization"], user=user
).exists():
raise serializers.ValidationError("User does not belong to this organization")
elif attrs.get("type") == AlertRuleTriggerAction.Type.SLACK:
if not attrs.get("integration"):
raise serializers.ValidationError(
{"integration": "Integration must be provided for slack"}
)
return attrs
def create(self, validated_data):
try:
return create_alert_rule_trigger_action(
trigger=self.context["trigger"], **validated_data
)
except InvalidTriggerActionError as e:
raise serializers.ValidationError(e.message)
def update(self, instance, validated_data):
if "id" in validated_data:
validated_data.pop("id")
try:
return update_alert_rule_trigger_action(instance, **validated_data)
except InvalidTriggerActionError as e:
raise serializers.ValidationError(e.message)
class AlertRuleTriggerSerializer(CamelSnakeModelSerializer):
"""
Serializer for creating/updating an alert rule trigger. Required context:
- `alert_rule`: The alert_rule related to this trigger.
- `organization`: The organization related to this trigger.
- `access`: An access object (from `request.access`)
"""
id = serializers.IntegerField(required=False)
# TODO: These might be slow for many projects, since it will query for each
# individually. If we find this to be a problem then we can look into batching.
excluded_projects = serializers.ListField(child=ProjectField(), required=False)
actions = serializers.ListField(required=True)
class Meta:
model = AlertRuleTrigger
fields = [
"id",
"label",
"threshold_type",
"alert_threshold",
"resolve_threshold",
"excluded_projects",
"actions",
]
extra_kwargs = {"label": {"min_length": 1, "max_length": 64}}
def validate_threshold_type(self, threshold_type):
try:
return AlertRuleThresholdType(threshold_type)
except ValueError:
raise serializers.ValidationError(
"Invalid threshold type, valid values are %s"
% [item.value for item in AlertRuleThresholdType]
)
def create(self, validated_data):
try:
actions = validated_data.pop("actions")
alert_rule_trigger = create_alert_rule_trigger(
alert_rule=self.context["alert_rule"], **validated_data
)
self._handle_actions(alert_rule_trigger, actions)
return alert_rule_trigger
except AlertRuleTriggerLabelAlreadyUsedError:
raise serializers.ValidationError("This label is already in use for this alert rule")
def update(self, instance, validated_data):
actions = validated_data.pop("actions")
if "id" in validated_data:
validated_data.pop("id")
try:
alert_rule_trigger = update_alert_rule_trigger(instance, **validated_data)
self._handle_actions(alert_rule_trigger, actions)
return alert_rule_trigger
except AlertRuleTriggerLabelAlreadyUsedError:
raise serializers.ValidationError("This label is already in use for this alert rule")
def _handle_actions(self, alert_rule_trigger, actions):
if actions is not None:
# Delete actions we don't have present in the updated data.
action_ids = [x["id"] for x in actions if "id" in x]
actions_to_delete = AlertRuleTriggerAction.objects.filter(
alert_rule_trigger=alert_rule_trigger
).exclude(id__in=action_ids)
for action in actions_to_delete:
delete_alert_rule_trigger_action(action)
for action_data in actions:
if "integration_id" in action_data:
action_data["integration"] = action_data.pop("integration_id")
if "id" in action_data:
action_instance = AlertRuleTriggerAction.objects.get(
alert_rule_trigger=alert_rule_trigger, id=action_data["id"]
)
else:
action_instance = None
action_serializer = AlertRuleTriggerActionSerializer(
context={
"alert_rule": alert_rule_trigger.alert_rule,
"trigger": alert_rule_trigger,
"organization": self.context["organization"],
"access": self.context["access"],
},
instance=action_instance,
data=action_data,
)
if action_serializer.is_valid():
action_serializer.save()
else:
raise serializers.ValidationError(action_serializer.errors)
class ObjectField(serializers.Field):
def to_internal_value(self, data):
return data
class AlertRuleSerializer(CamelSnakeModelSerializer):
"""
Serializer for creating/updating an alert rule. Required context:
- `organization`: The organization related to this alert rule.
- `access`: An access object (from `request.access`)
"""
environment = EnvironmentField(required=False, allow_null=True)
# TODO: These might be slow for many projects, since it will query for each
# individually. If we find this to be a problem then we can look into batching.
projects = serializers.ListField(child=ProjectField(), required=False)
excluded_projects = serializers.ListField(child=ProjectField(), required=False)
triggers = serializers.ListField(required=True)
class Meta:
model = AlertRule
fields = [
"name",
"query",
"time_window",
"environment",
"threshold_period",
"aggregation",
"projects",
"include_all_projects",
"excluded_projects",
"triggers",
]
extra_kwargs = {
"query": {"allow_blank": True, "required": True},
"threshold_period": {"default": 1, "min_value": 1, "max_value": 20},
"time_window": {
"min_value": 1,
"max_value": int(timedelta(days=1).total_seconds() / 60),
"required": True,
},
"aggregation": {"required": False},
"name": {"min_length": 1, "max_length": 64},
"include_all_projects": {"default": False},
}
def validate_aggregation(self, aggregation):
try:
return QueryAggregations(aggregation)
except ValueError:
raise serializers.ValidationError(
"Invalid aggregation, valid values are %s"
% [item.value for item in QueryAggregations]
)
def validate(self, data):
"""Performs validation on an alert rule's data
This includes ensuring there is either 1 or 2 triggers, which each have actions, and have proper thresholds set.
The critical trigger should both alert and resolve 'after' the warning trigger (whether that means > or < the value depends on threshold type).
"""
triggers = data.get("triggers", [])
if not triggers:
raise serializers.ValidationError("Must include at least one trigger")
if len(triggers) > 2:
raise serializers.ValidationError(
"Must send 1 or 2 triggers - A critical trigger, and an optional warning trigger"
)
for i, (trigger, expected_label) in enumerate(
zip(triggers, (CRITICAL_TRIGGER_LABEL, WARNING_TRIGGER_LABEL))
):
if trigger.get("label", None) != expected_label:
raise serializers.ValidationError(
'Trigger {} must be labeled "{}"'.format(i + 1, expected_label)
)
critical = triggers[0]
self._validate_trigger_thresholds(critical)
if len(triggers) == 2:
warning = triggers[1]
if critical["threshold_type"] != warning["threshold_type"]:
raise serializers.ValidationError(
"Must have matching threshold types (i.e. critical and warning "
"triggers must both be an upper or lower bound)"
)
self._validate_trigger_thresholds(warning)
self._validate_critical_warning_triggers(critical, warning)
# Triggers have passed checks. Check that all triggers have at least one action now.
for trigger in triggers:
actions = trigger.get("actions")
if not actions:
raise serializers.ValidationError(
'"' + trigger["label"] + '" trigger must have an action.'
)
return data
def _validate_trigger_thresholds(self, trigger):
if trigger.get("resolve_threshold") is None:
return
# Since we're comparing non-inclusive thresholds here (>, <), we need
# to modify the values when we compare. An example of why:
# Alert > 0, resolve < 1. This means that we want to alert on values
# of 1 or more, and resolve on values of 0 or less. This is valid, but
# without modifying the values, this boundary case will fail.
if trigger["threshold_type"] == AlertRuleThresholdType.ABOVE.value:
alert_op, alert_add, resolve_add = operator.lt, 1, -1
else:
alert_op, alert_add, resolve_add = operator.gt, -1, 1
if alert_op(
trigger["alert_threshold"] + alert_add, trigger["resolve_threshold"] + resolve_add
):
raise serializers.ValidationError(
"{} alert threshold must be above resolution threshold".format(trigger["label"])
)
def _validate_critical_warning_triggers(self, critical, warning):
if critical["threshold_type"] == AlertRuleThresholdType.ABOVE.value:
alert_op = operator.lt
threshold_type = "above"
elif critical["threshold_type"] == AlertRuleThresholdType.BELOW.value:
alert_op = operator.gt
threshold_type = "below"
if alert_op(critical["alert_threshold"], warning["alert_threshold"]):
raise serializers.ValidationError(
"Critical trigger must have an alert threshold {} warning trigger".format(
threshold_type
)
)
elif alert_op(critical["resolve_threshold"], warning["resolve_threshold"]):
raise serializers.ValidationError(
"Critical trigger must have a resolution threshold {} (or equal to) "
"warning trigger".format(threshold_type)
)
def create(self, validated_data):
try:
with transaction.atomic():
triggers = validated_data.pop("triggers")
alert_rule = create_alert_rule(
organization=self.context["organization"], **validated_data
)
self._handle_triggers(alert_rule, triggers)
return alert_rule
except AlertRuleNameAlreadyUsedError:
raise serializers.ValidationError("This name is already in use for this project")
def update(self, instance, validated_data):
triggers = validated_data.pop("triggers")
if "id" in validated_data:
validated_data.pop("id")
try:
with transaction.atomic():
alert_rule = update_alert_rule(instance, **validated_data)
self._handle_triggers(alert_rule, triggers)
return alert_rule
except AlertRuleNameAlreadyUsedError:
raise serializers.ValidationError("This name is already in use for this project")
def _handle_triggers(self, alert_rule, triggers):
if triggers is not None:
# Delete triggers we don't have present in the incoming data
trigger_ids = [x["id"] for x in triggers if "id" in x]
triggers_to_delete = AlertRuleTrigger.objects.filter(alert_rule=alert_rule).exclude(
id__in=trigger_ids
)
for trigger in triggers_to_delete:
delete_alert_rule_trigger(trigger)
for trigger_data in triggers:
if "id" in trigger_data:
trigger_instance = AlertRuleTrigger.objects.get(
alert_rule=alert_rule, id=trigger_data["id"]
)
else:
trigger_instance = None
trigger_serializer = AlertRuleTriggerSerializer(
context={
"alert_rule": alert_rule,
"organization": self.context["organization"],
"access": self.context["access"],
},
instance=trigger_instance,
data=trigger_data,
)
if trigger_serializer.is_valid():
trigger_serializer.save()
else:
raise serializers.ValidationError(trigger_serializer.errors)
| python | 18,588 |
# Copyright (C) 2018 Garth N. Wells
#
# SPDX-License-Identifier: MIT
from floodsystem.stationdata import build_station_list
def run():
"""Requirements for Task 1A"""
# Build list of stations
stations = build_station_list()
# Print number of stations
print("Number of stations: {}".format(len(stations)))
# Display data from 3 stations:
for station in stations:
if station.name in [
'Bourton Dickler', 'Surfleet Sluice', 'Gaw Bridge'
]:
print(station)
if __name__ == "__main__":
print("*** Task 1A: CUED Part IA Flood Warning System ***")
run()
| python | 631 |
# Generated by Django 3.1.7 on 2021-04-04 18:38
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('records', '0019_auto_20210401_1524'),
]
operations = [
migrations.AddField(
model_name='animalhealth',
name='food_refused',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='animalhealth',
name='shed',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='animalhealth',
name='food_regurgitated',
field=models.BooleanField(default=False),
),
]
| python | 730 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, division
import unittest
import numpy as np
import pytest
from pytest import approx
import allel
from allel.test.tools import assert_array_equal as aeq, assert_array_almost_equal
from allel.util import ignore_invalid
from allel import GenotypeArray, HaplotypeArray, SortedIndex, AlleleCountsArray
class TestWindowUtilities(unittest.TestCase):
def test_moving_statistic(self):
f = allel.moving_statistic
values = [2, 5, 8, 16]
expect = [7, 24]
actual = f(values, statistic=np.sum, size=2)
aeq(expect, actual)
values = [2, 5, 8, 16]
expect = [7, 13, 24]
actual = f(values, statistic=np.sum, size=2, step=1)
aeq(expect, actual)
def test_windowed_statistic(self):
f = allel.windowed_statistic
pos = [1, 12, 15, 27]
# boolean array, all true
b = [True, True, True, True]
expected_nnz = [1, 2, 1]
expected_windows = [[1, 10], [11, 20], [21, 27]]
expected_counts = [1, 2, 1]
actual_nnz, actual_windows, actual_counts = \
f(pos, b, np.count_nonzero, 10)
aeq(expected_nnz, actual_nnz)
aeq(expected_windows, actual_windows)
aeq(expected_counts, actual_counts)
# boolean array, not all true
b = [False, True, False, True]
expected_nnz = [0, 1, 1]
expected_windows = [[1, 10], [11, 20], [21, 27]]
expected_counts = [1, 2, 1]
actual_nnz, actual_windows, actual_counts = \
f(pos, b, np.count_nonzero, 10)
aeq(expected_windows, actual_windows)
aeq(expected_nnz, actual_nnz)
aeq(expected_counts, actual_counts)
# explicit start and stop
b = [False, True, False, True]
expected_nnz = [1, 0, 1]
expected_windows = [[5, 14], [15, 24], [25, 29]]
expected_counts = [1, 1, 1]
actual_nnz, actual_windows, actual_counts = \
f(pos, b, np.count_nonzero, 10, start=5, stop=29)
aeq(expected_windows, actual_windows)
aeq(expected_nnz, actual_nnz)
aeq(expected_counts, actual_counts)
# boolean array, bad length
b = [False, True, False]
with pytest.raises(ValueError):
f(pos, b, np.count_nonzero, 10)
# 2D, 4 variants, 2 samples
b = [[True, False],
[True, True],
[True, False],
[True, True]]
expected_nnz = [[1, 0],
[2, 1],
[1, 1]]
expected_windows = [[1, 10], [11, 20], [21, 27]]
expected_counts = [1, 2, 1]
actual_nnz, actual_windows, actual_counts = \
f(pos, b, statistic=lambda x: np.sum(x, axis=0), size=10)
aeq(expected_nnz, actual_nnz)
aeq(expected_windows, actual_windows)
aeq(expected_counts, actual_counts)
def test_per_base(self):
pos = [1, 12, 15, 27]
# boolean array, all true
b = [True, True, True, True]
# N.B., final bin includes right edge
expected_nnz = [1, 2, 1]
expected_windows = [[1, 10], [11, 20], [21, 27]]
expected_counts = [1, 2, 1]
expected_densities = [1/10, 2/10, 1/7]
expected_n_bases = [10, 10, 7]
nnz, windows, counts = allel.windowed_statistic(
pos, b, statistic=np.count_nonzero, size=10, start=1
)
densities, n_bases = allel.per_base(nnz, windows)
aeq(expected_nnz, nnz)
aeq(expected_windows, windows)
aeq(expected_counts, counts)
aeq(expected_densities, densities)
aeq(expected_n_bases, n_bases)
# boolean array, not all true
b = [False, True, False, True]
expected_densities = [0/10, 1/10, 1/7]
expected_n_bases = [10, 10, 7]
nnz, windows, counts = allel.windowed_statistic(
pos, b, statistic=np.count_nonzero, size=10, start=1
)
densities, n_bases = allel.per_base(nnz, windows)
aeq(expected_densities, densities)
aeq(expected_n_bases, n_bases)
# 2D, 4 variants, 2 samples
b = [[True, False],
[True, True],
[True, False],
[True, True]]
expected_densities = [[1/10, 0/10],
[2/10, 1/10],
[1/7, 1/7]]
expected_n_bases = [10, 10, 7]
nnz, windows, counts = allel.windowed_statistic(
pos, b, statistic=lambda x: np.sum(x, axis=0), size=10, start=1
)
densities, n_bases = allel.per_base(nnz, windows)
aeq(expected_densities, densities)
aeq(expected_n_bases, n_bases)
# include is_accessible array option
is_accessible = np.array([0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1, 1, 1, 1, 0, 0, 1, 1, 0, 0,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1], dtype=bool)
b = [False, True, False, True]
expected_densities = [-1, 1/6, 1/7]
expected_n_bases = [0, 6, 7]
nnz, windows, counts = allel.windowed_statistic(
pos, b, statistic=np.count_nonzero, size=10, start=1
)
densities, n_bases = allel.per_base(nnz, windows, is_accessible=is_accessible, fill=-1)
aeq(expected_densities, densities)
aeq(expected_n_bases, n_bases)
def test_equally_accessible_windows(self):
is_accessible = np.array([1, 0, 0, 1, 1, 0, 1, 0, 1])
# default options
actual = allel.equally_accessible_windows(is_accessible, size=2)
expect = np.array([[1, 4], [5, 7]])
aeq(expect, actual)
# with step
actual = allel.equally_accessible_windows(is_accessible, size=2, step=1)
expect = np.array([[1, 4], [4, 5], [5, 7], [7, 9]])
aeq(expect, actual)
# with start and stop
actual = allel.equally_accessible_windows(is_accessible, size=2, start=4, stop=5)
expect = np.array([[4, 5]])
aeq(expect, actual)
class TestDiversityDivergence(unittest.TestCase):
def test_mean_pairwise_diversity(self):
# start with simplest case, two haplotypes, one pairwise comparison
h = HaplotypeArray([[0, 0],
[1, 1],
[0, 1],
[1, 2],
[0, -1],
[-1, -1]])
ac = h.count_alleles()
expect = [0, 0, 1, 1, -1, -1]
actual = allel.mean_pairwise_difference(ac, fill=-1)
aeq(expect, actual)
# four haplotypes, 6 pairwise comparison
h = HaplotypeArray([[0, 0, 0, 0],
[0, 0, 0, 1],
[0, 0, 1, 1],
[0, 1, 1, 1],
[1, 1, 1, 1],
[0, 0, 1, 2],
[0, 1, 1, 2],
[0, 1, -1, -1],
[-1, -1, -1, -1]])
ac = h.count_alleles()
expect = [0, 3/6, 4/6, 3/6, 0, 5/6, 5/6, 1, -1]
actual = allel.mean_pairwise_difference(ac, fill=-1)
assert_array_almost_equal(expect, actual)
def test_sequence_divergence(self):
from allel import sequence_divergence
pos = [2, 4, 8]
ac1 = AlleleCountsArray([[2, 0],
[2, 0],
[2, 0]])
ac2 = AlleleCountsArray([[0, 2],
[0, 2],
[0, 2]])
# all variants
e = 3 / 7
a = sequence_divergence(pos, ac1, ac2)
assert e == a
# start/stop
e = 2 / 6
a = sequence_divergence(pos, ac1, ac2, start=0, stop=5)
assert e == a
# start/stop, an provided
an1 = ac1.sum(axis=1)
an2 = ac2.sum(axis=1)
e = 2 / 6
a = sequence_divergence(pos, ac1, ac2, start=0, stop=5, an1=an1,
an2=an2)
assert e == a
def test_windowed_diversity(self):
# four haplotypes, 6 pairwise comparison
h = HaplotypeArray([[0, 0, 0, 0],
[0, 0, 0, 1],
[0, 0, 1, 1],
[0, 1, 1, 1],
[1, 1, 1, 1],
[0, 0, 1, 2],
[0, 1, 1, 2],
[0, 1, -1, -1],
[-1, -1, -1, -1]])
ac = h.count_alleles()
# mean pairwise diversity
# expect = [0, 3/6, 4/6, 3/6, 0, 5/6, 5/6, 1, -1]
pos = SortedIndex([2, 4, 7, 14, 15, 18, 19, 25, 27])
expect = [(7/6)/10, (13/6)/10, 1/11]
actual, _, _, _ = allel.windowed_diversity(pos, ac, size=10, start=1, stop=31)
assert_array_almost_equal(expect, actual)
def test_mean_pairwise_divergence(self):
# simplest case, two haplotypes in each population
h = HaplotypeArray([[0, 0, 0, 0],
[0, 0, 0, 1],
[0, 0, 1, 1],
[0, 1, 1, 1],
[1, 1, 1, 1],
[0, 0, 1, 2],
[0, 1, 1, 2],
[0, 1, -1, -1],
[-1, -1, -1, -1]])
h1 = h.take([0, 1], axis=1)
h2 = h.take([2, 3], axis=1)
ac1 = h1.count_alleles()
ac2 = h2.count_alleles()
expect = [0/4, 2/4, 4/4, 2/4, 0/4, 4/4, 3/4, -1, -1]
actual = allel.mean_pairwise_difference_between(ac1, ac2, fill=-1)
aeq(expect, actual)
def test_windowed_divergence(self):
# simplest case, two haplotypes in each population
h = HaplotypeArray([[0, 0, 0, 0],
[0, 0, 0, 1],
[0, 0, 1, 1],
[0, 1, 1, 1],
[1, 1, 1, 1],
[0, 0, 1, 2],
[0, 1, 1, 2],
[0, 1, -1, -1],
[-1, -1, -1, -1]])
h1 = h.take([0, 1], axis=1)
h2 = h.take([2, 3], axis=1)
ac1 = h1.count_alleles()
ac2 = h2.count_alleles()
# mean pairwise divergence
# expect = [0/4, 2/4, 4/4, 2/4, 0/4, 4/4, 3/4, -1, -1]
pos = SortedIndex([2, 4, 7, 14, 15, 18, 19, 25, 27])
expect = [(6/4)/10, (9/4)/10, 0/11]
actual, _, _, _ = allel.windowed_divergence(
pos, ac1, ac2, size=10, start=1, stop=31
)
assert_array_almost_equal(expect, actual)
def test_tajima_d(self):
from allel import tajima_d
# example with calculable value
ac = AlleleCountsArray([[1, 3],
[2, 2],
[3, 1]])
expect = approx(0.168, 0.01)
actual = tajima_d(ac)
assert expect == actual
# too few sites
ac = AlleleCountsArray([[2, 2],
[3, 1]])
assert np.nan is tajima_d(ac)
# too few segregating sites
ac = AlleleCountsArray([[4, 0],
[2, 2],
[3, 1]])
assert np.nan is tajima_d(ac)
# allow people to override if they really want to
assert approx(0.592, 0.01) == tajima_d(ac, min_sites=2)
def test_moving_tajima_d(self):
from allel import moving_tajima_d
# example with calculable value
ac = AlleleCountsArray([[1, 3],
[2, 2],
[3, 1],
[1, 3],
[2, 2]])
expect = np.array([0.168] * 3)
actual = moving_tajima_d(ac, size=3, step=1)
assert_array_almost_equal(expect, actual, decimal=3)
# too few sites
actual = moving_tajima_d(ac, size=2, step=1)
assert 4 == len(actual)
assert np.all(np.isnan(actual))
# too few segregating sites
ac = AlleleCountsArray([[4, 0],
[2, 2],
[3, 1],
[4, 0],
[2, 2]])
actual = moving_tajima_d(ac, size=3, step=1)
assert 3 == len(actual)
assert np.all(np.isnan(actual))
# allow people to override if they really want to
expect = np.array([0.592] * 3)
actual = moving_tajima_d(ac, size=3, step=1, min_sites=2)
assert_array_almost_equal(expect, actual, decimal=3)
def test_windowed_tajima_d(self):
from allel import windowed_tajima_d
pos = np.array([1, 11, 21, 31, 41])
# example with calculable value
ac = AlleleCountsArray([[1, 3],
[2, 2],
[3, 1],
[1, 3],
[2, 2]])
expect = np.array([0.168] * 3)
actual, _, _ = windowed_tajima_d(pos, ac, size=25, step=10)
assert_array_almost_equal(expect, actual, decimal=3)
# too few sites
actual, _, _ = windowed_tajima_d(pos, ac, size=15, step=10)
assert 4 == len(actual)
assert np.all(np.isnan(actual))
# too few segregating sites
ac = AlleleCountsArray([[4, 0],
[2, 2],
[3, 1],
[4, 0],
[2, 2]])
actual, _, _ = windowed_tajima_d(pos, ac, size=25, step=10)
assert 3 == len(actual)
assert np.all(np.isnan(actual))
# allow people to override if they really want to
expect = np.array([0.592] * 3)
actual, _, _ = windowed_tajima_d(pos, ac, size=25, step=10, min_sites=2)
assert_array_almost_equal(expect, actual, decimal=3)
class TestHardyWeinberg(unittest.TestCase):
def test_heterozygosity_observed(self):
# diploid
g = GenotypeArray([[[0, 0], [0, 0]],
[[1, 1], [1, 1]],
[[1, 1], [2, 2]],
[[0, 0], [0, 1]],
[[0, 0], [0, 2]],
[[1, 1], [1, 2]],
[[0, 1], [0, 1]],
[[0, 1], [1, 2]],
[[0, 0], [-1, -1]],
[[0, 1], [-1, -1]],
[[-1, -1], [-1, -1]]], dtype='i1')
expect = [0, 0, 0, .5, .5, .5, 1, 1, 0, 1, -1]
actual = allel.heterozygosity_observed(g, fill=-1)
aeq(expect, actual)
# polyploid
g = GenotypeArray([[[0, 0, 0], [0, 0, 0]],
[[1, 1, 1], [1, 1, 1]],
[[1, 1, 1], [2, 2, 2]],
[[0, 0, 0], [0, 0, 1]],
[[0, 0, 0], [0, 0, 2]],
[[1, 1, 1], [0, 1, 2]],
[[0, 0, 1], [0, 1, 1]],
[[0, 1, 1], [0, 1, 2]],
[[0, 0, 0], [-1, -1, -1]],
[[0, 0, 1], [-1, -1, -1]],
[[-1, -1, -1], [-1, -1, -1]]], dtype='i1')
expect = [0, 0, 0, .5, .5, .5, 1, 1, 0, 1, -1]
actual = allel.heterozygosity_observed(g, fill=-1)
aeq(expect, actual)
def test_heterozygosity_expected(self):
def refimpl(f, ploidy, fill=0):
"""Limited reference implementation for testing purposes."""
# check allele frequencies sum to 1
af_sum = np.sum(f, axis=1)
# assume three alleles
p = f[:, 0]
q = f[:, 1]
r = f[:, 2]
out = 1 - p**ploidy - q**ploidy - r**ploidy
with ignore_invalid():
out[(af_sum < 1) | np.isnan(af_sum)] = fill
return out
# diploid
g = GenotypeArray([[[0, 0], [0, 0]],
[[1, 1], [1, 1]],
[[1, 1], [2, 2]],
[[0, 0], [0, 1]],
[[0, 0], [0, 2]],
[[1, 1], [1, 2]],
[[0, 1], [0, 1]],
[[0, 1], [1, 2]],
[[0, 0], [-1, -1]],
[[0, 1], [-1, -1]],
[[-1, -1], [-1, -1]]], dtype='i1')
expect1 = [0, 0, 0.5, .375, .375, .375, .5, .625, 0, .5, -1]
af = g.count_alleles().to_frequencies()
expect2 = refimpl(af, ploidy=g.ploidy, fill=-1)
actual = allel.heterozygosity_expected(af, ploidy=g.ploidy, fill=-1)
assert_array_almost_equal(expect1, actual)
assert_array_almost_equal(expect2, actual)
expect3 = [0, 0, 0.5, .375, .375, .375, .5, .625, 0, .5, 0]
actual = allel.heterozygosity_expected(af, ploidy=g.ploidy, fill=0)
assert_array_almost_equal(expect3, actual)
# polyploid
g = GenotypeArray([[[0, 0, 0], [0, 0, 0]],
[[1, 1, 1], [1, 1, 1]],
[[1, 1, 1], [2, 2, 2]],
[[0, 0, 0], [0, 0, 1]],
[[0, 0, 0], [0, 0, 2]],
[[1, 1, 1], [0, 1, 2]],
[[0, 0, 1], [0, 1, 1]],
[[0, 1, 1], [0, 1, 2]],
[[0, 0, 0], [-1, -1, -1]],
[[0, 0, 1], [-1, -1, -1]],
[[-1, -1, -1], [-1, -1, -1]]], dtype='i1')
af = g.count_alleles().to_frequencies()
expect = refimpl(af, ploidy=g.ploidy, fill=-1)
actual = allel.heterozygosity_expected(af, ploidy=g.ploidy, fill=-1)
assert_array_almost_equal(expect, actual)
def test_inbreeding_coefficient(self):
# diploid
g = GenotypeArray([[[0, 0], [0, 0]],
[[1, 1], [1, 1]],
[[1, 1], [2, 2]],
[[0, 0], [0, 1]],
[[0, 0], [0, 2]],
[[1, 1], [1, 2]],
[[0, 1], [0, 1]],
[[0, 1], [1, 2]],
[[0, 0], [-1, -1]],
[[0, 1], [-1, -1]],
[[-1, -1], [-1, -1]]], dtype='i1')
# ho = np.array([0, 0, 0, .5, .5, .5, 1, 1, 0, 1, -1])
# he = np.array([0, 0, 0.5, .375, .375, .375, .5, .625, 0, .5, -1])
# expect = 1 - (ho/he)
expect = [-1, -1, 1-0, 1-(.5/.375), 1-(.5/.375), 1-(.5/.375),
1-(1/.5), 1-(1/.625), -1, 1-(1/.5), -1]
actual = allel.inbreeding_coefficient(g, fill=-1)
assert_array_almost_equal(expect, actual)
class TestDistance(unittest.TestCase):
def test_pdist(self):
from allel.stats.distance import pdist
h = HaplotypeArray([[0, 0, 0, 0],
[0, 0, 0, 1],
[0, 0, 1, 1],
[0, 1, 1, 1],
[1, 1, 1, 1],
[0, 0, 1, 2],
[0, 1, 1, 2],
[0, 1, -1, -1],
[-1, -1, -1, -1]])
import scipy.spatial
d1 = scipy.spatial.distance.pdist(h.T, 'hamming')
d2 = pdist(h, 'hamming')
aeq(d1, d2)
def test_pairwise_distance_multidim(self):
g = GenotypeArray([[[0, 0], [0, 0]],
[[1, 1], [1, 1]],
[[1, 1], [2, 2]],
[[0, 0], [0, 1]],
[[0, 0], [0, 2]],
[[1, 1], [1, 2]],
[[0, 1], [0, 1]],
[[0, 1], [1, 2]],
[[0, 0], [-1, -1]],
[[0, 1], [-1, -1]],
[[-1, -1], [-1, -1]]], dtype='i1')
gac = g.to_allele_counts()
def metric(ac1, ac2):
mpd = allel.mean_pairwise_difference_between(ac1, ac2, fill=0)
return mpd.sum()
expect = [allel.mean_pairwise_difference_between(gac[:, 0], gac[:, 1], fill=0).sum()]
actual = allel.pairwise_distance(gac, metric)
aeq(expect, actual)
def test_condensed_coords(self):
from allel import condensed_coords
assert 0 == condensed_coords(0, 1, 2)
assert 0 == condensed_coords(1, 0, 2)
assert 0 == condensed_coords(0, 1, 3)
assert 0 == condensed_coords(1, 0, 3)
assert 1 == condensed_coords(0, 2, 3)
assert 1 == condensed_coords(2, 0, 3)
assert 2 == condensed_coords(1, 2, 3)
assert 2 == condensed_coords(2, 1, 3)
with pytest.raises(ValueError):
condensed_coords(0, 0, 1)
condensed_coords(0, 1, 1)
condensed_coords(1, 0, 1)
condensed_coords(0, 0, 2)
condensed_coords(0, 2, 2)
condensed_coords(2, 0, 2)
condensed_coords(1, 1, 2)
condensed_coords(0, 0, 3)
condensed_coords(1, 1, 3)
condensed_coords(2, 2, 3)
def test_condensed_coords_within(self):
from allel import condensed_coords_within
pop = [0, 1]
n = 3
expect = [0]
actual = condensed_coords_within(pop, n)
assert expect == actual
pop = [0, 2]
n = 3
expect = [1]
actual = condensed_coords_within(pop, n)
assert expect == actual
pop = [1, 2]
n = 3
expect = [2]
actual = condensed_coords_within(pop, n)
assert expect == actual
pop = [0, 1, 3]
n = 4
expect = [0, 2, 4]
actual = condensed_coords_within(pop, n)
assert expect == actual
pop = [0, 0]
with pytest.raises(ValueError):
condensed_coords_within(pop, n)
def test_condensed_coords_between(self):
from allel import condensed_coords_between
pop1 = [0, 1]
pop2 = [2, 3]
n = 4
expect = [1, 2, 3, 4]
actual = condensed_coords_between(pop1, pop2, n)
assert expect == actual
pop1 = [0, 2]
pop2 = [1, 3]
n = 4
expect = [0, 2, 3, 5]
actual = condensed_coords_between(pop1, pop2, n)
assert expect == actual
with pytest.raises(ValueError):
condensed_coords_between(pop1, pop1, n)
class TestLinkageDisequilibrium(unittest.TestCase):
def test_rogers_huff_r(self):
gn = [[0, 1, 2],
[0, 1, 2]]
expect = 1.
actual = allel.rogers_huff_r(gn)
assert expect == actual
gn = [[0, 1, 2],
[2, 1, 0]]
expect = -1.
actual = allel.rogers_huff_r(gn)
assert expect == actual
gn = [[0, 0, 0],
[0, 0, 0]]
actual = allel.rogers_huff_r(gn)
assert np.isnan(actual)
gn = [[0, 0, 0],
[1, 1, 1]]
actual = allel.rogers_huff_r(gn)
assert np.isnan(actual)
gn = [[1, 1, 1],
[1, 1, 1]]
actual = allel.rogers_huff_r(gn)
assert np.isnan(actual)
gn = [[0, -1, 0],
[-1, 1, -1]]
actual = allel.rogers_huff_r(gn)
assert np.isnan(actual)
gn = [[0, 1, 0],
[-1, -1, -1]]
actual = allel.rogers_huff_r(gn)
assert np.isnan(actual)
gn = [[0, 1, 0, 1],
[0, 1, 1, 0]]
expect = 0
actual = allel.rogers_huff_r(gn)
assert expect == actual
gn = [[0, 1, 2, -1],
[0, 1, 2, 2]]
expect = 1.
actual = allel.rogers_huff_r(gn)
assert expect == actual
gn = [[0, 1, 2, 2],
[0, 1, 2, -1]]
expect = 1.
actual = allel.rogers_huff_r(gn)
assert expect == actual
gn = [[0, 1, 2],
[0, 1, -1]]
expect = 1.
actual = allel.rogers_huff_r(gn)
assert expect == actual
gn = [[0, 2],
[2, 0],
[0, 1]]
expect = [-1, 1, -1]
actual = allel.rogers_huff_r(gn)
assert_array_almost_equal(expect, actual)
gn = [[0, 2, 0],
[0, 2, 0],
[2, 0, 2],
[0, 2, -1]]
expect = [1, -1, 1, -1, 1, -1]
actual = allel.rogers_huff_r(gn)
assert_array_almost_equal(expect, actual)
def test_rogers_huff_r_between(self):
gna = [[0, 1, 2]]
gnb = [[0, 1, 2]]
expect = 1.
actual = allel.rogers_huff_r_between(gna, gnb)
assert expect == actual
gna = [[0, 1, 2]]
gnb = [[2, 1, 0]]
expect = -1.
actual = allel.rogers_huff_r_between(gna, gnb)
assert expect == actual
gna = [[0, 0, 0]]
gnb = [[1, 1, 1]]
actual = allel.rogers_huff_r_between(gna, gnb)
assert np.isnan(actual)
def test_locate_unlinked(self):
gn = [[0, 1, 2],
[0, 1, 2]]
expect = [True, False]
actual = allel.locate_unlinked(gn, size=2, step=2, threshold=.5)
aeq(expect, actual)
gn = [[0, 1, 1, 2],
[0, 1, 1, 2],
[1, 1, 0, 2],
[1, 1, 0, 2]]
actual = allel.locate_unlinked(gn, size=2, step=1, threshold=.5)
expect = [True, False, True, False]
aeq(expect, actual)
gn = [[0, 1, 1, 2],
[0, 1, 1, 2],
[0, 1, 1, 2],
[1, 1, 0, 2],
[1, 1, 0, 2]]
actual = allel.locate_unlinked(gn, size=2, step=1, threshold=.5)
expect = [True, False, True, True, False]
aeq(expect, actual)
actual = allel.locate_unlinked(gn, size=3, step=1, threshold=.5)
expect = [True, False, False, True, False]
aeq(expect, actual)
# test with bcolz carray
import bcolz
gnz = bcolz.carray(gn, chunklen=2)
actual = allel.locate_unlinked(gnz, size=2, step=1, threshold=.5, blen=2)
expect = [True, False, True, True, False]
aeq(expect, actual)
class TestAdmixture(unittest.TestCase):
def test_patterson_f2(self):
aca = [[0, 2],
[2, 0],
[1, 1],
[0, 0]]
acb = [[0, 2],
[0, 2],
[0, 2],
[0, 2]]
expect = [0., 1., 0., np.nan]
actual = allel.patterson_f2(aca, acb)
assert_array_almost_equal(expect, actual)
def test_patterson_f3(self):
aca = [[0, 2],
[2, 0],
[0, 2],
[0, 2],
[0, 0]]
acb = [[2, 0],
[0, 2],
[0, 2],
[0, 2],
[0, 2]]
acc = [[1, 1],
[1, 1],
[0, 2],
[2, 0],
[1, 1]]
expect_f3 = [-.5, -.5, 0., 1., np.nan]
actual_f3, actual_hzc = allel.patterson_f3(acc, aca, acb)
assert_array_almost_equal(expect_f3, actual_f3)
expect_hzc = [1., 1., 0., 0., 1.]
assert_array_almost_equal(expect_hzc, actual_hzc)
def test_patterson_d(self):
aca = [[0, 2],
[2, 0],
[2, 0],
[1, 1],
[0, 0]]
acb = [[0, 2],
[0, 2],
[0, 2],
[1, 1],
[0, 2]]
acc = [[2, 0],
[2, 0],
[0, 2],
[1, 1],
[0, 2]]
acd = [[2, 0],
[0, 2],
[2, 0],
[1, 1],
[0, 2]]
num, den = allel.patterson_d(aca, acb, acc, acd)
expect_num = [0., 1., -1., 0., np.nan]
expect_den = [0., 1., 1., 0.25, np.nan]
assert_array_almost_equal(expect_num, num)
assert_array_almost_equal(expect_den, den)
| python | 28,231 |
#!/usr/bin/env python3
# Copyright (c) 2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test Hierarchical Deterministic wallet function."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
start_nodes,
start_node,
assert_equal,
connect_nodes_bi,
)
import os
import shutil
class WalletHDTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 2
self.node_args = [['-usehd=0'], ['-usehd=1', '-keypool=0']]
def setup_network(self):
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir, self.node_args)
self.is_network_split = False
connect_nodes_bi(self.nodes, 0, 1)
def run_test (self):
tmpdir = self.options.tmpdir
# Make sure we use hd, keep masterkeyid
masterkeyid = self.nodes[1].getwalletinfo()['hdmasterkeyid']
assert_equal(len(masterkeyid), 40)
# Import a non-HD private key in the HD wallet
non_hd_add = self.nodes[0].getnewaddress()
self.nodes[1].importprivkey(self.nodes[0].dumpprivkey(non_hd_add))
# This should be enough to keep the master key and the non-HD key
self.nodes[1].backupwallet(tmpdir + "/hd.bak")
#self.nodes[1].dumpwallet(tmpdir + "/hd.dump")
# Derive some HD addresses and remember the last
# Also send funds to each add
self.nodes[0].generate(101)
hd_add = None
num_hd_adds = 300
for i in range(num_hd_adds):
hd_add = self.nodes[1].getnewaddress()
hd_info = self.nodes[1].validateaddress(hd_add)
assert_equal(hd_info["hdkeypath"], "m/0'/0'/"+str(i+1)+"'")
assert_equal(hd_info["hdmasterkeyid"], masterkeyid)
self.nodes[0].sendtoaddress(hd_add, 1)
self.nodes[0].generate(1)
self.nodes[0].sendtoaddress(non_hd_add, 1)
self.nodes[0].generate(1)
self.sync_all()
assert_equal(self.nodes[1].getbalance(), num_hd_adds + 1)
print("Restore backup ...")
self.stop_node(1)
os.remove(self.options.tmpdir + "/node1/regtest/wallet.dat")
shutil.copyfile(tmpdir + "/hd.bak", tmpdir + "/node1/regtest/wallet.dat")
self.nodes[1] = start_node(1, self.options.tmpdir, self.node_args[1])
#connect_nodes_bi(self.nodes, 0, 1)
# Assert that derivation is deterministic
hd_add_2 = None
for _ in range(num_hd_adds):
hd_add_2 = self.nodes[1].getnewaddress()
hd_info_2 = self.nodes[1].validateaddress(hd_add_2)
assert_equal(hd_info_2["hdkeypath"], "m/0'/0'/"+str(_+1)+"'")
assert_equal(hd_info_2["hdmasterkeyid"], masterkeyid)
assert_equal(hd_add, hd_add_2)
# Needs rescan
self.stop_node(1)
self.nodes[1] = start_node(1, self.options.tmpdir, self.node_args[1] + ['-rescan'])
#connect_nodes_bi(self.nodes, 0, 1)
assert_equal(self.nodes[1].getbalance(), num_hd_adds + 1)
if __name__ == '__main__':
WalletHDTest().main ()
| python | 3,260 |
# -*- coding: utf-8 -*-
import scrapy
class GithubSpider(scrapy.Spider):
name = 'github'
allowed_domains = ['github.com']
start_urls = []
def parse(self, response):
url = response.request.url.replace("https://github.com/", "")
if len(response.css("#raw-url")):
pieces = response.request.url.split('/')
extensions_file_url = pieces[-1].split('.')[-1]
quantity_lines = response.\
css("div.file-info::text").\
re_first(r"(\d+) lines")
size_file, unit = response.\
css("div.file-info::text").\
re(r"(\d+\.*\d*)+ (Bytes|KB|MB)")
yield {
'url': url,
'qty_lines': quantity_lines or 0,
'size_files': size_file,
'unit': unit,
'is_file': 1,
'extensions_file_url': extensions_file_url
}
else:
yield {
'url': url,
'qty_lines': 0,
'size_files': 0,
'unit': '-',
'is_file': 0,
'extensions_file_url': '-'
}
seletor_link = 'table.files.js-navigation-container tbody' \
' tr.js-navigation-item td.content a'
for a in response.css(seletor_link):
yield response.follow(a, callback=self.parse)
if response.url in self.start_urls and self.scrapy_branches:
seletor_branch = 'a.select-menu-item.js-navigation-item' \
'.js-navigation-open'
for a in response.css(seletor_branch):
if a.re_first(r'data-name="(\w+-\w+)+"') != "master":
yield response.follow(a, callback=self.parse)
| python | 1,782 |
from database import *
from emulator import *
import tensorflow as tf
import numpy as np
import time
from ale_python_interface import ALEInterface
import cv2
from scipy import misc
import gc #garbage collector
import _thread
gc.enable()
params = {
'visualize' : True,
'network_type':'nature',
'ckpt_file':None,
'steps_per_epoch': 50000,
'num_epochs': 100,
'eval_freq':50000,
'steps_per_eval':10000,
'copy_freq' : 10000,
'disp_freq':10000,
'save_interval':10000,
'db_size': 1000000,
'batch': 32,
'num_act': 0,
'input_dims' : [210, 160, 3],
'input_dims_proc' : [84, 84, 4],
'learning_interval': 1,
'eps': 1.0,
'eps_step':1000000,
'eps_min' : 0.1,
'eps_eval' : 0.05,
'discount': 0.95,
'lr': 0.0002,
'rms_decay':0.99,
'rms_eps':1e-6,
'train_start':100,
'img_scale':255.0,
'clip_delta' : 1,
'gpu_fraction' : 0.6,
'batch_accumulator':'mean',
'record_eval' : True,
'only_eval' : 'n'
}
class deep_atari:
def __init__(self,params):
print ('Initializing Module...')
self.params = params
self.gpu_config = tf.ConfigProto(gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=self.params['gpu_fraction']))
self.sess = tf.Session(config=self.gpu_config)
self.DB = database(self.params)
self.engine = emulator(rom_name='breakout.bin', vis=self.params['visualize'],windowname=self.params['network_type']+'_preview')
self.params['num_act'] = len(self.engine.legal_actions)
self.build_net()
self.training = True
def build_net(self):
print ('Building QNet and targetnet...')
self.qnet = DQN(self.params,'qnet')
self.targetnet = DQN(self.params,'targetnet')
self.sess.run(tf.initialize_all_variables())
saver_dict = {'qw1':self.qnet.w1,'qb1':self.qnet.b1,
'qw2':self.qnet.w2,'qb2':self.qnet.b2,
'qw3':self.qnet.w3,'qb3':self.qnet.b3,
'qw4':self.qnet.w4,'qb4':self.qnet.b4,
'qw5':self.qnet.w5,'qb5':self.qnet.b5,
'tw1':self.targetnet.w1,'tb1':self.targetnet.b1,
'tw2':self.targetnet.w2,'tb2':self.targetnet.b2,
'tw3':self.targetnet.w3,'tb3':self.targetnet.b3,
'tw4':self.targetnet.w4,'tb4':self.targetnet.b4,
'tw5':self.targetnet.w5,'tb5':self.targetnet.b5,
'step':self.qnet.global_step}
self.saver = tf.train.Saver(saver_dict)
#self.saver = tf.train.Saver()
self.cp_ops = [
self.targetnet.w1.assign(self.qnet.w1),self.targetnet.b1.assign(self.qnet.b1),
self.targetnet.w2.assign(self.qnet.w2),self.targetnet.b2.assign(self.qnet.b2),
self.targetnet.w3.assign(self.qnet.w3),self.targetnet.b3.assign(self.qnet.b3),
self.targetnet.w4.assign(self.qnet.w4),self.targetnet.b4.assign(self.qnet.b4),
self.targetnet.w5.assign(self.qnet.w5),self.targetnet.b5.assign(self.qnet.b5)]
self.sess.run(self.cp_ops)
if self.params['ckpt_file'] is not None:
print ('loading checkpoint : ' + self.params['ckpt_file'])
self.saver.restore(self.sess,self.params['ckpt_file'])
temp_train_cnt = self.sess.run(self.qnet.global_step)
temp_step = temp_train_cnt * self.params['learning_interval']
print ('Continue from')
print (' -> Steps : ' + str(temp_step))
print (' -> Minibatch update : ' + str(temp_train_cnt))
def start(self):
self.reset_game()
self.step = 0
self.reset_statistics('all')
self.train_cnt = self.sess.run(self.qnet.global_step)
if self.train_cnt > 0 :
self.step = self.train_cnt * self.params['learning_interval']
try:
self.log_train = open('log_training_'+self.params['network_type']+'.csv','a')
except:
self.log_train = open('log_training_'+self.params['network_type']+'.csv','w')
self.log_train.write('step,epoch,train_cnt,avg_reward,avg_q,epsilon,time\n')
try:
self.log_eval = open('log_eval_'+self.params['network_type']+'.csv','a')
except:
self.log_eval = open('log_eval_'+self.params['network_type']+'.csv','w')
self.log_eval.write('step,epoch,train_cnt,avg_reward,avg_q,epsilon,time\n')
else:
self.log_train = open('log_training_'+self.params['network_type']+'.csv','w')
self.log_train.write('step,epoch,train_cnt,avg_reward,avg_q,epsilon,time\n')
self.log_eval = open('log_eval_'+self.params['network_type']+'.csv','w')
self.log_eval.write('step,epoch,train_cnt,avg_reward,avg_q,epsilon,time\n')
self.s = time.time()
print (self.params)
print ('Start training!')
print ('Collecting replay memory for ' + str(self.params['train_start']) + ' steps')
while self.step < (self.params['steps_per_epoch'] * self.params['num_epochs'] * self.params['learning_interval'] + self.params['train_start']):
if self.training :
if self.DB.get_size() >= self.params['train_start'] : self.step += 1 ; self.steps_train += 1
else : self.step_eval += 1
if self.state_gray_old is not None and self.training:
self.DB.insert(self.state_gray_old[26:110,:],self.reward_scaled,self.action_idx,self.terminal)
if self.training and self.params['copy_freq'] > 0 and self.step % self.params['copy_freq'] == 0 and self.DB.get_size() > self.params['train_start']:
print ('&&& Copying Qnet to targetnet\n')
self.sess.run(self.cp_ops)
if self.training and self.step % self.params['learning_interval'] == 0 and self.DB.get_size() > self.params['train_start'] :
bat_s,bat_a,bat_t,bat_n,bat_r = self.DB.get_batches()
bat_a = self.get_onehot(bat_a)
if self.params['copy_freq'] > 0 :
feed_dict={self.targetnet.x: bat_n}
q_t = self.sess.run(self.targetnet.y,feed_dict=feed_dict)
else:
feed_dict={self.qnet.x: bat_n}
q_t = self.sess.run(self.qnet.y,feed_dict=feed_dict)
q_t = np.amax(q_t,axis=1)
feed_dict={self.qnet.x: bat_s, self.qnet.q_t: q_t, self.qnet.actions: bat_a, self.qnet.terminals:bat_t, self.qnet.rewards: bat_r}
_,self.train_cnt,self.cost = self.sess.run([self.qnet.rmsprop,self.qnet.global_step,self.qnet.cost],feed_dict=feed_dict)
self.total_cost_train += np.sqrt(self.cost)
self.train_cnt_for_disp += 1
if self.training :
self.params['eps'] = max(self.params['eps_min'],1.0 - float(self.train_cnt * self.params['learning_interval'])/float(self.params['eps_step']))
else:
self.params['eps'] = 0.05
if self.DB.get_size() > self.params['train_start'] and self.step % self.params['save_interval'] == 0 and self.training:
save_idx = self.train_cnt
self.saver.save(self.sess,'ckpt/model_'+self.params['network_type']+'_'+str(save_idx))
sys.stdout.write('$$$ Model saved : %s\n\n' % ('ckpt/model_'+self.params['network_type']+'_'+str(save_idx)))
sys.stdout.flush()
if self.training and self.step > 0 and self.step % self.params['disp_freq'] == 0 and self.DB.get_size() > self.params['train_start'] :
self.write_log_train()
if self.training and self.step > 0 and self.step % self.params['eval_freq'] == 0 and self.DB.get_size() > self.params['train_start'] :
self.reset_game()
if self.step % self.params['steps_per_epoch'] == 0 : self.reset_statistics('all')
else: self.reset_statistics('eval')
self.training = False
#TODO : add video recording
continue
if self.training and self.step > 0 and self.step % self.params['steps_per_epoch'] == 0 and self.DB.get_size() > self.params['train_start']:
self.reset_game()
self.reset_statistics('all')
#self.training = False
continue
if not self.training and self.step_eval >= self.params['steps_per_eval'] :
self.write_log_eval()
self.reset_game()
self.reset_statistics('eval')
self.training = True
continue
if self.terminal :
self.reset_game()
if self.training :
self.num_epi_train += 1
self.total_reward_train += self.epi_reward_train
self.epi_reward_train = 0
else :
self.num_epi_eval += 1
self.total_reward_eval += self.epi_reward_eval
self.epi_reward_eval = 0
continue
self.action_idx,self.action, self.maxQ = self.select_action(self.state_proc)
self.state, self.reward, self.terminal = self.engine.next(self.action)
self.reward_scaled = self.reward // max(1,abs(self.reward))
if self.training : self.epi_reward_train += self.reward ; self.total_Q_train += self.maxQ
else : self.epi_reward_eval += self.reward ; self.total_Q_eval += self.maxQ
self.state_gray_old = np.copy(self.state_gray)
self.state_proc[:,:,0:3] = self.state_proc[:,:,1:4]
self.state_resized = cv2.resize(self.state,(84,110))
self.state_gray = cv2.cvtColor(self.state_resized, cv2.COLOR_BGR2GRAY)
self.state_proc[:,:,3] = self.state_gray[26:110,:]/self.params['img_scale']
def reset_game(self):
self.state_proc = np.zeros((84,84,4)); self.action = -1; self.terminal = False; self.reward = 0
self.state = self.engine.newGame()
self.state_resized = cv2.resize(self.state,(84,110))
self.state_gray = cv2.cvtColor(self.state_resized, cv2.COLOR_BGR2GRAY)
self.state_gray_old = None
self.state_proc[:,:,3] = self.state_gray[26:110,:]/self.params['img_scale']
def reset_statistics(self,mode):
if mode == 'all':
self.epi_reward_train = 0
self.epi_Q_train = 0
self.num_epi_train = 0
self.total_reward_train = 0
self.total_Q_train = 0
self.total_cost_train = 0
self.steps_train = 0
self.train_cnt_for_disp = 0
self.step_eval = 0
self.epi_reward_eval = 0
self.epi_Q_eval = 0
self.num_epi_eval = 0
self.total_reward_eval = 0
self.total_Q_eval = 0
def write_log_train(self):
sys.stdout.write('### Training (Step : %d , Minibatch update : %d , Epoch %d)\n' % (self.step,self.train_cnt,self.step//self.params['steps_per_epoch'] ))
sys.stdout.write(' Num.Episodes : %d , Avg.reward : %.3f , Avg.Q : %.3f, Avg.loss : %.3f\n' % (self.num_epi_train,float(self.total_reward_train)/max(1,self.num_epi_train),float(self.total_Q_train)/max(1,self.steps_train),self.total_cost_train/max(1,self.train_cnt_for_disp)))
sys.stdout.write(' Epsilon : %.3f , Elapsed time : %.1f\n\n' % (self.params['eps'],time.time()-self.s))
sys.stdout.flush()
self.log_train.write(str(self.step) + ',' + str(self.step//self.params['steps_per_epoch']) + ',' + str(self.train_cnt) + ',')
self.log_train.write(str(float(self.total_reward_train)/max(1,self.num_epi_train)) +','+ str(float(self.total_Q_train)/max(1,self.steps_train)) +',')
self.log_train.write(str(self.params['eps']) +','+ str(time.time()-self.s) + '\n')
self.log_train.flush()
def write_log_eval(self):
sys.stdout.write('@@@ Evaluation (Step : %d , Minibatch update : %d , Epoch %d)\n' % (self.step,self.train_cnt,self.step//self.params['steps_per_epoch'] ))
sys.stdout.write(' Num.Episodes : %d , Avg.reward : %.3f , Avg.Q : %.3f\n' % (self.num_epi_eval,float(self.total_reward_eval)/max(1,self.num_epi_eval),float(self.total_Q_eval)/max(1,self.params['steps_per_eval'])))
sys.stdout.write(' Epsilon : %.3f , Elapsed time : %.1f\n\n' % (self.params['eps'],time.time()-self.s))
sys.stdout.flush()
self.log_eval.write(str(self.step) + ',' + str(self.step//self.params['steps_per_epoch']) + ',' + str(self.train_cnt) + ',')
self.log_eval.write(str(float(self.total_reward_eval)/max(1,self.num_epi_eval)) +','+ str(float(self.total_Q_eval)/max(1,self.params['steps_per_eval'])) +',')
self.log_eval.write(str(self.params['eps']) +','+ str(time.time()-self.s) + '\n')
self.log_eval.flush()
def select_action(self,st):
if np.random.rand() > self.params['eps']:
#greedy with random tie-breaking
Q_pred = self.sess.run(self.qnet.y, feed_dict = {self.qnet.x: np.reshape(st, (1,84,84,4))})[0]
a_winner = np.argwhere(Q_pred == np.amax(Q_pred))
if len(a_winner) > 1:
act_idx = a_winner[np.random.randint(0, len(a_winner))][0]
return act_idx,self.engine.legal_actions[act_idx], np.amax(Q_pred)
else:
act_idx = a_winner[0][0]
return act_idx,self.engine.legal_actions[act_idx], np.amax(Q_pred)
else:
#random
act_idx = np.random.randint(0,len(self.engine.legal_actions))
Q_pred = self.sess.run(self.qnet.y, feed_dict = {self.qnet.x: np.reshape(st, (1,84,84,4))})[0]
return act_idx,self.engine.legal_actions[act_idx], Q_pred[act_idx]
def get_onehot(self,actions):
actions_onehot = np.zeros((self.params['batch'], self.params['num_act']))
actions = actions.astype(int)
for i in range(self.params['batch']):
actions_onehot[i,actions[i]] = 1
return actions_onehot
if __name__ == "__main__":
dict_items = params.items()
for i in range(1,len(sys.argv),2):
if sys.argv[i] == '-weight' :params['ckpt_file'] = sys.argv[i+1]
elif sys.argv[i] == '-network_type' :params['network_type'] = sys.argv[i+1]
elif sys.argv[i] == '-visualize' :
if sys.argv[i+1] == 'y' : params['visualize'] = True
elif sys.argv[i+1] == 'n' : params['visualize'] = False
else:
print ('Invalid visualization argument!!! Available arguments are')
print (' y or n')
raise ValueError()
elif sys.argv[i] == '-gpu_fraction' : params['gpu_fraction'] = float(sys.argv[i+1])
elif sys.argv[i] == '-db_size' : params['db_size'] = int(sys.argv[i+1])
elif sys.argv[i] == '-only_eval' : params['only_eval'] = sys.argv[i+1]
else :
print ('Invalid arguments!!! Available arguments are')
#print ' -weight (filename)'
#print ' -network_type (nips or nature)'
#print ' -visualize (y or n)'
#print ' -gpu_fraction (0.1~0.9)'
#print ' -db_size (integer)'
raise ValueError()
if params['network_type'] == 'nips':
from DQN_nips import *
elif params['network_type'] == 'nature':
from DQN_nature import *
params['steps_per_epoch']= 200000
params['eval_freq'] = 100000
params['steps_per_eval'] = 10000
params['copy_freq'] = 10000
params['disp_freq'] = 20000
params['save_interval'] = 20000
params['learning_interval'] = 1
params['discount'] = 0.99
params['lr'] = 0.00025
params['rms_decay'] = 0.95
params['rms_eps']=0.01
params['clip_delta'] = 1.0
params['train_start']=50000
params['batch_accumulator'] = 'sum'
params['eps_step'] = 1000000
params['num_epochs'] = 250
params['batch'] = 32
else :
print ('Invalid network type! Available network types are')
#print ' nips or nature'
raise ValueError()
if params['only_eval'] == 'y' : only_eval = True
elif params['only_eval'] == 'n' : only_eval = False
else :
print ('Invalid only_eval option! Available options are')
#print ' y or n'
raise ValueError()
if only_eval:
params['eval_freq'] = 1
params['train_start'] = 100
da = deep_atari(params)
da.start()
| python | 14,411 |
from sqlalchemy import create_engine
from sqlalchemy import Column, Integer, String
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy.sql.expression import exists
# DB Table configuration
BASE = declarative_base()
class Pokemon(BASE):
__tablename__ = 'pokemon'
id = Column(Integer, primary_key=True)
name = Column(String(40))
base_xp = Column(Integer)
weight = Column(Integer)
height = Column(Integer)
image = Column(String)
def __repr__(self):
return "<Pokemon(name='%s', base_experience='%s', weight='%s', " \
"height='%s', sprite_url='%s'>" \
% (self.name, self.base_xp, self.weight, self.height, self.image)
| python | 749 |
import re
import os
import sys
import six
import json
from future.utils import iteritems
from liveconfigparser.LiveConfigParser import LiveConfigParser
# get ConfigParser
tmpConf = LiveConfigParser()
# URL for config file if any
configEnv = 'HARVESTER_INSTANCE_CONFIG_URL'
if configEnv in os.environ:
configURL = os.environ[configEnv]
else:
configURL = None
# read
tmpConf.read('panda_harvester.cfg', configURL)
# dummy section class
class _SectionClass:
def __init__(self):
pass
# load configmap
config_map_data = {}
if 'PANDA_HOME' in os.environ:
config_map_name = 'panda_harvester_configmap.json'
config_map_path = os.path.join(os.environ['PANDA_HOME'], 'etc/configmap', config_map_name)
if os.path.exists(config_map_path):
with open(config_map_path) as f:
config_map_data = json.load(f)
# loop over all sections
for tmpSection in tmpConf.sections():
# read section
tmpDict = getattr(tmpConf, tmpSection)
# load configmap
if tmpSection in config_map_data:
tmpDict.update(config_map_data[tmpSection])
# make section class
tmpSelf = _SectionClass()
# update module dict
sys.modules[__name__].__dict__[tmpSection] = tmpSelf
# expand all values
for tmpKey, tmpVal in iteritems(tmpDict):
# use env vars
if isinstance(tmpVal, str) and tmpVal.startswith('$'):
tmpMatch = re.search('\$\{*([^\}]+)\}*', tmpVal)
envName = tmpMatch.group(1)
if envName not in os.environ:
raise KeyError('{0} in the cfg is an undefined environment variable.'.format(envName))
tmpVal = os.environ[envName]
# convert string to bool/int
if not isinstance(tmpVal, six.string_types):
pass
elif tmpVal == 'True':
tmpVal = True
elif tmpVal == 'False':
tmpVal = False
elif tmpVal == 'None':
tmpVal = None
elif re.match('^\d+$', tmpVal):
tmpVal = int(tmpVal)
elif '\n' in tmpVal and (
re.match(r'^\W*\[.*\]\W*$', tmpVal.replace('\n', ''))
or re.match(r'^\W*\{.*\}\W*$', tmpVal.replace('\n', ''))):
tmpVal = json.loads(tmpVal)
elif '\n' in tmpVal:
tmpVal = tmpVal.split('\n')
# remove empty
tmpVal = [x.strip() for x in tmpVal if x.strip()]
# update dict
setattr(tmpSelf, tmpKey, tmpVal)
| python | 2,461 |
import random
from utils.dirs import create_dirs
from utils.config import process_config
from utils.utils import get_args, get_logger
from models.aux_model import AuxModel
from data.data_loader import get_train_val_dataloader
from data.data_loader import get_target_dataloader
import wandb
# from torchsummary import summary
def main():
args = get_args()
config = process_config(args.config)
# create the experiments dirs
create_dirs([config.cache_dir, config.model_dir,
config.log_dir, config.img_dir])
# logging to the file and stdout
logger = get_logger(config.log_dir, config.exp_name)
# Intialize wandb model
run = wandb.init(project="SemiSupervised", config=args)
run.save()
args.run_name = wandb.run.name
# fix random seed to reproduce results
random.seed(config.random_seed)
logger.info('Random seed: {:d}'.format(config.random_seed))
if config.method in ['src', 'jigsaw', 'rotate']:
model = AuxModel(config, logger)
else:
raise ValueError("Unknown method: %s" % config.method)
src_loader, val_loader = get_train_val_dataloader(config.datasets.src)
# test_loader = get_test_dataloader(config.datasets.test)
test_loader = None
tar_loader = None
if config.datasets.get('tar', None):
tar_loader = get_target_dataloader(config.datasets.tar)
if config.mode == 'train':
model.train(src_loader, tar_loader, val_loader, test_loader)
# elif config.mode == 'test':
# model.test(test_loader)
if __name__ == '__main__':
main()
| python | 1,583 |
from pathlib import Path
from typing import Optional, List, Dict, Text, Type
import tensorflow as tf
import numpy as np
import pytest
from _pytest.monkeypatch import MonkeyPatch
from _pytest.logging import LogCaptureFixture
import logging
from rasa.core.featurizers.single_state_featurizer import (
IntentTokenizerSingleStateFeaturizer,
)
from rasa.core.featurizers.tracker_featurizers import (
TrackerFeaturizer,
IntentMaxHistoryTrackerFeaturizer,
)
from rasa.shared.core.generator import TrackerWithCachedStates
from rasa.core.policies.ted_policy import PREDICTION_FEATURES, TEDPolicy
from rasa.core.policies.unexpected_intent_policy import UnexpecTEDIntentPolicy
from rasa.shared.core.constants import ACTION_UNLIKELY_INTENT_NAME, ACTION_LISTEN_NAME
from rasa.shared.core.domain import Domain
from rasa.shared.core.events import (
ActionExecuted,
UserUttered,
EntitiesAdded,
SlotSet,
ActionExecutionRejected,
ActiveLoop,
)
from rasa.shared.core.trackers import DialogueStateTracker
from rasa.shared.nlu.interpreter import RegexInterpreter
from rasa.utils.tensorflow.constants import (
IGNORE_INTENTS_LIST,
LABEL,
MASK,
SENTENCE,
IDS,
POSITIVE_SCORES_KEY,
NEGATIVE_SCORES_KEY,
RANKING_KEY,
SCORE_KEY,
THRESHOLD_KEY,
SEVERITY_KEY,
QUERY_INTENT_KEY,
NAME,
RANKING_LENGTH,
)
from rasa.shared.nlu.constants import INTENT
from rasa.shared.core.events import Event
from rasa.utils.tensorflow import model_data_utils
from tests.core.test_policies import train_trackers
from tests.core.policies.test_ted_policy import TestTEDPolicy
class TestUnexpecTEDIntentPolicy(TestTEDPolicy):
@staticmethod
def _policy_class_to_test() -> Type[TEDPolicy]:
return UnexpecTEDIntentPolicy
def create_policy(
self, featurizer: Optional[TrackerFeaturizer], priority: int
) -> UnexpecTEDIntentPolicy:
return UnexpecTEDIntentPolicy(featurizer=featurizer, priority=priority)
@pytest.fixture(scope="class")
def featurizer(self) -> TrackerFeaturizer:
featurizer = IntentMaxHistoryTrackerFeaturizer(
IntentTokenizerSingleStateFeaturizer(), max_history=self.max_history
)
return featurizer
@staticmethod
def persist_and_load_policy(trained_policy: UnexpecTEDIntentPolicy, tmp_path: Path):
trained_policy.persist(tmp_path)
return UnexpecTEDIntentPolicy.load(tmp_path)
@pytest.mark.skip
def test_normalization(
self,
trained_policy: UnexpecTEDIntentPolicy,
tracker: DialogueStateTracker,
default_domain: Domain,
monkeypatch: MonkeyPatch,
):
# No normalization is done for UnexpecTEDIntentPolicy and
# hence this test is overridden to do nothing.
assert True
def test_label_data_assembly(
self, trained_policy: UnexpecTEDIntentPolicy, default_domain: Domain
):
interpreter = RegexInterpreter()
# Construct input data
state_featurizer = trained_policy.featurizer.state_featurizer
encoded_all_labels = state_featurizer.encode_all_labels(
default_domain, interpreter
)
attribute_data, _ = model_data_utils.convert_to_data_format(encoded_all_labels)
assembled_label_data = trained_policy._assemble_label_data(
attribute_data, default_domain
)
assembled_label_data_signature = assembled_label_data.get_signature()
assert list(assembled_label_data_signature.keys()) == [
f"{LABEL}_{INTENT}",
LABEL,
]
assert assembled_label_data.num_examples == len(default_domain.intents)
assert list(assembled_label_data_signature[f"{LABEL}_{INTENT}"].keys()) == [
MASK,
SENTENCE,
]
assert list(assembled_label_data_signature[LABEL].keys()) == [IDS]
assert assembled_label_data_signature[f"{LABEL}_{INTENT}"][SENTENCE][
0
].units == len(default_domain.intents)
async def test_training_with_no_intent(
self,
featurizer: Optional[TrackerFeaturizer],
priority: int,
default_domain: Domain,
tmp_path: Path,
caplog: LogCaptureFixture,
):
stories = tmp_path / "stories.yml"
stories.write_text(
"""
version: "2.0"
stories:
- story: test path
steps:
- action: utter_greet
"""
)
policy = self.create_policy(featurizer=featurizer, priority=priority)
import tests.core.test_policies
training_trackers = await tests.core.test_policies.train_trackers(
default_domain, str(stories), augmentation_factor=20
)
with pytest.warns(UserWarning):
policy.train(training_trackers, default_domain, RegexInterpreter())
async def test_prepared_data_for_threshold_prediction(
self,
trained_policy: UnexpecTEDIntentPolicy,
default_domain: Domain,
stories_path: Path,
):
training_trackers = await train_trackers(
default_domain, stories_path, augmentation_factor=0
)
interpreter = RegexInterpreter()
training_model_data, _ = trained_policy._prepare_for_training(
training_trackers, default_domain, interpreter
)
data_for_prediction = trained_policy._prepare_data_for_prediction(
training_model_data
)
assert set(data_for_prediction.data.keys()).issubset(PREDICTION_FEATURES)
def test_similarities_collection_for_label_ids(self):
label_ids = np.array([[0, 1], [1, -1], [2, -1]])
outputs = {
"similarities": np.array(
[[[1.2, 0.3, 0.2]], [[0.5, 0.2, 1.6]], [[0.01, 0.1, 1.7]],]
)
}
label_id_similarities = UnexpecTEDIntentPolicy._collect_label_id_grouped_scores(
outputs, label_ids
)
# Should contain similarities for all label ids except padding token.
assert sorted(list(label_id_similarities.keys())) == [0, 1, 2]
# Cross-check that the collected similarities are correct for each label id.
assert label_id_similarities[0] == {
POSITIVE_SCORES_KEY: [1.2],
NEGATIVE_SCORES_KEY: [0.5, 0.01],
}
assert label_id_similarities[1] == {
POSITIVE_SCORES_KEY: [0.3, 0.2],
NEGATIVE_SCORES_KEY: [0.1],
}
assert label_id_similarities[2] == {
POSITIVE_SCORES_KEY: [1.7],
NEGATIVE_SCORES_KEY: [0.2, 1.6],
}
def test_label_quantiles_computation(self):
label_id_scores = {
0: {
POSITIVE_SCORES_KEY: [1.3, 0.2],
NEGATIVE_SCORES_KEY: [
-0.1,
-1.2,
-2.3,
-4.1,
-0.5,
0.2,
0.8,
0.9,
-3.2,
-2.7,
],
},
3: {POSITIVE_SCORES_KEY: [1.3, 0.2], NEGATIVE_SCORES_KEY: [-0.1]},
6: {POSITIVE_SCORES_KEY: [1.3, 0.2], NEGATIVE_SCORES_KEY: []},
}
expected_thresholds = {
0: [
0.2,
0.2,
0.2,
0.2,
0.2,
-0.1,
-0.1,
-0.5,
-0.5,
-1.2,
-1.2,
-1.2,
-2.3,
-2.3,
-2.7,
-2.7,
-3.2,
-3.2,
-4.1,
-4.1,
],
3: [
-0.1,
-0.1,
-0.1,
-0.1,
-0.1,
-0.1,
-0.1,
-0.1,
-0.1,
-0.1,
-0.1,
-0.1,
-0.1,
-0.1,
-0.1,
-0.1,
-0.1,
-0.1,
-0.1,
-0.1,
],
6: [
0.2,
0.2,
0.2,
0.2,
0.2,
0.2,
0.2,
0.2,
0.2,
0.2,
0.2,
0.2,
0.2,
0.2,
0.2,
0.2,
0.2,
0.2,
0.2,
0.2,
],
}
thresholds = UnexpecTEDIntentPolicy._compute_label_quantiles(label_id_scores)
assert sorted(list(thresholds.keys())) == sorted(
list(expected_thresholds.keys())
)
for label_id, tolerance_thresholds in thresholds.items():
assert expected_thresholds[label_id] == tolerance_thresholds
async def test_post_training_threshold_computation(
self,
trained_policy: UnexpecTEDIntentPolicy,
default_domain: Domain,
stories_path: Path,
):
training_trackers = await train_trackers(
default_domain, stories_path, augmentation_factor=0
)
interpreter = RegexInterpreter()
training_model_data, label_ids = trained_policy._prepare_for_training(
training_trackers, default_domain, interpreter
)
trained_policy.compute_label_quantiles_post_training(
training_model_data, label_ids
)
computed_thresholds = trained_policy.label_quantiles
# -1 is used for padding and hence is not expected in the keys
expected_keys = list(np.unique(label_ids))
expected_keys.remove(-1)
assert sorted(list(computed_thresholds.keys())) == sorted(expected_keys)
@pytest.mark.parametrize(
"tolerance, expected_thresholds",
[
(0.0, [0.2, -0.1, 0.2]),
(0.75, [-2.9, -0.1, -4.3]),
(0.72, [-2.7, -0.1, -4.0]),
(0.78, [-2.9, -0.1, -4.3]),
(1.0, [-4.1, -0.1, -5.5]),
],
)
def test_pick_thresholds_for_labels(
self, tolerance: float, expected_thresholds: List[float]
):
label_id_tolerance_thresholds = {
0: [
0.2,
0.2,
0.2,
0.2,
0.2,
0.2,
-0.1,
-0.1,
-0.5,
-0.5,
-1.2,
-1.2,
-2.3,
-2.3,
-2.7,
-2.9,
-3.2,
-3.2,
-4.1,
-4.1,
],
3: [
-0.1,
-0.1,
-0.1,
-0.1,
-0.1,
-0.1,
-0.1,
-0.1,
-0.1,
-0.1,
-0.1,
-0.1,
-0.1,
-0.1,
-0.1,
-0.1,
-0.1,
-0.1,
-0.1,
-0.1,
],
4: [0.2 - (index * 0.3) for index in range(20)],
}
thresholds = UnexpecTEDIntentPolicy._pick_thresholds(
label_id_tolerance_thresholds, tolerance
)
assert sorted(list(thresholds.keys())) == sorted(
list(label_id_tolerance_thresholds.keys())
)
computed_values = list(thresholds.values())
assert expected_thresholds == computed_values
@pytest.mark.parametrize(
"predicted_similarity, threshold_value, is_unlikely",
[(1.2, 0.2, False), (0.3, -0.1, False), (-1.5, 0.03, True)],
)
def test_unlikely_intent_check(
self,
trained_policy: UnexpecTEDIntentPolicy,
default_domain: Domain,
predicted_similarity: float,
threshold_value: float,
is_unlikely: bool,
tmp_path: Path,
):
loaded_policy = self.persist_and_load_policy(trained_policy, tmp_path)
# Construct dummy similarities
similarities = np.array([[0.0] * len(default_domain.intents)])
dummy_intent_index = 4
similarities[0, dummy_intent_index] = predicted_similarity
loaded_policy.label_thresholds[dummy_intent_index] = threshold_value
query_intent = default_domain.intents[dummy_intent_index]
unlikely_intent_prediction = loaded_policy._check_unlikely_intent(
default_domain, similarities, query_intent
)
assert is_unlikely == unlikely_intent_prediction
def test_should_check_for_intent(
self,
trained_policy: UnexpecTEDIntentPolicy,
default_domain: Domain,
tmp_path: Path,
):
loaded_policy = self.persist_and_load_policy(trained_policy, tmp_path)
intent_index = 0
assert (
loaded_policy._should_check_for_intent(
default_domain.intents[intent_index], default_domain
)
is False
)
intent_index = 4
assert loaded_policy._should_check_for_intent(
default_domain.intents[intent_index], default_domain
)
loaded_policy.config[IGNORE_INTENTS_LIST] = [
default_domain.intents[intent_index]
]
assert (
loaded_policy._should_check_for_intent(
default_domain.intents[intent_index], default_domain
)
is False
)
def test_no_action_unlikely_intent_prediction(
self,
trained_policy: UnexpecTEDIntentPolicy,
default_domain: Domain,
tmp_path: Path,
):
loaded_policy = self.persist_and_load_policy(trained_policy, tmp_path)
expected_probabilities = [0] * default_domain.num_actions
interpreter = RegexInterpreter()
tracker = DialogueStateTracker(sender_id="init", slots=default_domain.slots)
prediction = loaded_policy.predict_action_probabilities(
tracker, default_domain, interpreter
)
assert prediction.probabilities == expected_probabilities
tracker.update_with_events(
[
UserUttered(text="hello", intent={"name": "greet"}),
ActionExecuted(action_name="utter_greet"),
],
default_domain,
)
prediction = loaded_policy.predict_action_probabilities(
tracker, default_domain, interpreter
)
assert prediction.probabilities == expected_probabilities
loaded_policy.model = None
prediction = loaded_policy.predict_action_probabilities(
tracker, default_domain, interpreter
)
assert prediction.probabilities == expected_probabilities
@pytest.mark.parametrize(
"predicted_similarity, threshold_value, is_unlikely",
[(1.2, 0.2, False), (0.3, -0.1, False), (-1.5, 0.03, True)],
)
def test_action_unlikely_intent_prediction(
self,
trained_policy: UnexpecTEDIntentPolicy,
default_domain: Domain,
predicted_similarity,
threshold_value,
is_unlikely,
monkeypatch: MonkeyPatch,
tmp_path: Path,
):
loaded_policy = self.persist_and_load_policy(trained_policy, tmp_path)
similarities = np.array([[[0.0] * len(default_domain.intents)]])
dummy_intent_index = 4
similarities[0, 0, dummy_intent_index] = predicted_similarity
query_intent = default_domain.intents[dummy_intent_index]
loaded_policy.label_thresholds[dummy_intent_index] = threshold_value
interpreter = RegexInterpreter()
tracker = DialogueStateTracker(sender_id="init", slots=default_domain.slots)
tracker.update_with_events(
[UserUttered(text="hello", intent={"name": query_intent})], default_domain,
)
# Preset the model predictions to the similarity values
# so that we don't need to hardcode for particular model predictions.
monkeypatch.setattr(
loaded_policy.model,
"run_inference",
lambda data: {"similarities": similarities},
)
prediction = loaded_policy.predict_action_probabilities(
tracker, default_domain, interpreter
)
if not is_unlikely:
assert prediction.probabilities == [0.0] * default_domain.num_actions
else:
assert (
prediction.probabilities[
default_domain.index_for_action(ACTION_UNLIKELY_INTENT_NAME)
]
== 1.0
)
# Make sure metadata is set. The exact structure
# of the metadata is tested separately and
# not as part of this test.
assert prediction.action_metadata is not None
@pytest.mark.parametrize(
"tracker_events, should_skip",
[
([], True),
([ActionExecuted("action_listen")], True),
(
[
ActionExecuted("action_listen"),
UserUttered("hi", intent={"name": "greet"}),
],
False,
),
(
[
ActionExecuted("action_listen"),
UserUttered("hi", intent={"name": "greet"}),
EntitiesAdded([{"name": "dummy"}]),
],
False,
),
(
[
ActionExecuted("action_listen"),
UserUttered("hi", intent={"name": "greet"}),
SlotSet("name"),
],
False,
),
(
[
ActiveLoop("loop"),
ActionExecuted("action_listen"),
UserUttered("hi", intent={"name": "greet"}),
ActionExecutionRejected("loop"),
],
False,
),
(
[
ActionExecuted("action_listen"),
UserUttered("hi", intent={"name": "greet"}),
ActionExecuted("utter_greet"),
],
True,
),
],
)
def test_skip_predictions_to_prevent_loop(
self,
trained_policy: UnexpecTEDIntentPolicy,
default_domain: Domain,
caplog: LogCaptureFixture,
tracker_events: List[Event],
should_skip: bool,
tmp_path: Path,
):
caplog.set_level(logging.DEBUG)
loaded_policy = self.persist_and_load_policy(trained_policy, tmp_path)
interpreter = RegexInterpreter()
tracker = DialogueStateTracker(sender_id="init", slots=default_domain.slots)
tracker.update_with_events(tracker_events, default_domain)
prediction = loaded_policy.predict_action_probabilities(
tracker, default_domain, interpreter
)
assert (
"Skipping predictions for UnexpecTEDIntentPolicy" in caplog.text
) == should_skip
if should_skip:
assert prediction.probabilities == loaded_policy._default_predictions(
default_domain
)
@pytest.mark.parametrize(
"tracker_events_with_action, tracker_events_without_action",
[
(
[
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(text="hello", intent={"name": "greet"}),
ActionExecuted(ACTION_UNLIKELY_INTENT_NAME),
ActionExecuted("utter_greet"),
UserUttered(text="sad", intent={"name": "thank_you"}),
],
[
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(text="hello", intent={"name": "greet"}),
ActionExecuted("utter_greet"),
UserUttered(text="sad", intent={"name": "thank_you"}),
],
),
(
[
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(text="hello", intent={"name": "greet"}),
EntitiesAdded(entities=[{"entity": "name", "value": "Peter"},]),
ActionExecuted(ACTION_UNLIKELY_INTENT_NAME),
ActionExecuted("utter_greet"),
UserUttered(text="sad", intent={"name": "thank_you"}),
],
[
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(text="hello", intent={"name": "greet"}),
EntitiesAdded(entities=[{"entity": "name", "value": "Peter"},]),
ActionExecuted("utter_greet"),
UserUttered(text="sad", intent={"name": "thank_you"}),
],
),
(
[
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(text="hello", intent={"name": "greet"}),
ActionExecuted(ACTION_UNLIKELY_INTENT_NAME),
ActionExecuted("some_form"),
ActiveLoop("some_form"),
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(text="default", intent={"name": "default"}),
ActionExecuted(ACTION_UNLIKELY_INTENT_NAME),
UserUttered(text="sad", intent={"name": "thank_you"}),
],
[
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(text="hello", intent={"name": "greet"}),
ActionExecuted(ACTION_UNLIKELY_INTENT_NAME),
ActionExecuted("some_form"),
ActiveLoop("some_form"),
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(text="default", intent={"name": "default"}),
UserUttered(text="sad", intent={"name": "thank_you"}),
],
),
],
)
def test_ignore_action_unlikely_intent(
self,
trained_policy: UnexpecTEDIntentPolicy,
default_domain: Domain,
tracker_events_with_action: List[Event],
tracker_events_without_action: List[Event],
tmp_path: Path,
):
loaded_policy = self.persist_and_load_policy(trained_policy, tmp_path)
interpreter = RegexInterpreter()
tracker_with_action = DialogueStateTracker.from_events(
"test 1", evts=tracker_events_with_action
)
tracker_without_action = DialogueStateTracker.from_events(
"test 2", evts=tracker_events_without_action
)
prediction_with_action = loaded_policy.predict_action_probabilities(
tracker_with_action, default_domain, interpreter
)
prediction_without_action = loaded_policy.predict_action_probabilities(
tracker_without_action, default_domain, interpreter
)
# If the weights didn't change then both trackers
# should result in same prediction. For `UnexpecTEDIntentPolicy`, the real
# prediction is inside action metadata.
assert (
prediction_with_action.action_metadata
== prediction_without_action.action_metadata
)
def test_label_embedding_collection(self, trained_policy: UnexpecTEDIntentPolicy):
label_ids = tf.constant([[[2], [-1]], [[1], [2]], [[0], [-1]]], dtype=tf.int32)
all_label_embeddings = np.random.random((10, 20))
# `-1` is used as padding label id. The embedding for it
# will be the same as `label_id=0`
expected_extracted_label_embeddings = tf.constant(
np.concatenate(
[
all_label_embeddings[2],
all_label_embeddings[0],
all_label_embeddings[1],
all_label_embeddings[2],
all_label_embeddings[0],
all_label_embeddings[0],
]
).reshape((3, 2, 20)),
dtype=tf.float32,
)
actual_extracted_label_embeddings = trained_policy.model._get_labels_embed(
label_ids, tf.constant(all_label_embeddings, dtype=tf.float32)
)
assert np.all(
expected_extracted_label_embeddings == actual_extracted_label_embeddings
)
@pytest.mark.parametrize(
"query_intent_index, ranking_length", [(0, 0), (1, 3), (2, 1), (5, 0)]
)
def test_collect_action_metadata(
self,
trained_policy: UnexpecTEDIntentPolicy,
default_domain: Domain,
tmp_path: Path,
query_intent_index: int,
ranking_length: int,
):
loaded_policy = self.persist_and_load_policy(trained_policy, tmp_path)
def test_individual_label_metadata(
label_metadata: Dict[Text, Optional[float]],
all_thresholds: Dict[int, float],
all_similarities: np.array,
label_index: int,
):
expected_score = all_similarities[0][label_index]
expected_threshold = (
all_thresholds[label_index] if label_index in all_thresholds else None
)
expected_severity = (
expected_threshold - expected_score if expected_threshold else None
)
assert label_metadata.get(SCORE_KEY) == expected_score
assert label_metadata.get(THRESHOLD_KEY) == expected_threshold
assert label_metadata.get(SEVERITY_KEY) == expected_severity
# Monkey-patch certain attributes of the policy to make the testing easier.
label_thresholds = {0: 1.2, 1: -0.3, 4: -2.3, 5: 0.2}
loaded_policy.label_thresholds = label_thresholds
loaded_policy.config[RANKING_LENGTH] = ranking_length
# Some dummy similarities
similarities = np.array([[3.2, 0.2, -1.2, -4.3, -5.1, 2.3]])
query_intent = default_domain.intents[query_intent_index]
metadata = loaded_policy._collect_action_metadata(
default_domain, similarities, query_intent=query_intent
)
# Expected outer-most keys
assert sorted(list(metadata.keys())) == sorted([QUERY_INTENT_KEY, RANKING_KEY])
# Schema validation for query intent key
assert sorted(list(metadata[QUERY_INTENT_KEY].keys())) == sorted(
[NAME, SCORE_KEY, THRESHOLD_KEY, SEVERITY_KEY]
)
# Test all elements of metadata for query intent
assert metadata[QUERY_INTENT_KEY].get(NAME) == query_intent
test_individual_label_metadata(
metadata.get(QUERY_INTENT_KEY),
label_thresholds,
similarities,
query_intent_index,
)
# Check if ranking is sorted correctly and truncated to `ranking_length`
sorted_label_similarities = sorted(
[(index, score) for index, score in enumerate(similarities[0])],
key=lambda x: -x[1],
)
sorted_label_similarities = (
sorted_label_similarities[:ranking_length]
if ranking_length
else sorted_label_similarities
)
expected_label_rankings = [
default_domain.intents[index] for index, _ in sorted_label_similarities
]
collected_label_rankings = [
label_metadata.get(NAME) for label_metadata in metadata.get(RANKING_KEY)
]
assert collected_label_rankings == expected_label_rankings
# Test all elements of metadata for all labels in ranking
for label_metadata in metadata.get(RANKING_KEY):
label_index = default_domain.intents.index(label_metadata.get(NAME))
test_individual_label_metadata(
label_metadata, label_thresholds, similarities, label_index
)
@pytest.mark.parametrize(
"tracker_events_for_training, expected_trackers_with_events",
[
# Filter because of no intent and action name
(
[
[
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(text="hello", intent={"name": "greet"}),
ActionExecuted("utter_greet"),
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(
text="happy to make it work", intent={"name": "goodbye"}
),
ActionExecuted("utter_goodbye"),
ActionExecuted(ACTION_LISTEN_NAME),
],
[
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(text="hello"),
ActionExecuted("utter_greet"),
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(text="happy to make it work"),
ActionExecuted(action_text="Great!"),
ActionExecuted(ACTION_LISTEN_NAME),
],
],
[
[
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(text="hello", intent={"name": "greet"}),
ActionExecuted("utter_greet"),
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(
text="happy to make it work", intent={"name": "goodbye"}
),
ActionExecuted("utter_goodbye"),
ActionExecuted(ACTION_LISTEN_NAME),
],
],
),
# Filter because of no action name
(
[
[
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(text="hello", intent={"name": "greet"}),
ActionExecuted("utter_greet"),
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(
text="happy to make it work", intent={"name": "goodbye"}
),
ActionExecuted("utter_goodbye"),
ActionExecuted(ACTION_LISTEN_NAME),
],
[
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(text="hello"),
ActionExecuted("utter_greet"),
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(
text="happy to make it work", intent={"name": "goodbye"}
),
ActionExecuted(action_text="Great!"),
ActionExecuted(ACTION_LISTEN_NAME),
],
],
[
[
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(text="hello", intent={"name": "greet"}),
ActionExecuted("utter_greet"),
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(
text="happy to make it work", intent={"name": "goodbye"}
),
ActionExecuted("utter_goodbye"),
ActionExecuted(ACTION_LISTEN_NAME),
],
],
),
# Filter because of no intent
(
[
[
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(text="hello", intent={"name": "greet"}),
ActionExecuted("utter_greet"),
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(
text="happy to make it work", intent={"name": "goodbye"}
),
ActionExecuted("utter_goodbye"),
ActionExecuted(ACTION_LISTEN_NAME),
],
[
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(text="hello"),
ActionExecuted("utter_greet"),
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(text="happy to make it work"),
ActionExecuted("utter_goodbye"),
ActionExecuted(ACTION_LISTEN_NAME),
],
],
[
[
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(text="hello", intent={"name": "greet"}),
ActionExecuted("utter_greet"),
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(
text="happy to make it work", intent={"name": "goodbye"}
),
ActionExecuted("utter_goodbye"),
ActionExecuted(ACTION_LISTEN_NAME),
],
],
),
# No filter needed
(
[
[
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(text="hello", intent={"name": "greet"}),
ActionExecuted("utter_greet"),
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(
text="happy to make it work", intent={"name": "goodbye"}
),
ActionExecuted("utter_goodbye"),
ActionExecuted(ACTION_LISTEN_NAME),
],
],
[
[
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(text="hello", intent={"name": "greet"}),
ActionExecuted("utter_greet"),
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(
text="happy to make it work", intent={"name": "goodbye"}
),
ActionExecuted("utter_goodbye"),
ActionExecuted(ACTION_LISTEN_NAME),
],
],
),
# Filter to return empty list of trackers
(
[
[
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(text="hello", intent={"name": "greet"}),
ActionExecuted("utter_greet"),
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(
text="happy to make it work", intent={"name": "goodbye"}
),
ActionExecuted(action_text="Great!"),
ActionExecuted(ACTION_LISTEN_NAME),
],
],
[],
),
],
)
def test_filter_training_trackers(
self,
tracker_events_for_training: List[List[Event]],
expected_trackers_with_events: List[List[Event]],
domain: Domain,
):
trackers_for_training = [
TrackerWithCachedStates.from_events(
sender_id=f"{tracker_index}", evts=events, domain=domain
)
for tracker_index, events in enumerate(tracker_events_for_training)
]
filtered_trackers = UnexpecTEDIntentPolicy._get_trackers_for_training(
trackers_for_training
)
assert len(filtered_trackers) == len(expected_trackers_with_events)
for collected_tracker, expected_tracker_events in zip(
filtered_trackers, expected_trackers_with_events
):
collected_tracker_events = list(collected_tracker.events)
assert collected_tracker_events == expected_tracker_events
@pytest.mark.parametrize(
"tracker_events, skip_training",
[
(
[
[
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(text="hello", intent={"name": "greet"}),
ActionExecuted("utter_greet"),
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(
text="happy to make it work", intent={"name": "goodbye"}
),
ActionExecuted("utter_goodbye"),
ActionExecuted(ACTION_LISTEN_NAME),
],
[
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(text="hello"),
ActionExecuted("utter_greet"),
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(text="happy to make it work"),
ActionExecuted(action_text="Great!"),
ActionExecuted(ACTION_LISTEN_NAME),
],
],
False,
),
(
[
[
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(text="hello"),
ActionExecuted("utter_greet"),
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(text="happy to make it work"),
ActionExecuted(action_text="Great!"),
ActionExecuted(ACTION_LISTEN_NAME),
],
],
True,
),
(
[
[
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(text="hello"),
ActionExecuted("utter_greet"),
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(text="happy to make it work"),
ActionExecuted("utter_goodbye"),
ActionExecuted(ACTION_LISTEN_NAME),
],
],
True,
),
(
[
[
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(text="hello"),
ActionExecuted("utter_greet"),
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(
text="happy to make it work", intent={"name": "goodbye"}
),
ActionExecuted(action_text="Great!"),
ActionExecuted(ACTION_LISTEN_NAME),
],
],
True,
),
],
)
def test_train_with_e2e_data(
tracker_events: List[List[Event]], skip_training: bool, domain: Domain,
):
policy = UnexpecTEDIntentPolicy(
featurizer=IntentMaxHistoryTrackerFeaturizer(
IntentTokenizerSingleStateFeaturizer()
)
)
trackers_for_training = [
TrackerWithCachedStates.from_events(
sender_id=f"{tracker_index}", evts=events, domain=domain
)
for tracker_index, events in enumerate(tracker_events)
]
if skip_training:
with pytest.warns(UserWarning):
policy.train(trackers_for_training, domain, interpreter=RegexInterpreter())
else:
policy.train(trackers_for_training, domain, interpreter=RegexInterpreter())
| python | 39,919 |
import datetime
from unittest import skipUnless
from django.db import connection
from django.db.models import CASCADE, ForeignKey, Index, Q
from django.db.models.functions import Lower
from django.test import (
TestCase, TransactionTestCase, skipIfDBFeature, skipUnlessDBFeature,
)
from django.test.utils import override_settings
from django.utils import timezone
from .models import (
Article, ArticleTranslation, IndexedArticle2, IndexTogetherSingleList,
)
class SchemaIndexesTests(TestCase):
"""
Test index handling by the db.backends.schema infrastructure.
"""
def test_index_name_hash(self):
"""
Index names should be deterministic.
"""
editor = connection.schema_editor()
index_name = editor._create_index_name(
table_name=Article._meta.db_table,
column_names=("c1",),
suffix="123",
)
self.assertEqual(index_name, "indexes_article_c1_a52bd80b123")
def test_index_name(self):
"""
Index names on the built-in database backends::
* Are truncated as needed.
* Include all the column names.
* Include a deterministic hash.
"""
long_name = 'l%sng' % ('o' * 100)
editor = connection.schema_editor()
index_name = editor._create_index_name(
table_name=Article._meta.db_table,
column_names=('c1', 'c2', long_name),
suffix='ix',
)
expected = {
'mysql': 'indexes_article_c1_c2_looooooooooooooooooo_255179b2ix',
'oracle': 'indexes_a_c1_c2_loo_255179b2ix',
'postgresql': 'indexes_article_c1_c2_loooooooooooooooooo_255179b2ix',
'sqlite': 'indexes_article_c1_c2_l%sng_255179b2ix' % ('o' * 100),
}
if connection.vendor not in expected:
self.skipTest('This test is only supported on the built-in database backends.')
self.assertEqual(index_name, expected[connection.vendor])
def test_index_together(self):
editor = connection.schema_editor()
index_sql = [str(statement) for statement in editor._model_indexes_sql(Article)]
self.assertEqual(len(index_sql), 1)
# Ensure the index name is properly quoted
self.assertIn(
connection.ops.quote_name(
editor._create_index_name(Article._meta.db_table, ['headline', 'pub_date'], suffix='_idx')
),
index_sql[0]
)
def test_index_together_single_list(self):
# Test for using index_together with a single list (#22172)
index_sql = connection.schema_editor()._model_indexes_sql(IndexTogetherSingleList)
self.assertEqual(len(index_sql), 1)
def test_columns_list_sql(self):
index = Index(fields=['headline'], name='whitespace_idx')
editor = connection.schema_editor()
self.assertIn(
'(%s)' % editor.quote_name('headline'),
str(index.create_sql(Article, editor)),
)
def test_descending_columns_list_sql(self):
index = Index(fields=['-headline'], name='whitespace_idx')
editor = connection.schema_editor()
self.assertIn(
'(%s DESC)' % editor.quote_name('headline'),
str(index.create_sql(Article, editor)),
)
class SchemaIndexesNotPostgreSQLTests(TransactionTestCase):
available_apps = ['indexes']
def test_create_index_ignores_opclasses(self):
index = Index(
name='test_ops_class',
fields=['headline'],
opclasses=['varchar_pattern_ops'],
)
with connection.schema_editor() as editor:
# This would error if opclasses weren't ignored.
editor.add_index(IndexedArticle2, index)
# The `condition` parameter is ignored by databases that don't support partial
# indexes.
@skipIfDBFeature('supports_partial_indexes')
class PartialIndexConditionIgnoredTests(TransactionTestCase):
available_apps = ['indexes']
def test_condition_ignored(self):
index = Index(
name='test_condition_ignored',
fields=['published'],
condition=Q(published=True),
)
with connection.schema_editor() as editor:
# This would error if condition weren't ignored.
editor.add_index(Article, index)
self.assertNotIn(
'WHERE %s' % editor.quote_name('published'),
str(index.create_sql(Article, editor))
)
@skipUnless(connection.vendor == 'postgresql', 'PostgreSQL tests')
class SchemaIndexesPostgreSQLTests(TransactionTestCase):
available_apps = ['indexes']
get_opclass_query = '''
SELECT opcname, c.relname FROM pg_opclass AS oc
JOIN pg_index as i on oc.oid = ANY(i.indclass)
JOIN pg_class as c on c.oid = i.indexrelid
WHERE c.relname = '%s'
'''
def test_text_indexes(self):
"""Test creation of PostgreSQL-specific text indexes (#12234)"""
from .models import IndexedArticle
index_sql = [str(statement) for statement in connection.schema_editor()._model_indexes_sql(IndexedArticle)]
self.assertEqual(len(index_sql), 5)
self.assertIn('("headline" varchar_pattern_ops)', index_sql[1])
self.assertIn('("body" text_pattern_ops)', index_sql[3])
# unique=True and db_index=True should only create the varchar-specific
# index (#19441).
self.assertIn('("slug" varchar_pattern_ops)', index_sql[4])
def test_virtual_relation_indexes(self):
"""Test indexes are not created for related objects"""
index_sql = connection.schema_editor()._model_indexes_sql(Article)
self.assertEqual(len(index_sql), 1)
def test_ops_class(self):
index = Index(
name='test_ops_class',
fields=['headline'],
opclasses=['varchar_pattern_ops'],
)
with connection.schema_editor() as editor:
editor.add_index(IndexedArticle2, index)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query % 'test_ops_class')
self.assertEqual(cursor.fetchall(), [('varchar_pattern_ops', 'test_ops_class')])
def test_ops_class_multiple_columns(self):
index = Index(
name='test_ops_class_multiple',
fields=['headline', 'body'],
opclasses=['varchar_pattern_ops', 'text_pattern_ops'],
)
with connection.schema_editor() as editor:
editor.add_index(IndexedArticle2, index)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query % 'test_ops_class_multiple')
expected_ops_classes = (
('varchar_pattern_ops', 'test_ops_class_multiple'),
('text_pattern_ops', 'test_ops_class_multiple'),
)
self.assertCountEqual(cursor.fetchall(), expected_ops_classes)
def test_ops_class_partial(self):
index = Index(
name='test_ops_class_partial',
fields=['body'],
opclasses=['text_pattern_ops'],
condition=Q(headline__contains='China'),
)
with connection.schema_editor() as editor:
editor.add_index(IndexedArticle2, index)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query % 'test_ops_class_partial')
self.assertCountEqual(cursor.fetchall(), [('text_pattern_ops', 'test_ops_class_partial')])
def test_ops_class_partial_tablespace(self):
indexname = 'test_ops_class_tblspace'
index = Index(
name=indexname,
fields=['body'],
opclasses=['text_pattern_ops'],
condition=Q(headline__contains='China'),
db_tablespace='pg_default',
)
with connection.schema_editor() as editor:
editor.add_index(IndexedArticle2, index)
self.assertIn('TABLESPACE "pg_default" ', str(index.create_sql(IndexedArticle2, editor)))
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query % indexname)
self.assertCountEqual(cursor.fetchall(), [('text_pattern_ops', indexname)])
def test_ops_class_descending(self):
indexname = 'test_ops_class_ordered'
index = Index(
name=indexname,
fields=['-body'],
opclasses=['text_pattern_ops'],
)
with connection.schema_editor() as editor:
editor.add_index(IndexedArticle2, index)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query % indexname)
self.assertCountEqual(cursor.fetchall(), [('text_pattern_ops', indexname)])
def test_ops_class_descending_partial(self):
indexname = 'test_ops_class_ordered_partial'
index = Index(
name=indexname,
fields=['-body'],
opclasses=['text_pattern_ops'],
condition=Q(headline__contains='China'),
)
with connection.schema_editor() as editor:
editor.add_index(IndexedArticle2, index)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query % indexname)
self.assertCountEqual(cursor.fetchall(), [('text_pattern_ops', indexname)])
@skipUnlessDBFeature('supports_covering_indexes')
def test_ops_class_include(self):
index_name = 'test_ops_class_include'
index = Index(
name=index_name,
fields=['body'],
opclasses=['text_pattern_ops'],
include=['headline'],
)
with connection.schema_editor() as editor:
editor.add_index(IndexedArticle2, index)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query % index_name)
self.assertCountEqual(cursor.fetchall(), [('text_pattern_ops', index_name)])
@skipUnlessDBFeature('supports_covering_indexes')
def test_ops_class_include_tablespace(self):
index_name = 'test_ops_class_include_tblspace'
index = Index(
name=index_name,
fields=['body'],
opclasses=['text_pattern_ops'],
include=['headline'],
db_tablespace='pg_default',
)
with connection.schema_editor() as editor:
editor.add_index(IndexedArticle2, index)
self.assertIn(
'TABLESPACE "pg_default"',
str(index.create_sql(IndexedArticle2, editor)),
)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query % index_name)
self.assertCountEqual(cursor.fetchall(), [('text_pattern_ops', index_name)])
def test_ops_class_columns_lists_sql(self):
index = Index(
fields=['headline'],
name='whitespace_idx',
opclasses=['text_pattern_ops'],
)
with connection.schema_editor() as editor:
self.assertIn(
'(%s text_pattern_ops)' % editor.quote_name('headline'),
str(index.create_sql(Article, editor)),
)
def test_ops_class_descending_columns_list_sql(self):
index = Index(
fields=['-headline'],
name='whitespace_idx',
opclasses=['text_pattern_ops'],
)
with connection.schema_editor() as editor:
self.assertIn(
'(%s text_pattern_ops DESC)' % editor.quote_name('headline'),
str(index.create_sql(Article, editor)),
)
@skipUnless(connection.vendor == 'mysql', 'MySQL tests')
class SchemaIndexesMySQLTests(TransactionTestCase):
available_apps = ['indexes']
def test_no_index_for_foreignkey(self):
"""
MySQL on InnoDB already creates indexes automatically for foreign keys.
(#14180). An index should be created if db_constraint=False (#26171).
"""
with connection.cursor() as cursor:
storage = connection.introspection.get_storage_engine(
cursor, ArticleTranslation._meta.db_table,
)
if storage != "InnoDB":
self.skip("This test only applies to the InnoDB storage engine")
index_sql = [str(statement) for statement in connection.schema_editor()._model_indexes_sql(ArticleTranslation)]
self.assertEqual(index_sql, [
'CREATE INDEX `indexes_articletranslation_article_no_constraint_id_d6c0806b` '
'ON `indexes_articletranslation` (`article_no_constraint_id`)'
])
# The index also shouldn't be created if the ForeignKey is added after
# the model was created.
field_created = False
try:
with connection.schema_editor() as editor:
new_field = ForeignKey(Article, CASCADE)
new_field.set_attributes_from_name('new_foreign_key')
editor.add_field(ArticleTranslation, new_field)
field_created = True
# No deferred SQL. The FK constraint is included in the
# statement to add the field.
self.assertFalse(editor.deferred_sql)
finally:
if field_created:
with connection.schema_editor() as editor:
editor.remove_field(ArticleTranslation, new_field)
@skipUnlessDBFeature('supports_partial_indexes')
# SQLite doesn't support timezone-aware datetimes when USE_TZ is False.
@override_settings(USE_TZ=True)
class PartialIndexTests(TransactionTestCase):
# Schema editor is used to create the index to test that it works.
available_apps = ['indexes']
def test_partial_index(self):
with connection.schema_editor() as editor:
index = Index(
name='recent_article_idx',
fields=['pub_date'],
condition=Q(
pub_date__gt=datetime.datetime(
year=2015, month=1, day=1,
# PostgreSQL would otherwise complain about the lookup
# being converted to a mutable function (by removing
# the timezone in the cast) which is forbidden.
tzinfo=timezone.get_current_timezone(),
),
)
)
self.assertIn(
'WHERE %s' % editor.quote_name('pub_date'),
str(index.create_sql(Article, schema_editor=editor))
)
editor.add_index(index=index, model=Article)
with connection.cursor() as cursor:
self.assertIn(index.name, connection.introspection.get_constraints(
cursor=cursor, table_name=Article._meta.db_table,
))
editor.remove_index(index=index, model=Article)
def test_integer_restriction_partial(self):
with connection.schema_editor() as editor:
index = Index(
name='recent_article_idx',
fields=['id'],
condition=Q(pk__gt=1),
)
self.assertIn(
'WHERE %s' % editor.quote_name('id'),
str(index.create_sql(Article, schema_editor=editor))
)
editor.add_index(index=index, model=Article)
with connection.cursor() as cursor:
self.assertIn(index.name, connection.introspection.get_constraints(
cursor=cursor, table_name=Article._meta.db_table,
))
editor.remove_index(index=index, model=Article)
def test_boolean_restriction_partial(self):
with connection.schema_editor() as editor:
index = Index(
name='published_index',
fields=['published'],
condition=Q(published=True),
)
self.assertIn(
'WHERE %s' % editor.quote_name('published'),
str(index.create_sql(Article, schema_editor=editor))
)
editor.add_index(index=index, model=Article)
with connection.cursor() as cursor:
self.assertIn(index.name, connection.introspection.get_constraints(
cursor=cursor, table_name=Article._meta.db_table,
))
editor.remove_index(index=index, model=Article)
@skipUnlessDBFeature('supports_functions_in_partial_indexes')
def test_multiple_conditions(self):
with connection.schema_editor() as editor:
index = Index(
name='recent_article_idx',
fields=['pub_date', 'headline'],
condition=(
Q(pub_date__gt=datetime.datetime(
year=2015,
month=1,
day=1,
tzinfo=timezone.get_current_timezone(),
)) & Q(headline__contains='China')
),
)
sql = str(index.create_sql(Article, schema_editor=editor))
where = sql.find('WHERE')
self.assertIn(
'WHERE (%s' % editor.quote_name('pub_date'),
sql
)
# Because each backend has different syntax for the operators,
# check ONLY the occurrence of headline in the SQL.
self.assertGreater(sql.rfind('headline'), where)
editor.add_index(index=index, model=Article)
with connection.cursor() as cursor:
self.assertIn(index.name, connection.introspection.get_constraints(
cursor=cursor, table_name=Article._meta.db_table,
))
editor.remove_index(index=index, model=Article)
def test_is_null_condition(self):
with connection.schema_editor() as editor:
index = Index(
name='recent_article_idx',
fields=['pub_date'],
condition=Q(pub_date__isnull=False),
)
self.assertIn(
'WHERE %s IS NOT NULL' % editor.quote_name('pub_date'),
str(index.create_sql(Article, schema_editor=editor))
)
editor.add_index(index=index, model=Article)
with connection.cursor() as cursor:
self.assertIn(index.name, connection.introspection.get_constraints(
cursor=cursor, table_name=Article._meta.db_table,
))
editor.remove_index(index=index, model=Article)
@skipUnlessDBFeature('supports_expression_indexes')
def test_partial_func_index(self):
index_name = 'partial_func_idx'
index = Index(
Lower('headline').desc(),
name=index_name,
condition=Q(pub_date__isnull=False),
)
with connection.schema_editor() as editor:
editor.add_index(index=index, model=Article)
sql = index.create_sql(Article, schema_editor=editor)
table = Article._meta.db_table
self.assertIs(sql.references_column(table, 'headline'), True)
sql = str(sql)
self.assertIn('LOWER(%s)' % editor.quote_name('headline'), sql)
self.assertIn(
'WHERE %s IS NOT NULL' % editor.quote_name('pub_date'),
sql,
)
self.assertGreater(sql.find('WHERE'), sql.find('LOWER'))
with connection.cursor() as cursor:
constraints = connection.introspection.get_constraints(
cursor=cursor, table_name=table,
)
self.assertIn(index_name, constraints)
if connection.features.supports_index_column_ordering:
self.assertEqual(constraints[index_name]['orders'], ['DESC'])
with connection.schema_editor() as editor:
editor.remove_index(Article, index)
with connection.cursor() as cursor:
self.assertNotIn(index_name, connection.introspection.get_constraints(
cursor=cursor, table_name=table,
))
@skipUnlessDBFeature('supports_covering_indexes')
class CoveringIndexTests(TransactionTestCase):
available_apps = ['indexes']
def test_covering_index(self):
index = Index(
name='covering_headline_idx',
fields=['headline'],
include=['pub_date', 'published'],
)
with connection.schema_editor() as editor:
self.assertIn(
'(%s) INCLUDE (%s, %s)' % (
editor.quote_name('headline'),
editor.quote_name('pub_date'),
editor.quote_name('published'),
),
str(index.create_sql(Article, editor)),
)
editor.add_index(Article, index)
with connection.cursor() as cursor:
constraints = connection.introspection.get_constraints(
cursor=cursor, table_name=Article._meta.db_table,
)
self.assertIn(index.name, constraints)
self.assertEqual(
constraints[index.name]['columns'],
['headline', 'pub_date', 'published'],
)
editor.remove_index(Article, index)
with connection.cursor() as cursor:
self.assertNotIn(index.name, connection.introspection.get_constraints(
cursor=cursor, table_name=Article._meta.db_table,
))
def test_covering_partial_index(self):
index = Index(
name='covering_partial_headline_idx',
fields=['headline'],
include=['pub_date'],
condition=Q(pub_date__isnull=False),
)
with connection.schema_editor() as editor:
self.assertIn(
'(%s) INCLUDE (%s) WHERE %s ' % (
editor.quote_name('headline'),
editor.quote_name('pub_date'),
editor.quote_name('pub_date'),
),
str(index.create_sql(Article, editor)),
)
editor.add_index(Article, index)
with connection.cursor() as cursor:
constraints = connection.introspection.get_constraints(
cursor=cursor, table_name=Article._meta.db_table,
)
self.assertIn(index.name, constraints)
self.assertEqual(
constraints[index.name]['columns'],
['headline', 'pub_date'],
)
editor.remove_index(Article, index)
with connection.cursor() as cursor:
self.assertNotIn(index.name, connection.introspection.get_constraints(
cursor=cursor, table_name=Article._meta.db_table,
))
@skipUnlessDBFeature('supports_expression_indexes')
def test_covering_func_index(self):
index_name = 'covering_func_headline_idx'
index = Index(Lower('headline'), name=index_name, include=['pub_date'])
with connection.schema_editor() as editor:
editor.add_index(index=index, model=Article)
sql = index.create_sql(Article, schema_editor=editor)
table = Article._meta.db_table
self.assertIs(sql.references_column(table, 'headline'), True)
sql = str(sql)
self.assertIn('LOWER(%s)' % editor.quote_name('headline'), sql)
self.assertIn('INCLUDE (%s)' % editor.quote_name('pub_date'), sql)
self.assertGreater(sql.find('INCLUDE'), sql.find('LOWER'))
with connection.cursor() as cursor:
constraints = connection.introspection.get_constraints(
cursor=cursor, table_name=table,
)
self.assertIn(index_name, constraints)
self.assertIn('pub_date', constraints[index_name]['columns'])
with connection.schema_editor() as editor:
editor.remove_index(Article, index)
with connection.cursor() as cursor:
self.assertNotIn(index_name, connection.introspection.get_constraints(
cursor=cursor, table_name=table,
))
@skipIfDBFeature('supports_covering_indexes')
class CoveringIndexIgnoredTests(TransactionTestCase):
available_apps = ['indexes']
def test_covering_ignored(self):
index = Index(
name='test_covering_ignored',
fields=['headline'],
include=['pub_date'],
)
with connection.schema_editor() as editor:
editor.add_index(Article, index)
self.assertNotIn(
'INCLUDE (%s)' % editor.quote_name('headline'),
str(index.create_sql(Article, editor)),
)
| python | 24,822 |
from urllib.parse import urlparse
from abeja.common.http_file import HTTPFile
from abeja.common.source_data import SourceData
from abeja.datalake.api.client import APIClient
from abeja.datalake.file import DatalakeFile
from abeja.exceptions import UnsupportedURI
def file_factory(
client: APIClient,
uri: str,
type: str,
**kwargs) -> SourceData:
"""generate file for the given uri
:param client:
:param uri:
:param type:
:param kwargs:
:return:
:raises: UnsupportedURI if given uri is not supported
"""
pr = urlparse(uri)
if pr.scheme == 'datalake':
return DatalakeFile(client, uri=uri, type=type, **kwargs)
elif pr.scheme == "http" or pr.scheme == "https":
return HTTPFile(client, uri=uri)
raise UnsupportedURI('{} is not supported.'.format(uri))
| python | 848 |
from os import path
from easy_thumbnails import files, utils, signals
from easy_thumbnails.tests import utils as test_utils
from easy_thumbnails.conf import settings
try:
from PIL import Image
except ImportError:
import Image
class FilesTest(test_utils.BaseTest):
def setUp(self):
super(FilesTest, self).setUp()
self.storage = test_utils.TemporaryStorage()
self.remote_storage = test_utils.FakeRemoteStorage()
# Save a test image in both storages.
filename = self.create_image(self.storage, 'test.jpg')
self.thumbnailer = files.get_thumbnailer(self.storage, filename)
self.thumbnailer.thumbnail_storage = self.storage
filename = self.create_image(self.remote_storage, 'test.jpg')
self.remote_thumbnailer = files.get_thumbnailer(self.remote_storage,
filename)
self.remote_thumbnailer.thumbnail_storage = self.remote_storage
# Create another thumbnailer for extension test.
self.ext_thumbnailer = files.get_thumbnailer(self.storage, filename)
self.ext_thumbnailer.thumbnail_storage = self.storage
# Generate test transparent images.
filename = self.create_image(self.storage, 'transparent.png',
image_mode='RGBA', image_format='PNG')
self.transparent_thumbnailer = files.get_thumbnailer(self.storage,
filename)
self.transparent_thumbnailer.thumbnail_storage = self.storage
filename = self.create_image(self.storage, 'transparent-greyscale.png',
image_mode='LA', image_format='PNG')
self.transparent_greyscale_thumbnailer = files.get_thumbnailer(
self.storage, filename)
self.transparent_greyscale_thumbnailer.thumbnail_storage = self.storage
def tearDown(self):
self.storage.delete_temporary_storage()
self.remote_storage.delete_temporary_storage()
super(FilesTest, self).tearDown()
def test_tag(self):
local = self.thumbnailer.get_thumbnail({'size': (100, 100)})
remote = self.remote_thumbnailer.get_thumbnail({'size': (100, 100)})
self.assertEqual(local.tag(), '<img alt="" height="75" '
'src="%s" width="100" />' % local.url)
self.assertEqual(local.tag(alt='A & B'), '<img alt="A & B" '
'height="75" src="%s" width="100" />' % local.url)
# Can turn off dimensions.
self.assertEqual(remote.tag(use_size=False), '<img alt="" '
'src="%s" />' % remote.url)
# Thumbnails on remote storage don't get dimensions...
self.assertEqual(remote.tag(), '<img alt="" '
'src="%s" />' % remote.url)
# ...unless explicitly requested.
self.assertEqual(remote.tag(use_size=True), '<img alt="" height="75" '
'src="%s" width="100" />' % remote.url)
# All other arguments are passed through as attributes.
self.assertEqual(local.tag(**{'rel': 'A&B', 'class': 'fish'}),
'<img alt="" class="fish" height="75" rel="A&B" '
'src="%s" width="100" />' % local.url)
def test_transparent_thumbnailing(self):
thumb_file = self.thumbnailer.get_thumbnail(
{'size': (100, 100)})
thumb_file.seek(0)
thumb = Image.open(thumb_file)
self.assertFalse(utils.is_transparent(thumb),
"%s shouldn't be transparent." % thumb_file.name)
thumb_file = self.transparent_thumbnailer.get_thumbnail(
{'size': (100, 100)})
thumb_file.seek(0)
thumb = Image.open(thumb_file)
self.assertTrue(utils.is_transparent(thumb),
"%s should be transparent." % thumb_file.name)
thumb_file = self.transparent_greyscale_thumbnailer.get_thumbnail(
{'size': (100, 100)})
thumb_file.seek(0)
thumb = Image.open(thumb_file)
self.assertTrue(utils.is_transparent(thumb),
"%s should be transparent." % thumb_file.name)
def test_extensions(self):
self.ext_thumbnailer.thumbnail_extension = 'png'
thumb = self.ext_thumbnailer.get_thumbnail({'size': (100, 100)})
self.assertEqual(path.splitext(thumb.name)[1], '.png')
self.ext_thumbnailer.thumbnail_preserve_extensions = ('foo',)
thumb = self.ext_thumbnailer.get_thumbnail({'size': (100, 100)})
self.assertEqual(path.splitext(thumb.name)[1], '.png')
self.ext_thumbnailer.thumbnail_preserve_extensions = True
thumb = self.ext_thumbnailer.get_thumbnail({'size': (100, 100)})
self.assertEqual(path.splitext(thumb.name)[1], '.jpg')
self.ext_thumbnailer.thumbnail_preserve_extensions = ('foo', 'jpg')
thumb = self.ext_thumbnailer.get_thumbnail({'size': (100, 100)})
self.assertEqual(path.splitext(thumb.name)[1], '.jpg')
def test_USE_TZ(self):
settings.USE_TZ = True
self.thumbnailer.get_thumbnail({'size': (10, 20)})
settings.USE_TZ = False
self.thumbnailer.get_thumbnail({'size': (20, 40)})
def test_thumbnailfile_options(self):
opts = {'size': (50, 50), 'crop': True, 'upscale': True}
thumb = self.thumbnailer.get_thumbnail(opts)
self.assertEqual(thumb.thumbnail_options, opts)
def test_default_options_setting(self):
settings.THUMBNAIL_DEFAULT_OPTIONS = {'crop': True}
opts = {'size': (50, 50)}
thumb = self.thumbnailer.get_thumbnail(opts)
self.assertEqual((thumb.width, thumb.height), (50, 50))
def test_thumbnail_created_signal(self):
def signal_handler(sender, *args, **kwargs):
sender.signal_received = True
signals.thumbnail_created.connect(signal_handler)
try:
thumb = self.thumbnailer.get_thumbnail({'size': (10, 20)})
self.assertTrue(hasattr(thumb, 'signal_received'))
finally:
signals.thumbnail_created.disconnect(signal_handler)
| python | 5,945 |
"""
pysteps.nowcasts.steps
======================
Implementation of the STEPS stochastic nowcasting method as described in
:cite:`Seed2003`, :cite:`BPS2006` and :cite:`SPN2013`.
.. autosummary::
:toctree: ../generated/
forecast
"""
import sys
import time
import numpy as np
import scipy.ndimage
from pysteps import cascade
from pysteps import extrapolation
from pysteps import noise
from pysteps import utils
from pysteps.postprocessing import probmatching
from pysteps.timeseries import autoregression, correlation
from pysteps.nowcasts import utils as nowcast_utils
try:
import dask
DASK_IMPORTED = True
except ImportError:
DASK_IMPORTED = False
def forecast(R, V, n_timesteps, n_ens_members=24, n_cascade_levels=6,
R_thr=None, kmperpixel=None, timestep=None,
extrap_method="semilagrangian", decomp_method="fft",
bandpass_filter_method="gaussian", noise_method="nonparametric",
noise_stddev_adj=None, ar_order=2, vel_pert_method="bps",
conditional=False, probmatching_method="cdf",
mask_method="incremental", callback=None, return_output=True,
seed=None, num_workers=1, fft_method="numpy", domain="spatial",
extrap_kwargs=None, filter_kwargs=None, noise_kwargs=None,
vel_pert_kwargs=None, mask_kwargs=None, measure_time=False):
"""Generate a nowcast ensemble by using the Short-Term Ensemble Prediction
System (STEPS) method.
Parameters
----------
R : array-like
Array of shape (ar_order+1,m,n) containing the input precipitation fields
ordered by timestamp from oldest to newest. The time steps between the
inputs are assumed to be regular, and the inputs are required to have
finite values.
V : array-like
Array of shape (2,m,n) containing the x- and y-components of the advection
field. The velocities are assumed to represent one time step between the
inputs. All values are required to be finite.
n_timesteps : int
Number of time steps to forecast.
n_ens_members : int, optional
The number of ensemble members to generate.
n_cascade_levels : int, optional
The number of cascade levels to use.
R_thr : float, optional
Specifies the threshold value for minimum observable precipitation
intensity. Required if mask_method is not None or conditional is True.
kmperpixel : float, optional
Spatial resolution of the input data (kilometers/pixel). Required if
vel_pert_method is not None or mask_method is 'incremental'.
timestep : float, optional
Time step of the motion vectors (minutes). Required if vel_pert_method is
not None or mask_method is 'incremental'.
extrap_method : str, optional
Name of the extrapolation method to use. See the documentation of
pysteps.extrapolation.interface.
decomp_method : {'fft'}, optional
Name of the cascade decomposition method to use. See the documentation
of pysteps.cascade.interface.
bandpass_filter_method : {'gaussian', 'uniform'}, optional
Name of the bandpass filter method to use with the cascade decomposition.
See the documentation of pysteps.cascade.interface.
noise_method : {'parametric','nonparametric','ssft','nested',None}, optional
Name of the noise generator to use for perturbating the precipitation
field. See the documentation of pysteps.noise.interface. If set to None,
no noise is generated.
noise_stddev_adj : {'auto','fixed',None}, optional
Optional adjustment for the standard deviations of the noise fields added
to each cascade level. This is done to compensate incorrect std. dev.
estimates of casace levels due to presence of no-rain areas. 'auto'=use
the method implemented in pysteps.noise.utils.compute_noise_stddev_adjs.
'fixed'= use the formula given in :cite:`BPS2006` (eq. 6), None=disable
noise std. dev adjustment.
ar_order : int, optional
The order of the autoregressive model to use. Must be >= 1.
vel_pert_method : {'bps',None}, optional
Name of the noise generator to use for perturbing the advection field. See
the documentation of pysteps.noise.interface. If set to None, the advection
field is not perturbed.
conditional : bool, optional
If set to True, compute the statistics of the precipitation field
conditionally by excluding pixels where the values are below the threshold
R_thr.
mask_method : {'obs','sprog','incremental',None}, optional
The method to use for masking no precipitation areas in the forecast field.
The masked pixels are set to the minimum value of the observations.
'obs' = apply R_thr to the most recently observed precipitation intensity
field, 'sprog' = use the smoothed forecast field from S-PROG, where the
AR(p) model has been applied, 'incremental' = iteratively buffer the mask
with a certain rate (currently it is 1 km/min), None=no masking.
probmatching_method : {'cdf','mean',None}, optional
Method for matching the statistics of the forecast field with those of
the most recently observed one. 'cdf'=map the forecast CDF to the observed
one, 'mean'=adjust only the conditional mean value of the forecast field
in precipitation areas, None=no matching applied. Using 'mean' requires
that mask_method is not None.
callback : function, optional
Optional function that is called after computation of each time step of
the nowcast. The function takes one argument: a three-dimensional array
of shape (n_ens_members,h,w), where h and w are the height and width
of the input field R, respectively. This can be used, for instance,
writing the outputs into files.
return_output : bool, optional
Set to False to disable returning the outputs as numpy arrays. This can
save memory if the intermediate results are written to output files using
the callback function.
seed : int, optional
Optional seed number for the random generators.
num_workers : int, optional
The number of workers to use for parallel computation. Applicable if dask
is enabled or pyFFTW is used for computing the FFT. When num_workers>1, it
is advisable to disable OpenMP by setting the environment variable
OMP_NUM_THREADS to 1. This avoids slowdown caused by too many simultaneous
threads.
fft_method : str, optional
A string defining the FFT method to use (see utils.fft.get_method).
Defaults to 'numpy' for compatibility reasons. If pyFFTW is installed,
the recommended method is 'pyfftw'.
domain : {"spatial", "spectral"}
If "spatial", all computations are done in the spatial domain (the
classical STEPS model). If "spectral", the AR(2) models and stochastic
perturbations are applied directly in the spectral domain to reduce
memory footprint and improve performance :cite:`PCH2019b`.
extrap_kwargs : dict, optional
Optional dictionary containing keyword arguments for the extrapolation
method. See the documentation of pysteps.extrapolation.
filter_kwargs : dict, optional
Optional dictionary containing keyword arguments for the filter method.
See the documentation of pysteps.cascade.bandpass_filters.py.
noise_kwargs : dict, optional
Optional dictionary containing keyword arguments for the initializer of
the noise generator. See the documentation of pysteps.noise.fftgenerators.
vel_pert_kwargs : dict, optional
Optional dictionary containing keyword arguments 'p_par' and 'p_perp' for
the initializer of the velocity perturbator. The choice of the optimal
parameters depends on the domain and the used optical flow method.
Default parameters from :cite:`BPS2006`:
p_par = [10.88, 0.23, -7.68]
p_perp = [5.76, 0.31, -2.72]
Parameters fitted to the data (optical flow/domain):
darts/fmi:
p_par = [13.71259667, 0.15658963, -16.24368207]
p_perp = [8.26550355, 0.17820458, -9.54107834]
darts/mch:
p_par = [24.27562298, 0.11297186, -27.30087471]
p_perp = [-7.80797846e+01, -3.38641048e-02, 7.56715304e+01]
darts/fmi+mch:
p_par = [16.55447057, 0.14160448, -19.24613059]
p_perp = [14.75343395, 0.11785398, -16.26151612]
lucaskanade/fmi:
p_par = [2.20837526, 0.33887032, -2.48995355]
p_perp = [2.21722634, 0.32359621, -2.57402761]
lucaskanade/mch:
p_par = [2.56338484, 0.3330941, -2.99714349]
p_perp = [1.31204508, 0.3578426, -1.02499891]
lucaskanade/fmi+mch:
p_par = [2.31970635, 0.33734287, -2.64972861]
p_perp = [1.90769947, 0.33446594, -2.06603662]
vet/fmi:
p_par = [0.25337388, 0.67542291, 11.04895538]
p_perp = [0.02432118, 0.99613295, 7.40146505]
vet/mch:
p_par = [0.5075159, 0.53895212, 7.90331791]
p_perp = [0.68025501, 0.41761289, 4.73793581]
vet/fmi+mch:
p_par = [0.29495222, 0.62429207, 8.6804131 ]
p_perp = [0.23127377, 0.59010281, 5.98180004]
fmi=Finland, mch=Switzerland, fmi+mch=both pooled into the same data set
The above parameters have been fitten by using run_vel_pert_analysis.py
and fit_vel_pert_params.py located in the scripts directory.
See pysteps.noise.motion for additional documentation.
mask_kwargs : dict
Optional dictionary containing mask keyword arguments 'mask_f' and
'mask_rim', the factor defining the the mask increment and the rim size,
respectively.
The mask increment is defined as mask_f*timestep/kmperpixel.
measure_time : bool
If set to True, measure, print and return the computation time.
Returns
-------
out : ndarray
If return_output is True, a four-dimensional array of shape
(n_ens_members,n_timesteps,m,n) containing a time series of forecast
precipitation fields for each ensemble member. Otherwise, a None value
is returned. The time series starts from t0+timestep, where timestep is
taken from the input precipitation fields R. If measure_time is True, the
return value is a three-element tuple containing the nowcast array, the
initialization time of the nowcast generator and the time used in the
main loop (seconds).
See also
--------
pysteps.extrapolation.interface, pysteps.cascade.interface,
pysteps.noise.interface, pysteps.noise.utils.compute_noise_stddev_adjs
References
----------
:cite:`Seed2003`, :cite:`BPS2006`, :cite:`SPN2013`, :cite:`PCH2019b`
"""
_check_inputs(R, V, ar_order)
if extrap_kwargs is None:
extrap_kwargs = dict()
if filter_kwargs is None:
filter_kwargs = dict()
if noise_kwargs is None:
noise_kwargs = dict()
if vel_pert_kwargs is None:
vel_pert_kwargs = dict()
if mask_kwargs is None:
mask_kwargs = dict()
if np.any(~np.isfinite(R)):
raise ValueError("R contains non-finite values")
if np.any(~np.isfinite(V)):
raise ValueError("V contains non-finite values")
if mask_method not in ["obs", "sprog", "incremental", None]:
raise ValueError("unknown mask method %s: must be 'obs', 'sprog' or 'incremental' or None" % mask_method)
if conditional and R_thr is None:
raise ValueError("conditional=True but R_thr is not set")
if mask_method is not None and R_thr is None:
raise ValueError("mask_method!=None but R_thr=None")
if noise_stddev_adj not in ['auto', 'fixed', None]:
raise ValueError("unknown noise_std_dev_adj method %s: must be 'auto', 'fixed', or None" % noise_stddev_adj)
if kmperpixel is None:
if vel_pert_method is not None:
raise ValueError("vel_pert_method is set but kmperpixel=None")
if mask_method == "incremental":
raise ValueError("mask_method='incremental' but kmperpixel=None")
if timestep is None:
if vel_pert_method is not None:
raise ValueError("vel_pert_method is set but timestep=None")
if mask_method == "incremental":
raise ValueError("mask_method='incremental' but timestep=None")
print("Computing STEPS nowcast:")
print("------------------------")
print("")
print("Inputs:")
print("-------")
print("input dimensions: %dx%d" % (R.shape[1], R.shape[2]))
if kmperpixel is not None:
print("km/pixel: %g" % kmperpixel)
if timestep is not None:
print("time step: %d minutes" % timestep)
print("")
print("Methods:")
print("--------")
print("extrapolation: %s" % extrap_method)
print("bandpass filter: %s" % bandpass_filter_method)
print("decomposition: %s" % decomp_method)
print("noise generator: %s" % noise_method)
print("noise adjustment: %s" % ("yes" if noise_stddev_adj else "no"))
print("velocity perturbator: %s" % vel_pert_method)
print("conditional statistics: %s" % ("yes" if conditional else "no"))
print("precip. mask method: %s" % mask_method)
print("probability matching: %s" % probmatching_method)
print("FFT method: %s" % fft_method)
print("domain: %s" % domain)
print("")
print("Parameters:")
print("-----------")
print("number of time steps: %d" % n_timesteps)
print("ensemble size: %d" % n_ens_members)
print("parallel threads: %d" % num_workers)
print("number of cascade levels: %d" % n_cascade_levels)
print("order of the AR(p) model: %d" % ar_order)
if vel_pert_method == "bps":
vp_par = vel_pert_kwargs.get("p_par", noise.motion.get_default_params_bps_par())
vp_perp = vel_pert_kwargs.get("p_perp", noise.motion.get_default_params_bps_perp())
print("velocity perturbations, parallel: %g,%g,%g" % \
(vp_par[0], vp_par[1], vp_par[2]))
print("velocity perturbations, perpendicular: %g,%g,%g" % \
(vp_perp[0], vp_perp[1], vp_perp[2]))
if conditional or mask_method is not None:
print("precip. intensity threshold: %g" % R_thr)
num_ensemble_workers = n_ens_members if num_workers > n_ens_members \
else num_workers
if measure_time:
starttime_init = time.time()
fft = utils.get_method(fft_method, shape=R.shape[1:], n_threads=num_workers)
M, N = R.shape[1:]
# initialize the band-pass filter
filter_method = cascade.get_method(bandpass_filter_method)
filter = filter_method((M, N), n_cascade_levels, **filter_kwargs)
decomp_method, recomp_method = cascade.get_method(decomp_method)
extrapolator_method = extrapolation.get_method(extrap_method)
x_values, y_values = np.meshgrid(np.arange(R.shape[2]),
np.arange(R.shape[1]))
xy_coords = np.stack([x_values, y_values])
R = R[-(ar_order + 1):, :, :].copy()
if conditional:
MASK_thr = np.logical_and.reduce([R[i, :, :] >= R_thr for i in range(R.shape[0])])
else:
MASK_thr = None
# advect the previous precipitation fields to the same position with the
# most recent one (i.e. transform them into the Lagrangian coordinates)
extrap_kwargs = extrap_kwargs.copy()
extrap_kwargs['xy_coords'] = xy_coords
res = list()
def f(R, i):
return extrapolator_method(R[i, :, :], V, ar_order - i, "min",
**extrap_kwargs)[-1]
for i in range(ar_order):
if not DASK_IMPORTED:
R[i, :, :] = f(R, i)
else:
res.append(dask.delayed(f)(R, i))
if DASK_IMPORTED:
num_workers_ = len(res) if num_workers > len(res) else num_workers
R = np.stack(list(dask.compute(*res, num_workers=num_workers_)) + [R[-1, :, :]])
if noise_method is not None:
# get methods for perturbations
init_noise, generate_noise = noise.get_method(noise_method)
# initialize the perturbation generator for the precipitation field
pp = init_noise(R, fft_method=fft, **noise_kwargs)
if noise_stddev_adj == "auto":
print("Computing noise adjustment coefficients... ", end="")
sys.stdout.flush()
if measure_time:
starttime = time.time()
R_min = np.min(R)
noise_std_coeffs = noise.utils.compute_noise_stddev_adjs(R[-1, :, :],
R_thr, R_min, filter, decomp_method, pp,
generate_noise, 20,
conditional=True, num_workers=num_workers)
if measure_time:
print("%.2f seconds." % (time.time() - starttime))
else:
print("done.")
elif noise_stddev_adj == "fixed":
f = lambda k: 1.0 / (0.75 + 0.09 * k)
noise_std_coeffs = [f(k) for k in range(1, n_cascade_levels + 1)]
else:
noise_std_coeffs = np.ones(n_cascade_levels)
if noise_stddev_adj is not None:
print("noise std. dev. coeffs: %s" % str(noise_std_coeffs))
# compute the cascade decompositions of the input precipitation fields
R_d = []
for i in range(ar_order + 1):
R_ = decomp_method(R[i, :, :], filter, mask=MASK_thr, fft_method=fft,
output_domain=domain, normalize=True,
compute_stats=True, compact_output=True)
R_d.append(R_)
# normalize the cascades and rearrange them into a four-dimensional array
# of shape (n_cascade_levels,ar_order+1,m,n) for the autoregressive model
R_c = nowcast_utils.stack_cascades(R_d, n_cascade_levels)
R_d = R_d[-1]
R_d = [R_d.copy() for j in range(n_ens_members)]
# compute lag-l temporal autocorrelation coefficients for each cascade level
GAMMA = np.empty((n_cascade_levels, ar_order))
for i in range(n_cascade_levels):
GAMMA[i, :] = correlation.temporal_autocorrelation(R_c[i], mask=MASK_thr)
nowcast_utils.print_corrcoefs(GAMMA)
if ar_order == 2:
# adjust the lag-2 correlation coefficient to ensure that the AR(p)
# process is stationary
for i in range(n_cascade_levels):
GAMMA[i, 1] = autoregression.adjust_lag2_corrcoef2(GAMMA[i, 0], GAMMA[i, 1])
# estimate the parameters of the AR(p) model from the autocorrelation
# coefficients
PHI = np.empty((n_cascade_levels, ar_order + 1))
for i in range(n_cascade_levels):
PHI[i, :] = autoregression.estimate_ar_params_yw(GAMMA[i, :])
nowcast_utils.print_ar_params(PHI)
# discard all except the p-1 last cascades because they are not needed for
# the AR(p) model
R_c = [R_c[i][-ar_order:] for i in range(n_cascade_levels)]
# stack the cascades into a list containing all ensemble members
R_c = [[R_c[j].copy() for j in range(n_cascade_levels)] for i in range(n_ens_members)]
# initialize the random generators
if noise_method is not None:
randgen_prec = []
randgen_motion = []
np.random.seed(seed)
for j in range(n_ens_members):
rs = np.random.RandomState(seed)
randgen_prec.append(rs)
seed = rs.randint(0, high=1e9)
rs = np.random.RandomState(seed)
randgen_motion.append(rs)
seed = rs.randint(0, high=1e9)
if vel_pert_method is not None:
init_vel_noise, generate_vel_noise = noise.get_method(vel_pert_method)
# initialize the perturbation generators for the motion field
vps = []
for j in range(n_ens_members):
kwargs = {"randstate": randgen_motion[j],
"p_par": vp_par,
"p_perp": vp_perp}
vp_ = init_vel_noise(V, 1. / kmperpixel, timestep, **kwargs)
vps.append(vp_)
D = [None for j in range(n_ens_members)]
R_f = [[] for j in range(n_ens_members)]
if probmatching_method == "mean":
mu_0 = np.mean(R[-1, :, :][R[-1, :, :] >= R_thr])
R_m = None
if mask_method is not None:
MASK_prec = R[-1, :, :] >= R_thr
if mask_method == "obs":
pass
elif mask_method == "sprog":
# compute the wet area ratio and the precipitation mask
war = 1.0 * np.sum(MASK_prec) / (R.shape[1] * R.shape[2])
R_m = [R_c[0][i].copy() for i in range(n_cascade_levels)]
R_m_d = R_d[0].copy()
elif mask_method == "incremental":
# get mask parameters
mask_rim = mask_kwargs.get("mask_rim", 10)
mask_f = mask_kwargs.get("mask_f", 1.)
# initialize the structuring element
struct = scipy.ndimage.generate_binary_structure(2, 1)
# iterate it to expand it nxn
n = mask_f * timestep / kmperpixel
struct = scipy.ndimage.iterate_structure(struct, int((n - 1) / 2.))
# initialize precip mask for each member
MASK_prec = _compute_incremental_mask(MASK_prec, struct, mask_rim)
MASK_prec = [MASK_prec.copy() for j in range(n_ens_members)]
if noise_method is None and R_m is None:
R_m = [R_c[0][i].copy() for i in range(n_cascade_levels)]
fft_objs = []
for i in range(n_ens_members):
fft_objs.append(utils.get_method(fft_method, shape=R.shape[1:]))
if measure_time:
init_time = time.time() - starttime_init
R = R[-1, :, :]
print("Starting nowcast computation.")
if measure_time:
starttime_mainloop = time.time()
# iterate each time step
for t in range(n_timesteps):
print("Computing nowcast for time step %d... " % (t + 1), end="")
sys.stdout.flush()
if measure_time:
starttime = time.time()
if noise_method is None or mask_method == "sprog":
for i in range(n_cascade_levels):
# use a separate AR(p) model for the non-perturbed forecast,
# from which the mask is obtained
R_m[i] = autoregression.iterate_ar_model(R_m[i], PHI[i, :])
#R_m_ = nowcast_utils.recompose_cascade(R_m[:, -1, :, :], mu, sigma)
R_m_d["cascade_levels"] = [R_m[i][-1] for i in range(n_cascade_levels)]
if domain == "spatial":
R_m_d["cascade_levels"] = np.stack(R_m_d["cascade_levels"])
R_m_ = recomp_method(R_m_d)
if domain == "spectral":
R_m_ = fft.irfft2(R_m_)
if mask_method == "sprog":
MASK_prec = _compute_sprog_mask(R_m_, war)
# the nowcast iteration for each ensemble member
def worker(j):
if noise_method is not None:
# generate noise field
EPS = generate_noise(pp, randstate=randgen_prec[j],
fft_method=fft_objs[j], domain=domain)
# decompose the noise field into a cascade
EPS = decomp_method(EPS, filter, fft_method=fft_objs[j],
input_domain=domain, output_domain=domain,
compute_stats=True, normalize=True,
compact_output=True)
else:
EPS = None
# iterate the AR(p) model for each cascade level
for i in range(n_cascade_levels):
# normalize the noise cascade
if EPS is not None:
EPS_ = EPS["cascade_levels"][i]
EPS_ *= noise_std_coeffs[i]
else:
EPS_ = None
# apply AR(p) process to cascade level
if EPS is not None or vel_pert_method is not None:
R_c[j][i] = \
autoregression.iterate_ar_model(R_c[j][i], PHI[i, :],
eps=EPS_)
else:
# use the deterministic AR(p) model computed above if
# perturbations are disabled
R_c[j][i] = R_m[i]
EPS = None
EPS_ = None
# compute the recomposed precipitation field(s) from the cascades
# obtained from the AR(p) model(s)
R_d[j]["cascade_levels"] = [R_c[j][i][-1, :] for i in range(n_cascade_levels)]
if domain == "spatial":
R_d[j]["cascade_levels"] = np.stack(R_d[j]["cascade_levels"])
R_c_ = recomp_method(R_d[j])
if domain == "spectral":
R_c_ = fft_objs[j].irfft2(R_c_)
if mask_method is not None:
# apply the precipitation mask to prevent generation of new
# precipitation into areas where it was not originally
# observed
R_cmin = R_c_.min()
if mask_method == "incremental":
R_c_ = R_cmin + (R_c_ - R_cmin) * MASK_prec[j]
MASK_prec_ = R_c_ > R_cmin
else:
MASK_prec_ = MASK_prec
# Set to min value outside of mask
R_c_[~MASK_prec_] = R_cmin
if probmatching_method == "cdf":
# adjust the CDF of the forecast to match the most recently
# observed precipitation field
R_c_ = probmatching.nonparam_match_empirical_cdf(R_c_, R)
elif probmatching_method == "mean":
MASK = R_c_ >= R_thr
mu_fct = np.mean(R_c_[MASK])
R_c_[MASK] = R_c_[MASK] - mu_fct + mu_0
if mask_method == "incremental":
MASK_prec[j] = _compute_incremental_mask(R_c_ >= R_thr, struct, mask_rim)
# compute the perturbed motion field
if vel_pert_method is not None:
V_ = V + generate_vel_noise(vps[j], (t + 1) * timestep)
else:
V_ = V
# advect the recomposed precipitation field to obtain the forecast
# for time step t
extrap_kwargs.update({"D_prev": D[j], "return_displacement": True})
R_f_, D_ = extrapolator_method(R_c_, V_, 1, **extrap_kwargs)
D[j] = D_
R_f_ = R_f_[0]
return R_f_
res = []
for j in range(n_ens_members):
if not DASK_IMPORTED or n_ens_members == 1:
res.append(worker(j))
else:
res.append(dask.delayed(worker)(j))
R_f_ = dask.compute(*res, num_workers=num_ensemble_workers) \
if DASK_IMPORTED and n_ens_members > 1 else res
res = None
if measure_time:
print("%.2f seconds." % (time.time() - starttime))
else:
print("done.")
if callback is not None:
callback(np.stack(R_f_))
R_f_ = None
if return_output:
for j in range(n_ens_members):
R_f[j].append(R_f_[j])
if measure_time:
mainloop_time = time.time() - starttime_mainloop
if return_output:
outarr = np.stack([np.stack(R_f[j]) for j in range(n_ens_members)])
if measure_time:
return outarr, init_time, mainloop_time
else:
return outarr
else:
return None
def _check_inputs(R, V, ar_order):
if len(R.shape) != 3:
raise ValueError("R must be a three-dimensional array")
if R.shape[0] < ar_order + 1:
raise ValueError("R.shape[0] < ar_order+1")
if len(V.shape) != 3:
raise ValueError("V must be a three-dimensional array")
if R.shape[1:3] != V.shape[1:3]:
raise ValueError("dimension mismatch between R and V: shape(R)=%s, shape(V)=%s" % \
(str(R.shape), str(V.shape)))
def _compute_incremental_mask(Rbin, kr, r):
# buffer the observation mask Rbin using the kernel kr
# add a grayscale rim r (for smooth rain/no-rain transition)
# buffer observation mask
Rbin = np.ndarray.astype(Rbin.copy(), "uint8")
Rd = scipy.ndimage.morphology.binary_dilation(Rbin, kr)
# add grayscale rim
kr1 = scipy.ndimage.generate_binary_structure(2, 1)
mask = Rd.astype(float)
for n in range(r):
Rd = scipy.ndimage.morphology.binary_dilation(Rd, kr1)
mask += Rd
# normalize between 0 and 1
return mask / mask.max()
def _compute_sprog_mask(R, war):
# obtain the CDF from the non-perturbed forecast that is
# scale-filtered by the AR(p) model
R_s = R.flatten()
# compute the threshold value R_pct_thr corresponding to the
# same fraction of precipitation pixels (forecast values above
# R_thr) as in the most recently observed precipitation field
R_s.sort(kind="quicksort")
x = 1.0 * np.arange(1, len(R_s) + 1)[::-1] / len(R_s)
i = np.argmin(abs(x - war))
# handle ties
if R_s[i] == R_s[i + 1]:
i = np.where(R_s == R_s[i])[0][-1] + 1
R_pct_thr = R_s[i]
# determine a mask using the above threshold value to preserve the
# wet-area ratio
return R >= R_pct_thr
| python | 29,447 |
# Copyright 2020 Antonio Macaluso
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
### Experiments ###
from Utils_Spline import *
lower = -1.
upper = 1.
step = .1
## Definition of the interval of B-Spline
c=.5
label = 'relu'
def relu(x, c = 0):
"""
Compute the value of the relu function with parameter c, for a given point x.
:param x: (float) input coordinate
:param c: (float) shifting parameter
:return: (float) the value of the relu function
"""
return c + max(0.0, x)
x = np.arange(lower, upper + .03, step).tolist()
y = [ relu(value, c) for value in x]
data_coef = coeff_splines_estimation(x, y, label) # data_coef = pd.read_csv('results/relu_full.csv')
data_est = estimate_function(data_coef, relu, label, c=0, step=step)
data_est.hybrid_quantum = data_est.hybrid_quantum-c
data_est.classical_spline = data_est.classical_spline -c
data_est.to_csv('results/' + label + '_estimates.csv', index=False)
plot_activation(label, data_est, data_coef, full = True)
plot_activation(label, data_est, data_coef, full = False)
| python | 1,552 |
import django.core.validators
from django.db import migrations, models
import django.utils.timezone
import model_utils.fields
import sorl.thumbnail.fields
class Migration(migrations.Migration):
replaces = [('photo', '0001_initial'),
('photo', '0002_imagefile_crop_diameter'),
('photo', '0003_auto_20150228_2151')]
dependencies = [
('contributors', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ImageFile',
fields=[
(
'id',
models.AutoField(
serialize=False,
primary_key=True,
verbose_name='ID',
auto_created=True
)
),
(
'created',
model_utils.fields.AutoCreatedField(
default=django.utils.timezone.now,
verbose_name='created',
editable=False
)
),
(
'modified',
model_utils.fields.AutoLastModifiedField(
default=django.utils.timezone.now,
verbose_name='modified',
editable=False
)
),
(
'source_file',
sorl.thumbnail.fields.ImageField(
max_length=1024,
upload_to='',
height_field='full_height',
width_field='full_width'
)
),
(
'full_height',
models.PositiveIntegerField(
verbose_name='full height',
editable=False,
help_text='full height in pixels'
)
),
(
'full_width',
models.PositiveIntegerField(
verbose_name='full height',
editable=False,
help_text='full height in pixels'
)
),
(
'from_top',
models.PositiveSmallIntegerField(
validators=[
django.core.validators.MaxValueValidator(100),
django.core.validators.MinValueValidator(0)
],
default=50,
help_text='image crop vertical. Between 0% and 100%.'
)
),
(
'from_left',
models.PositiveSmallIntegerField(
validators=[
django.core.validators.MaxValueValidator(100),
django.core.validators.MinValueValidator(0)
],
default=50,
help_text='image crop horizontal. Between 0% and 100%.'
)
),
(
'cropping_method',
models.PositiveSmallIntegerField(
default=0,
help_text='How this image has been cropped.',
choices=[(0, 'center'), (5, 'corner detection'),
(10, 'multiple faces'), (15, 'single face'),
(100, 'manual crop')]
)
),
(
'old_file_path',
models.CharField(
help_text='previous path if the image has been moved.',
max_length=1000,
blank=True,
null=True
)
),
(
'copyright_information',
models.CharField(
help_text=
'extra information about license and attribution if needed.',
max_length=1000,
blank=True,
null=True
)
),
(
'contributor',
models.ForeignKey(
on_delete=models.CASCADE,
to='contributors.Contributor',
blank=True,
help_text='who made this',
null=True
)
),
(
'crop_diameter',
models.PositiveSmallIntegerField(
validators=[
django.core.validators.MaxValueValidator(100),
django.core.validators.MinValueValidator(0)
],
default=100,
help_text=
'area containing most relevant content. Area is considered a circle with center x,y and diameter d where x and y are the values "from_left" and "from_right" and d is a percentage of the shortest axis. This is used for close cropping of some images, for instance byline photos.'
)
),
],
options={
'verbose_name': 'ImageFile',
'verbose_name_plural': 'ImageFiles',
},
bases=(models.Model, ),
),
]
| python | 5,725 |
# Generated by Django 3.2 on 2021-05-04 13:30
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('reman', '0009_auto_20210329_1352'),
]
operations = [
migrations.RemoveField(
model_name='ecurefbase',
name='cal_ktag',
),
migrations.RemoveField(
model_name='ecurefbase',
name='open_diag',
),
migrations.RemoveField(
model_name='ecurefbase',
name='ref_cal_out',
),
migrations.RemoveField(
model_name='ecurefbase',
name='ref_comp',
),
migrations.RemoveField(
model_name='ecurefbase',
name='ref_mat',
),
migrations.RemoveField(
model_name='ecurefbase',
name='ref_psa_out',
),
migrations.RemoveField(
model_name='ecurefbase',
name='status',
),
migrations.RemoveField(
model_name='sparepart',
name='repairs',
),
migrations.AddField(
model_name='ecutype',
name='cal_ktag',
field=models.CharField(blank=True, max_length=10, verbose_name='CAL_KTAG'),
),
migrations.AddField(
model_name='ecutype',
name='open_diag',
field=models.CharField(blank=True, max_length=16, verbose_name='OPENDIAG'),
),
migrations.AddField(
model_name='ecutype',
name='ref_cal_out',
field=models.CharField(blank=True, max_length=10, verbose_name='REF_CAL_OUT'),
),
migrations.AddField(
model_name='ecutype',
name='ref_comp',
field=models.CharField(blank=True, max_length=10, verbose_name='REF_COMP'),
),
migrations.AddField(
model_name='ecutype',
name='ref_mat',
field=models.CharField(blank=True, max_length=10, verbose_name='REF_MAT'),
),
migrations.AddField(
model_name='ecutype',
name='ref_psa_out',
field=models.CharField(blank=True, max_length=10, verbose_name='REF_PSA_OUT'),
),
migrations.AddField(
model_name='ecutype',
name='status',
field=models.CharField(blank=True, max_length=16, verbose_name='STATUT'),
),
migrations.AlterField(
model_name='ecutype',
name='hw_reference',
field=models.CharField(max_length=20, verbose_name='hardware'),
),
migrations.AlterField(
model_name='ecutype',
name='technical_data',
field=models.CharField(max_length=50, verbose_name='modèle produit'),
),
migrations.AlterUniqueTogether(
name='ecutype',
unique_together={('hw_reference', 'technical_data')},
),
]
| python | 2,970 |
import typing
import numpy as np
from gym import spaces
from stable_baselines3.common.preprocessing import is_image_space
from stable_baselines3.common.vec_env.base_vec_env import VecEnv, VecEnvWrapper
if typing.TYPE_CHECKING:
from stable_baselines3.common.type_aliases import GymStepReturn # noqa: F401
class VecTransposeImage(VecEnvWrapper):
"""
Re-order channels, from HxWxC to CxHxW.
It is required for PyTorch convolution layers.
:param venv: (VecEnv)
"""
def __init__(self, venv: VecEnv):
assert is_image_space(venv.observation_space), "The observation space must be an image"
observation_space = self.transpose_space(venv.observation_space)
super(VecTransposeImage, self).__init__(venv, observation_space=observation_space)
@staticmethod
def transpose_space(observation_space: spaces.Box) -> spaces.Box:
"""
Transpose an observation space (re-order channels).
:param observation_space: (spaces.Box)
:return: (spaces.Box)
"""
assert is_image_space(observation_space), "The observation space must be an image"
width, height, channels = observation_space.shape
new_shape = (channels, width, height)
return spaces.Box(low=0, high=255, shape=new_shape, dtype=observation_space.dtype)
@staticmethod
def transpose_image(image: np.ndarray) -> np.ndarray:
"""
Transpose an image or batch of images (re-order channels).
:param image: (np.ndarray)
:return: (np.ndarray)
"""
if len(image.shape) == 3:
return np.transpose(image, (2, 0, 1))
return np.transpose(image, (0, 3, 1, 2))
def step_wait(self) -> "GymStepReturn":
observations, rewards, dones, infos = self.venv.step_wait()
return self.transpose_image(observations), rewards, dones, infos
def reset(self) -> np.ndarray:
"""
Reset all environments
"""
return self.transpose_image(self.venv.reset())
def close(self) -> None:
self.venv.close()
| python | 2,084 |
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class Webbench(MakefilePackage):
"""Webbench is a simple website pressure test tool used in Linux."""
homepage = "http://home.tiscali.cz/~cz210552/webbench.html"
git = "https://github.com/EZLippi/WebBench.git"
version('1.5', commit='b1acf3c01cc914729fe188dfc8ed761858028d4f')
depends_on('ntirpc')
def setup_build_environment(self, env):
env.prepend_path('CPATH', self.spec['ntirpc'].prefix.include.ntirpc)
def edit(self, spec, prefix):
makefile = FileFilter('Makefile')
makefile.filter('$(DESTDIR)/usr/local/man/man1', self.prefix.man.man1,
string=True)
def install(self, spec, prefix):
make('install', 'PREFIX={0}'.format(prefix))
| python | 959 |
class Leapx_org():
def __init__(self,first,last,pay):
self.f_name = first
self.l_name = last
self.pay_amt = pay
self.full_name = first+" "+last
def make_email(self):
return self.f_name+ "."+self.l_name+"@xyz.com"
L_obj1 = Leapx_org('mohit', 'RAJ',60000)
print L_obj1
print "\n"
print "Lenght is ",len(L_obj1) | python | 335 |
from datetime import datetime
from Counter import Counter
from DateTools import DateTools
from Ssh import Ssh
from XmlReader import XmlReader
class Ctrl:
compt = 0
comptIte = 0
comptVide = 0
comptIgn = 0
comptIgnList = 0
comptNotInRange = 0
comptError = 0
comptOk = 0
addAutomaticallyIp = False
configReader = None
ignoredIpReader = None
addressSubnet = None
controllerAntenna = None
ignoredIp = []
newIgnoredIp = []
counter = None
sshConnector = None
dateStart = None
dateEnd = None
def __init__(self):
self.configReader = XmlReader("config")
self.addressSubnet = self.configReader.readConfigAddresses()
self.controllerAntenna = self.configReader.readConfigController()
self.ignoredIpReader = XmlReader("ignoredIp")
self.ignoredIp = self.ignoredIpReader.readIgnoredIp()
def load(self):
print(
'Addresses: ' + str(self.addressSubnet.ip1) + '.' + str(self.addressSubnet.ip2) + '.' + str(
self.addressSubnet.ip3) + '.' + str(
self.addressSubnet.ip4) + ' /' + str(self.addressSubnet.mask))
print(
'Range: ' + str(self.addressSubnet.rangeStart1) + '.' + str(self.addressSubnet.rangeStart2) + '.' + str(
self.addressSubnet.rangeStart3) + '.' + str(self.addressSubnet.rangeStart4) + '-' + str(
self.addressSubnet.rangeEnd1) + '.' + str(self.addressSubnet.rangeEnd2) + '.' + str(
self.addressSubnet.rangeEnd3) + '.' + str(self.addressSubnet.rangeEnd4))
print('User: ' + self.controllerAntenna.user)
print('Password: ' + self.controllerAntenna.mdp)
print('Url: http://unifi.' + self.controllerAntenna.url + ':' + str(self.controllerAntenna.port) + '/inform')
print('Timeout: ' + str(
self.controllerAntenna.timeout) + ' (Number of seconds before an address is set as empty)')
print('Ignored ip: ' + str(len(self.ignoredIp)))
def prestart(self):
modify = input('Would you like to modify the informations ? (y/n) [n]: ')
if modify.__eq__('y'):
subnet = input(
'subnet [' + str(self.addressSubnet.ip1) + '.' + str(self.addressSubnet.ip2) + '.' + str(
self.addressSubnet.ip3) + '.' + str(self.addressSubnet.ip4) + ']: ')
if subnet:
ip1, ip2, ip3, ip4 = subnet.split('.', 4)
self.addressSubnet.ip1 = int(ip1)
self.addressSubnet.ip2 = int(ip2)
self.addressSubnet.ip3 = int(ip3)
self.addressSubnet.ip4 = int(ip4)
mask = input('Mask [' + str(self.addressSubnet.mask) + ']: ')
if mask:
self.addressSubnet.mask = int(mask)
rangeStart = input(
'Range start [' + str(self.addressSubnet.rangeStart1) + '.' + str(
self.addressSubnet.rangeStart2) + '.' + str(
self.addressSubnet.rangeStart3) + '.' + str(self.addressSubnet.rangeStart4) + ']: ')
if rangeStart:
rsip1, rsip2, rsip3, rsip4 = rangeStart.split('.', 4)
self.addressSubnet.rangeStart1 = int(rsip1)
self.addressSubnet.rangeStart2 = int(rsip2)
self.addressSubnet.rangeStart3 = int(rsip3)
self.addressSubnet.rangeStart4 = int(rsip4)
rangeEnd = input(
'Range end [' + str(self.addressSubnet.rangeEnd1) + '.' + str(
self.addressSubnet.rangeEnd2) + '.' + str(
self.addressSubnet.rangeEnd3) + '.' + str(self.addressSubnet.rangeEnd4) + ']: ')
if rangeEnd:
reip1, reip2, reip3, reip4 = rangeEnd.split('.', 4)
self.addressSubnet.rangeEnd1 = int(reip1)
self.addressSubnet.rangeEnd2 = int(reip2)
self.addressSubnet.rangeEnd3 = int(reip3)
self.addressSubnet.rangeEnd4 = int(reip4)
user = input('User [' + self.controllerAntenna.user + ']: ')
if user:
self.controllerAntenna.user = user
password = input('Password [' + self.controllerAntenna.mdp + ']: ')
if password:
self.controllerAntenna.mdp = password
url = input('Url http://unifi.[' + self.controllerAntenna.url + ']/inform: ')
if url:
self.controllerAntenna.url = url
port = input('Port [' + str(self.controllerAntenna.port) + ']: ')
if port:
self.controllerAntenna.port = int(port)
timeout = input('Timeout: [' + str(self.controllerAntenna.timeout) + ']: ')
if timeout:
self.controllerAntenna.timeout = int(timeout)
overwriteConfigFile = input('Would you like to overwrite config files ? (y/n) [n]: ')
if overwriteConfigFile.__eq__('y'):
if not self.configReader.writeConfig(self.addressSubnet, self.controllerAntenna):
print('Error when writing the configuration files')
elif modify.__eq__('kebab83'):
print('Hummm, très bon kebab')
seeAddressesIgnored = input('Would you like to see the ignored ip ? (y/n) [n]: ')
if seeAddressesIgnored.__eq__('y'):
for ip in self.ignoredIp:
print(ip)
modifAddressesIgnored = input('Would you like to modify the ignored ip list? (y/n) [n]: ')
if modifAddressesIgnored.__eq__('y'):
print('Writes the replacement address (None = no change / - = delete / -all = delete all): ')
delete = False
self.newIgnoredIp = []
for ip in self.ignoredIp:
if not delete:
newAddress = input(ip + ' :')
if newAddress == '-all':
delete = True
elif newAddress == '':
self.newIgnoredIp.append(ip)
elif not newAddress == '-':
self.newIgnoredIp.append(newAddress)
if delete:
self.newIgnoredIp = []
print('Writes new range (None = no): ')
addRange = input('')
while addRange:
for rangeIp in self.ignoredIpReader.readRange(addRange):
self.newIgnoredIp.append(rangeIp)
addRange = input('')
print('Writes new address (None = no): ')
addIp = input('')
while addIp:
self.newIgnoredIp.append(addIp)
addIp = input('')
self.ignoredIp = self.newIgnoredIp
overwriteIpfile = input('Would you like to overwrite the ignored ip files ? (y/n) [n]: ')
if overwriteIpfile.__eq__('y'):
if not self.ignoredIpReader.writeIgnoredIp(self.ignoredIp):
print('Error when writing the IP files')
runScan = input('Would you like to run the scan ? (y/n) [y]: ')
if runScan.__eq__('n'):
return False
addressesIgnored = input('Would you like to add automatically the non-valid ip to the files ? (y/n) [n]: ')
if addressesIgnored.__eq__('y'):
self.addAutomaticallyIp = True
return True
def start(self):
self.compt = 2 ** (32 - self.addressSubnet.mask) - 2
self.counter = Counter(self.addressSubnet)
self.sshConnector = Ssh(self.controllerAntenna)
print(
'#########################################################################################################')
self.dateStart = datetime.now()
dtS_string = self.dateStart.strftime("%d/%m/%Y %H:%M:%S")
print('Start at ' + dtS_string)
def isInRange(self, ip):
result = False
ip1, ip2, ip3, ip4 = ip.split('.', 4)
ip1 = int(ip1)
ip2 = int(ip2)
ip3 = int(ip3)
ip4 = int(ip4)
if self.addressSubnet.rangeStart1 <= ip1 and ip1 <= self.addressSubnet.rangeEnd1:
if self.addressSubnet.rangeStart2 <= ip2 and ip2 <= self.addressSubnet.rangeEnd2:
if self.addressSubnet.rangeStart3 <= ip3 and ip3 <= self.addressSubnet.rangeEnd3:
if self.addressSubnet.rangeStart4 <= ip4 and ip4 <= self.addressSubnet.rangeEnd4:
result = True
return result
def scan(self):
while self.compt > 0:
ip = self.counter.counter()
print(ip, end=': ', flush=True)
if self.isInRange(ip):
try:
self.ignoredIp.index(ip)
self.comptIgn = self.comptIgn + 1
self.comptIgnList = self.comptIgnList + 1
print("Ignored from the list")
except ValueError:
result = self.sshConnector.addtocontroller(ip)
if result.__eq__('Ok'):
self.comptOk = self.comptOk + 1
elif result.__eq__('Empty'):
if self.addAutomaticallyIp:
self.ignoredIp.append(ip)
self.comptVide = self.comptVide + 1
elif result.__eq__('Ignored'):
if self.addAutomaticallyIp:
self.ignoredIp.append(ip)
self.comptIgn = self.comptIgn + 1
else:
if self.addAutomaticallyIp:
self.ignoredIp.append(ip)
self.comptError = self.comptError + 1
print(result)
else:
self.comptIgn = self.comptIgn + 1
self.comptNotInRange = self.comptNotInRange + 1
print("Not in range")
self.compt = self.compt - 1
self.comptIte = self.comptIte + 1
def finish(self):
print(
'#########################################################################################################')
if self.addAutomaticallyIp:
self.ignoredIpReader.writeIgnoredIp(self.ignoredIp)
self.dateEnd = datetime.now()
dtE_string = self.dateEnd.strftime("%d/%m/%Y %H:%M:%S")
print("Finish at " + dtE_string + " with " + str(self.comptIte) + " addresses tested: " + str(
self.comptVide) + " empty, " + str(
self.comptIgn) + " ignored (" + str(
self.comptIgnList) + " from the list, " + str(self.comptNotInRange) + " not in range), " + str(
self.comptError) + " with error and " + str(self.comptOk) + " ok")
hours, minutes, seconds = DateTools.convert_timedelta(abs(self.dateStart - self.dateEnd))
print("Time: " + str(hours) + " hours " + str(minutes) + " minutes " + str(seconds) + " seconds")
| python | 11,077 |
from fabric.api import env
env.client = 'zsoobhan'
env.project_code = 'prometheus'
env.web_dir = 'www'
# Environment-agnostic folders
env.project_dir = '/var/www/%(client)s/%(project_code)s' % env
env.static_dir = '/mnt/static/%(client)s/%(project_code)s' % env
env.builds_dir = '%(project_dir)s/builds' % env
def _configure(build_name):
env.build = build_name
env.virtualenv = '%(project_dir)s/virtualenvs/%(build)s/' % env
env.data_dir = '%(project_dir)s/data/%(build)s/' % env
env.nginx_conf = 'www/deploy/nginx/%(build)s.conf' % env
env.supervisord_conf = 'www/deploy/supervisord/%(build)s.conf' % env
env.wsgi = 'deploy/wsgi/%(build)s.wsgi' % env
env.webserver_user = 'www-data'
def prod():
_configure('prod')
env.hosts = ['46.101.74.134']
env.remote_user = 'root'
def test():
_configure('test')
env.remote_user = 'ubuntu'
| python | 885 |
# Copyright 2018-2022 Xanadu Quantum Technologies Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Version information.
Version number (major.minor.patch[-label])
"""
__version__ = "0.24.0"
| python | 697 |
# -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.async_support.base.exchange import Exchange
# -----------------------------------------------------------------------------
try:
basestring # Python 3
except NameError:
basestring = str # Python 2
import hashlib
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import PermissionDenied
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import BadRequest
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import NotSupported
from ccxt.base.errors import RateLimitExceeded
from ccxt.base.errors import OnMaintenance
from ccxt.base.errors import InvalidNonce
from ccxt.base.precise import Precise
class exmo(Exchange):
def describe(self):
return self.deep_extend(super(exmo, self).describe(), {
'id': 'exmo',
'name': 'EXMO',
'countries': ['ES', 'RU'], # Spain, Russia
'rateLimit': 350, # once every 350 ms ≈ 180 requests per minute ≈ 3 requests per second
'version': 'v1.1',
'has': {
'cancelOrder': True,
'CORS': False,
'createOrder': True,
'fetchBalance': True,
'fetchCurrencies': True,
'fetchDepositAddress': True,
'fetchFundingFees': True,
'fetchMarkets': True,
'fetchMyTrades': True,
'fetchOHLCV': True,
'fetchOpenOrders': True,
'fetchOrder': 'emulated',
'fetchOrderBook': True,
'fetchOrderBooks': True,
'fetchOrderTrades': True,
'fetchTicker': True,
'fetchTickers': True,
'fetchTrades': True,
'fetchTradingFee': True,
'fetchTradingFees': True,
'fetchTransactions': True,
'withdraw': True,
},
'timeframes': {
'1m': '1',
'5m': '5',
'15m': '15',
'30m': '30',
'45m': '45',
'1h': '60',
'2h': '120',
'3h': '180',
'4h': '240',
'1d': 'D',
'1w': 'W',
'1M': 'M',
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/27766491-1b0ea956-5eda-11e7-9225-40d67b481b8d.jpg',
'api': {
'public': 'https://api.exmo.com',
'private': 'https://api.exmo.com',
'web': 'https://exmo.me',
},
'www': 'https://exmo.me',
'referral': 'https://exmo.me/?ref=131685',
'doc': [
'https://exmo.me/en/api_doc?ref=131685',
'https://github.com/exmo-dev/exmo_api_lib/tree/master/nodejs',
],
'fees': 'https://exmo.com/en/docs/fees',
},
'api': {
'web': {
'get': [
'ctrl/feesAndLimits',
'en/docs/fees',
],
},
'public': {
'get': [
'currency',
'currency/list/extended',
'order_book',
'pair_settings',
'ticker',
'trades',
'candles_history',
'required_amount',
'payments/providers/crypto/list',
],
},
'private': {
'post': [
'user_info',
'order_create',
'order_cancel',
'stop_market_order_create',
'stop_market_order_cancel',
'user_open_orders',
'user_trades',
'user_cancelled_orders',
'order_trades',
'deposit_address',
'withdraw_crypt',
'withdraw_get_txid',
'excode_create',
'excode_load',
'code_check',
'wallet_history',
'wallet_operations',
],
},
},
'fees': {
'trading': {
'feeSide': 'get',
'tierBased': False,
'percentage': True,
'maker': 0.2 / 100,
'taker': 0.2 / 100,
},
'funding': {
'tierBased': False,
'percentage': False, # fixed funding fees for crypto, see fetchFundingFees below
},
},
'options': {
'useWebapiForFetchingFees': False, # TODO: figure why Exmo bans us when we try to fetch() their web urls
'feesAndLimits': {
'success': 1,
'ctlr': 'feesAndLimits',
'error': '',
'data': {
'limits': [
{'pair': 'BTC/USD', 'min_q': '0.0001', 'max_q': '1000', 'min_p': '1', 'max_p': '30000', 'min_a': '1', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'BTC/RUB', 'min_q': '0.0001', 'max_q': '1000', 'min_p': '1', 'max_p': '2000000', 'min_a': '10', 'max_a': '50000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'BTC/EUR', 'min_q': '0.0001', 'max_q': '1000', 'min_p': '1', 'max_p': '30000', 'min_a': '1', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'BTC/GBP', 'min_q': '0.0001', 'max_q': '1000', 'min_p': '1', 'max_p': '30000', 'min_a': '1', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'BTC/UAH', 'min_q': '0.0001', 'max_q': '1000', 'min_p': '1', 'max_p': '15000000', 'min_a': '10', 'max_a': '15000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'BTC/PLN', 'min_q': '0.0001', 'max_q': '1000', 'min_p': '1', 'max_p': '20000000', 'min_a': '50', 'max_a': '2000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'BTC/TRY', 'min_q': '0.0001', 'max_q': '1000', 'min_p': '1', 'max_p': '800000', 'min_a': '40', 'max_a': '6000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'BTC/KZT', 'min_q': '0.0001', 'max_q': '1000', 'min_p': '1000', 'max_p': '12000000', 'min_a': '1000', 'max_a': '100000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'BTC/USDT', 'min_q': '0.0001', 'max_q': '1000', 'min_p': '0.01', 'max_p': '30000', 'min_a': '3', 'max_a': '500000', 'taker': '0', 'maker': '0'},
{'pair': 'ETH/BTC', 'min_q': '0.001', 'max_q': '5000', 'min_p': '0.00000001', 'max_p': '10', 'min_a': '0.001', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ETH/USD', 'min_q': '0.001', 'max_q': '5000', 'min_p': '0.01', 'max_p': '100000', 'min_a': '3', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ETH/RUB', 'min_q': '0.001', 'max_q': '5000', 'min_p': '0.01', 'max_p': '100000', 'min_a': '150', 'max_a': '50000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'XRP/BTC', 'min_q': '1', 'max_q': '5000000', 'min_p': '0.0000001', 'max_p': '1', 'min_a': '0.00001', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'XRP/USD', 'min_q': '1', 'max_q': '5000000', 'min_p': '0.001', 'max_p': '1000', 'min_a': '0.001', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'XRP/RUB', 'min_q': '1', 'max_q': '5000000', 'min_p': '0.000001', 'max_p': '1000', 'min_a': '0.01', 'max_a': '50000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ADA/BTC', 'min_q': '1', 'max_q': '10000000', 'min_p': '0.00000001', 'max_p': '1', 'min_a': '0.001', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ADA/ETH', 'min_q': '0.01', 'max_q': '10000000', 'min_p': '0.00000001', 'max_p': '10', 'min_a': '0.001', 'max_a': '5000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ADA/USD', 'min_q': '0.01', 'max_q': '10000000', 'min_p': '0.0001', 'max_p': '1000', 'min_a': '0.01', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ALGO/EXM', 'min_q': '1', 'max_q': '1000000', 'min_p': '0.001', 'max_p': '10000', 'min_a': '1', 'max_a': '50000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ALGO/BTC', 'min_q': '1', 'max_q': '1000000', 'min_p': '0.00000001', 'max_p': '1', 'min_a': '0.000001', 'max_a': '50', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ALGO/USDT', 'min_q': '1', 'max_q': '1000000', 'min_p': '0.001', 'max_p': '1000', 'min_a': '1', 'max_a': '500000', 'taker': '0', 'maker': '0'},
{'pair': 'ALGO/RUB', 'min_q': '1', 'max_q': '1000000', 'min_p': '0.000001', 'max_p': '10000', 'min_a': '1', 'max_a': '50000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ALGO/EUR', 'min_q': '1', 'max_q': '1000000', 'min_p': '0.001', 'max_p': '1000', 'min_a': '1', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ATOM/EXM', 'min_q': '1', 'max_q': '500000', 'min_p': '0.01', 'max_p': '100000', 'min_a': '200', 'max_a': '50000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ATOM/BTC', 'min_q': '1', 'max_q': '500000', 'min_p': '0.00000001', 'max_p': '1', 'min_a': '0.001', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ATOM/USD', 'min_q': '1', 'max_q': '500000', 'min_p': '0.001', 'max_p': '1000', 'min_a': '0.5', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ATOM/EUR', 'min_q': '1', 'max_q': '500000', 'min_p': '0.001', 'max_p': '1000', 'min_a': '0.5', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'BCH/USD', 'min_q': '0.003', 'max_q': '5000', 'min_p': '0.00000001', 'max_p': '30000', 'min_a': '0.0001', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'BCH/RUB', 'min_q': '0.003', 'max_q': '5000', 'min_p': '0.00000001', 'max_p': '2000000', 'min_a': '0.0001', 'max_a': '50000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'BCH/EUR', 'min_q': '0.003', 'max_q': '5000', 'min_p': '0.01', 'max_p': '300000', 'min_a': '3', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'BCH/UAH', 'min_q': '0.003', 'max_q': '5000', 'min_p': '0.1', 'max_p': '30000', 'min_a': '10', 'max_a': '15000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'BCH/BTC', 'min_q': '0.003', 'max_q': '5000', 'min_p': '0.00000001', 'max_p': '5', 'min_a': '0.0001', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'BCH/ETH', 'min_q': '0.003', 'max_q': '5000', 'min_p': '0.0000001', 'max_p': '200', 'min_a': '0.0001', 'max_a': '5000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'BCH/USDT', 'min_q': '0.003', 'max_q': '5000', 'min_p': '0.01', 'max_p': '5000', 'min_a': '3', 'max_a': '500000', 'taker': '0', 'maker': '0'},
{'pair': 'BTG/USD', 'min_q': '0.01', 'max_q': '100000', 'min_p': '0.001', 'max_p': '1000', 'min_a': '3', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'BTG/BTC', 'min_q': '0.01', 'max_q': '100000', 'min_p': '0.00000001', 'max_p': '1', 'min_a': '0.001', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'BTG/ETH', 'min_q': '0.01', 'max_q': '100000', 'min_p': '0.0001', 'max_p': '100', 'min_a': '0.01', 'max_a': '5000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'BTT/RUB', 'min_q': '1', 'max_q': '500000000', 'min_p': '0.000001', 'max_p': '1000', 'min_a': '0.000001', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'BTT/UAH', 'min_q': '1', 'max_q': '500000000', 'min_p': '0.000001', 'max_p': '1000', 'min_a': '0.000001', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'BTT/BTC', 'min_q': '1', 'max_q': '500000000', 'min_p': '0.00000001', 'max_p': '0.1', 'min_a': '0.00001', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'CRON/BTC', 'min_q': '1', 'max_q': '100000', 'min_p': '0.0000001', 'max_p': '1', 'min_a': '0.00001', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'CRON/ETH', 'min_q': '1', 'max_q': '100000', 'min_p': '0.0000001', 'max_p': '10', 'min_a': '0.00001', 'max_a': '5000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'CRON/USDT', 'min_q': '1', 'max_q': '100000', 'min_p': '0.001', 'max_p': '1000', 'min_a': '0.001', 'max_a': '500000', 'taker': '0', 'maker': '0'},
{'pair': 'CRON/EXM', 'min_q': '1', 'max_q': '100000000', 'min_p': '0.00000001', 'max_p': '1000', 'min_a': '0.01', 'max_a': '100000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'DAI/USD', 'min_q': '1', 'max_q': '500000', 'min_p': '0.001', 'max_p': '1000', 'min_a': '0.1', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'DAI/RUB', 'min_q': '1', 'max_q': '500000', 'min_p': '0.01', 'max_p': '100000', 'min_a': '0.5', 'max_a': '30000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'DAI/BTC', 'min_q': '1', 'max_q': '500000', 'min_p': '0.0000001', 'max_p': '0.1', 'min_a': '0.00001', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'DAI/ETH', 'min_q': '1', 'max_q': '500000', 'min_p': '0.000001', 'max_p': '10', 'min_a': '0.0001', 'max_a': '5000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'DASH/USD', 'min_q': '0.01', 'max_q': '10000', 'min_p': '0.01', 'max_p': '10000', 'min_a': '3', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'DASH/RUB', 'min_q': '0.01', 'max_q': '10000', 'min_p': '0.01', 'max_p': '100000', 'min_a': '150', 'max_a': '50000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'DASH/UAH', 'min_q': '0.01', 'max_q': '10000', 'min_p': '0.01', 'max_p': '200000', 'min_a': '10', 'max_a': '15000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'DASH/BTC', 'min_q': '0.01', 'max_q': '10000', 'min_p': '0.0001', 'max_p': '1', 'min_a': '0.001', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'DASH/USDT', 'min_q': '0.01', 'max_q': '10000', 'min_p': '0.01', 'max_p': '5000', 'min_a': '3', 'max_a': '500000', 'taker': '0', 'maker': '0'},
{'pair': 'DCR/RUB', 'min_q': '0.01', 'max_q': '50000', 'min_p': '0.00001', 'max_p': '100000', 'min_a': '0.5', 'max_a': '3000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'DCR/UAH', 'min_q': '0.01', 'max_q': '50000', 'min_p': '0.00001', 'max_p': '100000', 'min_a': '0.25', 'max_a': '1000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'DCR/BTC', 'min_q': '0.01', 'max_q': '50000', 'min_p': '0.00000001', 'max_p': '1', 'min_a': '0.001', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'DOGE/USD', 'min_q': '100', 'max_q': '500000000', 'min_p': '0.0000001', 'max_p': '1000', 'min_a': '0.01', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'DOGE/BTC', 'min_q': '100', 'max_q': '500000000', 'min_p': '0.0000001', 'max_p': '1', 'min_a': '0.0001', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'EOS/USD', 'min_q': '0.01', 'max_q': '500000', 'min_p': '0.01', 'max_p': '1000', 'min_a': '0.5', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'EOS/EUR', 'min_q': '0.01', 'max_q': '500000', 'min_p': '0.001', 'max_p': '1000', 'min_a': '0.5', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'EOS/BTC', 'min_q': '0.01', 'max_q': '500000', 'min_p': '0.00000001', 'max_p': '1', 'min_a': '0.001', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ETC/USD', 'min_q': '0.2', 'max_q': '100000', 'min_p': '0.01', 'max_p': '10000', 'min_a': '0.01', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ETC/RUB', 'min_q': '0.2', 'max_q': '100000', 'min_p': '0.01', 'max_p': '10000', 'min_a': '0.01', 'max_a': '50000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ETC/BTC', 'min_q': '0.2', 'max_q': '100000', 'min_p': '0.0001', 'max_p': '0.5', 'min_a': '0.001', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ETH/EUR', 'min_q': '0.001', 'max_q': '5000', 'min_p': '0.01', 'max_p': '100000', 'min_a': '3', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ETH/GBP', 'min_q': '0.001', 'max_q': '5000', 'min_p': '0.01', 'max_p': '100000', 'min_a': '3', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ETH/UAH', 'min_q': '0.001', 'max_q': '5000', 'min_p': '0.01', 'max_p': '1000000', 'min_a': '90', 'max_a': '15000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ETH/PLN', 'min_q': '0.001', 'max_q': '5000', 'min_p': '0.01', 'max_p': '100000', 'min_a': '50', 'max_a': '2000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ETH/TRY', 'min_q': '0.001', 'max_q': '5000', 'min_p': '0.1', 'max_p': '80000', 'min_a': '10', 'max_a': '6000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ETH/KZT', 'min_q': '0.001', 'max_q': '5000', 'min_p': '4', 'max_p': '40000000', 'min_a': '3', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ETH/USDT', 'min_q': '0.001', 'max_q': '5000', 'min_p': '0.01', 'max_p': '100000', 'min_a': '3', 'max_a': '500000', 'taker': '0', 'maker': '0'},
{'pair': 'ETH/LTC', 'min_q': '0.001', 'max_q': '5000', 'min_p': '0.00000001', 'max_p': '100000', 'min_a': '0.05', 'max_a': '100000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ETZ/BTC', 'min_q': '1', 'max_q': '50000000', 'min_p': '0.00000001', 'max_p': '1', 'min_a': '0.0001', 'max_a': '10', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ETZ/ETH', 'min_q': '1', 'max_q': '50000000', 'min_p': '0.00000001', 'max_p': '100', 'min_a': '0.001', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ETZ/USDT', 'min_q': '1', 'max_q': '50000000', 'min_p': '0.000001', 'max_p': '1000', 'min_a': '0.01', 'max_a': '1000', 'taker': '0', 'maker': '0'},
{'pair': 'EXM/USDT', 'min_q': '1', 'max_q': '100000000', 'min_p': '0.00000001', 'max_p': '1000', 'min_a': '0.01', 'max_a': '100000', 'taker': '0', 'maker': '0'},
{'pair': 'EXM/ETH', 'min_q': '1', 'max_q': '100000000', 'min_p': '0.00000001', 'max_p': '1', 'min_a': '0.0001', 'max_a': '5000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'GAS/USD', 'min_q': '0.01', 'max_q': '500000', 'min_p': '0.01', 'max_p': '50000', 'min_a': '0.1', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'GAS/BTC', 'min_q': '0.01', 'max_q': '500000', 'min_p': '0.00000001', 'max_p': '1', 'min_a': '0.001', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'GNT/BTC', 'min_q': '1', 'max_q': '10000000', 'min_p': '0.00000001', 'max_p': '1', 'min_a': '0.001', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'GNT/ETH', 'min_q': '0.01', 'max_q': '10000000', 'min_p': '0.00000001', 'max_p': '10', 'min_a': '0.01', 'max_a': '5000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'GUSD/USD', 'min_q': '1', 'max_q': '500000', 'min_p': '0.1', 'max_p': '10', 'min_a': '0.1', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'GUSD/RUB', 'min_q': '1', 'max_q': '500000', 'min_p': '0.01', 'max_p': '1000', 'min_a': '10', 'max_a': '50000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'GUSD/BTC', 'min_q': '1', 'max_q': '500000', 'min_p': '0.00000001', 'max_p': '1', 'min_a': '0.0015', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'HP/BTC', 'min_q': '1', 'max_q': '100000000', 'min_p': '0.00000001', 'max_p': '0.1', 'min_a': '0.00001', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'HB/BTC', 'min_q': '10', 'max_q': '100000000', 'min_p': '0.00000001', 'max_p': '1', 'min_a': '0.000001', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'LSK/USD', 'min_q': '0.1', 'max_q': '500000', 'min_p': '0.1', 'max_p': '1000', 'min_a': '1', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'LSK/RUB', 'min_q': '0.1', 'max_q': '500000', 'min_p': '0.001', 'max_p': '100000', 'min_a': '0.5', 'max_a': '50000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'LSK/BTC', 'min_q': '1', 'max_q': '500000', 'min_p': '0.0000001', 'max_p': '1', 'min_a': '0.0015', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'LTC/USD', 'min_q': '0.05', 'max_q': '10000', 'min_p': '0.01', 'max_p': '10000', 'min_a': '3', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'LTC/RUB', 'min_q': '0.05', 'max_q': '10000', 'min_p': '0.01', 'max_p': '100000', 'min_a': '150', 'max_a': '50000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'LTC/EUR', 'min_q': '0.05', 'max_q': '10000', 'min_p': '0.01', 'max_p': '10000', 'min_a': '3', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'LTC/UAH', 'min_q': '0.05', 'max_q': '10000', 'min_p': '0.01', 'max_p': '300000', 'min_a': '5', 'max_a': '18000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'LTC/BTC', 'min_q': '0.05', 'max_q': '10000', 'min_p': '0.00000001', 'max_p': '1', 'min_a': '0.001', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'MKR/BTC', 'min_q': '0.0001', 'max_q': '1000', 'min_p': '0.0001', 'max_p': '100', 'min_a': '0.000001', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'MKR/DAI', 'min_q': '0.0001', 'max_q': '1000', 'min_p': '0.5', 'max_p': '500000', 'min_a': '0.005', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'MNC/USD', 'min_q': '10', 'max_q': '500000000', 'min_p': '0.000001', 'max_p': '10000', 'min_a': '0.01', 'max_a': '100000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'MNC/BTC', 'min_q': '10', 'max_q': '500000000', 'min_p': '0.00000001', 'max_p': '1', 'min_a': '0.000001', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'MNC/ETH', 'min_q': '10', 'max_q': '500000000', 'min_p': '0.0000001', 'max_p': '10', 'min_a': '0.00001', 'max_a': '1000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'NEO/USD', 'min_q': '0.01', 'max_q': '100000', 'min_p': '0.01', 'max_p': '50000', 'min_a': '0.1', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'NEO/RUB', 'min_q': '0.01', 'max_q': '100000', 'min_p': '0.001', 'max_p': '1500000', 'min_a': '50', 'max_a': '50000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'NEO/BTC', 'min_q': '0.1', 'max_q': '100000', 'min_p': '0.00000001', 'max_p': '1', 'min_a': '0.001', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'OMG/USD', 'min_q': '0.01', 'max_q': '500000', 'min_p': '0.01', 'max_p': '1000', 'min_a': '0.5', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'OMG/BTC', 'min_q': '1', 'max_q': '500000', 'min_p': '0.00000001', 'max_p': '1', 'min_a': '0.001', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'OMG/ETH', 'min_q': '0.01', 'max_q': '500000', 'min_p': '0.00000001', 'max_p': '10', 'min_a': '0.01', 'max_a': '5000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ONG/EXM', 'min_q': '1', 'max_q': '1000000', 'min_p': '0.01', 'max_p': '100000', 'min_a': '100', 'max_a': '15000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ONG/BTC', 'min_q': '1', 'max_q': '1000000', 'min_p': '0.00000001', 'max_p': '1', 'min_a': '0.00001', 'max_a': '10', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ONG/RUB', 'min_q': '1', 'max_q': '1000000', 'min_p': '0.01', 'max_p': '100000', 'min_a': '100', 'max_a': '250000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ONG/UAH', 'min_q': '1', 'max_q': '1000000', 'min_p': '0.01', 'max_p': '100000', 'min_a': '50', 'max_a': '6000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ONT/EXM', 'min_q': '1', 'max_q': '500000', 'min_p': '0.01', 'max_p': '100000', 'min_a': '200', 'max_a': '15000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ONT/BTC', 'min_q': '1', 'max_q': '500000', 'min_p': '0.00000001', 'max_p': '1', 'min_a': '0.00001', 'max_a': '10', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ONT/RUB', 'min_q': '1', 'max_q': '500000', 'min_p': '0.01', 'max_p': '100000', 'min_a': '100', 'max_a': '6000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ONT/UAH', 'min_q': '1', 'max_q': '500000', 'min_p': '0.01', 'max_p': '100000', 'min_a': '200', 'max_a': '250000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'PTI/RUB', 'min_q': '1', 'max_q': '50000000', 'min_p': '0.00000001', 'max_p': '600000', 'min_a': '10', 'max_a': '600000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'PTI/BTC', 'min_q': '1', 'max_q': '50000000', 'min_p': '0.00000001', 'max_p': '1', 'min_a': '0.000001', 'max_a': '10', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'PTI/EOS', 'min_q': '1', 'max_q': '50000000', 'min_p': '0.0000001', 'max_p': '5000', 'min_a': '0.01', 'max_a': '20000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'PTI/USDT', 'min_q': '1', 'max_q': '50000000', 'min_p': '0.000001', 'max_p': '10000', 'min_a': '0.01', 'max_a': '100000', 'taker': '0', 'maker': '0'},
{'pair': 'QTUM/USD', 'min_q': '0.1', 'max_q': '500000', 'min_p': '0.00000001', 'max_p': '10000', 'min_a': '0.1', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'QTUM/BTC', 'min_q': '0.1', 'max_q': '500000', 'min_p': '0.00000001', 'max_p': '1', 'min_a': '0.0001', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'QTUM/ETH', 'min_q': '0.1', 'max_q': '500000', 'min_p': '0.00000001', 'max_p': '100', 'min_a': '0.001', 'max_a': '5000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ROOBEE/BTC', 'min_q': '1', 'max_q': '10000000', 'min_p': '0.00000001', 'max_p': '0.1', 'min_a': '0.00001', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'SMART/USD', 'min_q': '10', 'max_q': '100000000', 'min_p': '0.000001', 'max_p': '1000', 'min_a': '1', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'SMART/RUB', 'min_q': '10', 'max_q': '100000000', 'min_p': '0.0001', 'max_p': '100000', 'min_a': '10', 'max_a': '50000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'SMART/EUR', 'min_q': '10', 'max_q': '100000000', 'min_p': '0.000001', 'max_p': '1000', 'min_a': '1', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'SMART/BTC', 'min_q': '10', 'max_q': '100000000', 'min_p': '0.00000001', 'max_p': '1', 'min_a': '0.00001', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'TRX/USD', 'min_q': '1', 'max_q': '50000000', 'min_p': '0.0001', 'max_p': '1000', 'min_a': '0.01', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'TRX/EUR', 'min_q': '0.01', 'max_q': '50000000', 'min_p': '0.0001', 'max_p': '1000', 'min_a': '0.01', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'TRX/RUB', 'min_q': '1', 'max_q': '50000000', 'min_p': '0.000001', 'max_p': '100000', 'min_a': '0.1', 'max_a': '50000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'TRX/UAH', 'min_q': '1', 'max_q': '50000000', 'min_p': '0.000001', 'max_p': '100000', 'min_a': '0.1', 'max_a': '50000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'TRX/BTC', 'min_q': '1', 'max_q': '50000000', 'min_p': '0.00000001', 'max_p': '1', 'min_a': '0.001', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'USDC/USD', 'min_q': '1', 'max_q': '500000', 'min_p': '0.0001', 'max_p': '1000', 'min_a': '3', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'USDC/BTC', 'min_q': '1', 'max_q': '500000', 'min_p': '0.00000001', 'max_p': '1', 'min_a': '0.0001', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'USDC/ETH', 'min_q': '1', 'max_q': '500000', 'min_p': '0.0000001', 'max_p': '100', 'min_a': '0.001', 'max_a': '1000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'USDC/USDT', 'min_q': '1', 'max_q': '500000', 'min_p': '0.0001', 'max_p': '1000', 'min_a': '3', 'max_a': '500000', 'taker': '0', 'maker': '0'},
{'pair': 'USDT/USD', 'min_q': '1', 'max_q': '500000', 'min_p': '0.5', 'max_p': '10', 'min_a': '0.1', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'USDT/RUB', 'min_q': '1', 'max_q': '500000', 'min_p': '0.01', 'max_p': '1000', 'min_a': '10', 'max_a': '50000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'USDT/EUR', 'min_q': '0.01', 'max_q': '500000', 'min_p': '0.1', 'max_p': '10', 'min_a': '0.1', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'USDT/GBP', 'min_q': '1', 'max_q': '500000', 'min_p': '0.5', 'max_p': '10', 'min_a': '0.1', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'USDT/UAH', 'min_q': '0.01', 'max_q': '500000', 'min_p': '1', 'max_p': '3000', 'min_a': '2', 'max_a': '15000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'USDT/KZT', 'min_q': '1', 'max_q': '500000', 'min_p': '200', 'max_p': '4000', 'min_a': '0.1', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'VLX/BTC', 'min_q': '1', 'max_q': '10000000', 'min_p': '0.00000001', 'max_p': '0.1', 'min_a': '0.00001', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'WAVES/USD', 'min_q': '0.5', 'max_q': '500000', 'min_p': '0.001', 'max_p': '3500', 'min_a': '0.5', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'WAVES/RUB', 'min_q': '0.5', 'max_q': '500000', 'min_p': '0.01', 'max_p': '10000', 'min_a': '1', 'max_a': '50000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'WAVES/BTC', 'min_q': '0.5', 'max_q': '500000', 'min_p': '0.000001', 'max_p': '1', 'min_a': '0.0001', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'WAVES/ETH', 'min_q': '0.5', 'max_q': '500000', 'min_p': '0.00001', 'max_p': '30', 'min_a': '0.0035', 'max_a': '3500', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'XEM/USD', 'min_q': '10', 'max_q': '10000000', 'min_p': '0.00001', 'max_p': '1000', 'min_a': '0.1', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'XEM/EUR', 'min_q': '10', 'max_q': '10000000', 'min_p': '0.00001', 'max_p': '1000', 'min_a': '0.1', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'XEM/UAH', 'min_q': '1', 'max_q': '10000000', 'min_p': '0.0001', 'max_p': '30000', 'min_a': '10', 'max_a': '15000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'XEM/BTC', 'min_q': '10', 'max_q': '10000000', 'min_p': '0.0000001', 'max_p': '1', 'min_a': '0.00015', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'XLM/USD', 'min_q': '0.01', 'max_q': '5000000', 'min_p': '0.0001', 'max_p': '1000', 'min_a': '0.01', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'XLM/RUB', 'min_q': '0.01', 'max_q': '5000000', 'min_p': '0.00001', 'max_p': '100000', 'min_a': '0.1', 'max_a': '50000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'XLM/TRY', 'min_q': '0.01', 'max_q': '5000000', 'min_p': '0.00001', 'max_p': '100000', 'min_a': '0.1', 'max_a': '6000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'XLM/BTC', 'min_q': '1', 'max_q': '5000000', 'min_p': '0.00000001', 'max_p': '1', 'min_a': '0.001', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'XMR/USD', 'min_q': '0.01', 'max_q': '10000', 'min_p': '0.001', 'max_p': '1000', 'min_a': '0.1', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'XMR/RUB', 'min_q': '0.01', 'max_q': '10000', 'min_p': '0.001', 'max_p': '600000', 'min_a': '10', 'max_a': '16000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'XMR/EUR', 'min_q': '0.01', 'max_q': '10000', 'min_p': '0.001', 'max_p': '1000', 'min_a': '0.1', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'XMR/UAH', 'min_q': '0.01', 'max_q': '10000', 'min_p': '0.001', 'max_p': '300000', 'min_a': '5', 'max_a': '16000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'XMR/BTC', 'min_q': '0.01', 'max_q': '10000', 'min_p': '0.0001', 'max_p': '1', 'min_a': '0.001', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'XMR/ETH', 'min_q': '0.01', 'max_q': '10000', 'min_p': '0.00000001', 'max_p': '100', 'min_a': '0.001', 'max_a': '5000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'XRP/EUR', 'min_q': '1', 'max_q': '5000000', 'min_p': '0.001', 'max_p': '1000', 'min_a': '0.001', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'XRP/GBP', 'min_q': '1', 'max_q': '5000000', 'min_p': '0.001', 'max_p': '1000', 'min_a': '0.001', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'XRP/TRY', 'min_q': '1', 'max_q': '5000000', 'min_p': '0.0001', 'max_p': '1000', 'min_a': '0.01', 'max_a': '6000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'XRP/UAH', 'min_q': '1', 'max_q': '5000000', 'min_p': '0.0001', 'max_p': '1000', 'min_a': '0.01', 'max_a': '15000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'XRP/USDT', 'min_q': '1', 'max_q': '5000000', 'min_p': '0.001', 'max_p': '1000', 'min_a': '0.001', 'max_a': '500000', 'taker': '0', 'maker': '0'},
{'pair': 'XRP/ETH', 'min_q': '1', 'max_q': '5000000', 'min_p': '0.00000001', 'max_p': '10', 'min_a': '0.00001', 'max_a': '5000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'XTZ/USD', 'min_q': '0.1', 'max_q': '100000', 'min_p': '0.0001', 'max_p': '1000', 'min_a': '0.1', 'max_a': '100000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'XTZ/RUB', 'min_q': '0.1', 'max_q': '100000', 'min_p': '0.00001', 'max_p': '100000', 'min_a': '0.5', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'XTZ/BTC', 'min_q': '0.1', 'max_q': '100000', 'min_p': '0.00000001', 'max_p': '1', 'min_a': '0.00001', 'max_a': '10', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'XTZ/ETH', 'min_q': '0.1', 'max_q': '100000', 'min_p': '0.0000001', 'max_p': '10', 'min_a': '0.0001', 'max_a': '1000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ZEC/USD', 'min_q': '0.01', 'max_q': '10000', 'min_p': '0.001', 'max_p': '5000', 'min_a': '0.1', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ZEC/RUB', 'min_q': '0.01', 'max_q': '10000', 'min_p': '0.001', 'max_p': '100000', 'min_a': '0.1', 'max_a': '50000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ZEC/EUR', 'min_q': '0.01', 'max_q': '10000', 'min_p': '0.001', 'max_p': '5000', 'min_a': '0.1', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ZEC/BTC', 'min_q': '0.01', 'max_q': '10000', 'min_p': '0.00001', 'max_p': '10', 'min_a': '0.001', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ZRX/USD', 'min_q': '0.01', 'max_q': '10000000', 'min_p': '0.00001', 'max_p': '1000', 'min_a': '0.1', 'max_a': '500000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ZRX/BTC', 'min_q': '1', 'max_q': '10000000', 'min_p': '0.00000001', 'max_p': '1', 'min_a': '0.001', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ZRX/ETH', 'min_q': '0.01', 'max_q': '10000000', 'min_p': '0.00000001', 'max_p': '10', 'min_a': '0.01', 'max_a': '5000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'ZAG/BTC', 'min_q': '1', 'max_q': '10000000', 'min_p': '0.00000001', 'max_p': '0.1', 'min_a': '0.00001', 'max_a': '100', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'USD/RUB', 'min_q': '1', 'max_q': '500000', 'min_p': '0.01', 'max_p': '1000', 'min_a': '10', 'max_a': '50000000', 'taker': '0.4', 'maker': '0.4'},
{'pair': 'EXM/BTC', 'min_q': '1', 'max_q': '100000000', 'min_p': '0.00000001', 'max_p': '1', 'min_a': '0.0000001', 'max_a': '1', 'taker': '0.4', 'maker': '0.4'},
],
'fees': [
{
'group': 'crypto',
'title': 'Cryptocurrency',
'items': [
{'prov': 'EXM', 'dep': '0%', 'wd': '1 EXM'},
{'prov': 'BTC', 'dep': '0%', 'wd': '0.0004 BTC'},
{'prov': 'LTC', 'dep': '0%', 'wd': '0.01 LTC'},
{'prov': 'DOGE', 'dep': '0%', 'wd': '1 Doge'},
{'prov': 'DASH', 'dep': '0%', 'wd': '0.002 DASH'},
{'prov': 'ETH', 'dep': '0%', 'wd': '0.003 ETH'},
{'prov': 'WAVES', 'dep': '0%', 'wd': '0.001 WAVES'},
{'prov': 'ZEC', 'dep': '0%', 'wd': '0.001 ZEC'},
{'prov': 'USDT', 'dep': '0%', 'wd': ''},
{'prov': 'XMR', 'dep': '0%', 'wd': '0.001 XMR'},
{'prov': 'XRP', 'dep': '0%', 'wd': '0.02 XRP'},
{'prov': 'ETC', 'dep': '0%', 'wd': '0.01 ETC'},
{'prov': 'BCH', 'dep': '0%', 'wd': '0.001 BCH'},
{'prov': 'BTG', 'dep': '0%', 'wd': '0.001 BTG'},
{'prov': 'EOS', 'dep': '0%', 'wd': '0.05 EOS'},
{'prov': 'XLM', 'dep': '0%', 'wd': '0.01 XLM'},
{'prov': 'OMG', 'dep': '0.1 OMG', 'wd': '0.5 OMG'},
{'prov': 'TRX', 'dep': '0%', 'wd': '1 TRX'},
{'prov': 'ADA', 'dep': '0%', 'wd': '1 ADA'},
{'prov': 'NEO', 'dep': '0%', 'wd': '0%'},
{'prov': 'GAS', 'dep': '0%', 'wd': '0%'},
{'prov': 'ZRX', 'dep': '0%', 'wd': '1 ZRX'},
{'prov': 'GNT', 'dep': '0%', 'wd': '1 GNT'},
{'prov': 'GUSD', 'dep': '0%', 'wd': '0.5 GUSD'},
{'prov': 'LSK', 'dep': '0%', 'wd': '0.1 LSK'},
{'prov': 'XEM', 'dep': '0%', 'wd': '5 XEM'},
{'prov': 'SMART', 'dep': '0%', 'wd': '0.5 SMART'},
{'prov': 'QTUM', 'dep': '0%', 'wd': '0.01 QTUM'},
{'prov': 'HB', 'dep': '0%', 'wd': '10 HB'},
{'prov': 'DAI', 'dep': '0%', 'wd': '1 DAI'},
{'prov': 'MKR', 'dep': '0%', 'wd': '0.005 MKR'},
{'prov': 'MNC', 'dep': '0%', 'wd': '15 MNC'},
{'prov': 'PTI', 'dep': '-', 'wd': '10 PTI'},
{'prov': 'ETZ', 'dep': '0%', 'wd': '1 ETZ'},
{'prov': 'USDC', 'dep': '0%', 'wd': '0.5 USDC'},
{'prov': 'ROOBEE', 'dep': '0%', 'wd': '200 ROOBEE'},
{'prov': 'DCR', 'dep': '0%', 'wd': '0.01 DCR'},
{'prov': 'ZAG', 'dep': '0%', 'wd': '0%'},
{'prov': 'BTT', 'dep': '0 BTT', 'wd': '100 BTT'},
{'prov': 'VLX', 'dep': '0%', 'wd': '1 VLX'},
{'prov': 'CRON', 'dep': '0%', 'wd': '5 CRON'},
{'prov': 'ONT', 'dep': '0%', 'wd': '1 ONT'},
{'prov': 'ONG', 'dep': '0%', 'wd': '5 ONG'},
{'prov': 'ALGO', 'dep': '0%', 'wd': '0.01 ALGO'},
{'prov': 'ATOM', 'dep': '0%', 'wd': '0.05 ATOM'},
],
},
{
'group': 'usd',
'title': 'USD',
'items': [
{'prov': 'Payeer', 'dep': '3.95%', 'wd': '-'},
{'prov': 'EX-CODE', 'dep': '', 'wd': '0.2%'},
{'prov': 'AdvCash', 'dep': '0%', 'wd': '2.49%'},
{'prov': 'Visa/MasterCard(Simplex)', 'dep': '4.5% + 0.5 USD', 'wd': '-'},
{'prov': 'Visa', 'dep': '3.45%', 'wd': '-'},
{'prov': 'Frick Bank', 'dep': '0 USD', 'wd': '-'},
],
},
{
'group': 'eur',
'title': 'EUR',
'items': [
{'prov': 'Visa/MasterCard', 'dep': '4.5% + 0.5 EUR', 'wd': '-'},
{'prov': 'EX-CODE', 'dep': '', 'wd': '0.2%'},
{'prov': 'Visa', 'dep': '2.95%', 'wd': '-'},
{'prov': 'Frick Internal Transfer', 'dep': '0 EUR', 'wd': '-'},
{'prov': 'SEPA Frick Bank', 'dep': '0 EUR', 'wd': '1 EUR'},
{'prov': 'WIRE Frick Bank', 'dep': '0%', 'wd': '20 EUR'},
{'prov': 'SEPA Weg Ag', 'dep': '-', 'wd': '1 EUR'},
],
},
{
'group': 'gbp',
'title': 'GBP',
'items': [
{'prov': 'EX-CODE', 'dep': '', 'wd': '0.2%'},
{'prov': 'WIRE Frick Bank', 'dep': '10 GBP', 'wd': '-'},
],
},
{
'group': 'rub',
'title': 'RUB',
'items': [
{'prov': 'Payeer', 'dep': '2.49%', 'wd': '3.49%'},
{'prov': 'EX-CODE', 'dep': '', 'wd': '0.2%'},
{'prov': 'Qiwi', 'dep': '1.49%', 'wd': '2.49%'},
{'prov': 'Yandex Money', 'dep': '1.49%', 'wd': '1.95 %'},
{'prov': 'AdvCash', 'dep': '0.99%', 'wd': '0.99%'},
{'prov': 'Visa/MasterCard', 'dep': '2.99%', 'wd': '3.99% + 60 RUB'},
],
},
{
'group': 'pln',
'title': 'PLN',
'items': [
{'prov': 'EX-CODE', 'dep': '', 'wd': '0.2%'},
],
},
{
'group': 'try',
'title': 'TRY',
'items': [
{'prov': 'EX-CODE', 'dep': '', 'wd': '0.2%'},
{'prov': 'Visa', 'dep': '3.05%', 'wd': '-'},
{'prov': 'Visa/MasterCard(Simplex)', 'dep': '4.5% + 2 TRY', 'wd': '-'},
{'prov': 'AdvCash', 'dep': '0%', 'wd': '-'},
],
},
{
'group': 'uah',
'title': 'UAH',
'items': [
{'prov': 'EX-CODE', 'dep': '', 'wd': '0.2%'},
{'prov': 'Terminal', 'dep': '2.6%', 'wd': '-'},
{'prov': 'Visa/MasterCard EasyTransfer', 'dep': '-', 'wd': '2.99%'},
{'prov': 'Visa/MasterCard', 'dep': '1% + 5 UAH', 'wd': '-'},
],
},
{
'group': 'kzt',
'title': 'KZT',
'items': [
{'prov': 'Visa/MasterCard', 'dep': '3.5%', 'wd': '2.99% + 450 KZT'},
{'prov': 'EX-CODE', 'dep': '', 'wd': '0.2%'},
{'prov': 'AdvCash', 'dep': '0%', 'wd': '-'},
],
},
],
},
},
},
'exceptions': {
'exact': {
'40005': AuthenticationError, # Authorization error, incorrect signature
'40009': InvalidNonce, #
'40015': ExchangeError, # API function do not exist
'40016': OnMaintenance, # {"result":false,"error":"Error 40016: Maintenance work in progress"}
'40017': AuthenticationError, # Wrong API Key
'40032': PermissionDenied, # {"result":false,"error":"Error 40032: Access is denied for self API key"}
'40034': RateLimitExceeded, # {"result":false,"error":"Error 40034: Access is denied, rate limit is exceeded"}
'50052': InsufficientFunds,
'50054': InsufficientFunds,
'50304': OrderNotFound, # "Order was not found '123456789'"(fetching order trades for an order that does not have trades yet)
'50173': OrderNotFound, # "Order with id X was not found."(cancelling non-existent, closed and cancelled order)
'50277': InvalidOrder,
'50319': InvalidOrder, # Price by order is less than permissible minimum for self pair
'50321': InvalidOrder, # Price by order is more than permissible maximum for self pair
},
'broad': {
'range period is too long': BadRequest,
'invalid syntax': BadRequest,
'API rate limit exceeded': RateLimitExceeded, # {"result":false,"error":"API rate limit exceeded for x.x.x.x. Retry after 60 sec.","history":[],"begin":1579392000,"end":1579478400}
},
},
'orders': {}, # orders cache / emulation
})
async def fetch_trading_fees(self, params={}):
if self.options['useWebapiForFetchingFees']:
response = await self.webGetEnDocsFees(params)
parts = response.split('<td class="th_fees_2" colspan="2">')
numParts = len(parts)
if numParts != 2:
raise NotSupported(self.id + ' fetchTradingFees format has changed')
rest = parts[1]
parts = rest.split('</td>')
numParts = len(parts)
if numParts < 2:
raise NotSupported(self.id + ' fetchTradingFees format has changed')
fee = float(parts[0].replace('%', '')) * 0.01
taker = fee
maker = fee
return {
# 'info': response,
'maker': maker,
'taker': taker,
}
else:
return {
'maker': self.fees['trading']['maker'],
'taker': self.fees['trading']['taker'],
}
def parse_fixed_float_value(self, input):
if (input is None) or (input == '-'):
return None
if input == '':
return 0
isPercentage = (input.find('%') >= 0)
parts = input.split(' ')
value = parts[0].replace('%', '')
result = float(value)
if (result > 0) and isPercentage:
raise ExchangeError(self.id + ' parseFixedFloatValue detected an unsupported non-zero percentage-based fee ' + input)
return result
async def fetch_funding_fees(self, params={}):
response = None
if self.options['useWebapiForFetchingFees']:
response = await self.webGetCtrlFeesAndLimits(params)
else:
response = self.options['feesAndLimits']
# the code below assumes all non-zero crypto fees are fixed(for now)
withdraw = {}
deposit = {}
groups = self.safe_value(response['data'], 'fees')
groupsByGroup = self.index_by(groups, 'group')
items = groupsByGroup['crypto']['items']
for i in range(0, len(items)):
item = items[i]
code = self.safe_currency_code(self.safe_string(item, 'prov'))
withdrawalFee = self.safe_string(item, 'wd')
depositFee = self.safe_string(item, 'dep')
if withdrawalFee is not None:
withdraw[code] = self.parse_fixed_float_value(withdrawalFee)
if depositFee is not None:
deposit[code] = self.parse_fixed_float_value(depositFee)
# sets fiat fees to None
fiatGroups = self.to_array(self.omit(groupsByGroup, 'crypto'))
for i in range(0, len(fiatGroups)):
code = self.safe_currency_code(self.safe_string(fiatGroups[i], 'title'))
withdraw[code] = None
deposit[code] = None
result = {
'info': response,
'withdraw': withdraw,
'deposit': deposit,
}
# cache them for later use
self.options['fundingFees'] = result
return result
async def fetch_currencies(self, params={}):
fees = await self.fetch_funding_fees(params)
# todo redesign the 'fee' property in currencies
ids = list(fees['withdraw'].keys())
limitsByMarketId = self.index_by(fees['info']['data']['limits'], 'pair')
marketIds = list(limitsByMarketId.keys())
minAmounts = {}
minPrices = {}
minCosts = {}
maxAmounts = {}
maxPrices = {}
maxCosts = {}
for i in range(0, len(marketIds)):
marketId = marketIds[i]
limit = limitsByMarketId[marketId]
baseId, quoteId = marketId.split('/')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
maxAmount = self.safe_number(limit, 'max_q')
maxPrice = self.safe_number(limit, 'max_p')
maxCost = self.safe_number(limit, 'max_a')
minAmount = self.safe_number(limit, 'min_q')
minPrice = self.safe_number(limit, 'min_p')
minCost = self.safe_number(limit, 'min_a')
minAmounts[base] = min(self.safe_number(minAmounts, base, minAmount), minAmount)
maxAmounts[base] = max(self.safe_number(maxAmounts, base, maxAmount), maxAmount)
minPrices[quote] = min(self.safe_number(minPrices, quote, minPrice), minPrice)
minCosts[quote] = min(self.safe_number(minCosts, quote, minCost), minCost)
maxPrices[quote] = max(self.safe_number(maxPrices, quote, maxPrice), maxPrice)
maxCosts[quote] = max(self.safe_number(maxCosts, quote, maxCost), maxCost)
result = {}
for i in range(0, len(ids)):
id = ids[i]
code = self.safe_currency_code(id)
fee = self.safe_value(fees['withdraw'], code)
active = True
result[code] = {
'id': id,
'code': code,
'name': code,
'active': active,
'fee': fee,
'precision': 8,
'limits': {
'amount': {
'min': self.safe_number(minAmounts, code),
'max': self.safe_number(maxAmounts, code),
},
'price': {
'min': self.safe_number(minPrices, code),
'max': self.safe_number(maxPrices, code),
},
'cost': {
'min': self.safe_number(minCosts, code),
'max': self.safe_number(maxCosts, code),
},
},
'info': id,
}
return result
async def fetch_markets(self, params={}):
response = await self.publicGetPairSettings(params)
#
# {
# "BTC_USD":{
# "min_quantity":"0.0001",
# "max_quantity":"1000",
# "min_price":"1",
# "max_price":"30000",
# "max_amount":"500000",
# "min_amount":"1",
# "price_precision":8,
# "commission_taker_percent":"0.4",
# "commission_maker_percent":"0.4"
# },
# }
#
keys = list(response.keys())
result = []
for i in range(0, len(keys)):
id = keys[i]
market = response[id]
symbol = id.replace('_', '/')
baseId, quoteId = symbol.split('/')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
takerString = self.safe_string(market, 'commission_taker_percent')
makerString = self.safe_string(market, 'commission_maker_percent')
taker = self.parse_number(Precise.string_div(takerString, '100'))
maker = self.parse_number(Precise.string_div(makerString, '100'))
result.append({
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'baseId': baseId,
'quoteId': quoteId,
'active': True,
'taker': taker,
'maker': maker,
'limits': {
'amount': {
'min': self.safe_number(market, 'min_quantity'),
'max': self.safe_number(market, 'max_quantity'),
},
'price': {
'min': self.safe_number(market, 'min_price'),
'max': self.safe_number(market, 'max_price'),
},
'cost': {
'min': self.safe_number(market, 'min_amount'),
'max': self.safe_number(market, 'max_amount'),
},
},
'precision': {
'amount': 8,
'price': self.safe_integer(market, 'price_precision'),
},
'info': market,
})
return result
async def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
'resolution': self.timeframes[timeframe],
}
options = self.safe_value(self.options, 'fetchOHLCV')
maxLimit = self.safe_integer(options, 'maxLimit', 3000)
duration = self.parse_timeframe(timeframe)
now = self.milliseconds()
if since is None:
if limit is None:
raise ArgumentsRequired(self.id + ' fetchOHLCV() requires a since argument or a limit argument')
else:
if limit > maxLimit:
raise BadRequest(self.id + ' fetchOHLCV will serve ' + str(maxLimit) + ' candles at most')
request['from'] = int(now / 1000) - limit * duration - 1
request['to'] = int(now / 1000)
else:
request['from'] = int(since / 1000) - 1
if limit is None:
request['to'] = int(now / 1000)
else:
if limit > maxLimit:
raise BadRequest(self.id + ' fetchOHLCV will serve ' + str(maxLimit) + ' candles at most')
to = self.sum(since, limit * duration * 1000)
request['to'] = int(to / 1000)
response = await self.publicGetCandlesHistory(self.extend(request, params))
#
# {
# "candles":[
# {"t":1584057600000,"o":0.02235144,"c":0.02400233,"h":0.025171,"l":0.02221,"v":5988.34031761},
# {"t":1584144000000,"o":0.0240373,"c":0.02367413,"h":0.024399,"l":0.0235,"v":2027.82522329},
# {"t":1584230400000,"o":0.02363458,"c":0.02319242,"h":0.0237948,"l":0.02223196,"v":1707.96944997},
# ]
# }
#
candles = self.safe_value(response, 'candles', [])
return self.parse_ohlcvs(candles, market, timeframe, since, limit)
def parse_ohlcv(self, ohlcv, market=None):
#
# {
# "t":1584057600000,
# "o":0.02235144,
# "c":0.02400233,
# "h":0.025171,
# "l":0.02221,
# "v":5988.34031761
# }
#
return [
self.safe_integer(ohlcv, 't'),
self.safe_number(ohlcv, 'o'),
self.safe_number(ohlcv, 'h'),
self.safe_number(ohlcv, 'l'),
self.safe_number(ohlcv, 'c'),
self.safe_number(ohlcv, 'v'),
]
async def fetch_balance(self, params={}):
await self.load_markets()
response = await self.privatePostUserInfo(params)
result = {'info': response}
free = self.safe_value(response, 'balances', {})
used = self.safe_value(response, 'reserved', {})
codes = list(free.keys())
for i in range(0, len(codes)):
code = codes[i]
currencyId = self.currency_id(code)
account = self.account()
if currencyId in free:
account['free'] = self.safe_string(free, currencyId)
if currencyId in used:
account['used'] = self.safe_string(used, currencyId)
result[code] = account
return self.parse_balance(result, False)
async def fetch_order_book(self, symbol, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'pair': market['id'],
}
if limit is not None:
request['limit'] = limit
response = await self.publicGetOrderBook(self.extend(request, params))
result = self.safe_value(response, market['id'])
return self.parse_order_book(result, symbol, None, 'bid', 'ask')
async def fetch_order_books(self, symbols=None, limit=None, params={}):
await self.load_markets()
ids = None
if symbols is None:
ids = ','.join(self.ids)
# max URL length is 2083 symbols, including http schema, hostname, tld, etc...
if len(ids) > 2048:
numIds = len(self.ids)
raise ExchangeError(self.id + ' has ' + str(numIds) + ' symbols exceeding max URL length, you are required to specify a list of symbols in the first argument to fetchOrderBooks')
else:
ids = self.market_ids(symbols)
ids = ','.join(ids)
request = {
'pair': ids,
}
if limit is not None:
request['limit'] = limit
response = await self.publicGetOrderBook(self.extend(request, params))
result = {}
marketIds = list(response.keys())
for i in range(0, len(marketIds)):
marketId = marketIds[i]
symbol = marketId
if marketId in self.markets_by_id:
market = self.markets_by_id[marketId]
symbol = market['symbol']
result[symbol] = self.parse_order_book(response[marketId], None, 'bid', 'ask')
return result
def parse_ticker(self, ticker, market=None):
timestamp = self.safe_timestamp(ticker, 'updated')
symbol = None
if market is not None:
symbol = market['symbol']
last = self.safe_number(ticker, 'last_trade')
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_number(ticker, 'high'),
'low': self.safe_number(ticker, 'low'),
'bid': self.safe_number(ticker, 'buy_price'),
'bidVolume': None,
'ask': self.safe_number(ticker, 'sell_price'),
'askVolume': None,
'vwap': None,
'open': None,
'close': last,
'last': last,
'previousClose': None,
'change': None,
'percentage': None,
'average': self.safe_number(ticker, 'avg'),
'baseVolume': self.safe_number(ticker, 'vol'),
'quoteVolume': self.safe_number(ticker, 'vol_curr'),
'info': ticker,
}
async def fetch_tickers(self, symbols=None, params={}):
await self.load_markets()
response = await self.publicGetTicker(params)
result = {}
ids = list(response.keys())
for i in range(0, len(ids)):
id = ids[i]
market = self.markets_by_id[id]
symbol = market['symbol']
ticker = response[id]
result[symbol] = self.parse_ticker(ticker, market)
return self.filter_by_array(result, 'symbol', symbols)
async def fetch_ticker(self, symbol, params={}):
await self.load_markets()
response = await self.publicGetTicker(params)
market = self.market(symbol)
return self.parse_ticker(response[market['id']], market)
def parse_trade(self, trade, market=None):
#
# fetchTrades(public)
#
# {
# "trade_id":165087520,
# "date":1587470005,
# "type":"buy",
# "quantity":"1.004",
# "price":"0.02491461",
# "amount":"0.02501426"
# },
#
# fetchMyTrades, fetchOrderTrades
#
# {
# "trade_id": 3,
# "date": 1435488248,
# "type": "buy",
# "pair": "BTC_USD",
# "order_id": 12345,
# "quantity": 1,
# "price": 100,
# "amount": 100,
# "exec_type": "taker",
# "commission_amount": "0.02",
# "commission_currency": "BTC",
# "commission_percent": "0.2"
# }
#
timestamp = self.safe_timestamp(trade, 'date')
symbol = None
id = self.safe_string(trade, 'trade_id')
orderId = self.safe_string(trade, 'order_id')
price = self.safe_number(trade, 'price')
amount = self.safe_number(trade, 'quantity')
cost = self.safe_number(trade, 'amount')
side = self.safe_string(trade, 'type')
type = None
marketId = self.safe_string(trade, 'pair')
if marketId is not None:
if marketId in self.markets_by_id:
market = self.markets_by_id[marketId]
else:
baseId, quoteId = marketId.split('_')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
if (symbol is None) and (market is not None):
symbol = market['symbol']
takerOrMaker = self.safe_string(trade, 'exec_type')
fee = None
feeCost = self.safe_number(trade, 'commission_amount')
if feeCost is not None:
feeCurrencyId = self.safe_string(trade, 'commission_currency')
feeCurrencyCode = self.safe_currency_code(feeCurrencyId)
feeRate = self.safe_number(trade, 'commission_percent')
if feeRate is not None:
feeRate /= 1000
fee = {
'cost': feeCost,
'currency': feeCurrencyCode,
'rate': feeRate,
}
return {
'id': id,
'info': trade,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'order': orderId,
'type': type,
'side': side,
'takerOrMaker': takerOrMaker,
'price': price,
'amount': amount,
'cost': cost,
'fee': fee,
}
async def fetch_trades(self, symbol, since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'pair': market['id'],
}
response = await self.publicGetTrades(self.extend(request, params))
#
# {
# "ETH_BTC":[
# {
# "trade_id":165087520,
# "date":1587470005,
# "type":"buy",
# "quantity":"1.004",
# "price":"0.02491461",
# "amount":"0.02501426"
# },
# {
# "trade_id":165087369,
# "date":1587469938,
# "type":"buy",
# "quantity":"0.94",
# "price":"0.02492348",
# "amount":"0.02342807"
# }
# ]
# }
#
data = self.safe_value(response, market['id'], [])
return self.parse_trades(data, market, since, limit)
async def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
# a symbol is required but it can be a single string, or a non-empty array
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchMyTrades() requires a symbol argument(a single symbol or an array)')
await self.load_markets()
pair = None
market = None
if isinstance(symbol, list):
numSymbols = len(symbol)
if numSymbols < 1:
raise ArgumentsRequired(self.id + ' fetchMyTrades() requires a non-empty symbol array')
marketIds = self.market_ids(symbol)
pair = ','.join(marketIds)
else:
market = self.market(symbol)
pair = market['id']
request = {
'pair': pair,
}
if limit is not None:
request['limit'] = limit
response = await self.privatePostUserTrades(self.extend(request, params))
result = []
marketIds = list(response.keys())
for i in range(0, len(marketIds)):
marketId = marketIds[i]
symbol = None
if marketId in self.markets_by_id:
market = self.markets_by_id[marketId]
symbol = market['symbol']
else:
baseId, quoteId = marketId.split('_')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
items = response[marketId]
trades = self.parse_trades(items, market, since, limit, {
'symbol': symbol,
})
result = self.array_concat(result, trades)
return self.filter_by_since_limit(result, since, limit)
async def create_order(self, symbol, type, side, amount, price=None, params={}):
await self.load_markets()
prefix = (type + '_') if (type == 'market') else ''
market = self.market(symbol)
if (type == 'market') and (price is None):
price = 0
request = {
'pair': market['id'],
'quantity': self.amount_to_precision(symbol, amount),
'type': prefix + side,
'price': self.price_to_precision(symbol, price),
}
response = await self.privatePostOrderCreate(self.extend(request, params))
id = self.safe_string(response, 'order_id')
timestamp = self.milliseconds()
amount = float(amount)
price = float(price)
status = 'open'
return {
'id': id,
'info': response,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': None,
'status': status,
'symbol': symbol,
'type': type,
'side': side,
'price': price,
'cost': price * amount,
'amount': amount,
'remaining': amount,
'filled': 0.0,
'fee': None,
'trades': None,
'clientOrderId': None,
'average': None,
}
async def cancel_order(self, id, symbol=None, params={}):
await self.load_markets()
request = {'order_id': id}
return await self.privatePostOrderCancel(self.extend(request, params))
async def fetch_order(self, id, symbol=None, params={}):
await self.load_markets()
request = {
'order_id': str(id),
}
response = await self.privatePostOrderTrades(self.extend(request, params))
#
# {
# "type": "buy",
# "in_currency": "BTC",
# "in_amount": "1",
# "out_currency": "USD",
# "out_amount": "100",
# "trades": [
# {
# "trade_id": 3,
# "date": 1435488248,
# "type": "buy",
# "pair": "BTC_USD",
# "order_id": 12345,
# "quantity": 1,
# "price": 100,
# "amount": 100
# }
# ]
# }
#
order = self.parse_order(response)
return self.extend(order, {
'id': str(id),
})
async def fetch_order_trades(self, id, symbol=None, since=None, limit=None, params={}):
market = None
if symbol is not None:
market = self.market(symbol)
request = {
'order_id': str(id),
}
response = await self.privatePostOrderTrades(self.extend(request, params))
#
# {
# "type": "buy",
# "in_currency": "BTC",
# "in_amount": "1",
# "out_currency": "USD",
# "out_amount": "100",
# "trades": [
# {
# "trade_id": 3,
# "date": 1435488248,
# "type": "buy",
# "pair": "BTC_USD",
# "order_id": 12345,
# "quantity": 1,
# "price": 100,
# "amount": 100,
# "exec_type": "taker",
# "commission_amount": "0.02",
# "commission_currency": "BTC",
# "commission_percent": "0.2"
# }
# ]
# }
#
trades = self.safe_value(response, 'trades')
return self.parse_trades(trades, market, since, limit)
async def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
response = await self.privatePostUserOpenOrders(params)
marketIds = list(response.keys())
orders = []
for i in range(0, len(marketIds)):
marketId = marketIds[i]
market = None
if marketId in self.markets_by_id:
market = self.markets_by_id[marketId]
parsedOrders = self.parse_orders(response[marketId], market)
orders = self.array_concat(orders, parsedOrders)
return self.filter_by_symbol_since_limit(orders, symbol, since, limit)
def parse_order(self, order, market=None):
#
# fetchOrders, fetchOpenOrders, fetchClosedOrders
#
# {
# "order_id": "14",
# "created": "1435517311",
# "type": "buy",
# "pair": "BTC_USD",
# "price": "100",
# "quantity": "1",
# "amount": "100"
# }
#
# fetchOrder
#
# {
# "type": "buy",
# "in_currency": "BTC",
# "in_amount": "1",
# "out_currency": "USD",
# "out_amount": "100",
# "trades": [
# {
# "trade_id": 3,
# "date": 1435488248,
# "type": "buy",
# "pair": "BTC_USD",
# "order_id": 12345,
# "quantity": 1,
# "price": 100,
# "amount": 100
# }
# ]
# }
#
id = self.safe_string(order, 'order_id')
timestamp = self.safe_timestamp(order, 'created')
symbol = None
side = self.safe_string(order, 'type')
if market is None:
marketId = None
if 'pair' in order:
marketId = order['pair']
elif ('in_currency' in order) and ('out_currency' in order):
if side == 'buy':
marketId = order['in_currency'] + '_' + order['out_currency']
else:
marketId = order['out_currency'] + '_' + order['in_currency']
if (marketId is not None) and (marketId in self.markets_by_id):
market = self.markets_by_id[marketId]
amount = self.safe_number(order, 'quantity')
if amount is None:
amountField = 'in_amount' if (side == 'buy') else 'out_amount'
amount = self.safe_number(order, amountField)
price = self.safe_number(order, 'price')
cost = self.safe_number(order, 'amount')
filled = 0.0
trades = []
transactions = self.safe_value(order, 'trades', [])
feeCost = None
lastTradeTimestamp = None
average = None
numTransactions = len(transactions)
if numTransactions > 0:
feeCost = 0
for i in range(0, numTransactions):
trade = self.parse_trade(transactions[i], market)
if id is None:
id = trade['order']
if timestamp is None:
timestamp = trade['timestamp']
if timestamp > trade['timestamp']:
timestamp = trade['timestamp']
filled = self.sum(filled, trade['amount'])
feeCost = self.sum(feeCost, trade['fee']['cost'])
trades.append(trade)
lastTradeTimestamp = trades[numTransactions - 1]['timestamp']
status = self.safe_string(order, 'status') # in case we need to redefine it for canceled orders
remaining = None
if amount is not None:
remaining = amount - filled
if filled >= amount:
status = 'closed'
else:
status = 'open'
if market is None:
market = self.get_market_from_trades(trades)
feeCurrency = None
if market is not None:
symbol = market['symbol']
feeCurrency = market['quote']
if cost is None:
if price is not None:
cost = price * filled
else:
if filled > 0:
if average is None:
average = cost / filled
if price is None:
price = cost / filled
fee = {
'cost': feeCost,
'currency': feeCurrency,
}
return {
'id': id,
'clientOrderId': None,
'datetime': self.iso8601(timestamp),
'timestamp': timestamp,
'lastTradeTimestamp': lastTradeTimestamp,
'status': status,
'symbol': symbol,
'type': 'limit',
'timeInForce': None,
'postOnly': None,
'side': side,
'price': price,
'stopPrice': None,
'cost': cost,
'amount': amount,
'filled': filled,
'remaining': remaining,
'average': average,
'trades': trades,
'fee': fee,
'info': order,
}
async def fetch_deposit_address(self, code, params={}):
await self.load_markets()
response = await self.privatePostDepositAddress(params)
depositAddress = self.safe_string(response, code)
address = None
tag = None
if depositAddress:
addressAndTag = depositAddress.split(',')
address = addressAndTag[0]
numParts = len(addressAndTag)
if numParts > 1:
tag = addressAndTag[1]
self.check_address(address)
return {
'currency': code,
'address': address,
'tag': tag,
'info': response,
}
def get_market_from_trades(self, trades):
tradesBySymbol = self.index_by(trades, 'pair')
symbols = list(tradesBySymbol.keys())
numSymbols = len(symbols)
if numSymbols == 1:
return self.markets[symbols[0]]
return None
async def withdraw(self, code, amount, address, tag=None, params={}):
await self.load_markets()
currency = self.currency(code)
request = {
'amount': amount,
'currency': currency['id'],
'address': address,
}
if tag is not None:
request['invoice'] = tag
response = await self.privatePostWithdrawCrypt(self.extend(request, params))
return {
'info': response,
'id': response['task_id'],
}
def parse_transaction_status(self, status):
statuses = {
'transferred': 'ok',
'paid': 'ok',
'pending': 'pending',
'processing': 'pending',
}
return self.safe_string(statuses, status, status)
def parse_transaction(self, transaction, currency=None):
#
# fetchTransactions
#
# {
# "dt": 1461841192,
# "type": "deposit",
# "curr": "RUB",
# "status": "processing",
# "provider": "Qiwi(LA) [12345]",
# "amount": "1",
# "account": "",
# "txid": "ec46f784ad976fd7f7539089d1a129fe46...",
# }
#
timestamp = self.safe_timestamp(transaction, 'dt')
amount = self.safe_number(transaction, 'amount')
if amount is not None:
amount = abs(amount)
status = self.parse_transaction_status(self.safe_string(transaction, 'status'))
txid = self.safe_string(transaction, 'txid')
type = self.safe_string(transaction, 'type')
currencyId = self.safe_string(transaction, 'curr')
code = self.safe_currency_code(currencyId, currency)
address = None
tag = None
comment = None
account = self.safe_string(transaction, 'account')
if type == 'deposit':
comment = account
elif type == 'withdrawal':
address = account
if address is not None:
parts = address.split(':')
numParts = len(parts)
if numParts == 2:
address = self.safe_string(parts, 1)
address = address.replace(' ', '')
fee = None
# fixed funding fees only(for now)
if not self.fees['funding']['percentage']:
key = 'withdraw' if (type == 'withdrawal') else 'deposit'
feeCost = self.safe_number(self.options['fundingFees'][key], code)
# users don't pay for cashbacks, no fees for that
provider = self.safe_string(transaction, 'provider')
if provider == 'cashback':
feeCost = 0
if feeCost is not None:
# withdrawal amount includes the fee
if type == 'withdrawal':
amount = amount - feeCost
fee = {
'cost': feeCost,
'currency': code,
'rate': None,
}
return {
'info': transaction,
'id': None,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'currency': code,
'amount': amount,
'address': address,
'addressTo': address,
'addressFrom': None,
'tag': tag,
'tagTo': tag,
'tagFrom': None,
'status': status,
'type': type,
'updated': None,
'comment': comment,
'txid': txid,
'fee': fee,
}
async def fetch_transactions(self, code=None, since=None, limit=None, params={}):
await self.load_markets()
request = {}
if since is not None:
request['date'] = int(since / 1000)
currency = None
if code is not None:
currency = self.currency(code)
response = await self.privatePostWalletHistory(self.extend(request, params))
#
# {
# "result": True,
# "error": "",
# "begin": "1493942400",
# "end": "1494028800",
# "history": [
# {
# "dt": 1461841192,
# "type": "deposit",
# "curr": "RUB",
# "status": "processing",
# "provider": "Qiwi(LA) [12345]",
# "amount": "1",
# "account": "",
# "txid": "ec46f784ad976fd7f7539089d1a129fe46...",
# },
# {
# "dt": 1463414785,
# "type": "withdrawal",
# "curr": "USD",
# "status": "paid",
# "provider": "EXCODE",
# "amount": "-1",
# "account": "EX-CODE_19371_USDda...",
# "txid": "",
# },
# ],
# }
#
return self.parse_transactions(response['history'], currency, since, limit)
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = self.urls['api'][api] + '/'
if api != 'web':
url += self.version + '/'
url += path
if (api == 'public') or (api == 'web'):
if params:
url += '?' + self.urlencode(params)
elif api == 'private':
self.check_required_credentials()
nonce = self.nonce()
body = self.urlencode(self.extend({'nonce': nonce}, params))
headers = {
'Content-Type': 'application/x-www-form-urlencoded',
'Key': self.apiKey,
'Sign': self.hmac(self.encode(body), self.encode(self.secret), hashlib.sha512),
}
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def nonce(self):
return self.milliseconds()
def handle_errors(self, httpCode, reason, url, method, headers, body, response, requestHeaders, requestBody):
if response is None:
return # fallback to default error handler
if ('result' in response) or ('errmsg' in response):
#
# {"result":false,"error":"Error 50052: Insufficient funds"}
# {"s":"error","errmsg":"strconv.ParseInt: parsing \"\": invalid syntax"}
#
success = self.safe_value(response, 'result', False)
if isinstance(success, basestring):
if (success == 'true') or (success == '1'):
success = True
else:
success = False
if not success:
code = None
message = self.safe_string_2(response, 'error', 'errmsg')
errorParts = message.split(':')
numParts = len(errorParts)
if numParts > 1:
errorSubParts = errorParts[0].split(' ')
numSubParts = len(errorSubParts)
code = errorSubParts[1] if (numSubParts > 1) else errorSubParts[0]
feedback = self.id + ' ' + body
self.throw_exactly_matched_exception(self.exceptions['exact'], code, feedback)
self.throw_broadly_matched_exception(self.exceptions['broad'], message, feedback)
raise ExchangeError(feedback)
| python | 88,866 |
from snappy.lambda_handler import set_handler # noqa: F401
| python | 60 |
#!/usr/bin/python
# Copyright (c) 2017 Alibaba Group Holding Limited. He Guimin <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see http://www.gnu.org/licenses/.
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['deprecated'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: alicloud_ess_configuration
version_added: "1.0.9"
short_description: Create or Terminate an scaling configuration in ESS.
description:
- Scaling configuration defines the configuration of ECS instances used for Auto Scaling.
When adding ECS instances to a scaling group, Auto Scaling creates the ECS instances according to the scaling configuration.
deprecated:
removed_in: "1.5.0"
why: Alibaba Cloud module name prefix "ali" will be more concise.
alternative: Use M(ali_ess_configuration) instead.
options:
state:
description:
- The state of the scaling configuration after operating.
default: 'present'
choices: [ 'present', 'active', 'inactive', 'absent' ]
name:
description:
- The name of scaling configuration. The name must contain 2-40 English or Chinese characters,
and start with a number, a letter in upper or lower case or a Chinese character.
The name can contain numbers, "_", "-" or ".". Default to configuration Id.
aliases: ['configuration_name' ]
image_id:
description:
- Image ID used to scale ECS instances. Required when C(state=present).
aliases: [ 'image' ]
instance_type:
description:
- Instance type used to scale ECS instances. Required when C(state=present).
aliases: [ 'type' ]
security_group_id:
description:
- Security Group id used to scale ECS instances.
group_id:
description:
- ID of the scaling group of a scaling configuration.
aliases: [ 'scaling_group_id' ]
internet_charge_type:
description:
- Internet charge type of scaling ECS instance.
default: "PayByBandwidth"
choices: ["PayByBandwidth", "PayByTraffic"]
max_bandwidth_in:
description:
- For scaling ECS instance, maximum incoming bandwidth from the public network, measured in Mbps (Mega bit per second).
Valid values range [1,200].
default: 200
max_bandwidth_out:
description:
- For scaling ECS instance, maximum outgoing bandwidth to the public network, measured in Mbps (Mega bit per second).
Valid values range [0,100].
default: 0
system_disk_category:
description:
- Category of the system disk.
default: "cloud_efficiency"
choices: ["cloud_efficiency", "cloud_ssd"]
system_disk_size:
description:
- Size of the system disk, in GB. The valid value range [40, 500]. Default to maximum of specified value and image size.
id:
description:
- The ID of existing scaling configuration.
aliases: [ 'configuration_id' ]
data_disks:
description:
- List of hash/dictionaries data disks for scaling ECS instances. A maximum of four items can be entered.
suboptions:
size:
description:
- Size of data disk, in GB. The valid value range [20, 32768]. Ignored when I(snapshot_id).
category:
description:
- Category of data disk.
default: "cloud_efficiency"
choices: ["cloud_efficiency", "cloud_ssd"]
snapshot_id:
description:
- Snapshot used for creating the data disk.
delete_with_instance:
description:
- Whether the data disk will be released along with the instance.
type: bool
default: True
tags:
description:
- A hash/dictionaries of instance tags, to add to the new instance or for starting/stopping instance by tag. C({"key":"value"})
key_name:
description:
- The name of key pair which is used to access ECS instance in SSH.
aliases: ['keypair']
user_data:
description:
- User-defined data to customize the startup behaviors of an ECS instance and to pass data into an ECS instance.
It only will take effect when launching the new ECS instances.
ram_role_name:
description:
- The name of the instance RAM role.
aliases: ['ram_role']
author:
- "He Guimin (@xiaozhu36)"
requirements:
- "python >= 2.6"
- "footmark >= 1.3.0"
extends_documentation_fragment:
- alicloud
'''
EXAMPLES = '''
# basic provisioning example scaling configuration
- name: basic provisioning example
hosts: localhost
vars:
alicloud_access_key: <your-alicloud-access-key-id>
alicloud_secret_key: <your-alicloud-access-secret-key>
alicloud_region: cn-beijing
image: ubuntu1404_64_40G_cloudinit_20160727.raw
instance_type: ecs.n4.small
max_bandwidth_out: 10
system_disk_category: cloud_efficiency
system_disk_size: 100
internet_charge_type: PayByBandwidth
security_group_id: sg-f2rwnfh23r
group_id: asg-2zebnrbt206pex
key_name: key-pair-for-ess
name: configuration-from-ansible
data_disks:
- size: 50
category: cloud_efficiency
- snapshot_id: s-w3cif22r2rd
category: cloud_efficiency
tags:
CreatedBy: 'Ansible'
Version: '1'
tasks:
- name: launch scaling configuration
alicloud_ess_configuration:
alicloud_access_key: '{{ alicloud_access_key }}'
alicloud_secret_key: '{{ alicloud_secret_key }}'
alicloud_region: '{{ alicloud_region }}'
image: '{{ image }}'
system_disk_category: '{{ system_disk_category }}'
system_disk_size: '{{ system_disk_size }}'
instance_type: '{{ instance_type }}'
internet_charge_type: '{{ internet_charge_type }}'
max_bandwidth_out: '{{ max_bandwidth_out }}'
key_name: '{{ key_name }}'
- name: launch scaling configuration with data disks and tags
alicloud_ess_configuration:
alicloud_access_key: '{{ alicloud_access_key }}'
alicloud_secret_key: '{{ alicloud_secret_key }}'
alicloud_region: '{{ alicloud_region }}'
image: '{{ image }}'
system_disk_category: '{{ system_disk_category }}'
system_disk_size: '{{ system_disk_size }}'
instance_type: '{{ instance_type }}'
internet_charge_type: '{{ internet_charge_type }}'
max_bandwidth_out: '{{ max_bandwidth_out }}'
key_name: '{{ key_name }}'
data_disks: '{{ data_disks }}'
tags: '{{ tags }}'
- name: delete specified scaling configuration
alicloud_ess_configuration:
alicloud_access_key: '{{ alicloud_access_key }}'
alicloud_secret_key: '{{ alicloud_secret_key }}'
alicloud_region: '{{ alicloud_region }}'
name: '{{ image }}'
state: absent
'''
RETURN = '''
id:
description: Scaling Configuration ID.
returned: expect absent
type: str
sample: "asc-2zeimuvzeil1ybfd2lt3"
name:
description: Scaling Configuration name.
returned: expect absent
type: str
sample: ess-configuration-foo
group_id:
description: ID of the scaling group of a scaling configuration.
returned: expect absent
type: str
sample: "asg-2zeimuvzeil1xfuor9ej"
configuration:
description: The details of a scaling configuration.
returned: expect absent
type: dict
sample: {
"creation_time": "2018-01-05T14:03Z",
"group_id": "asg-2zeimuvzeil1xfuor9ej",
"id": "asc-2zeimuvzeil1ybfd2lt3",
"image_id": "centos_6_09_64_20G_alibase_20170825.vhd",
"instance_type": "ecs.n4.small",
"name": "test-for-ansible",
"security_group_id": "sg-2zeb86qfocdo7pvk41tt",
"status": "inactive"
}
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.alicloud_ecs import ecs_argument_spec, ess_connect
HAS_FOOTMARK = False
try:
from footmark.exception import ECSResponseError
HAS_FOOTMARK = True
except ImportError:
HAS_FOOTMARK = False
def get_details(configuration):
return dict(id=configuration.id,
name=configuration.name,
group_id=configuration.scaling_group_id,
status=configuration.status,
image_id=configuration.image_id,
instance_type=configuration.instance_type,
security_group_id=configuration.security_group_id,
data_disks=getattr(configuration,'data_disks', None),
creation_time=configuration.creation_time)
def main():
argument_spec = ecs_argument_spec()
argument_spec.update(dict(
group_id=dict(type='str', aliases=['scaling_group_id']),
instance_type=dict(type='str', aliases=['type']),
image_id=dict(type='str', aliases=['image']),
name=dict(type='str', aliases=['configuration_name']),
internet_charge_type=dict(type='str', default="PayByBandwidth", choices=["PayByBandwidth", "PayByTraffic"]),
max_bandwidth_in=dict(type='int', default=200),
max_bandwidth_out=dict(type='int', default=0),
system_disk_category=dict(type='str', default='cloud_efficiency'),
system_disk_size=dict(type='int', default='40'),
tags=dict(type='dict'),
state=dict(default='present', choices=['present', 'absent']),
id=dict(type='str', aliases=['configuration_id']),
key_name=dict(type='str', aliases=['keypair']),
user_data=dict(type='str'),
data_disks=dict(type='list'),
security_group_id=dict(type='str'),
ram_role_name=dict(type='str', aliases=['ram_role'])
))
module = AnsibleModule(argument_spec=argument_spec)
if HAS_FOOTMARK is False:
module.fail_json(msg="Package 'footmark' required for the module alicloud_ess_configuration.")
ess = ess_connect(module)
state = module.params['state']
cfg_id = module.params['id']
cfg_name = module.params['name']
scaling_group = module.params['group_id']
changed = False
current = None
all_cfgs = []
if cfg_id or cfg_name:
cfgs = ess.describe_configurations(scaling_group_id=scaling_group, scaling_configuration_ids=[cfg_id],
scaling_configuration_names=[cfg_name])
if cfgs:
if len(cfgs) > 1:
for cfg in cfgs:
all_cfgs.append(cfg.id)
module.fail_json(msg="There are several scaling configurations in our record based on name {0}: {1}. "
"Please specified one using 'id' and try again.".format(cfg_name, all_cfgs))
current = cfgs[0]
if state == 'present':
if current is None:
try:
data_disks = module.params['data_disks']
if not isinstance(data_disks, list):
module.fail_json(msg="Filed 'data_disks' should be a list, aborting.")
if not isinstance(module.params['tags'], dict):
module.fail_json(msg="Filed 'tags' should be a dict, aborting.")
current = ess.create_configuration(scaling_group_id=scaling_group,
image_id=module.params['image_id'],
instance_type=module.params['instance_type'],
security_group_id=module.params['security_group_id'],
name=cfg_name,
internet_charge_type=module.params['internet_charge_type'],
max_bandwidth_in=module.params['max_bandwidth_in'],
max_bandwidth_out=module.params['max_bandwidth_out'],
system_disk_category=module.params['system_disk_category'],
system_disk_size=module.params['system_disk_size'],
data_disks=data_disks,
tags=module.params['tags'],
key_pair_name=module.params['key_name'],
ram_role_name=module.params['ram_role_name'],
user_data=module.params['user_data'])
changed = True
except Exception as e:
module.fail_json(msg="Create scaling configuration got an error: {0}".format(e))
module.exit_json(changed=changed, id=current.id, name=current.name, group_id=current.group_id, configuration=get_details(current))
if current is None:
if cfg_id or cfg_name:
module.fail_json(msg="There are no scaling configuration in our record based on id {0} or name {1}. "
"Please check it and try again.".format(cfg_id, cfg_name))
module.fail_json(msg='Please specify a scaling configuration that you want to terminate by parameters id or name, aborting')
try:
module.exit_json(changed=current.terminate())
except Exception as e:
module.fail_json(msg='Delete scaling configuration {0} got an error: {1}'.format(current.id, e))
if __name__ == '__main__':
main()
| python | 14,315 |
from django.db import models
from django.contrib.auth.base_user import AbstractBaseUser
from django.contrib.auth.models import Group, PermissionsMixin, UserManager
from django.contrib.auth.validators import UnicodeUsernameValidator
from django.core.mail import send_mail
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from ..managers import TenantGroupManager
from .mixins import TenantSpecificModel
from .tenant import Tenant
class TenantGroup(TenantSpecificModel):
"""
This class has a OneToOne relation on django.contrib.auth.models.Group. It
gives us a ForeignKey on our Tenant object, allowing us to have users that
are in the Django Group only for specific tenants.
"""
objects = TenantGroupManager()
group = models.OneToOneField(Group, on_delete=models.CASCADE, related_name="tenant_group")
def __str__(self):
return f'{self.name}:{self.tenant.public_domain}'
def natural_key(self):
return (self.name, self.tenant.site.domain,)
class Meta:
verbose_name = _('tenant group')
verbose_name_plural = _('tenant groups')
class UserTenantProfile(models.Model):
tenant_groups = models.ManyToManyField(
TenantGroup,
related_name='user_tenant_profiles'
)
tenants = models.ManyToManyField(
Tenant,
through='multitenancy.TenantMembership'
)
class Meta:
verbose_name = _('user tenant profile')
verbose_name_plural = _('user tenant profiles')
class TenantMembership(models.Model):
tenant = models.ForeignKey(
Tenant,
on_delete=models.CASCADE,
related_name='tenant_membership'
)
user = models.ForeignKey(
UserTenantProfile,
on_delete=models.CASCADE,
related_name="tenant_membership"
)
is_active = models.BooleanField(default=True)
is_staff = models.BooleanField(default=False)
class Meta:
verbose_name = _('tenant membership')
verbose_name_plural = _('tenant memberships')
unique_together = ('tenant', 'user',)
class AbstractTenantUser(AbstractBaseUser, PermissionsMixin):
"""
Our own AbstractUser model. We need this primarily to properly handle the `is_active` and `is_staff` fields, which
are used often by auth backends and by the Django admin.
"""
username_validator = UnicodeUsernameValidator()
tenant_profile = models.OneToOneField(UserTenantProfile, on_delete=models.CASCADE, related_name="user")
username = models.CharField(
_('username'),
max_length=150,
unique=True,
help_text=_('Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.'),
validators=[username_validator],
error_messages={
'unique': _("A user with that username already exists."),
},
)
first_name = models.CharField(_('first name'), max_length=150, blank=True)
last_name = models.CharField(_('last name'), max_length=150, blank=True)
email = models.EmailField(_('email address'), blank=True)
_is_staff = models.BooleanField(
_('staff status'),
default=False,
help_text=_('Designates whether the user can log into this admin site.'),
)
_is_active = models.BooleanField(
_('active'),
default=True,
help_text=_(
'Designates whether this user should be treated as active. '
'Unselect this instead of deleting accounts.'
),
)
date_joined = models.DateTimeField(_('date joined'), default=timezone.now)
objects = UserManager()
@property
def is_active(self):
is_active = self._is_active or self._is_active is None
return is_active and self.is_active_for_tenant()
@is_active.setter
def is_active(self, value):
"""
We're just setting `self._is_active` here. To manage a user for a particular `Tenant`, use `User.activate()`
and `User.deactivate()`.
"""
self._is_active = value
# is_staff
@property
def is_staff(self):
return self._is_staff and self.is_staff_for_tenant()
@is_staff.setter
def is_staff(self, value):
"""
We're just setting `self._is_staff` here. To manage a user for a particular `Tenant`, use `User.make_staff()`
and `User.remove_staff()`.
"""
self._is_staff = value
def add_to_tenant(self, tenant=None):
if not tenant:
tenant = Tenant.objects.get_current()
try:
self.tenants.get(tenant=tenant)
except TenantMembership.DoesNotExist:
membership = TenantMembership(
tenant=tenant,
user=self.tenant_profile
)
membership.save()
@property
def is_super_admin(self):
"""
A user is a super admin if they are in one of the Django Groups
associated with our TenantGroups.
"""
group_ids = TenantGroup.objects.django_groups().values_list('id', flat=True)
return self.groups.filter(id__in=group_ids).exists()
def is_member(self, tenant=None):
# First, see if this user is a super admin. Super admins are people
# who are in any of the root Django Groups for our TenantGrouops
if self.is_super_admin:
return True
if not tenant:
tenant = Tenant.objects.get_current()
try:
TenantMembership.objects.get(
tenant=tenant,
user=self.tenant_profile
)
except TenantMembership.DoesNotExist:
return False
return True
def is_staff_for_tenant(self, tenant=None):
if self.is_super_admin and self._is_staff:
return True
if not tenant:
tenant = Tenant.objects.get_current()
membership = TenantMembership.objects.get(
tenant=tenant,
user=self.tenant_profile
)
return membership.is_staff
def make_staff(self, tenant=None):
"""
Set is_staff to True for this user on Tenant.
"""
if not tenant:
tenant = Tenant.objects.get_current()
membership = TenantMembership.objects.get(
tenant=tenant,
user=self.tenant_profile
)
membership.is_staff = True
membership.save()
def remove_staff(self, tenant=None):
"""
Set is_staff to True for this user on Tenant.
"""
if not tenant:
tenant = Tenant.objects.get_current()
membership = TenantMembership.objects.get(
tenant=tenant,
user=self.tenant_profile
)
membership.is_staff = False
membership.save()
# is_active
def is_active_for_tenant(self, tenant=None):
if self.is_super_admin and self._is_active:
return True
if not tenant:
tenant = Tenant.objects.get_current()
membership = TenantMembership.objects.get(
tenant=tenant,
user=self.tenant_profile
)
return membership.is_active
def activate(self, tenant=None):
"""
Set is_active to True for this user on Tenant.
"""
if not tenant:
tenant = Tenant.objects.get_current()
membership = TenantMembership.objects.get(
tenant=tenant,
user=self.tenant_profile
)
membership.is_active = True
membership.save()
def deactivate(self, tenant=None):
"""
Set is_active to False for this user on Tenant.
"""
if not tenant:
tenant = Tenant.objects.get_current()
membership = TenantMembership.objects.get(tenant=tenant, user=self)
membership.is_active = False
membership.save()
EMAIL_FIELD = 'email'
USERNAME_FIELD = 'username'
REQUIRED_FIELDS = ['email']
class Meta:
verbose_name = _('user')
verbose_name_plural = _('users')
abstract = True
def clean(self):
super().clean()
self.email = self.__class__.objects.normalize_email(self.email)
def get_full_name(self):
"""
Return the first_name plus the last_name, with a space in between.
"""
full_name = '%s %s' % (self.first_name, self.last_name)
return full_name.strip()
def get_short_name(self):
"""Return the short name for the user."""
return self.first_name
def email_user(self, subject, message, from_email=None, **kwargs):
"""Send an email to this user."""
send_mail(subject, message, from_email, [self.email], **kwargs)
| python | 8,731 |
import pdf_to_json as p2j
import json
url = "file:data/multilingual/Latn.TWI/Serif_16/udhr_Latn.TWI_Serif_16.pdf"
lConverter = p2j.pdf_to_json.pdf_to_json_converter()
lConverter.mImageHashOnly = True
lDict = lConverter.convert(url)
print(json.dumps(lDict, indent=4, ensure_ascii=False, sort_keys=True))
| python | 305 |
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: api.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='api.proto',
package='api',
syntax='proto3',
serialized_pb=_b('\n\tapi.proto\x12\x03\x61pi\"7\n\rFeasibleSpace\x12\x0b\n\x03max\x18\x01 \x01(\t\x12\x0b\n\x03min\x18\x02 \x01(\t\x12\x0c\n\x04list\x18\x03 \x03(\t\"q\n\x0fParameterConfig\x12\x0c\n\x04name\x18\x01 \x01(\t\x12*\n\x0eparameter_type\x18\x02 \x01(\x0e\x32\x12.api.ParameterType\x12$\n\x08\x66\x65\x61sible\x18\x03 \x01(\x0b\x32\x12.api.FeasibleSpace\"T\n\tParameter\x12\x0c\n\x04name\x18\x01 \x01(\t\x12*\n\x0eparameter_type\x18\x02 \x01(\x0e\x32\x12.api.ParameterType\x12\r\n\x05value\x18\x03 \x01(\t\"l\n\rMetricsLogSet\x12\x11\n\tworker_id\x18\x01 \x01(\t\x12%\n\x0cmetrics_logs\x18\x02 \x03(\x0b\x32\x0f.api.MetricsLog\x12!\n\rworker_status\x18\x03 \x01(\x0e\x32\n.api.State\"&\n\x07Metrics\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"*\n\nMetricsLog\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06values\x18\x02 \x03(\t\"2\n\x13SuggestionParameter\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"5\n\x16\x45\x61rlyStoppingParameter\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"\"\n\x03Tag\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"&\n\tMountConf\x12\x0b\n\x03pvc\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\t\"M\n\rStudyOverview\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05owner\x18\x02 \x01(\t\x12\n\n\x02id\x18\x03 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t\"\x83\x01\n\x05Trial\x12\x10\n\x08trial_id\x18\x01 \x01(\t\x12\x10\n\x08study_id\x18\x02 \x01(\t\x12%\n\rparameter_set\x18\x03 \x03(\x0b\x32\x0e.api.Parameter\x12\x17\n\x0fobjective_value\x18\x04 \x01(\t\x12\x16\n\x04tags\x18\x05 \x03(\x0b\x32\x08.api.Tag\"\x82\x01\n\x0cWorkerConfig\x12\r\n\x05image\x18\x01 \x01(\t\x12\x0f\n\x07\x63ommand\x18\x02 \x03(\t\x12\x0b\n\x03gpu\x18\x03 \x01(\x05\x12\x11\n\tscheduler\x18\x04 \x01(\t\x12\x1d\n\x05mount\x18\x05 \x01(\x0b\x32\x0e.api.MountConf\x12\x13\n\x0bpull_secret\x18\x06 \x01(\t\"\xa7\x01\n\x06Worker\x12\x11\n\tworker_id\x18\x01 \x01(\t\x12\x10\n\x08study_id\x18\x02 \x01(\t\x12\x10\n\x08trial_id\x18\x03 \x01(\t\x12\x0f\n\x07runtime\x18\x04 \x01(\t\x12\x1a\n\x06status\x18\x05 \x01(\x0e\x32\n.api.State\x12!\n\x06\x63onfig\x18\x06 \x01(\x0b\x32\x11.api.WorkerConfig\x12\x16\n\x04tags\x18\x07 \x03(\x0b\x32\x08.api.Tag\"\xd3\x02\n\x0bStudyConfig\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05owner\x18\x02 \x01(\t\x12\x30\n\x11optimization_type\x18\x03 \x01(\x0e\x32\x15.api.OptimizationType\x12\x19\n\x11optimization_goal\x18\x04 \x01(\x01\x12<\n\x11parameter_configs\x18\x05 \x01(\x0b\x32!.api.StudyConfig.ParameterConfigs\x12\x1a\n\x12\x61\x63\x63\x65ss_permissions\x18\x06 \x03(\t\x12\x16\n\x04tags\x18\x07 \x03(\x0b\x32\x08.api.Tag\x12\x1c\n\x14objective_value_name\x18\x08 \x01(\t\x12\x0f\n\x07metrics\x18\t \x03(\t\x1a\x39\n\x10ParameterConfigs\x12%\n\x07\x63onfigs\x18\x01 \x03(\x0b\x32\x14.api.ParameterConfig\"<\n\x12\x43reateStudyRequest\x12&\n\x0cstudy_config\x18\x01 \x01(\x0b\x32\x10.api.StudyConfig\"$\n\x10\x43reateStudyReply\x12\x10\n\x08study_id\x18\x01 \x01(\t\"$\n\x10StopStudyRequest\x12\x10\n\x08study_id\x18\x01 \x01(\t\"\x10\n\x0eStopStudyReply\"#\n\x0fGetStudyRequest\x12\x10\n\x08study_id\x18\x01 \x01(\t\"7\n\rGetStudyReply\x12&\n\x0cstudy_config\x18\x01 \x01(\x0b\x32\x10.api.StudyConfig\"\x15\n\x13GetStudyListRequest\"@\n\x11GetStudyListReply\x12+\n\x0fstudy_overviews\x18\x01 \x03(\x0b\x32\x12.api.StudyOverview\"/\n\x12\x43reateTrialRequest\x12\x19\n\x05trial\x18\x01 \x01(\x0b\x32\n.api.Trial\"$\n\x10\x43reateTrialReply\x12\x10\n\x08trial_id\x18\x01 \x01(\t\"$\n\x10GetTrialsRequest\x12\x10\n\x08study_id\x18\x01 \x01(\t\",\n\x0eGetTrialsReply\x12\x1a\n\x06trials\x18\x01 \x03(\x0b\x32\n.api.Trial\"p\n\x0fRunTrialRequest\x12\x10\n\x08study_id\x18\x01 \x01(\t\x12\x10\n\x08trial_id\x18\x02 \x01(\t\x12\x0f\n\x07runtime\x18\x03 \x01(\t\x12(\n\rworker_config\x18\x04 \x01(\x0b\x32\x11.api.WorkerConfig\"\"\n\rRunTrialReply\x12\x11\n\tworker_id\x18\x01 \x01(\t\"O\n\x12StopWorkersRequest\x12\x10\n\x08study_id\x18\x01 \x01(\t\x12\x12\n\nworker_ids\x18\x02 \x03(\t\x12\x13\n\x0bis_complete\x18\x03 \x01(\x08\"\x12\n\x10StopWorkersReply\"J\n\x11GetWorkersRequest\x12\x10\n\x08study_id\x18\x01 \x01(\t\x12\x10\n\x08trial_id\x18\x02 \x01(\t\x12\x11\n\tworker_id\x18\x03 \x01(\t\"/\n\x0fGetWorkersReply\x12\x1c\n\x07workers\x18\x01 \x03(\x0b\x32\x0b.api.Worker\"\x89\x01\n\x15GetSuggestionsRequest\x12\x10\n\x08study_id\x18\x01 \x01(\t\x12\x1c\n\x14suggestion_algorithm\x18\x02 \x01(\t\x12\x16\n\x0erequest_number\x18\x03 \x01(\x05\x12\x16\n\x0elog_worker_ids\x18\x04 \x03(\t\x12\x10\n\x08param_id\x18\x05 \x01(\t\"1\n\x13GetSuggestionsReply\x12\x1a\n\x06trials\x18\x01 \x03(\x0b\x32\n.api.Trial\"c\n\x1bGetShouldStopWorkersRequest\x12\x10\n\x08study_id\x18\x01 \x01(\t\x12 \n\x18\x65\x61rly_stopping_algorithm\x18\x02 \x01(\t\x12\x10\n\x08param_id\x18\x05 \x01(\t\";\n\x19GetShouldStopWorkersReply\x12\x1e\n\x16should_stop_worker_ids\x18\x01 \x03(\t\"P\n\x11GetMetricsRequest\x12\x10\n\x08study_id\x18\x01 \x01(\t\x12\x12\n\nworker_ids\x18\x02 \x03(\t\x12\x15\n\rmetrics_names\x18\x03 \x03(\t\"?\n\x0fGetMetricsReply\x12,\n\x10metrics_log_sets\x18\x01 \x03(\x0b\x32\x12.api.MetricsLogSet\"\x89\x01\n\tModelInfo\x12\x12\n\nstudy_name\x18\x01 \x01(\t\x12\x11\n\tworker_id\x18\x02 \x01(\t\x12\"\n\nparameters\x18\x03 \x03(\x0b\x32\x0e.api.Parameter\x12\x1d\n\x07metrics\x18\x04 \x03(\x0b\x32\x0c.api.Metrics\x12\x12\n\nmodel_path\x18\x05 \x01(\t\")\n\x0b\x44\x61taSetInfo\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\t\"J\n\x10SaveStudyRequest\x12\x12\n\nstudy_name\x18\x01 \x01(\t\x12\r\n\x05owner\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\"\x10\n\x0eSaveStudyReply\"k\n\x10SaveModelRequest\x12\x1d\n\x05model\x18\x01 \x01(\x0b\x32\x0e.api.ModelInfo\x12\"\n\x08\x64\x61ta_set\x18\x02 \x01(\x0b\x32\x10.api.DataSetInfo\x12\x14\n\x0ctensor_board\x18\x03 \x01(\x08\"\x10\n\x0eSaveModelReply\"\x18\n\x16GetSavedStudiesRequest\";\n\x14GetSavedStudiesReply\x12#\n\x07studies\x18\x01 \x03(\x0b\x32\x12.api.StudyOverview\"+\n\x15GetSavedModelsRequest\x12\x12\n\nstudy_name\x18\x01 \x01(\t\"5\n\x13GetSavedModelsReply\x12\x1e\n\x06models\x18\x01 \x03(\x0b\x32\x0e.api.ModelInfo\"=\n\x14GetSavedModelRequest\x12\x12\n\nstudy_name\x18\x01 \x01(\t\x12\x11\n\tworker_id\x18\x02 \x01(\t\"3\n\x12GetSavedModelReply\x12\x1d\n\x05model\x18\x01 \x01(\x0b\x32\x0e.api.ModelInfo\"\x9b\x01\n\x1eSetSuggestionParametersRequest\x12\x10\n\x08study_id\x18\x01 \x01(\t\x12\x1c\n\x14suggestion_algorithm\x18\x02 \x01(\t\x12\x10\n\x08param_id\x18\x03 \x01(\t\x12\x37\n\x15suggestion_parameters\x18\x04 \x03(\x0b\x32\x18.api.SuggestionParameter\"0\n\x1cSetSuggestionParametersReply\x12\x10\n\x08param_id\x18\x01 \x01(\t\"2\n\x1eGetSuggestionParametersRequest\x12\x10\n\x08param_id\x18\x01 \x01(\t\"W\n\x1cGetSuggestionParametersReply\x12\x37\n\x15suggestion_parameters\x18\x01 \x03(\x0b\x32\x18.api.SuggestionParameter\"5\n!GetSuggestionParameterListRequest\x12\x10\n\x08study_id\x18\x01 \x01(\t\"\x81\x01\n\x16SuggestionParameterSet\x12\x10\n\x08param_id\x18\x01 \x01(\t\x12\x1c\n\x14suggestion_algorithm\x18\x02 \x01(\t\x12\x37\n\x15suggestion_parameters\x18\x03 \x03(\x0b\x32\x18.api.SuggestionParameter\"a\n\x1fGetSuggestionParameterListReply\x12>\n\x19suggestion_parameter_sets\x18\x01 \x03(\x0b\x32\x1b.api.SuggestionParameterSet\")\n\x15StopSuggestionRequest\x12\x10\n\x08study_id\x18\x01 \x01(\t\"\x15\n\x13StopSuggestionReply\"\xa9\x01\n!SetEarlyStoppingParametersRequest\x12\x10\n\x08study_id\x18\x01 \x01(\t\x12 \n\x18\x65\x61rly_stopping_algorithm\x18\x02 \x01(\t\x12\x10\n\x08param_id\x18\x03 \x01(\t\x12>\n\x19\x65\x61rly_stopping_parameters\x18\x04 \x03(\x0b\x32\x1b.api.EarlyStoppingParameter\"3\n\x1fSetEarlyStoppingParametersReply\x12\x10\n\x08param_id\x18\x01 \x01(\t\"5\n!GetEarlyStoppingParametersRequest\x12\x10\n\x08param_id\x18\x01 \x01(\t\"a\n\x1fGetEarlyStoppingParametersReply\x12>\n\x19\x65\x61rly_stopping_parameters\x18\x01 \x03(\x0b\x32\x1b.api.EarlyStoppingParameter\"8\n$GetEarlyStoppingParameterListRequest\x12\x10\n\x08study_id\x18\x01 \x01(\t\"\x8f\x01\n\x19\x45\x61rlyStoppingParameterSet\x12\x10\n\x08param_id\x18\x01 \x01(\t\x12 \n\x18\x65\x61rly_stopping_algorithm\x18\x02 \x01(\t\x12>\n\x19\x65\x61rly_stopping_parameters\x18\x03 \x03(\x0b\x32\x1b.api.EarlyStoppingParameter\"k\n\"GetEarlyStoppingParameterListReply\x12\x45\n\x1d\x65\x61rly_stopping_parameter_sets\x18\x01 \x03(\x0b\x32\x1e.api.EarlyStoppingParameterSet*U\n\rParameterType\x12\x10\n\x0cUNKNOWN_TYPE\x10\x00\x12\n\n\x06\x44OUBLE\x10\x01\x12\x07\n\x03INT\x10\x02\x12\x0c\n\x08\x44ISCRETE\x10\x03\x12\x0f\n\x0b\x43\x41TEGORICAL\x10\x04*H\n\x10OptimizationType\x12\x18\n\x14UNKNOWN_OPTIMIZATION\x10\x00\x12\x0c\n\x08MINIMIZE\x10\x01\x12\x0c\n\x08MAXIMIZE\x10\x02*G\n\x05State\x12\x0b\n\x07PENDING\x10\x00\x12\x0b\n\x07RUNNING\x10\x01\x12\r\n\tCOMPLETED\x10\x02\x12\n\n\x06KILLED\x10\x03\x12\t\n\x05\x45RROR\x10x2\x84\r\n\x07Manager\x12=\n\x0b\x43reateStudy\x12\x17.api.CreateStudyRequest\x1a\x15.api.CreateStudyReply\x12\x37\n\tStopStudy\x12\x15.api.StopStudyRequest\x1a\x13.api.StopStudyReply\x12\x34\n\x08GetStudy\x12\x14.api.GetStudyRequest\x1a\x12.api.GetStudyReply\x12@\n\x0cGetStudyList\x12\x18.api.GetStudyListRequest\x1a\x16.api.GetStudyListReply\x12=\n\x0b\x43reateTrial\x12\x17.api.CreateTrialRequest\x1a\x15.api.CreateTrialReply\x12\x37\n\tGetTrials\x12\x15.api.GetTrialsRequest\x1a\x13.api.GetTrialsReply\x12\x34\n\x08RunTrial\x12\x14.api.RunTrialRequest\x1a\x12.api.RunTrialReply\x12=\n\x0bStopWorkers\x12\x17.api.StopWorkersRequest\x1a\x15.api.StopWorkersReply\x12:\n\nGetWorkers\x12\x16.api.GetWorkersRequest\x1a\x14.api.GetWorkersReply\x12\x46\n\x0eGetSuggestions\x12\x1a.api.GetSuggestionsRequest\x1a\x18.api.GetSuggestionsReply\x12X\n\x14GetShouldStopWorkers\x12 .api.GetShouldStopWorkersRequest\x1a\x1e.api.GetShouldStopWorkersReply\x12:\n\nGetMetrics\x12\x16.api.GetMetricsRequest\x1a\x14.api.GetMetricsReply\x12\x61\n\x17SetSuggestionParameters\x12#.api.SetSuggestionParametersRequest\x1a!.api.SetSuggestionParametersReply\x12\x61\n\x17GetSuggestionParameters\x12#.api.GetSuggestionParametersRequest\x1a!.api.GetSuggestionParametersReply\x12j\n\x1aGetSuggestionParameterList\x12&.api.GetSuggestionParameterListRequest\x1a$.api.GetSuggestionParameterListReply\x12j\n\x1aSetEarlyStoppingParameters\x12&.api.SetEarlyStoppingParametersRequest\x1a$.api.SetEarlyStoppingParametersReply\x12j\n\x1aGetEarlyStoppingParameters\x12&.api.GetEarlyStoppingParametersRequest\x1a$.api.GetEarlyStoppingParametersReply\x12s\n\x1dGetEarlyStoppingParameterList\x12).api.GetEarlyStoppingParameterListRequest\x1a\'.api.GetEarlyStoppingParameterListReply\x12\x37\n\tSaveStudy\x12\x15.api.SaveStudyRequest\x1a\x13.api.SaveStudyReply\x12\x37\n\tSaveModel\x12\x15.api.SaveModelRequest\x1a\x13.api.SaveModelReply\x12I\n\x0fGetSavedStudies\x12\x1b.api.GetSavedStudiesRequest\x1a\x19.api.GetSavedStudiesReply\x12\x46\n\x0eGetSavedModels\x12\x1a.api.GetSavedModelsRequest\x1a\x18.api.GetSavedModelsReply2T\n\nSuggestion\x12\x46\n\x0eGetSuggestions\x12\x1a.api.GetSuggestionsRequest\x1a\x18.api.GetSuggestionsReply2i\n\rEarlyStopping\x12X\n\x14GetShouldStopWorkers\x12 .api.GetShouldStopWorkersRequest\x1a\x1e.api.GetShouldStopWorkersReplyb\x06proto3')
)
_PARAMETERTYPE = _descriptor.EnumDescriptor(
name='ParameterType',
full_name='api.ParameterType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNOWN_TYPE', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOUBLE', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='INT', index=2, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DISCRETE', index=3, number=3,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CATEGORICAL', index=4, number=4,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=4997,
serialized_end=5082,
)
_sym_db.RegisterEnumDescriptor(_PARAMETERTYPE)
ParameterType = enum_type_wrapper.EnumTypeWrapper(_PARAMETERTYPE)
_OPTIMIZATIONTYPE = _descriptor.EnumDescriptor(
name='OptimizationType',
full_name='api.OptimizationType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNOWN_OPTIMIZATION', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='MINIMIZE', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='MAXIMIZE', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=5084,
serialized_end=5156,
)
_sym_db.RegisterEnumDescriptor(_OPTIMIZATIONTYPE)
OptimizationType = enum_type_wrapper.EnumTypeWrapper(_OPTIMIZATIONTYPE)
_STATE = _descriptor.EnumDescriptor(
name='State',
full_name='api.State',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='PENDING', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='RUNNING', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='COMPLETED', index=2, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='KILLED', index=3, number=3,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ERROR', index=4, number=120,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=5158,
serialized_end=5229,
)
_sym_db.RegisterEnumDescriptor(_STATE)
State = enum_type_wrapper.EnumTypeWrapper(_STATE)
UNKNOWN_TYPE = 0
DOUBLE = 1
INT = 2
DISCRETE = 3
CATEGORICAL = 4
UNKNOWN_OPTIMIZATION = 0
MINIMIZE = 1
MAXIMIZE = 2
PENDING = 0
RUNNING = 1
COMPLETED = 2
KILLED = 3
ERROR = 120
_FEASIBLESPACE = _descriptor.Descriptor(
name='FeasibleSpace',
full_name='api.FeasibleSpace',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='max', full_name='api.FeasibleSpace.max', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='min', full_name='api.FeasibleSpace.min', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='list', full_name='api.FeasibleSpace.list', index=2,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=18,
serialized_end=73,
)
_PARAMETERCONFIG = _descriptor.Descriptor(
name='ParameterConfig',
full_name='api.ParameterConfig',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='api.ParameterConfig.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='parameter_type', full_name='api.ParameterConfig.parameter_type', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='feasible', full_name='api.ParameterConfig.feasible', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=75,
serialized_end=188,
)
_PARAMETER = _descriptor.Descriptor(
name='Parameter',
full_name='api.Parameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='api.Parameter.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='parameter_type', full_name='api.Parameter.parameter_type', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='api.Parameter.value', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=190,
serialized_end=274,
)
_METRICSLOGSET = _descriptor.Descriptor(
name='MetricsLogSet',
full_name='api.MetricsLogSet',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='worker_id', full_name='api.MetricsLogSet.worker_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='metrics_logs', full_name='api.MetricsLogSet.metrics_logs', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='worker_status', full_name='api.MetricsLogSet.worker_status', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=276,
serialized_end=384,
)
_METRICS = _descriptor.Descriptor(
name='Metrics',
full_name='api.Metrics',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='api.Metrics.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='api.Metrics.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=386,
serialized_end=424,
)
_METRICSLOG = _descriptor.Descriptor(
name='MetricsLog',
full_name='api.MetricsLog',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='api.MetricsLog.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='values', full_name='api.MetricsLog.values', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=426,
serialized_end=468,
)
_SUGGESTIONPARAMETER = _descriptor.Descriptor(
name='SuggestionParameter',
full_name='api.SuggestionParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='api.SuggestionParameter.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='api.SuggestionParameter.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=470,
serialized_end=520,
)
_EARLYSTOPPINGPARAMETER = _descriptor.Descriptor(
name='EarlyStoppingParameter',
full_name='api.EarlyStoppingParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='api.EarlyStoppingParameter.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='api.EarlyStoppingParameter.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=522,
serialized_end=575,
)
_TAG = _descriptor.Descriptor(
name='Tag',
full_name='api.Tag',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='api.Tag.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='api.Tag.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=577,
serialized_end=611,
)
_MOUNTCONF = _descriptor.Descriptor(
name='MountConf',
full_name='api.MountConf',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='pvc', full_name='api.MountConf.pvc', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='path', full_name='api.MountConf.path', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=613,
serialized_end=651,
)
_STUDYOVERVIEW = _descriptor.Descriptor(
name='StudyOverview',
full_name='api.StudyOverview',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='api.StudyOverview.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='owner', full_name='api.StudyOverview.owner', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='id', full_name='api.StudyOverview.id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='description', full_name='api.StudyOverview.description', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=653,
serialized_end=730,
)
_TRIAL = _descriptor.Descriptor(
name='Trial',
full_name='api.Trial',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='trial_id', full_name='api.Trial.trial_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='study_id', full_name='api.Trial.study_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='parameter_set', full_name='api.Trial.parameter_set', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='objective_value', full_name='api.Trial.objective_value', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='tags', full_name='api.Trial.tags', index=4,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=733,
serialized_end=864,
)
_WORKERCONFIG = _descriptor.Descriptor(
name='WorkerConfig',
full_name='api.WorkerConfig',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='image', full_name='api.WorkerConfig.image', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='command', full_name='api.WorkerConfig.command', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='gpu', full_name='api.WorkerConfig.gpu', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='scheduler', full_name='api.WorkerConfig.scheduler', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mount', full_name='api.WorkerConfig.mount', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pull_secret', full_name='api.WorkerConfig.pull_secret', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=867,
serialized_end=997,
)
_WORKER = _descriptor.Descriptor(
name='Worker',
full_name='api.Worker',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='worker_id', full_name='api.Worker.worker_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='study_id', full_name='api.Worker.study_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='trial_id', full_name='api.Worker.trial_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='runtime', full_name='api.Worker.runtime', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='status', full_name='api.Worker.status', index=4,
number=5, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='config', full_name='api.Worker.config', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='tags', full_name='api.Worker.tags', index=6,
number=7, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1000,
serialized_end=1167,
)
_STUDYCONFIG_PARAMETERCONFIGS = _descriptor.Descriptor(
name='ParameterConfigs',
full_name='api.StudyConfig.ParameterConfigs',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='configs', full_name='api.StudyConfig.ParameterConfigs.configs', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1452,
serialized_end=1509,
)
_STUDYCONFIG = _descriptor.Descriptor(
name='StudyConfig',
full_name='api.StudyConfig',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='api.StudyConfig.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='owner', full_name='api.StudyConfig.owner', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='optimization_type', full_name='api.StudyConfig.optimization_type', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='optimization_goal', full_name='api.StudyConfig.optimization_goal', index=3,
number=4, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='parameter_configs', full_name='api.StudyConfig.parameter_configs', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='access_permissions', full_name='api.StudyConfig.access_permissions', index=5,
number=6, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='tags', full_name='api.StudyConfig.tags', index=6,
number=7, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='objective_value_name', full_name='api.StudyConfig.objective_value_name', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='metrics', full_name='api.StudyConfig.metrics', index=8,
number=9, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_STUDYCONFIG_PARAMETERCONFIGS, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1170,
serialized_end=1509,
)
_CREATESTUDYREQUEST = _descriptor.Descriptor(
name='CreateStudyRequest',
full_name='api.CreateStudyRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='study_config', full_name='api.CreateStudyRequest.study_config', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1511,
serialized_end=1571,
)
_CREATESTUDYREPLY = _descriptor.Descriptor(
name='CreateStudyReply',
full_name='api.CreateStudyReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='study_id', full_name='api.CreateStudyReply.study_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1573,
serialized_end=1609,
)
_STOPSTUDYREQUEST = _descriptor.Descriptor(
name='StopStudyRequest',
full_name='api.StopStudyRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='study_id', full_name='api.StopStudyRequest.study_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1611,
serialized_end=1647,
)
_STOPSTUDYREPLY = _descriptor.Descriptor(
name='StopStudyReply',
full_name='api.StopStudyReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1649,
serialized_end=1665,
)
_GETSTUDYREQUEST = _descriptor.Descriptor(
name='GetStudyRequest',
full_name='api.GetStudyRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='study_id', full_name='api.GetStudyRequest.study_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1667,
serialized_end=1702,
)
_GETSTUDYREPLY = _descriptor.Descriptor(
name='GetStudyReply',
full_name='api.GetStudyReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='study_config', full_name='api.GetStudyReply.study_config', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1704,
serialized_end=1759,
)
_GETSTUDYLISTREQUEST = _descriptor.Descriptor(
name='GetStudyListRequest',
full_name='api.GetStudyListRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1761,
serialized_end=1782,
)
_GETSTUDYLISTREPLY = _descriptor.Descriptor(
name='GetStudyListReply',
full_name='api.GetStudyListReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='study_overviews', full_name='api.GetStudyListReply.study_overviews', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1784,
serialized_end=1848,
)
_CREATETRIALREQUEST = _descriptor.Descriptor(
name='CreateTrialRequest',
full_name='api.CreateTrialRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='trial', full_name='api.CreateTrialRequest.trial', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1850,
serialized_end=1897,
)
_CREATETRIALREPLY = _descriptor.Descriptor(
name='CreateTrialReply',
full_name='api.CreateTrialReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='trial_id', full_name='api.CreateTrialReply.trial_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1899,
serialized_end=1935,
)
_GETTRIALSREQUEST = _descriptor.Descriptor(
name='GetTrialsRequest',
full_name='api.GetTrialsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='study_id', full_name='api.GetTrialsRequest.study_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1937,
serialized_end=1973,
)
_GETTRIALSREPLY = _descriptor.Descriptor(
name='GetTrialsReply',
full_name='api.GetTrialsReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='trials', full_name='api.GetTrialsReply.trials', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1975,
serialized_end=2019,
)
_RUNTRIALREQUEST = _descriptor.Descriptor(
name='RunTrialRequest',
full_name='api.RunTrialRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='study_id', full_name='api.RunTrialRequest.study_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='trial_id', full_name='api.RunTrialRequest.trial_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='runtime', full_name='api.RunTrialRequest.runtime', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='worker_config', full_name='api.RunTrialRequest.worker_config', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2021,
serialized_end=2133,
)
_RUNTRIALREPLY = _descriptor.Descriptor(
name='RunTrialReply',
full_name='api.RunTrialReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='worker_id', full_name='api.RunTrialReply.worker_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2135,
serialized_end=2169,
)
_STOPWORKERSREQUEST = _descriptor.Descriptor(
name='StopWorkersRequest',
full_name='api.StopWorkersRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='study_id', full_name='api.StopWorkersRequest.study_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='worker_ids', full_name='api.StopWorkersRequest.worker_ids', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_complete', full_name='api.StopWorkersRequest.is_complete', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2171,
serialized_end=2250,
)
_STOPWORKERSREPLY = _descriptor.Descriptor(
name='StopWorkersReply',
full_name='api.StopWorkersReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2252,
serialized_end=2270,
)
_GETWORKERSREQUEST = _descriptor.Descriptor(
name='GetWorkersRequest',
full_name='api.GetWorkersRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='study_id', full_name='api.GetWorkersRequest.study_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='trial_id', full_name='api.GetWorkersRequest.trial_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='worker_id', full_name='api.GetWorkersRequest.worker_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2272,
serialized_end=2346,
)
_GETWORKERSREPLY = _descriptor.Descriptor(
name='GetWorkersReply',
full_name='api.GetWorkersReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='workers', full_name='api.GetWorkersReply.workers', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2348,
serialized_end=2395,
)
_GETSUGGESTIONSREQUEST = _descriptor.Descriptor(
name='GetSuggestionsRequest',
full_name='api.GetSuggestionsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='study_id', full_name='api.GetSuggestionsRequest.study_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='suggestion_algorithm', full_name='api.GetSuggestionsRequest.suggestion_algorithm', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='request_number', full_name='api.GetSuggestionsRequest.request_number', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='log_worker_ids', full_name='api.GetSuggestionsRequest.log_worker_ids', index=3,
number=4, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='param_id', full_name='api.GetSuggestionsRequest.param_id', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2398,
serialized_end=2535,
)
_GETSUGGESTIONSREPLY = _descriptor.Descriptor(
name='GetSuggestionsReply',
full_name='api.GetSuggestionsReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='trials', full_name='api.GetSuggestionsReply.trials', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2537,
serialized_end=2586,
)
_GETSHOULDSTOPWORKERSREQUEST = _descriptor.Descriptor(
name='GetShouldStopWorkersRequest',
full_name='api.GetShouldStopWorkersRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='study_id', full_name='api.GetShouldStopWorkersRequest.study_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='early_stopping_algorithm', full_name='api.GetShouldStopWorkersRequest.early_stopping_algorithm', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='param_id', full_name='api.GetShouldStopWorkersRequest.param_id', index=2,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2588,
serialized_end=2687,
)
_GETSHOULDSTOPWORKERSREPLY = _descriptor.Descriptor(
name='GetShouldStopWorkersReply',
full_name='api.GetShouldStopWorkersReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='should_stop_worker_ids', full_name='api.GetShouldStopWorkersReply.should_stop_worker_ids', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2689,
serialized_end=2748,
)
_GETMETRICSREQUEST = _descriptor.Descriptor(
name='GetMetricsRequest',
full_name='api.GetMetricsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='study_id', full_name='api.GetMetricsRequest.study_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='worker_ids', full_name='api.GetMetricsRequest.worker_ids', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='metrics_names', full_name='api.GetMetricsRequest.metrics_names', index=2,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2750,
serialized_end=2830,
)
_GETMETRICSREPLY = _descriptor.Descriptor(
name='GetMetricsReply',
full_name='api.GetMetricsReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='metrics_log_sets', full_name='api.GetMetricsReply.metrics_log_sets', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2832,
serialized_end=2895,
)
_MODELINFO = _descriptor.Descriptor(
name='ModelInfo',
full_name='api.ModelInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='study_name', full_name='api.ModelInfo.study_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='worker_id', full_name='api.ModelInfo.worker_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='parameters', full_name='api.ModelInfo.parameters', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='metrics', full_name='api.ModelInfo.metrics', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='model_path', full_name='api.ModelInfo.model_path', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2898,
serialized_end=3035,
)
_DATASETINFO = _descriptor.Descriptor(
name='DataSetInfo',
full_name='api.DataSetInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='api.DataSetInfo.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='path', full_name='api.DataSetInfo.path', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3037,
serialized_end=3078,
)
_SAVESTUDYREQUEST = _descriptor.Descriptor(
name='SaveStudyRequest',
full_name='api.SaveStudyRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='study_name', full_name='api.SaveStudyRequest.study_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='owner', full_name='api.SaveStudyRequest.owner', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='description', full_name='api.SaveStudyRequest.description', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3080,
serialized_end=3154,
)
_SAVESTUDYREPLY = _descriptor.Descriptor(
name='SaveStudyReply',
full_name='api.SaveStudyReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3156,
serialized_end=3172,
)
_SAVEMODELREQUEST = _descriptor.Descriptor(
name='SaveModelRequest',
full_name='api.SaveModelRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='model', full_name='api.SaveModelRequest.model', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='data_set', full_name='api.SaveModelRequest.data_set', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='tensor_board', full_name='api.SaveModelRequest.tensor_board', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3174,
serialized_end=3281,
)
_SAVEMODELREPLY = _descriptor.Descriptor(
name='SaveModelReply',
full_name='api.SaveModelReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3283,
serialized_end=3299,
)
_GETSAVEDSTUDIESREQUEST = _descriptor.Descriptor(
name='GetSavedStudiesRequest',
full_name='api.GetSavedStudiesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3301,
serialized_end=3325,
)
_GETSAVEDSTUDIESREPLY = _descriptor.Descriptor(
name='GetSavedStudiesReply',
full_name='api.GetSavedStudiesReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='studies', full_name='api.GetSavedStudiesReply.studies', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3327,
serialized_end=3386,
)
_GETSAVEDMODELSREQUEST = _descriptor.Descriptor(
name='GetSavedModelsRequest',
full_name='api.GetSavedModelsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='study_name', full_name='api.GetSavedModelsRequest.study_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3388,
serialized_end=3431,
)
_GETSAVEDMODELSREPLY = _descriptor.Descriptor(
name='GetSavedModelsReply',
full_name='api.GetSavedModelsReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='models', full_name='api.GetSavedModelsReply.models', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3433,
serialized_end=3486,
)
_GETSAVEDMODELREQUEST = _descriptor.Descriptor(
name='GetSavedModelRequest',
full_name='api.GetSavedModelRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='study_name', full_name='api.GetSavedModelRequest.study_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='worker_id', full_name='api.GetSavedModelRequest.worker_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3488,
serialized_end=3549,
)
_GETSAVEDMODELREPLY = _descriptor.Descriptor(
name='GetSavedModelReply',
full_name='api.GetSavedModelReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='model', full_name='api.GetSavedModelReply.model', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3551,
serialized_end=3602,
)
_SETSUGGESTIONPARAMETERSREQUEST = _descriptor.Descriptor(
name='SetSuggestionParametersRequest',
full_name='api.SetSuggestionParametersRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='study_id', full_name='api.SetSuggestionParametersRequest.study_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='suggestion_algorithm', full_name='api.SetSuggestionParametersRequest.suggestion_algorithm', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='param_id', full_name='api.SetSuggestionParametersRequest.param_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='suggestion_parameters', full_name='api.SetSuggestionParametersRequest.suggestion_parameters', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3605,
serialized_end=3760,
)
_SETSUGGESTIONPARAMETERSREPLY = _descriptor.Descriptor(
name='SetSuggestionParametersReply',
full_name='api.SetSuggestionParametersReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='param_id', full_name='api.SetSuggestionParametersReply.param_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3762,
serialized_end=3810,
)
_GETSUGGESTIONPARAMETERSREQUEST = _descriptor.Descriptor(
name='GetSuggestionParametersRequest',
full_name='api.GetSuggestionParametersRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='param_id', full_name='api.GetSuggestionParametersRequest.param_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3812,
serialized_end=3862,
)
_GETSUGGESTIONPARAMETERSREPLY = _descriptor.Descriptor(
name='GetSuggestionParametersReply',
full_name='api.GetSuggestionParametersReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='suggestion_parameters', full_name='api.GetSuggestionParametersReply.suggestion_parameters', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3864,
serialized_end=3951,
)
_GETSUGGESTIONPARAMETERLISTREQUEST = _descriptor.Descriptor(
name='GetSuggestionParameterListRequest',
full_name='api.GetSuggestionParameterListRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='study_id', full_name='api.GetSuggestionParameterListRequest.study_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3953,
serialized_end=4006,
)
_SUGGESTIONPARAMETERSET = _descriptor.Descriptor(
name='SuggestionParameterSet',
full_name='api.SuggestionParameterSet',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='param_id', full_name='api.SuggestionParameterSet.param_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='suggestion_algorithm', full_name='api.SuggestionParameterSet.suggestion_algorithm', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='suggestion_parameters', full_name='api.SuggestionParameterSet.suggestion_parameters', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4009,
serialized_end=4138,
)
_GETSUGGESTIONPARAMETERLISTREPLY = _descriptor.Descriptor(
name='GetSuggestionParameterListReply',
full_name='api.GetSuggestionParameterListReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='suggestion_parameter_sets', full_name='api.GetSuggestionParameterListReply.suggestion_parameter_sets', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4140,
serialized_end=4237,
)
_STOPSUGGESTIONREQUEST = _descriptor.Descriptor(
name='StopSuggestionRequest',
full_name='api.StopSuggestionRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='study_id', full_name='api.StopSuggestionRequest.study_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4239,
serialized_end=4280,
)
_STOPSUGGESTIONREPLY = _descriptor.Descriptor(
name='StopSuggestionReply',
full_name='api.StopSuggestionReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4282,
serialized_end=4303,
)
_SETEARLYSTOPPINGPARAMETERSREQUEST = _descriptor.Descriptor(
name='SetEarlyStoppingParametersRequest',
full_name='api.SetEarlyStoppingParametersRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='study_id', full_name='api.SetEarlyStoppingParametersRequest.study_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='early_stopping_algorithm', full_name='api.SetEarlyStoppingParametersRequest.early_stopping_algorithm', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='param_id', full_name='api.SetEarlyStoppingParametersRequest.param_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='early_stopping_parameters', full_name='api.SetEarlyStoppingParametersRequest.early_stopping_parameters', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4306,
serialized_end=4475,
)
_SETEARLYSTOPPINGPARAMETERSREPLY = _descriptor.Descriptor(
name='SetEarlyStoppingParametersReply',
full_name='api.SetEarlyStoppingParametersReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='param_id', full_name='api.SetEarlyStoppingParametersReply.param_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4477,
serialized_end=4528,
)
_GETEARLYSTOPPINGPARAMETERSREQUEST = _descriptor.Descriptor(
name='GetEarlyStoppingParametersRequest',
full_name='api.GetEarlyStoppingParametersRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='param_id', full_name='api.GetEarlyStoppingParametersRequest.param_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4530,
serialized_end=4583,
)
_GETEARLYSTOPPINGPARAMETERSREPLY = _descriptor.Descriptor(
name='GetEarlyStoppingParametersReply',
full_name='api.GetEarlyStoppingParametersReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='early_stopping_parameters', full_name='api.GetEarlyStoppingParametersReply.early_stopping_parameters', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4585,
serialized_end=4682,
)
_GETEARLYSTOPPINGPARAMETERLISTREQUEST = _descriptor.Descriptor(
name='GetEarlyStoppingParameterListRequest',
full_name='api.GetEarlyStoppingParameterListRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='study_id', full_name='api.GetEarlyStoppingParameterListRequest.study_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4684,
serialized_end=4740,
)
_EARLYSTOPPINGPARAMETERSET = _descriptor.Descriptor(
name='EarlyStoppingParameterSet',
full_name='api.EarlyStoppingParameterSet',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='param_id', full_name='api.EarlyStoppingParameterSet.param_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='early_stopping_algorithm', full_name='api.EarlyStoppingParameterSet.early_stopping_algorithm', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='early_stopping_parameters', full_name='api.EarlyStoppingParameterSet.early_stopping_parameters', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4743,
serialized_end=4886,
)
_GETEARLYSTOPPINGPARAMETERLISTREPLY = _descriptor.Descriptor(
name='GetEarlyStoppingParameterListReply',
full_name='api.GetEarlyStoppingParameterListReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='early_stopping_parameter_sets', full_name='api.GetEarlyStoppingParameterListReply.early_stopping_parameter_sets', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4888,
serialized_end=4995,
)
_PARAMETERCONFIG.fields_by_name['parameter_type'].enum_type = _PARAMETERTYPE
_PARAMETERCONFIG.fields_by_name['feasible'].message_type = _FEASIBLESPACE
_PARAMETER.fields_by_name['parameter_type'].enum_type = _PARAMETERTYPE
_METRICSLOGSET.fields_by_name['metrics_logs'].message_type = _METRICSLOG
_METRICSLOGSET.fields_by_name['worker_status'].enum_type = _STATE
_TRIAL.fields_by_name['parameter_set'].message_type = _PARAMETER
_TRIAL.fields_by_name['tags'].message_type = _TAG
_WORKERCONFIG.fields_by_name['mount'].message_type = _MOUNTCONF
_WORKER.fields_by_name['status'].enum_type = _STATE
_WORKER.fields_by_name['config'].message_type = _WORKERCONFIG
_WORKER.fields_by_name['tags'].message_type = _TAG
_STUDYCONFIG_PARAMETERCONFIGS.fields_by_name['configs'].message_type = _PARAMETERCONFIG
_STUDYCONFIG_PARAMETERCONFIGS.containing_type = _STUDYCONFIG
_STUDYCONFIG.fields_by_name['optimization_type'].enum_type = _OPTIMIZATIONTYPE
_STUDYCONFIG.fields_by_name['parameter_configs'].message_type = _STUDYCONFIG_PARAMETERCONFIGS
_STUDYCONFIG.fields_by_name['tags'].message_type = _TAG
_CREATESTUDYREQUEST.fields_by_name['study_config'].message_type = _STUDYCONFIG
_GETSTUDYREPLY.fields_by_name['study_config'].message_type = _STUDYCONFIG
_GETSTUDYLISTREPLY.fields_by_name['study_overviews'].message_type = _STUDYOVERVIEW
_CREATETRIALREQUEST.fields_by_name['trial'].message_type = _TRIAL
_GETTRIALSREPLY.fields_by_name['trials'].message_type = _TRIAL
_RUNTRIALREQUEST.fields_by_name['worker_config'].message_type = _WORKERCONFIG
_GETWORKERSREPLY.fields_by_name['workers'].message_type = _WORKER
_GETSUGGESTIONSREPLY.fields_by_name['trials'].message_type = _TRIAL
_GETMETRICSREPLY.fields_by_name['metrics_log_sets'].message_type = _METRICSLOGSET
_MODELINFO.fields_by_name['parameters'].message_type = _PARAMETER
_MODELINFO.fields_by_name['metrics'].message_type = _METRICS
_SAVEMODELREQUEST.fields_by_name['model'].message_type = _MODELINFO
_SAVEMODELREQUEST.fields_by_name['data_set'].message_type = _DATASETINFO
_GETSAVEDSTUDIESREPLY.fields_by_name['studies'].message_type = _STUDYOVERVIEW
_GETSAVEDMODELSREPLY.fields_by_name['models'].message_type = _MODELINFO
_GETSAVEDMODELREPLY.fields_by_name['model'].message_type = _MODELINFO
_SETSUGGESTIONPARAMETERSREQUEST.fields_by_name['suggestion_parameters'].message_type = _SUGGESTIONPARAMETER
_GETSUGGESTIONPARAMETERSREPLY.fields_by_name['suggestion_parameters'].message_type = _SUGGESTIONPARAMETER
_SUGGESTIONPARAMETERSET.fields_by_name['suggestion_parameters'].message_type = _SUGGESTIONPARAMETER
_GETSUGGESTIONPARAMETERLISTREPLY.fields_by_name['suggestion_parameter_sets'].message_type = _SUGGESTIONPARAMETERSET
_SETEARLYSTOPPINGPARAMETERSREQUEST.fields_by_name['early_stopping_parameters'].message_type = _EARLYSTOPPINGPARAMETER
_GETEARLYSTOPPINGPARAMETERSREPLY.fields_by_name['early_stopping_parameters'].message_type = _EARLYSTOPPINGPARAMETER
_EARLYSTOPPINGPARAMETERSET.fields_by_name['early_stopping_parameters'].message_type = _EARLYSTOPPINGPARAMETER
_GETEARLYSTOPPINGPARAMETERLISTREPLY.fields_by_name['early_stopping_parameter_sets'].message_type = _EARLYSTOPPINGPARAMETERSET
DESCRIPTOR.message_types_by_name['FeasibleSpace'] = _FEASIBLESPACE
DESCRIPTOR.message_types_by_name['ParameterConfig'] = _PARAMETERCONFIG
DESCRIPTOR.message_types_by_name['Parameter'] = _PARAMETER
DESCRIPTOR.message_types_by_name['MetricsLogSet'] = _METRICSLOGSET
DESCRIPTOR.message_types_by_name['Metrics'] = _METRICS
DESCRIPTOR.message_types_by_name['MetricsLog'] = _METRICSLOG
DESCRIPTOR.message_types_by_name['SuggestionParameter'] = _SUGGESTIONPARAMETER
DESCRIPTOR.message_types_by_name['EarlyStoppingParameter'] = _EARLYSTOPPINGPARAMETER
DESCRIPTOR.message_types_by_name['Tag'] = _TAG
DESCRIPTOR.message_types_by_name['MountConf'] = _MOUNTCONF
DESCRIPTOR.message_types_by_name['StudyOverview'] = _STUDYOVERVIEW
DESCRIPTOR.message_types_by_name['Trial'] = _TRIAL
DESCRIPTOR.message_types_by_name['WorkerConfig'] = _WORKERCONFIG
DESCRIPTOR.message_types_by_name['Worker'] = _WORKER
DESCRIPTOR.message_types_by_name['StudyConfig'] = _STUDYCONFIG
DESCRIPTOR.message_types_by_name['CreateStudyRequest'] = _CREATESTUDYREQUEST
DESCRIPTOR.message_types_by_name['CreateStudyReply'] = _CREATESTUDYREPLY
DESCRIPTOR.message_types_by_name['StopStudyRequest'] = _STOPSTUDYREQUEST
DESCRIPTOR.message_types_by_name['StopStudyReply'] = _STOPSTUDYREPLY
DESCRIPTOR.message_types_by_name['GetStudyRequest'] = _GETSTUDYREQUEST
DESCRIPTOR.message_types_by_name['GetStudyReply'] = _GETSTUDYREPLY
DESCRIPTOR.message_types_by_name['GetStudyListRequest'] = _GETSTUDYLISTREQUEST
DESCRIPTOR.message_types_by_name['GetStudyListReply'] = _GETSTUDYLISTREPLY
DESCRIPTOR.message_types_by_name['CreateTrialRequest'] = _CREATETRIALREQUEST
DESCRIPTOR.message_types_by_name['CreateTrialReply'] = _CREATETRIALREPLY
DESCRIPTOR.message_types_by_name['GetTrialsRequest'] = _GETTRIALSREQUEST
DESCRIPTOR.message_types_by_name['GetTrialsReply'] = _GETTRIALSREPLY
DESCRIPTOR.message_types_by_name['RunTrialRequest'] = _RUNTRIALREQUEST
DESCRIPTOR.message_types_by_name['RunTrialReply'] = _RUNTRIALREPLY
DESCRIPTOR.message_types_by_name['StopWorkersRequest'] = _STOPWORKERSREQUEST
DESCRIPTOR.message_types_by_name['StopWorkersReply'] = _STOPWORKERSREPLY
DESCRIPTOR.message_types_by_name['GetWorkersRequest'] = _GETWORKERSREQUEST
DESCRIPTOR.message_types_by_name['GetWorkersReply'] = _GETWORKERSREPLY
DESCRIPTOR.message_types_by_name['GetSuggestionsRequest'] = _GETSUGGESTIONSREQUEST
DESCRIPTOR.message_types_by_name['GetSuggestionsReply'] = _GETSUGGESTIONSREPLY
DESCRIPTOR.message_types_by_name['GetShouldStopWorkersRequest'] = _GETSHOULDSTOPWORKERSREQUEST
DESCRIPTOR.message_types_by_name['GetShouldStopWorkersReply'] = _GETSHOULDSTOPWORKERSREPLY
DESCRIPTOR.message_types_by_name['GetMetricsRequest'] = _GETMETRICSREQUEST
DESCRIPTOR.message_types_by_name['GetMetricsReply'] = _GETMETRICSREPLY
DESCRIPTOR.message_types_by_name['ModelInfo'] = _MODELINFO
DESCRIPTOR.message_types_by_name['DataSetInfo'] = _DATASETINFO
DESCRIPTOR.message_types_by_name['SaveStudyRequest'] = _SAVESTUDYREQUEST
DESCRIPTOR.message_types_by_name['SaveStudyReply'] = _SAVESTUDYREPLY
DESCRIPTOR.message_types_by_name['SaveModelRequest'] = _SAVEMODELREQUEST
DESCRIPTOR.message_types_by_name['SaveModelReply'] = _SAVEMODELREPLY
DESCRIPTOR.message_types_by_name['GetSavedStudiesRequest'] = _GETSAVEDSTUDIESREQUEST
DESCRIPTOR.message_types_by_name['GetSavedStudiesReply'] = _GETSAVEDSTUDIESREPLY
DESCRIPTOR.message_types_by_name['GetSavedModelsRequest'] = _GETSAVEDMODELSREQUEST
DESCRIPTOR.message_types_by_name['GetSavedModelsReply'] = _GETSAVEDMODELSREPLY
DESCRIPTOR.message_types_by_name['GetSavedModelRequest'] = _GETSAVEDMODELREQUEST
DESCRIPTOR.message_types_by_name['GetSavedModelReply'] = _GETSAVEDMODELREPLY
DESCRIPTOR.message_types_by_name['SetSuggestionParametersRequest'] = _SETSUGGESTIONPARAMETERSREQUEST
DESCRIPTOR.message_types_by_name['SetSuggestionParametersReply'] = _SETSUGGESTIONPARAMETERSREPLY
DESCRIPTOR.message_types_by_name['GetSuggestionParametersRequest'] = _GETSUGGESTIONPARAMETERSREQUEST
DESCRIPTOR.message_types_by_name['GetSuggestionParametersReply'] = _GETSUGGESTIONPARAMETERSREPLY
DESCRIPTOR.message_types_by_name['GetSuggestionParameterListRequest'] = _GETSUGGESTIONPARAMETERLISTREQUEST
DESCRIPTOR.message_types_by_name['SuggestionParameterSet'] = _SUGGESTIONPARAMETERSET
DESCRIPTOR.message_types_by_name['GetSuggestionParameterListReply'] = _GETSUGGESTIONPARAMETERLISTREPLY
DESCRIPTOR.message_types_by_name['StopSuggestionRequest'] = _STOPSUGGESTIONREQUEST
DESCRIPTOR.message_types_by_name['StopSuggestionReply'] = _STOPSUGGESTIONREPLY
DESCRIPTOR.message_types_by_name['SetEarlyStoppingParametersRequest'] = _SETEARLYSTOPPINGPARAMETERSREQUEST
DESCRIPTOR.message_types_by_name['SetEarlyStoppingParametersReply'] = _SETEARLYSTOPPINGPARAMETERSREPLY
DESCRIPTOR.message_types_by_name['GetEarlyStoppingParametersRequest'] = _GETEARLYSTOPPINGPARAMETERSREQUEST
DESCRIPTOR.message_types_by_name['GetEarlyStoppingParametersReply'] = _GETEARLYSTOPPINGPARAMETERSREPLY
DESCRIPTOR.message_types_by_name['GetEarlyStoppingParameterListRequest'] = _GETEARLYSTOPPINGPARAMETERLISTREQUEST
DESCRIPTOR.message_types_by_name['EarlyStoppingParameterSet'] = _EARLYSTOPPINGPARAMETERSET
DESCRIPTOR.message_types_by_name['GetEarlyStoppingParameterListReply'] = _GETEARLYSTOPPINGPARAMETERLISTREPLY
DESCRIPTOR.enum_types_by_name['ParameterType'] = _PARAMETERTYPE
DESCRIPTOR.enum_types_by_name['OptimizationType'] = _OPTIMIZATIONTYPE
DESCRIPTOR.enum_types_by_name['State'] = _STATE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
FeasibleSpace = _reflection.GeneratedProtocolMessageType('FeasibleSpace', (_message.Message,), dict(
DESCRIPTOR = _FEASIBLESPACE,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.FeasibleSpace)
))
_sym_db.RegisterMessage(FeasibleSpace)
ParameterConfig = _reflection.GeneratedProtocolMessageType('ParameterConfig', (_message.Message,), dict(
DESCRIPTOR = _PARAMETERCONFIG,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.ParameterConfig)
))
_sym_db.RegisterMessage(ParameterConfig)
Parameter = _reflection.GeneratedProtocolMessageType('Parameter', (_message.Message,), dict(
DESCRIPTOR = _PARAMETER,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.Parameter)
))
_sym_db.RegisterMessage(Parameter)
MetricsLogSet = _reflection.GeneratedProtocolMessageType('MetricsLogSet', (_message.Message,), dict(
DESCRIPTOR = _METRICSLOGSET,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.MetricsLogSet)
))
_sym_db.RegisterMessage(MetricsLogSet)
Metrics = _reflection.GeneratedProtocolMessageType('Metrics', (_message.Message,), dict(
DESCRIPTOR = _METRICS,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.Metrics)
))
_sym_db.RegisterMessage(Metrics)
MetricsLog = _reflection.GeneratedProtocolMessageType('MetricsLog', (_message.Message,), dict(
DESCRIPTOR = _METRICSLOG,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.MetricsLog)
))
_sym_db.RegisterMessage(MetricsLog)
SuggestionParameter = _reflection.GeneratedProtocolMessageType('SuggestionParameter', (_message.Message,), dict(
DESCRIPTOR = _SUGGESTIONPARAMETER,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.SuggestionParameter)
))
_sym_db.RegisterMessage(SuggestionParameter)
EarlyStoppingParameter = _reflection.GeneratedProtocolMessageType('EarlyStoppingParameter', (_message.Message,), dict(
DESCRIPTOR = _EARLYSTOPPINGPARAMETER,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.EarlyStoppingParameter)
))
_sym_db.RegisterMessage(EarlyStoppingParameter)
Tag = _reflection.GeneratedProtocolMessageType('Tag', (_message.Message,), dict(
DESCRIPTOR = _TAG,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.Tag)
))
_sym_db.RegisterMessage(Tag)
MountConf = _reflection.GeneratedProtocolMessageType('MountConf', (_message.Message,), dict(
DESCRIPTOR = _MOUNTCONF,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.MountConf)
))
_sym_db.RegisterMessage(MountConf)
StudyOverview = _reflection.GeneratedProtocolMessageType('StudyOverview', (_message.Message,), dict(
DESCRIPTOR = _STUDYOVERVIEW,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.StudyOverview)
))
_sym_db.RegisterMessage(StudyOverview)
Trial = _reflection.GeneratedProtocolMessageType('Trial', (_message.Message,), dict(
DESCRIPTOR = _TRIAL,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.Trial)
))
_sym_db.RegisterMessage(Trial)
WorkerConfig = _reflection.GeneratedProtocolMessageType('WorkerConfig', (_message.Message,), dict(
DESCRIPTOR = _WORKERCONFIG,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.WorkerConfig)
))
_sym_db.RegisterMessage(WorkerConfig)
Worker = _reflection.GeneratedProtocolMessageType('Worker', (_message.Message,), dict(
DESCRIPTOR = _WORKER,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.Worker)
))
_sym_db.RegisterMessage(Worker)
StudyConfig = _reflection.GeneratedProtocolMessageType('StudyConfig', (_message.Message,), dict(
ParameterConfigs = _reflection.GeneratedProtocolMessageType('ParameterConfigs', (_message.Message,), dict(
DESCRIPTOR = _STUDYCONFIG_PARAMETERCONFIGS,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.StudyConfig.ParameterConfigs)
))
,
DESCRIPTOR = _STUDYCONFIG,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.StudyConfig)
))
_sym_db.RegisterMessage(StudyConfig)
_sym_db.RegisterMessage(StudyConfig.ParameterConfigs)
CreateStudyRequest = _reflection.GeneratedProtocolMessageType('CreateStudyRequest', (_message.Message,), dict(
DESCRIPTOR = _CREATESTUDYREQUEST,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.CreateStudyRequest)
))
_sym_db.RegisterMessage(CreateStudyRequest)
CreateStudyReply = _reflection.GeneratedProtocolMessageType('CreateStudyReply', (_message.Message,), dict(
DESCRIPTOR = _CREATESTUDYREPLY,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.CreateStudyReply)
))
_sym_db.RegisterMessage(CreateStudyReply)
StopStudyRequest = _reflection.GeneratedProtocolMessageType('StopStudyRequest', (_message.Message,), dict(
DESCRIPTOR = _STOPSTUDYREQUEST,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.StopStudyRequest)
))
_sym_db.RegisterMessage(StopStudyRequest)
StopStudyReply = _reflection.GeneratedProtocolMessageType('StopStudyReply', (_message.Message,), dict(
DESCRIPTOR = _STOPSTUDYREPLY,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.StopStudyReply)
))
_sym_db.RegisterMessage(StopStudyReply)
GetStudyRequest = _reflection.GeneratedProtocolMessageType('GetStudyRequest', (_message.Message,), dict(
DESCRIPTOR = _GETSTUDYREQUEST,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.GetStudyRequest)
))
_sym_db.RegisterMessage(GetStudyRequest)
GetStudyReply = _reflection.GeneratedProtocolMessageType('GetStudyReply', (_message.Message,), dict(
DESCRIPTOR = _GETSTUDYREPLY,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.GetStudyReply)
))
_sym_db.RegisterMessage(GetStudyReply)
GetStudyListRequest = _reflection.GeneratedProtocolMessageType('GetStudyListRequest', (_message.Message,), dict(
DESCRIPTOR = _GETSTUDYLISTREQUEST,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.GetStudyListRequest)
))
_sym_db.RegisterMessage(GetStudyListRequest)
GetStudyListReply = _reflection.GeneratedProtocolMessageType('GetStudyListReply', (_message.Message,), dict(
DESCRIPTOR = _GETSTUDYLISTREPLY,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.GetStudyListReply)
))
_sym_db.RegisterMessage(GetStudyListReply)
CreateTrialRequest = _reflection.GeneratedProtocolMessageType('CreateTrialRequest', (_message.Message,), dict(
DESCRIPTOR = _CREATETRIALREQUEST,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.CreateTrialRequest)
))
_sym_db.RegisterMessage(CreateTrialRequest)
CreateTrialReply = _reflection.GeneratedProtocolMessageType('CreateTrialReply', (_message.Message,), dict(
DESCRIPTOR = _CREATETRIALREPLY,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.CreateTrialReply)
))
_sym_db.RegisterMessage(CreateTrialReply)
GetTrialsRequest = _reflection.GeneratedProtocolMessageType('GetTrialsRequest', (_message.Message,), dict(
DESCRIPTOR = _GETTRIALSREQUEST,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.GetTrialsRequest)
))
_sym_db.RegisterMessage(GetTrialsRequest)
GetTrialsReply = _reflection.GeneratedProtocolMessageType('GetTrialsReply', (_message.Message,), dict(
DESCRIPTOR = _GETTRIALSREPLY,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.GetTrialsReply)
))
_sym_db.RegisterMessage(GetTrialsReply)
RunTrialRequest = _reflection.GeneratedProtocolMessageType('RunTrialRequest', (_message.Message,), dict(
DESCRIPTOR = _RUNTRIALREQUEST,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.RunTrialRequest)
))
_sym_db.RegisterMessage(RunTrialRequest)
RunTrialReply = _reflection.GeneratedProtocolMessageType('RunTrialReply', (_message.Message,), dict(
DESCRIPTOR = _RUNTRIALREPLY,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.RunTrialReply)
))
_sym_db.RegisterMessage(RunTrialReply)
StopWorkersRequest = _reflection.GeneratedProtocolMessageType('StopWorkersRequest', (_message.Message,), dict(
DESCRIPTOR = _STOPWORKERSREQUEST,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.StopWorkersRequest)
))
_sym_db.RegisterMessage(StopWorkersRequest)
StopWorkersReply = _reflection.GeneratedProtocolMessageType('StopWorkersReply', (_message.Message,), dict(
DESCRIPTOR = _STOPWORKERSREPLY,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.StopWorkersReply)
))
_sym_db.RegisterMessage(StopWorkersReply)
GetWorkersRequest = _reflection.GeneratedProtocolMessageType('GetWorkersRequest', (_message.Message,), dict(
DESCRIPTOR = _GETWORKERSREQUEST,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.GetWorkersRequest)
))
_sym_db.RegisterMessage(GetWorkersRequest)
GetWorkersReply = _reflection.GeneratedProtocolMessageType('GetWorkersReply', (_message.Message,), dict(
DESCRIPTOR = _GETWORKERSREPLY,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.GetWorkersReply)
))
_sym_db.RegisterMessage(GetWorkersReply)
GetSuggestionsRequest = _reflection.GeneratedProtocolMessageType('GetSuggestionsRequest', (_message.Message,), dict(
DESCRIPTOR = _GETSUGGESTIONSREQUEST,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.GetSuggestionsRequest)
))
_sym_db.RegisterMessage(GetSuggestionsRequest)
GetSuggestionsReply = _reflection.GeneratedProtocolMessageType('GetSuggestionsReply', (_message.Message,), dict(
DESCRIPTOR = _GETSUGGESTIONSREPLY,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.GetSuggestionsReply)
))
_sym_db.RegisterMessage(GetSuggestionsReply)
GetShouldStopWorkersRequest = _reflection.GeneratedProtocolMessageType('GetShouldStopWorkersRequest', (_message.Message,), dict(
DESCRIPTOR = _GETSHOULDSTOPWORKERSREQUEST,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.GetShouldStopWorkersRequest)
))
_sym_db.RegisterMessage(GetShouldStopWorkersRequest)
GetShouldStopWorkersReply = _reflection.GeneratedProtocolMessageType('GetShouldStopWorkersReply', (_message.Message,), dict(
DESCRIPTOR = _GETSHOULDSTOPWORKERSREPLY,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.GetShouldStopWorkersReply)
))
_sym_db.RegisterMessage(GetShouldStopWorkersReply)
GetMetricsRequest = _reflection.GeneratedProtocolMessageType('GetMetricsRequest', (_message.Message,), dict(
DESCRIPTOR = _GETMETRICSREQUEST,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.GetMetricsRequest)
))
_sym_db.RegisterMessage(GetMetricsRequest)
GetMetricsReply = _reflection.GeneratedProtocolMessageType('GetMetricsReply', (_message.Message,), dict(
DESCRIPTOR = _GETMETRICSREPLY,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.GetMetricsReply)
))
_sym_db.RegisterMessage(GetMetricsReply)
ModelInfo = _reflection.GeneratedProtocolMessageType('ModelInfo', (_message.Message,), dict(
DESCRIPTOR = _MODELINFO,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.ModelInfo)
))
_sym_db.RegisterMessage(ModelInfo)
DataSetInfo = _reflection.GeneratedProtocolMessageType('DataSetInfo', (_message.Message,), dict(
DESCRIPTOR = _DATASETINFO,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.DataSetInfo)
))
_sym_db.RegisterMessage(DataSetInfo)
SaveStudyRequest = _reflection.GeneratedProtocolMessageType('SaveStudyRequest', (_message.Message,), dict(
DESCRIPTOR = _SAVESTUDYREQUEST,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.SaveStudyRequest)
))
_sym_db.RegisterMessage(SaveStudyRequest)
SaveStudyReply = _reflection.GeneratedProtocolMessageType('SaveStudyReply', (_message.Message,), dict(
DESCRIPTOR = _SAVESTUDYREPLY,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.SaveStudyReply)
))
_sym_db.RegisterMessage(SaveStudyReply)
SaveModelRequest = _reflection.GeneratedProtocolMessageType('SaveModelRequest', (_message.Message,), dict(
DESCRIPTOR = _SAVEMODELREQUEST,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.SaveModelRequest)
))
_sym_db.RegisterMessage(SaveModelRequest)
SaveModelReply = _reflection.GeneratedProtocolMessageType('SaveModelReply', (_message.Message,), dict(
DESCRIPTOR = _SAVEMODELREPLY,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.SaveModelReply)
))
_sym_db.RegisterMessage(SaveModelReply)
GetSavedStudiesRequest = _reflection.GeneratedProtocolMessageType('GetSavedStudiesRequest', (_message.Message,), dict(
DESCRIPTOR = _GETSAVEDSTUDIESREQUEST,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.GetSavedStudiesRequest)
))
_sym_db.RegisterMessage(GetSavedStudiesRequest)
GetSavedStudiesReply = _reflection.GeneratedProtocolMessageType('GetSavedStudiesReply', (_message.Message,), dict(
DESCRIPTOR = _GETSAVEDSTUDIESREPLY,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.GetSavedStudiesReply)
))
_sym_db.RegisterMessage(GetSavedStudiesReply)
GetSavedModelsRequest = _reflection.GeneratedProtocolMessageType('GetSavedModelsRequest', (_message.Message,), dict(
DESCRIPTOR = _GETSAVEDMODELSREQUEST,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.GetSavedModelsRequest)
))
_sym_db.RegisterMessage(GetSavedModelsRequest)
GetSavedModelsReply = _reflection.GeneratedProtocolMessageType('GetSavedModelsReply', (_message.Message,), dict(
DESCRIPTOR = _GETSAVEDMODELSREPLY,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.GetSavedModelsReply)
))
_sym_db.RegisterMessage(GetSavedModelsReply)
GetSavedModelRequest = _reflection.GeneratedProtocolMessageType('GetSavedModelRequest', (_message.Message,), dict(
DESCRIPTOR = _GETSAVEDMODELREQUEST,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.GetSavedModelRequest)
))
_sym_db.RegisterMessage(GetSavedModelRequest)
GetSavedModelReply = _reflection.GeneratedProtocolMessageType('GetSavedModelReply', (_message.Message,), dict(
DESCRIPTOR = _GETSAVEDMODELREPLY,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.GetSavedModelReply)
))
_sym_db.RegisterMessage(GetSavedModelReply)
SetSuggestionParametersRequest = _reflection.GeneratedProtocolMessageType('SetSuggestionParametersRequest', (_message.Message,), dict(
DESCRIPTOR = _SETSUGGESTIONPARAMETERSREQUEST,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.SetSuggestionParametersRequest)
))
_sym_db.RegisterMessage(SetSuggestionParametersRequest)
SetSuggestionParametersReply = _reflection.GeneratedProtocolMessageType('SetSuggestionParametersReply', (_message.Message,), dict(
DESCRIPTOR = _SETSUGGESTIONPARAMETERSREPLY,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.SetSuggestionParametersReply)
))
_sym_db.RegisterMessage(SetSuggestionParametersReply)
GetSuggestionParametersRequest = _reflection.GeneratedProtocolMessageType('GetSuggestionParametersRequest', (_message.Message,), dict(
DESCRIPTOR = _GETSUGGESTIONPARAMETERSREQUEST,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.GetSuggestionParametersRequest)
))
_sym_db.RegisterMessage(GetSuggestionParametersRequest)
GetSuggestionParametersReply = _reflection.GeneratedProtocolMessageType('GetSuggestionParametersReply', (_message.Message,), dict(
DESCRIPTOR = _GETSUGGESTIONPARAMETERSREPLY,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.GetSuggestionParametersReply)
))
_sym_db.RegisterMessage(GetSuggestionParametersReply)
GetSuggestionParameterListRequest = _reflection.GeneratedProtocolMessageType('GetSuggestionParameterListRequest', (_message.Message,), dict(
DESCRIPTOR = _GETSUGGESTIONPARAMETERLISTREQUEST,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.GetSuggestionParameterListRequest)
))
_sym_db.RegisterMessage(GetSuggestionParameterListRequest)
SuggestionParameterSet = _reflection.GeneratedProtocolMessageType('SuggestionParameterSet', (_message.Message,), dict(
DESCRIPTOR = _SUGGESTIONPARAMETERSET,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.SuggestionParameterSet)
))
_sym_db.RegisterMessage(SuggestionParameterSet)
GetSuggestionParameterListReply = _reflection.GeneratedProtocolMessageType('GetSuggestionParameterListReply', (_message.Message,), dict(
DESCRIPTOR = _GETSUGGESTIONPARAMETERLISTREPLY,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.GetSuggestionParameterListReply)
))
_sym_db.RegisterMessage(GetSuggestionParameterListReply)
StopSuggestionRequest = _reflection.GeneratedProtocolMessageType('StopSuggestionRequest', (_message.Message,), dict(
DESCRIPTOR = _STOPSUGGESTIONREQUEST,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.StopSuggestionRequest)
))
_sym_db.RegisterMessage(StopSuggestionRequest)
StopSuggestionReply = _reflection.GeneratedProtocolMessageType('StopSuggestionReply', (_message.Message,), dict(
DESCRIPTOR = _STOPSUGGESTIONREPLY,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.StopSuggestionReply)
))
_sym_db.RegisterMessage(StopSuggestionReply)
SetEarlyStoppingParametersRequest = _reflection.GeneratedProtocolMessageType('SetEarlyStoppingParametersRequest', (_message.Message,), dict(
DESCRIPTOR = _SETEARLYSTOPPINGPARAMETERSREQUEST,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.SetEarlyStoppingParametersRequest)
))
_sym_db.RegisterMessage(SetEarlyStoppingParametersRequest)
SetEarlyStoppingParametersReply = _reflection.GeneratedProtocolMessageType('SetEarlyStoppingParametersReply', (_message.Message,), dict(
DESCRIPTOR = _SETEARLYSTOPPINGPARAMETERSREPLY,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.SetEarlyStoppingParametersReply)
))
_sym_db.RegisterMessage(SetEarlyStoppingParametersReply)
GetEarlyStoppingParametersRequest = _reflection.GeneratedProtocolMessageType('GetEarlyStoppingParametersRequest', (_message.Message,), dict(
DESCRIPTOR = _GETEARLYSTOPPINGPARAMETERSREQUEST,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.GetEarlyStoppingParametersRequest)
))
_sym_db.RegisterMessage(GetEarlyStoppingParametersRequest)
GetEarlyStoppingParametersReply = _reflection.GeneratedProtocolMessageType('GetEarlyStoppingParametersReply', (_message.Message,), dict(
DESCRIPTOR = _GETEARLYSTOPPINGPARAMETERSREPLY,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.GetEarlyStoppingParametersReply)
))
_sym_db.RegisterMessage(GetEarlyStoppingParametersReply)
GetEarlyStoppingParameterListRequest = _reflection.GeneratedProtocolMessageType('GetEarlyStoppingParameterListRequest', (_message.Message,), dict(
DESCRIPTOR = _GETEARLYSTOPPINGPARAMETERLISTREQUEST,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.GetEarlyStoppingParameterListRequest)
))
_sym_db.RegisterMessage(GetEarlyStoppingParameterListRequest)
EarlyStoppingParameterSet = _reflection.GeneratedProtocolMessageType('EarlyStoppingParameterSet', (_message.Message,), dict(
DESCRIPTOR = _EARLYSTOPPINGPARAMETERSET,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.EarlyStoppingParameterSet)
))
_sym_db.RegisterMessage(EarlyStoppingParameterSet)
GetEarlyStoppingParameterListReply = _reflection.GeneratedProtocolMessageType('GetEarlyStoppingParameterListReply', (_message.Message,), dict(
DESCRIPTOR = _GETEARLYSTOPPINGPARAMETERLISTREPLY,
__module__ = 'api_pb2'
# @@protoc_insertion_point(class_scope:api.GetEarlyStoppingParameterListReply)
))
_sym_db.RegisterMessage(GetEarlyStoppingParameterListReply)
_MANAGER = _descriptor.ServiceDescriptor(
name='Manager',
full_name='api.Manager',
file=DESCRIPTOR,
index=0,
options=None,
serialized_start=5232,
serialized_end=6900,
methods=[
_descriptor.MethodDescriptor(
name='CreateStudy',
full_name='api.Manager.CreateStudy',
index=0,
containing_service=None,
input_type=_CREATESTUDYREQUEST,
output_type=_CREATESTUDYREPLY,
options=None,
),
_descriptor.MethodDescriptor(
name='StopStudy',
full_name='api.Manager.StopStudy',
index=1,
containing_service=None,
input_type=_STOPSTUDYREQUEST,
output_type=_STOPSTUDYREPLY,
options=None,
),
_descriptor.MethodDescriptor(
name='GetStudy',
full_name='api.Manager.GetStudy',
index=2,
containing_service=None,
input_type=_GETSTUDYREQUEST,
output_type=_GETSTUDYREPLY,
options=None,
),
_descriptor.MethodDescriptor(
name='GetStudyList',
full_name='api.Manager.GetStudyList',
index=3,
containing_service=None,
input_type=_GETSTUDYLISTREQUEST,
output_type=_GETSTUDYLISTREPLY,
options=None,
),
_descriptor.MethodDescriptor(
name='CreateTrial',
full_name='api.Manager.CreateTrial',
index=4,
containing_service=None,
input_type=_CREATETRIALREQUEST,
output_type=_CREATETRIALREPLY,
options=None,
),
_descriptor.MethodDescriptor(
name='GetTrials',
full_name='api.Manager.GetTrials',
index=5,
containing_service=None,
input_type=_GETTRIALSREQUEST,
output_type=_GETTRIALSREPLY,
options=None,
),
_descriptor.MethodDescriptor(
name='RunTrial',
full_name='api.Manager.RunTrial',
index=6,
containing_service=None,
input_type=_RUNTRIALREQUEST,
output_type=_RUNTRIALREPLY,
options=None,
),
_descriptor.MethodDescriptor(
name='StopWorkers',
full_name='api.Manager.StopWorkers',
index=7,
containing_service=None,
input_type=_STOPWORKERSREQUEST,
output_type=_STOPWORKERSREPLY,
options=None,
),
_descriptor.MethodDescriptor(
name='GetWorkers',
full_name='api.Manager.GetWorkers',
index=8,
containing_service=None,
input_type=_GETWORKERSREQUEST,
output_type=_GETWORKERSREPLY,
options=None,
),
_descriptor.MethodDescriptor(
name='GetSuggestions',
full_name='api.Manager.GetSuggestions',
index=9,
containing_service=None,
input_type=_GETSUGGESTIONSREQUEST,
output_type=_GETSUGGESTIONSREPLY,
options=None,
),
_descriptor.MethodDescriptor(
name='GetShouldStopWorkers',
full_name='api.Manager.GetShouldStopWorkers',
index=10,
containing_service=None,
input_type=_GETSHOULDSTOPWORKERSREQUEST,
output_type=_GETSHOULDSTOPWORKERSREPLY,
options=None,
),
_descriptor.MethodDescriptor(
name='GetMetrics',
full_name='api.Manager.GetMetrics',
index=11,
containing_service=None,
input_type=_GETMETRICSREQUEST,
output_type=_GETMETRICSREPLY,
options=None,
),
_descriptor.MethodDescriptor(
name='SetSuggestionParameters',
full_name='api.Manager.SetSuggestionParameters',
index=12,
containing_service=None,
input_type=_SETSUGGESTIONPARAMETERSREQUEST,
output_type=_SETSUGGESTIONPARAMETERSREPLY,
options=None,
),
_descriptor.MethodDescriptor(
name='GetSuggestionParameters',
full_name='api.Manager.GetSuggestionParameters',
index=13,
containing_service=None,
input_type=_GETSUGGESTIONPARAMETERSREQUEST,
output_type=_GETSUGGESTIONPARAMETERSREPLY,
options=None,
),
_descriptor.MethodDescriptor(
name='GetSuggestionParameterList',
full_name='api.Manager.GetSuggestionParameterList',
index=14,
containing_service=None,
input_type=_GETSUGGESTIONPARAMETERLISTREQUEST,
output_type=_GETSUGGESTIONPARAMETERLISTREPLY,
options=None,
),
_descriptor.MethodDescriptor(
name='SetEarlyStoppingParameters',
full_name='api.Manager.SetEarlyStoppingParameters',
index=15,
containing_service=None,
input_type=_SETEARLYSTOPPINGPARAMETERSREQUEST,
output_type=_SETEARLYSTOPPINGPARAMETERSREPLY,
options=None,
),
_descriptor.MethodDescriptor(
name='GetEarlyStoppingParameters',
full_name='api.Manager.GetEarlyStoppingParameters',
index=16,
containing_service=None,
input_type=_GETEARLYSTOPPINGPARAMETERSREQUEST,
output_type=_GETEARLYSTOPPINGPARAMETERSREPLY,
options=None,
),
_descriptor.MethodDescriptor(
name='GetEarlyStoppingParameterList',
full_name='api.Manager.GetEarlyStoppingParameterList',
index=17,
containing_service=None,
input_type=_GETEARLYSTOPPINGPARAMETERLISTREQUEST,
output_type=_GETEARLYSTOPPINGPARAMETERLISTREPLY,
options=None,
),
_descriptor.MethodDescriptor(
name='SaveStudy',
full_name='api.Manager.SaveStudy',
index=18,
containing_service=None,
input_type=_SAVESTUDYREQUEST,
output_type=_SAVESTUDYREPLY,
options=None,
),
_descriptor.MethodDescriptor(
name='SaveModel',
full_name='api.Manager.SaveModel',
index=19,
containing_service=None,
input_type=_SAVEMODELREQUEST,
output_type=_SAVEMODELREPLY,
options=None,
),
_descriptor.MethodDescriptor(
name='GetSavedStudies',
full_name='api.Manager.GetSavedStudies',
index=20,
containing_service=None,
input_type=_GETSAVEDSTUDIESREQUEST,
output_type=_GETSAVEDSTUDIESREPLY,
options=None,
),
_descriptor.MethodDescriptor(
name='GetSavedModels',
full_name='api.Manager.GetSavedModels',
index=21,
containing_service=None,
input_type=_GETSAVEDMODELSREQUEST,
output_type=_GETSAVEDMODELSREPLY,
options=None,
),
])
_sym_db.RegisterServiceDescriptor(_MANAGER)
DESCRIPTOR.services_by_name['Manager'] = _MANAGER
_SUGGESTION = _descriptor.ServiceDescriptor(
name='Suggestion',
full_name='api.Suggestion',
file=DESCRIPTOR,
index=1,
options=None,
serialized_start=6902,
serialized_end=6986,
methods=[
_descriptor.MethodDescriptor(
name='GetSuggestions',
full_name='api.Suggestion.GetSuggestions',
index=0,
containing_service=None,
input_type=_GETSUGGESTIONSREQUEST,
output_type=_GETSUGGESTIONSREPLY,
options=None,
),
])
_sym_db.RegisterServiceDescriptor(_SUGGESTION)
DESCRIPTOR.services_by_name['Suggestion'] = _SUGGESTION
_EARLYSTOPPING = _descriptor.ServiceDescriptor(
name='EarlyStopping',
full_name='api.EarlyStopping',
file=DESCRIPTOR,
index=2,
options=None,
serialized_start=6988,
serialized_end=7093,
methods=[
_descriptor.MethodDescriptor(
name='GetShouldStopWorkers',
full_name='api.EarlyStopping.GetShouldStopWorkers',
index=0,
containing_service=None,
input_type=_GETSHOULDSTOPWORKERSREQUEST,
output_type=_GETSHOULDSTOPWORKERSREPLY,
options=None,
),
])
_sym_db.RegisterServiceDescriptor(_EARLYSTOPPING)
DESCRIPTOR.services_by_name['EarlyStopping'] = _EARLYSTOPPING
# @@protoc_insertion_point(module_scope)
| python | 124,827 |
"""
Copyright 2020 The Magma Authors.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree.
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging
import subprocess
import unittest
import warnings
from concurrent.futures import Future
from magma.pipelined.bridge_util import BridgeTools
from magma.pipelined.tests.app.start_pipelined import (
PipelinedController,
TestSetup,
)
from magma.pipelined.tests.pipelined_test_util import (
assert_bridge_snapshot_match,
create_service_manager,
get_iface_gw_ipv4,
get_iface_ipv4,
get_ovsdb_port_tag,
start_ryu_app_thread,
stop_ryu_app_thread,
)
from ryu.lib import hub
class UplinkBridgeTest(unittest.TestCase):
BRIDGE = 'testing_br'
MAC_DEST = "5e:cc:cc:b1:49:4b"
BRIDGE_IP = '192.168.128.1'
UPLINK_BRIDGE = 'upt_br0'
@classmethod
def setUpClass(cls):
"""
Starts the thread which launches ryu apps
Create a testing bridge, add a port, setup the port interfaces. Then
launch the ryu apps for testing pipelined. Gets the references
to apps launched by using futures.
"""
super(UplinkBridgeTest, cls).setUpClass()
warnings.simplefilter('ignore')
cls.service_manager = create_service_manager([])
uplink_bridge_controller_reference = Future()
testing_controller_reference = Future()
test_setup = TestSetup(
apps=[
PipelinedController.UplinkBridge,
PipelinedController.Testing,
PipelinedController.StartupFlows,
],
references={
PipelinedController.UplinkBridge:
uplink_bridge_controller_reference,
PipelinedController.Testing:
testing_controller_reference,
PipelinedController.StartupFlows:
Future(),
},
config={
'bridge_name': cls.BRIDGE,
'bridge_ip_address': cls.BRIDGE_IP,
'ovs_gtp_port_number': 32768,
'clean_restart': True,
'enable_nat': True,
},
mconfig=None,
loop=None,
service_manager=cls.service_manager,
integ_test=False,
)
BridgeTools.create_bridge(cls.BRIDGE, cls.BRIDGE)
BridgeTools.create_bridge(cls.UPLINK_BRIDGE, cls.UPLINK_BRIDGE)
cls.thread = start_ryu_app_thread(test_setup)
cls.uplink_br_controller = uplink_bridge_controller_reference.result()
cls.testing_controller = testing_controller_reference.result()
@classmethod
def tearDownClass(cls):
stop_ryu_app_thread(cls.thread)
BridgeTools.destroy_bridge(cls.BRIDGE)
BridgeTools.destroy_bridge(cls.UPLINK_BRIDGE)
def testFlowSnapshotMatch(self):
assert_bridge_snapshot_match(self, self.UPLINK_BRIDGE, self.service_manager)
class UplinkBridgeWithNonNATTest(unittest.TestCase):
BRIDGE = 'testing_br'
MAC_DEST = "5e:cc:cc:b1:49:4b"
BRIDGE_IP = '192.168.128.1'
UPLINK_BRIDGE = 'upt_br0'
UPLINK_DHCP = 'test_dhcp0'
UPLINK_PATCH = 'test_patch_p2'
UPLINK_ETH_PORT = 'test_eth3'
VLAN_DEV_IN = "test_v_in"
VLAN_DEV_OUT = "test_v_out"
@classmethod
def setUpClass(cls):
"""
Starts the thread which launches ryu apps
Create a testing bridge, add a port, setup the port interfaces. Then
launch the ryu apps for testing pipelined. Gets the references
to apps launched by using futures.
"""
super(UplinkBridgeWithNonNATTest, cls).setUpClass()
warnings.simplefilter('ignore')
cls.service_manager = create_service_manager([])
uplink_bridge_controller_reference = Future()
testing_controller_reference = Future()
test_setup = TestSetup(
apps=[
PipelinedController.UplinkBridge,
PipelinedController.Testing,
PipelinedController.StartupFlows,
],
references={
PipelinedController.UplinkBridge:
uplink_bridge_controller_reference,
PipelinedController.Testing:
testing_controller_reference,
PipelinedController.StartupFlows:
Future(),
},
config={
'bridge_name': cls.BRIDGE,
'bridge_ip_address': cls.BRIDGE_IP,
'ovs_gtp_port_number': 32768,
'clean_restart': True,
'enable_nat': False,
'uplink_bridge': cls.UPLINK_BRIDGE,
'uplink_eth_port_name': cls.UPLINK_ETH_PORT,
'virtual_mac': '02:bb:5e:36:06:4b',
'uplink_patch': cls.UPLINK_PATCH,
'uplink_dhcp_port': cls.UPLINK_DHCP,
'sgi_management_iface_vlan': "",
'dev_vlan_in': cls.VLAN_DEV_IN,
'dev_vlan_out': cls.VLAN_DEV_OUT,
'ovs_vlan_workaround': False,
'sgi_management_iface_ip_addr': '1.1.11.1',
},
mconfig=None,
loop=None,
service_manager=cls.service_manager,
integ_test=False,
)
BridgeTools.create_bridge(cls.BRIDGE, cls.BRIDGE)
BridgeTools.create_bridge(cls.UPLINK_BRIDGE, cls.UPLINK_BRIDGE)
BridgeTools.create_veth_pair(
cls.VLAN_DEV_IN,
cls.VLAN_DEV_OUT,
)
# Add to OVS,
BridgeTools.add_ovs_port(
cls.UPLINK_BRIDGE,
cls.VLAN_DEV_IN, "70",
)
BridgeTools.add_ovs_port(
cls.UPLINK_BRIDGE,
cls.VLAN_DEV_OUT, "71",
)
# dummy uplink interface
vlan = "10"
BridgeTools.create_internal_iface(
cls.UPLINK_BRIDGE,
cls.UPLINK_DHCP, None,
)
BridgeTools.create_internal_iface(
cls.UPLINK_BRIDGE,
cls.UPLINK_PATCH, None,
)
BridgeTools.create_internal_iface(
cls.UPLINK_BRIDGE,
cls.UPLINK_ETH_PORT, None,
)
cls.thread = start_ryu_app_thread(test_setup)
cls.uplink_br_controller = uplink_bridge_controller_reference.result()
cls.testing_controller = testing_controller_reference.result()
@classmethod
def tearDownClass(cls):
stop_ryu_app_thread(cls.thread)
BridgeTools.destroy_bridge(cls.BRIDGE)
BridgeTools.destroy_bridge(cls.UPLINK_BRIDGE)
def testFlowSnapshotMatch(self):
cls = self.__class__
assert_bridge_snapshot_match(
self, self.UPLINK_BRIDGE, self.service_manager,
include_stats=False,
)
class UplinkBridgeWithNonNATTestVlan(unittest.TestCase):
BRIDGE = 'testing_br'
MAC_DEST = "5e:cc:cc:b1:49:4b"
BRIDGE_IP = '192.168.128.1'
UPLINK_BRIDGE = 'upt_br0'
UPLINK_DHCP = 'test_dhcp0'
UPLINK_PATCH = 'test_patch_p2'
UPLINK_ETH_PORT = 'test_eth3'
VLAN_TAG = '100'
VLAN_DEV_IN = "test_v_in"
VLAN_DEV_OUT = "test_v_out"
@classmethod
def setUpClass(cls):
"""
Starts the thread which launches ryu apps
Create a testing bridge, add a port, setup the port interfaces. Then
launch the ryu apps for testing pipelined. Gets the references
to apps launched by using futures.
"""
super(UplinkBridgeWithNonNATTestVlan, cls).setUpClass()
warnings.simplefilter('ignore')
cls.service_manager = create_service_manager([])
uplink_bridge_controller_reference = Future()
testing_controller_reference = Future()
test_setup = TestSetup(
apps=[
PipelinedController.UplinkBridge,
PipelinedController.Testing,
PipelinedController.StartupFlows,
],
references={
PipelinedController.UplinkBridge:
uplink_bridge_controller_reference,
PipelinedController.Testing:
testing_controller_reference,
PipelinedController.StartupFlows:
Future(),
},
config={
'bridge_name': cls.BRIDGE,
'bridge_ip_address': cls.BRIDGE_IP,
'ovs_gtp_port_number': 32768,
'clean_restart': True,
'enable_nat': False,
'uplink_bridge': cls.UPLINK_BRIDGE,
'uplink_eth_port_name': cls.UPLINK_ETH_PORT,
'virtual_mac': '02:bb:5e:36:06:4b',
'uplink_patch': cls.UPLINK_PATCH,
'uplink_dhcp_port': cls.UPLINK_DHCP,
'sgi_management_iface_vlan': cls.VLAN_TAG,
'dev_vlan_in': cls.VLAN_DEV_IN,
'dev_vlan_out': cls.VLAN_DEV_OUT,
'sgi_management_iface_ip_addr': '1.1.11.1',
},
mconfig=None,
loop=None,
service_manager=cls.service_manager,
integ_test=False,
)
BridgeTools.create_bridge(cls.BRIDGE, cls.BRIDGE)
BridgeTools.create_bridge(cls.UPLINK_BRIDGE, cls.UPLINK_BRIDGE)
BridgeTools.create_veth_pair(
cls.VLAN_DEV_IN,
cls.VLAN_DEV_OUT,
)
# Add to OVS,
BridgeTools.add_ovs_port(
cls.UPLINK_BRIDGE,
cls.VLAN_DEV_IN, "70",
)
BridgeTools.add_ovs_port(
cls.UPLINK_BRIDGE,
cls.VLAN_DEV_OUT, "71",
)
# validate vlan id set
vlan = "10"
BridgeTools.create_bridge(cls.UPLINK_BRIDGE, cls.UPLINK_BRIDGE)
BridgeTools.create_internal_iface(
cls.UPLINK_BRIDGE,
cls.UPLINK_DHCP, None,
)
BridgeTools.create_internal_iface(
cls.UPLINK_BRIDGE,
cls.UPLINK_PATCH, None,
)
BridgeTools.create_internal_iface(
cls.UPLINK_BRIDGE,
cls.UPLINK_ETH_PORT, None,
)
cls.thread = start_ryu_app_thread(test_setup)
cls.uplink_br_controller = uplink_bridge_controller_reference.result()
cls.testing_controller = testing_controller_reference.result()
@classmethod
def tearDownClass(cls):
stop_ryu_app_thread(cls.thread)
BridgeTools.destroy_bridge(cls.BRIDGE)
BridgeTools.destroy_bridge(cls.UPLINK_BRIDGE)
def testFlowSnapshotMatch(self):
cls = self.__class__
assert_bridge_snapshot_match(
self, self.UPLINK_BRIDGE, self.service_manager,
include_stats=False,
)
@unittest.skip
# this reset default GW
class UplinkBridgeWithNonNATTest_IP_VLAN(unittest.TestCase):
BRIDGE = 'testing_br'
MAC_DEST = "5e:cc:cc:b1:49:4b"
BRIDGE_IP = '192.168.128.1'
UPLINK_BRIDGE = 'upt_br0'
UPLINK_DHCP = 'test_dhcp0'
UPLINK_PATCH = 'test_patch_p2'
UPLINK_ETH_PORT = 'test_eth3'
VLAN_TAG = '500'
SGi_IP = "1.6.5.7"
@classmethod
def setUpClass(cls):
"""
Starts the thread which launches ryu apps
Create a testing bridge, add a port, setup the port interfaces. Then
launch the ryu apps for testing pipelined. Gets the references
to apps launched by using futures.
"""
super(UplinkBridgeWithNonNATTest_IP_VLAN, cls).setUpClass()
warnings.simplefilter('ignore')
cls.service_manager = create_service_manager([])
uplink_bridge_controller_reference = Future()
testing_controller_reference = Future()
test_setup = TestSetup(
apps=[
PipelinedController.UplinkBridge,
PipelinedController.Testing,
PipelinedController.StartupFlows,
],
references={
PipelinedController.UplinkBridge:
uplink_bridge_controller_reference,
PipelinedController.Testing:
testing_controller_reference,
PipelinedController.StartupFlows:
Future(),
},
config={
'bridge_name': cls.BRIDGE,
'bridge_ip_address': cls.BRIDGE_IP,
'ovs_gtp_port_number': 32768,
'clean_restart': True,
'enable_nat': False,
'uplink_bridge': cls.UPLINK_BRIDGE,
'uplink_eth_port_name': cls.UPLINK_ETH_PORT,
'virtual_mac': '02:bb:5e:36:06:4b',
'uplink_patch': cls.UPLINK_PATCH,
'uplink_dhcp_port': cls.UPLINK_DHCP,
'sgi_management_iface_vlan': cls.VLAN_TAG,
'sgi_management_iface_ip_addr': cls.SGi_IP,
'dev_vlan_in': "test_v_in",
'dev_vlan_out': "test_v_out",
},
mconfig=None,
loop=None,
service_manager=cls.service_manager,
integ_test=False,
)
BridgeTools.create_bridge(cls.BRIDGE, cls.BRIDGE)
# validate vlan id set
vlan = "10"
BridgeTools.create_bridge(cls.UPLINK_BRIDGE, cls.UPLINK_BRIDGE)
set_ip_cmd = [
"ip",
"addr", "replace",
"2.33.44.6",
"dev",
cls.UPLINK_BRIDGE,
]
subprocess.check_call(set_ip_cmd)
BridgeTools.create_internal_iface(
cls.UPLINK_BRIDGE,
cls.UPLINK_DHCP, None,
)
BridgeTools.create_internal_iface(
cls.UPLINK_BRIDGE,
cls.UPLINK_PATCH, None,
)
BridgeTools.create_internal_iface(
cls.UPLINK_BRIDGE,
cls.UPLINK_ETH_PORT, None,
)
cls.thread = start_ryu_app_thread(test_setup)
cls.uplink_br_controller = uplink_bridge_controller_reference.result()
cls.testing_controller = testing_controller_reference.result()
@classmethod
def tearDownClass(cls):
stop_ryu_app_thread(cls.thread)
BridgeTools.destroy_bridge(cls.BRIDGE)
BridgeTools.destroy_bridge(cls.UPLINK_BRIDGE)
def testFlowSnapshotMatch(self):
cls = self.__class__
assert_bridge_snapshot_match(
self, self.UPLINK_BRIDGE, self.service_manager,
include_stats=False,
)
self.assertIn(cls.SGi_IP, get_iface_ipv4(cls.UPLINK_BRIDGE), "ip not found")
@unittest.skip
# this reset default GW
class UplinkBridgeWithNonNATTest_IP_VLAN_GW(unittest.TestCase):
BRIDGE = 'testing_br'
MAC_DEST = "5e:cc:cc:b1:49:4b"
BRIDGE_IP = '192.168.128.1'
UPLINK_BRIDGE = 'upt_br0'
UPLINK_DHCP = 'test_dhcp0'
UPLINK_PATCH = 'test_patch_p2'
UPLINK_ETH_PORT = 'test_eth3'
VLAN_TAG = '100'
SGi_IP = "1.6.5.7/24"
SGi_GW = "1.6.5.1"
@classmethod
def setUpClass(cls):
"""
Starts the thread which launches ryu apps
Create a testing bridge, add a port, setup the port interfaces. Then
launch the ryu apps for testing pipelined. Gets the references
to apps launched by using futures.
"""
super(UplinkBridgeWithNonNATTest_IP_VLAN_GW, cls).setUpClass()
warnings.simplefilter('ignore')
cls.service_manager = create_service_manager([])
uplink_bridge_controller_reference = Future()
testing_controller_reference = Future()
test_setup = TestSetup(
apps=[
PipelinedController.UplinkBridge,
PipelinedController.Testing,
PipelinedController.StartupFlows,
],
references={
PipelinedController.UplinkBridge:
uplink_bridge_controller_reference,
PipelinedController.Testing:
testing_controller_reference,
PipelinedController.StartupFlows:
Future(),
},
config={
'bridge_name': cls.BRIDGE,
'bridge_ip_address': cls.BRIDGE_IP,
'ovs_gtp_port_number': 32768,
'clean_restart': True,
'enable_nat': False,
'uplink_bridge': cls.UPLINK_BRIDGE,
'uplink_eth_port_name': cls.UPLINK_ETH_PORT,
'virtual_mac': '02:bb:5e:36:06:4b',
'uplink_patch': cls.UPLINK_PATCH,
'uplink_dhcp_port': cls.UPLINK_DHCP,
'sgi_management_iface_vlan': cls.VLAN_TAG,
'sgi_management_iface_ip_addr': cls.SGi_IP,
'sgi_management_iface_gw': cls.SGi_GW,
'dev_vlan_in': "test_v_in",
'dev_vlan_out': "test_v_out",
},
mconfig=None,
loop=None,
service_manager=cls.service_manager,
integ_test=False,
)
BridgeTools.create_bridge(cls.BRIDGE, cls.BRIDGE)
# validate vlan id set
vlan = "10"
BridgeTools.create_bridge(cls.UPLINK_BRIDGE, cls.UPLINK_BRIDGE)
subprocess.Popen([
"ovs-vsctl", "set", "port", cls.UPLINK_BRIDGE,
"tag=" + vlan,
]).wait()
assert get_ovsdb_port_tag(cls.UPLINK_BRIDGE) == vlan
set_ip_cmd = [
"ip",
"addr", "replace",
"2.33.44.6",
"dev",
cls.UPLINK_BRIDGE,
]
subprocess.check_call(set_ip_cmd)
BridgeTools.create_internal_iface(
cls.UPLINK_BRIDGE,
cls.UPLINK_DHCP, None,
)
BridgeTools.create_internal_iface(
cls.UPLINK_BRIDGE,
cls.UPLINK_PATCH, None,
)
BridgeTools.create_internal_iface(
cls.UPLINK_BRIDGE,
cls.UPLINK_ETH_PORT, None,
)
cls.thread = start_ryu_app_thread(test_setup)
cls.uplink_br_controller = uplink_bridge_controller_reference.result()
cls.testing_controller = testing_controller_reference.result()
@classmethod
def tearDownClass(cls):
stop_ryu_app_thread(cls.thread)
BridgeTools.destroy_bridge(cls.BRIDGE)
BridgeTools.destroy_bridge(cls.UPLINK_BRIDGE)
def testFlowSnapshotMatch(self):
cls = self.__class__
assert_bridge_snapshot_match(
self, self.UPLINK_BRIDGE,
self.service_manager,
include_stats=False,
)
self.assertIn(
cls.SGi_GW, get_iface_gw_ipv4(cls.UPLINK_BRIDGE),
"gw not found",
)
@unittest.skip
class UplinkBridgeWithNonNatUplinkConnect_Test(unittest.TestCase):
BRIDGE = 'testing_br'
IFACE = 'testing_br'
MAC_DEST = "5e:cc:cc:b1:49:4b"
BRIDGE_IP = '192.168.128.1'
SCRIPT_PATH = "/home/vagrant/magma/lte/gateway/python/magma/mobilityd/"
NET_SW_BR = "net_sw_up1"
UPLINK_DHCP = "tino_dhcp"
SCRIPT_PATH = "/home/vagrant/magma/lte/gateway/python/magma/mobilityd/"
UPLINK_ETH_PORT = "upb_ul_0"
UPLINK_BRIDGE = 'upt_br0'
UPLINK_PATCH = 'test_patch_p2'
ROUTER_IP = "10.55.0.211"
@classmethod
def _setup_vlan_network(cls, vlan: str):
setup_vlan_switch = cls.SCRIPT_PATH + "scripts/setup-uplink-vlan-sw.sh"
subprocess.check_call([setup_vlan_switch, cls.NET_SW_BR, "upb"])
cls._setup_vlan(vlan)
@classmethod
def _setup_vlan(cls, vlan):
setup_vlan_switch = cls.SCRIPT_PATH + "scripts/setup-uplink-vlan-srv.sh"
subprocess.check_call([setup_vlan_switch, cls.NET_SW_BR, vlan, "55"])
@classmethod
def setUpClass(cls):
"""
Starts the thread which launches ryu apps
Create a testing bridge, add a port, setup the port interfaces. Then
launch the ryu apps for testing pipelined. Gets the references
to apps launched by using futures.
"""
super(UplinkBridgeWithNonNatUplinkConnect_Test, cls).setUpClass()
warnings.simplefilter('ignore')
cls.service_manager = create_service_manager([])
cls._setup_vlan_network("0")
BridgeTools.create_bridge(cls.UPLINK_BRIDGE, cls.UPLINK_BRIDGE)
BridgeTools.create_internal_iface(
cls.UPLINK_BRIDGE,
cls.UPLINK_DHCP, None,
)
BridgeTools.create_internal_iface(
cls.UPLINK_BRIDGE,
cls.UPLINK_PATCH, None,
)
check_connectivity(cls.ROUTER_IP, cls.UPLINK_ETH_PORT)
BridgeTools.add_ovs_port(cls.UPLINK_BRIDGE, cls.UPLINK_ETH_PORT, "200")
# this is setup after AGW boot up in NATed mode.
uplink_bridge_controller_reference = Future()
testing_controller_reference = Future()
test_setup = TestSetup(
apps=[
PipelinedController.UplinkBridge,
PipelinedController.Testing,
PipelinedController.StartupFlows,
],
references={
PipelinedController.UplinkBridge:
uplink_bridge_controller_reference,
PipelinedController.Testing:
testing_controller_reference,
PipelinedController.StartupFlows:
Future(),
},
config={
'bridge_name': cls.BRIDGE,
'bridge_ip_address': cls.BRIDGE_IP,
'ovs_gtp_port_number': 32768,
'clean_restart': True,
'enable_nat': False,
'uplink_bridge': cls.UPLINK_BRIDGE,
'uplink_eth_port_name': cls.UPLINK_ETH_PORT,
'virtual_mac': '02:bb:5e:36:06:4b',
'uplink_patch': cls.UPLINK_PATCH,
'uplink_dhcp_port': cls.UPLINK_DHCP,
'sgi_management_iface_vlan': "",
'ovs_vlan_workaround': True,
'dev_vlan_in': "testv1_in",
'dev_vlan_out': "testv1_out",
},
mconfig=None,
loop=None,
service_manager=cls.service_manager,
integ_test=False,
)
BridgeTools.create_bridge(cls.BRIDGE, cls.BRIDGE)
cls.thread = start_ryu_app_thread(test_setup)
cls.uplink_br_controller = uplink_bridge_controller_reference.result()
cls.testing_controller = testing_controller_reference.result()
@classmethod
def tearDownClass(cls):
stop_ryu_app_thread(cls.thread)
BridgeTools.destroy_bridge(cls.BRIDGE)
BridgeTools.destroy_bridge(cls.UPLINK_BRIDGE)
BridgeTools.destroy_bridge(cls.NET_SW_BR)
def testFlowSnapshotMatch(self):
cls = self.__class__
assert_bridge_snapshot_match(
self, self.UPLINK_BRIDGE, self.service_manager,
include_stats=False,
)
self.assertEqual(get_ovsdb_port_tag(cls.UPLINK_BRIDGE), '[]')
# after Non NAT init, router shld be accessible.
# manually start DHCP client on up-br
check_connectivity(cls.ROUTER_IP, cls.UPLINK_BRIDGE)
class UplinkBridgeTestNatIPAddr(unittest.TestCase):
BRIDGE = 'testing_br'
MAC_DEST = "5e:cc:cc:b1:49:4b"
BRIDGE_IP = '192.168.128.1'
BRIDGE_ETH_PORT = "eth_t1"
UPLINK_BRIDGE = 'upt_br0'
SGi_IP = "1.6.5.77"
@classmethod
def setUpClass(cls):
"""
Starts the thread which launches ryu apps
Create a testing bridge, add a port, setup the port interfaces. Then
launch the ryu apps for testing pipelined. Gets the references
to apps launched by using futures.
"""
super(UplinkBridgeTestNatIPAddr, cls).setUpClass()
warnings.simplefilter('ignore')
cls.service_manager = create_service_manager([])
uplink_bridge_controller_reference = Future()
testing_controller_reference = Future()
test_setup = TestSetup(
apps=[
PipelinedController.UplinkBridge,
PipelinedController.Testing,
PipelinedController.StartupFlows,
],
references={
PipelinedController.UplinkBridge:
uplink_bridge_controller_reference,
PipelinedController.Testing:
testing_controller_reference,
PipelinedController.StartupFlows:
Future(),
},
config={
'bridge_name': cls.BRIDGE,
'bridge_ip_address': cls.BRIDGE_IP,
'ovs_gtp_port_number': 32768,
'clean_restart': True,
'enable_nat': True,
'uplink_bridge': cls.UPLINK_BRIDGE,
'sgi_management_iface_ip_addr': cls.SGi_IP,
'uplink_eth_port_name': cls.BRIDGE_ETH_PORT,
},
mconfig=None,
loop=None,
service_manager=cls.service_manager,
integ_test=False,
)
BridgeTools.create_bridge(cls.BRIDGE, cls.BRIDGE)
BridgeTools.create_bridge(cls.UPLINK_BRIDGE, cls.UPLINK_BRIDGE)
BridgeTools.create_internal_iface(
cls.BRIDGE,
cls.BRIDGE_ETH_PORT, '2.2.2.2',
)
cls.thread = start_ryu_app_thread(test_setup)
cls.uplink_br_controller = uplink_bridge_controller_reference.result()
cls.testing_controller = testing_controller_reference.result()
@classmethod
def tearDownClass(cls):
stop_ryu_app_thread(cls.thread)
BridgeTools.destroy_bridge(cls.BRIDGE)
BridgeTools.destroy_bridge(cls.UPLINK_BRIDGE)
def testFlowSnapshotMatch(self):
cls = self.__class__
assert_bridge_snapshot_match(self, self.UPLINK_BRIDGE, self.service_manager)
self.assertIn(cls.SGi_IP, get_iface_ipv4(cls.BRIDGE_ETH_PORT), "ip not found")
if __name__ == "__main__":
unittest.main()
def check_connectivity(dst: str, dev_name: str):
try:
ifdown_if = ["dhclient", dev_name]
subprocess.check_call(ifdown_if)
except subprocess.SubprocessError as e:
logging.warning(
"Error while setting dhcl IP: %s: %s",
dev_name, e,
)
return
hub.sleep(1)
try:
ping_cmd = ["ping", "-c", "3", dst]
subprocess.check_call(ping_cmd)
except subprocess.SubprocessError as e:
logging.warning("Error while ping: %s", e)
# for now dont assert here.
validate_routing_table(dst, dev_name)
def validate_routing_table(dst: str, dev_name: str) -> str:
dump1 = subprocess.Popen(
["ip", "r", "get", dst],
stdout=subprocess.PIPE,
)
for line in dump1.stdout.readlines():
if "dev" not in str(line):
continue
try:
if dev_name in str(line):
return
except ValueError:
pass
logging.error("could not find route to %s via %s", dst, dev_name)
dump1 = subprocess.Popen(
["ovs-ofctl", "dump-flows", cls.UPLINK_BRIDGE],
stdout=subprocess.PIPE,
)
for line in dump1.stdout.readlines():
print("pbs: %s", line)
assert 0
| python | 27,481 |
#!/usr/bin/env python
"""
Copyright 2017-2018 Fizyr (https://fizyr.com)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import argparse
import os
import sys
import warnings
import keras
import keras.preprocessing.image
import tensorflow as tf
# Allow relative imports when being executed as script.
if __name__ == "__main__" and __package__ is None:
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
import keras_retinanet.bin # noqa: F401
__package__ = "keras_retinanet.bin"
# Change these to absolute imports if you copy this script outside the keras_retinanet package.
from .. import layers # noqa: F401
from .. import losses
from .. import models
from ..callbacks import RedirectModel
from ..callbacks.eval import Evaluate
from ..models.retinanet import retinanet_bbox
from ..preprocessing.csv_generator import CSVGenerator
from ..preprocessing.kitti import KittiGenerator
from ..preprocessing.open_images import OpenImagesGenerator
from ..preprocessing.pascal_voc import PascalVocGenerator
from ..utils.anchors import make_shapes_callback
from ..utils.config import read_config_file, parse_anchor_parameters
from ..utils.keras_version import check_keras_version
from ..utils.model import freeze as freeze_model
from ..utils.transform import random_transform_generator
from ..utils.image import random_visual_effect_generator
from ..utils.gpu import setup_gpu
def makedirs(path):
# Intended behavior: try to create the directory,
# pass if the directory exists already, fails otherwise.
# Meant for Python 2.7/3.n compatibility.
try:
os.makedirs(path)
except OSError:
if not os.path.isdir(path):
raise
def model_with_weights(model, weights, skip_mismatch):
""" Load weights for model.
Args
model : The model to load weights for.
weights : The weights to load.
skip_mismatch : If True, skips layers whose shape of weights doesn't match with the model.
"""
if weights is not None:
model.load_weights(weights, by_name=True, skip_mismatch=skip_mismatch)
return model
def create_models(backbone_retinanet, num_classes, weights, multi_gpu=0,
freeze_backbone=False, lr=1e-5, config=None):
""" Creates three models (model, training_model, prediction_model).
Args
backbone_retinanet : A function to call to create a retinanet model with a given backbone.
num_classes : The number of classes to train.
weights : The weights to load into the model.
multi_gpu : The number of GPUs to use for training.
freeze_backbone : If True, disables learning for the backbone.
config : Config parameters, None indicates the default configuration.
Returns
model : The base model. This is also the model that is saved in snapshots.
training_model : The training model. If multi_gpu=0, this is identical to model.
prediction_model : The model wrapped with utility functions to perform object detection (applies regression values and performs NMS).
"""
modifier = freeze_model if freeze_backbone else None
# load anchor parameters, or pass None (so that defaults will be used)
anchor_params = None
num_anchors = None
if config and 'anchor_parameters' in config:
anchor_params = parse_anchor_parameters(config)
num_anchors = anchor_params.num_anchors()
# Keras recommends initialising a multi-gpu model on the CPU to ease weight sharing, and to prevent OOM errors.
# optionally wrap in a parallel model
if multi_gpu > 1:
from keras.utils import multi_gpu_model
with tf.device('/cpu:0'):
model = model_with_weights(backbone_retinanet(num_classes, num_anchors=num_anchors, modifier=modifier), weights=weights, skip_mismatch=True)
training_model = multi_gpu_model(model, gpus=multi_gpu)
else:
model = model_with_weights(backbone_retinanet(num_classes, num_anchors=num_anchors, modifier=modifier), weights=weights, skip_mismatch=True)
training_model = model
# make prediction model
prediction_model = retinanet_bbox(model=model, anchor_params=anchor_params)
# compile model
training_model.compile(
loss={
'regression' : losses.smooth_l1(),
'classification': losses.focal()
},
optimizer=keras.optimizers.adam(lr=lr, clipnorm=0.001)
)
return model, training_model, prediction_model
def create_callbacks(model, training_model, prediction_model, validation_generator, args):
""" Creates the callbacks to use during training.
Args
model: The base model.
training_model: The model that is used for training.
prediction_model: The model that should be used for validation.
validation_generator: The generator for creating validation data.
args: parseargs args object.
Returns:
A list of callbacks used for training.
"""
callbacks = []
tensorboard_callback = None
if args.tensorboard_dir:
tensorboard_callback = keras.callbacks.TensorBoard(
log_dir = args.tensorboard_dir,
histogram_freq = 0,
batch_size = args.batch_size,
write_graph = True,
write_grads = False,
write_images = False,
embeddings_freq = 0,
embeddings_layer_names = None,
embeddings_metadata = None
)
if args.evaluation and validation_generator:
if args.dataset_type == 'coco':
from ..callbacks.coco import CocoEval
# use prediction model for evaluation
evaluation = CocoEval(validation_generator, tensorboard=tensorboard_callback)
else:
evaluation = Evaluate(validation_generator, tensorboard=tensorboard_callback, weighted_average=args.weighted_average)
evaluation = RedirectModel(evaluation, prediction_model)
callbacks.append(evaluation)
# save the model
if args.snapshots:
# ensure directory created first; otherwise h5py will error after epoch.
makedirs(args.snapshot_path)
checkpoint = keras.callbacks.ModelCheckpoint(
os.path.join(
args.snapshot_path,
'{backbone}_{dataset_type}_{{epoch:02d}}.h5'.format(backbone=args.backbone, dataset_type=args.dataset_type)
),
verbose=1,
# save_best_only=True,
# monitor="mAP",
# mode='max'
)
checkpoint = RedirectModel(checkpoint, model)
callbacks.append(checkpoint)
callbacks.append(keras.callbacks.ReduceLROnPlateau(
monitor = 'loss',
factor = 0.1,
patience = 2,
verbose = 1,
mode = 'auto',
min_delta = 0.0001,
cooldown = 0,
min_lr = 0
))
if args.tensorboard_dir:
callbacks.append(tensorboard_callback)
return callbacks
def create_generators(args, preprocess_image):
""" Create generators for training and validation.
Args
args : parseargs object containing configuration for generators.
preprocess_image : Function that preprocesses an image for the network.
"""
common_args = {
'batch_size' : args.batch_size,
'config' : args.config,
'image_min_side' : args.image_min_side,
'image_max_side' : args.image_max_side,
'no_resize' : args.no_resize,
'preprocess_image' : preprocess_image,
}
# create random transform generator for augmenting training data
if args.random_transform:
transform_generator = random_transform_generator(
min_rotation=-0.1,
max_rotation=0.1,
min_translation=(-0.1, -0.1),
max_translation=(0.1, 0.1),
min_shear=-0.1,
max_shear=0.1,
min_scaling=(0.9, 0.9),
max_scaling=(1.1, 1.1),
flip_x_chance=0.5,
flip_y_chance=0.5,
)
visual_effect_generator = random_visual_effect_generator(
contrast_range=(0.9, 1.1),
brightness_range=(-.1, .1),
hue_range=(-0.05, 0.05),
saturation_range=(0.95, 1.05)
)
else:
transform_generator = random_transform_generator(flip_x_chance=0.5)
visual_effect_generator = None
if args.dataset_type == 'coco':
# import here to prevent unnecessary dependency on cocoapi
from ..preprocessing.coco import CocoGenerator
train_generator = CocoGenerator(
args.coco_path,
'train2017',
transform_generator=transform_generator,
visual_effect_generator=visual_effect_generator,
**common_args
)
validation_generator = CocoGenerator(
args.coco_path,
'val2017',
shuffle_groups=False,
**common_args
)
elif args.dataset_type == 'pascal':
train_generator = PascalVocGenerator(
args.pascal_path,
'trainval',
transform_generator=transform_generator,
visual_effect_generator=visual_effect_generator,
**common_args
)
validation_generator = PascalVocGenerator(
args.pascal_path,
'test',
shuffle_groups=False,
**common_args
)
elif args.dataset_type == 'csv':
train_generator = CSVGenerator(
args.annotations,
args.classes,
transform_generator=transform_generator,
visual_effect_generator=visual_effect_generator,
**common_args
)
if args.val_annotations:
validation_generator = CSVGenerator(
args.val_annotations,
args.classes,
shuffle_groups=False,
**common_args
)
else:
validation_generator = None
elif args.dataset_type == 'oid':
train_generator = OpenImagesGenerator(
args.main_dir,
subset='train',
version=args.version,
labels_filter=args.labels_filter,
annotation_cache_dir=args.annotation_cache_dir,
parent_label=args.parent_label,
transform_generator=transform_generator,
visual_effect_generator=visual_effect_generator,
**common_args
)
validation_generator = OpenImagesGenerator(
args.main_dir,
subset='validation',
version=args.version,
labels_filter=args.labels_filter,
annotation_cache_dir=args.annotation_cache_dir,
parent_label=args.parent_label,
shuffle_groups=False,
**common_args
)
elif args.dataset_type == 'kitti':
train_generator = KittiGenerator(
args.kitti_path,
subset='train',
transform_generator=transform_generator,
visual_effect_generator=visual_effect_generator,
**common_args
)
validation_generator = KittiGenerator(
args.kitti_path,
subset='val',
shuffle_groups=False,
**common_args
)
else:
raise ValueError('Invalid data type received: {}'.format(args.dataset_type))
return train_generator, validation_generator
def check_args(parsed_args):
""" Function to check for inherent contradictions within parsed arguments.
For example, batch_size < num_gpus
Intended to raise errors prior to backend initialisation.
Args
parsed_args: parser.parse_args()
Returns
parsed_args
"""
if parsed_args.multi_gpu > 1 and parsed_args.batch_size < parsed_args.multi_gpu:
raise ValueError(
"Batch size ({}) must be equal to or higher than the number of GPUs ({})".format(parsed_args.batch_size,
parsed_args.multi_gpu))
if parsed_args.multi_gpu > 1 and parsed_args.snapshot:
raise ValueError(
"Multi GPU training ({}) and resuming from snapshots ({}) is not supported.".format(parsed_args.multi_gpu,
parsed_args.snapshot))
if parsed_args.multi_gpu > 1 and not parsed_args.multi_gpu_force:
raise ValueError("Multi-GPU support is experimental, use at own risk! Run with --multi-gpu-force if you wish to continue.")
if 'resnet' not in parsed_args.backbone:
warnings.warn('Using experimental backbone {}. Only resnet50 has been properly tested.'.format(parsed_args.backbone))
return parsed_args
def parse_args(args):
""" Parse the arguments.
"""
parser = argparse.ArgumentParser(description='Simple training script for training a RetinaNet network.')
subparsers = parser.add_subparsers(help='Arguments for specific dataset types.', dest='dataset_type')
subparsers.required = True
coco_parser = subparsers.add_parser('coco')
coco_parser.add_argument('coco_path', help='Path to dataset directory (ie. /tmp/COCO).')
pascal_parser = subparsers.add_parser('pascal')
pascal_parser.add_argument('pascal_path', help='Path to dataset directory (ie. /tmp/VOCdevkit).')
kitti_parser = subparsers.add_parser('kitti')
kitti_parser.add_argument('kitti_path', help='Path to dataset directory (ie. /tmp/kitti).')
def csv_list(string):
return string.split(',')
oid_parser = subparsers.add_parser('oid')
oid_parser.add_argument('main_dir', help='Path to dataset directory.')
oid_parser.add_argument('--version', help='The current dataset version is v4.', default='v4')
oid_parser.add_argument('--labels-filter', help='A list of labels to filter.', type=csv_list, default=None)
oid_parser.add_argument('--annotation-cache-dir', help='Path to store annotation cache.', default='.')
oid_parser.add_argument('--parent-label', help='Use the hierarchy children of this label.', default=None)
csv_parser = subparsers.add_parser('csv')
csv_parser.add_argument('annotations', help='Path to CSV file containing annotations for training.')
csv_parser.add_argument('classes', help='Path to a CSV file containing class label mapping.')
csv_parser.add_argument('--val-annotations', help='Path to CSV file containing annotations for validation (optional).')
group = parser.add_mutually_exclusive_group()
group.add_argument('--snapshot', help='Resume training from a snapshot.')
group.add_argument('--imagenet-weights', help='Initialize the model with pretrained imagenet weights. This is the default behaviour.', action='store_const', const=True, default=True)
group.add_argument('--weights', help='Initialize the model with weights from a file.')
group.add_argument('--no-weights', help='Don\'t initialize the model with any weights.', dest='imagenet_weights', action='store_const', const=False)
parser.add_argument('--backbone', help='Backbone model used by retinanet.', default='resnet50', type=str)
parser.add_argument('--batch-size', help='Size of the batches.', default=1, type=int)
parser.add_argument('--gpu', help='Id of the GPU to use (as reported by nvidia-smi).')
parser.add_argument('--multi-gpu', help='Number of GPUs to use for parallel processing.', type=int, default=0)
parser.add_argument('--multi-gpu-force', help='Extra flag needed to enable (experimental) multi-gpu support.', action='store_true')
parser.add_argument('--epochs', help='Number of epochs to train.', type=int, default=50)
parser.add_argument('--steps', help='Number of steps per epoch.', type=int, default=10000)
parser.add_argument('--lr', help='Learning rate.', type=float, default=1e-5)
parser.add_argument('--snapshot-path', help='Path to store snapshots of models during training (defaults to \'./snapshots\')', default='./snapshots')
parser.add_argument('--tensorboard-dir', help='Log directory for Tensorboard output', default='./logs')
parser.add_argument('--no-snapshots', help='Disable saving snapshots.', dest='snapshots', action='store_false')
parser.add_argument('--no-evaluation', help='Disable per epoch evaluation.', dest='evaluation', action='store_false')
parser.add_argument('--freeze-backbone', help='Freeze training of backbone layers.', action='store_true')
parser.add_argument('--random-transform', help='Randomly transform image and annotations.', action='store_true')
parser.add_argument('--image-min-side', help='Rescale the image so the smallest side is min_side.', type=int, default=400)
parser.add_argument('--image-max-side', help='Rescale the image if the largest side is larger than max_side.', type=int, default=1333)
parser.add_argument('--no-resize', help='Don''t rescale the image.', action='store_true')
parser.add_argument('--config', help='Path to a configuration parameters .ini file.')
parser.add_argument('--weighted-average', help='Compute the mAP using the weighted average of precisions among classes.', action='store_true')
parser.add_argument('--compute-val-loss', help='Compute validation loss during training', dest='compute_val_loss', action='store_true')
# Fit generator arguments
parser.add_argument('--multiprocessing', help='Use multiprocessing in fit_generator.', action='store_true')
parser.add_argument('--workers', help='Number of generator workers.', type=int, default=1)
parser.add_argument('--max-queue-size', help='Queue length for multiprocessing workers in fit_generator.', type=int, default=10)
return check_args(parser.parse_args(args))
def main(args=None):
# parse arguments
if args is None:
args = sys.argv[1:]
args = parse_args(args)
# create object that stores backbone information
backbone = models.backbone(args.backbone)
# make sure keras is the minimum required version
check_keras_version()
# optionally choose specific GPU
if args.gpu:
setup_gpu(args.gpu)
# optionally load config parameters
if args.config:
args.config = read_config_file(args.config)
# create the generators
train_generator, validation_generator = create_generators(args, backbone.preprocess_image)
# create the model
if args.snapshot is not None:
print('Loading model, this may take a second...')
model = models.load_model(args.snapshot, backbone_name=args.backbone)
training_model = model
anchor_params = None
if args.config and 'anchor_parameters' in args.config:
anchor_params = parse_anchor_parameters(args.config)
prediction_model = retinanet_bbox(model=model, anchor_params=anchor_params)
else:
weights = args.weights
# default to imagenet if nothing else is specified
if weights is None and args.imagenet_weights:
weights = backbone.download_imagenet()
print('Creating model, this may take a second...')
model, training_model, prediction_model = create_models(
backbone_retinanet=backbone.retinanet,
num_classes=train_generator.num_classes(),
weights=weights,
multi_gpu=args.multi_gpu,
freeze_backbone=args.freeze_backbone,
lr=args.lr,
config=args.config
)
# print model summary
print(model.summary())
# this lets the generator compute backbone layer shapes using the actual backbone model
if 'vgg' in args.backbone or 'densenet' in args.backbone:
train_generator.compute_shapes = make_shapes_callback(model)
if validation_generator:
validation_generator.compute_shapes = train_generator.compute_shapes
# create the callbacks
callbacks = create_callbacks(
model,
training_model,
prediction_model,
validation_generator,
args,
)
if not args.compute_val_loss:
validation_generator = None
# start training
training_model.fit_generator(
generator=train_generator,
steps_per_epoch=args.steps,
epochs=args.epochs,
verbose=1,
callbacks=callbacks,
workers=args.workers,
use_multiprocessing=args.multiprocessing,
max_queue_size=args.max_queue_size,
validation_data=validation_generator
)
#return model
return training_model
if __name__ == '__main__':
main()
| python | 21,487 |
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Bravo Logistics and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
class TestVehicleInspection(unittest.TestCase):
pass
| python | 223 |
from distutils.core import setup
setup(
name='sportplot',
version='0.1',
packages=['sportplot'],
author="Elliott Barcikowski",
)
| python | 146 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.