Dataset Viewer (First 5GB)
prompt
string | target
string |
|---|---|
Implement continuation:
<|code_start|>import errno
import logging
import os
from ceph_deploy import conf
from ceph_deploy import exc
from ceph_deploy import hosts
from ceph_deploy.util import system
from ceph_deploy.lib import remoto
from ceph_deploy.cliutil import priority
LOG = logging.getLogger(__name__)
def get_bootstrap_rgw_key(cluster):
"""
Read the bootstrap-rgw key for `cluster`.
"""
path = '{cluster}.bootstrap-rgw.keyring'.format(cluster=cluster)
try:
with open(path, 'rb') as f:
return f.read()
except IOError:
raise RuntimeError('bootstrap-rgw keyring not found; run \'gatherkeys\'')
def create_rgw(distro, name, cluster, init):
conn = distro.conn
path = '/var/lib/ceph/radosgw/{cluster}-{name}'.format(
cluster=cluster,
name=name
)
conn.remote_module.safe_makedirs(path)
bootstrap_keyring = '/var/lib/ceph/bootstrap-rgw/{cluster}.keyring'.format(
cluster=cluster
)
keypath = os.path.join(path, 'keyring')
stdout, stderr, returncode = remoto.process.check(
conn,
[
'ceph',
'--cluster', cluster,
'--name', 'client.bootstrap-rgw',
'--keyring', bootstrap_keyring,
'auth', 'get-or-create', 'client.{name}'.format(name=name),
'osd', 'allow rwx',
'mon', 'allow rw',
'-o',
os.path.join(keypath),
]
)
if returncode > 0 and returncode != errno.EACCES:
for line in stderr:
conn.logger.error(line)
for line in stdout:
# yes stdout as err because this is an error
conn.logger.error(line)
conn.logger.error('exit code from command was: %s' % returncode)
raise RuntimeError('could not create rgw')
remoto.process.check(
conn,
[
'ceph',
'--cluster', cluster,
'--name', 'client.bootstrap-rgw',
'--keyring', bootstrap_keyring,
'auth', 'get-or-create', 'client.{name}'.format(name=name),
'osd', 'allow *',
'mon', 'allow *',
'-o',
os.path.join(keypath),
]
)
conn.remote_module.touch_file(os.path.join(path, 'done'))
conn.remote_module.touch_file(os.path.join(path, init))
if init == 'upstart':
remoto.process.run(
conn,
[
'initctl',
'emit',
'radosgw',
'cluster={cluster}'.format(cluster=cluster),
'id={name}'.format(name=name),
],
timeout=7
)
elif init == 'sysvinit':
remoto.process.run(
conn,
[
'service',
'ceph-radosgw',
'start',
],
timeout=7
)
if distro.is_el:
system.enable_service(distro.conn, service='ceph-radosgw')
elif init == 'systemd':
remoto.process.run(
conn,
[
'systemctl',
'enable',
'ceph-radosgw@{name}'.format(name=name),
],
<|code_end|>
|
To complete this:
timeout=7
)
remoto.process.run(
conn,
[
'systemctl',
'start',
'ceph-radosgw@{name}'.format(name=name),
],
timeout=7
)
remoto.process.run(
conn,
[
'systemctl',
'enable',
'ceph.target',
],
timeout=7
)
def rgw_create(args):
conf_data = conf.ceph.load_raw(args)
LOG.debug(
'Deploying rgw, cluster %s hosts %s',
args.cluster,
' '.join(':'.join(x or '' for x in t) for t in args.rgw),
)
key = get_bootstrap_rgw_key(cluster=args.cluster)
bootstrapped = set()
errors = 0
for hostname, name in args.rgw:
try:
distro = hosts.get(hostname, username=args.username)
rlogger = distro.conn.logger
LOG.info(
'Distro info: %s %s %s',
distro.name,
distro.release,
distro.codename
)
LOG.debug('remote host will use %s', distro.init)
if hostname not in bootstrapped:
bootstrapped.add(hostname)
LOG.debug('deploying rgw bootstrap to %s', hostname)
distro.conn.remote_module.write_conf(
args.cluster,
conf_data,
args.overwrite_conf,
)
path = '/var/lib/ceph/bootstrap-rgw/{cluster}.keyring'.format(
cluster=args.cluster,
)
if not distro.conn.remote_module.path_exists(path):
rlogger.warning('rgw keyring does not exist yet, creating one')
distro.conn.remote_module.write_keyring(path, key)
create_rgw(distro, name, args.cluster, distro.init)
distro.conn.exit()
LOG.info(
('The Ceph Object Gateway (RGW) is now running on host %s and '
'default port %s'),
hostname,
'7480'
)
except RuntimeError as e:
LOG.error(e)
errors += 1
if errors:
raise exc.GenericError('Failed to create %d RGWs' % errors)
def rgw(args):
if args.subcommand == 'create':
rgw_create(args)
else:
LOG.error('subcommand %s not implemented', args.subcommand)
def colon_separated(s):
host = s
name = s
if s.count(':') == 1:
(host, name) = s.split(':')
name = 'rgw.' + name
return (host, name)
@priority(30)
def make(parser):
"""
Ceph RGW daemon management
"""
rgw_parser = parser.add_subparsers(dest='subcommand')
rgw_parser.required = True
rgw_create = rgw_parser.add_parser(
'create',
help='Create an RGW instance'
)
rgw_create.add_argument(
'rgw',
metavar='HOST[:NAME]',
nargs='+',
type=colon_separated,
help='host (and optionally the daemon name) to deploy on. \
NAME is automatically prefixed with \'rgw.\'',
)
parser.set_defaults(
func=rgw,
)
|
Continue from this point:
<|code_start|># Copyright 2009 Qiqi Wang
#
# This file is part of wanginterp.
#
# wanginterp is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Interpolation and regression in multi-dimensional space
reference:
Q Wang et al. A Multivariate Rational Interpolation
Scheme with High Rates of Convergence.
Submitted to Journal of Computational Physics.
"""
import copy
import sys
import pickle
import time
import numpy
import pylab
from numpy import zeros, ones, eye, kron, linalg, dot, exp, sqrt, diag, pi, \
asarray, sign
from _interp1d import _factorial, _solve_L, _solve_R, \
_lsqr_wang, _lsqr_golub
from _interp2d import _binomial, _binomial_inv, _order_set_2d
# ---------------------------------------------------------------------------- #
def _order_set(N, d):
"Return the total order set in 2D"
if d == 0:
return []
else:
set = []
for i in range(N+1):
if i > 0:
set.append([i] + [0]*(d-1))
lesser_set = _order_set(N-i, d-1)
set.extend([i] + order for order in lesser_set)
return sorted(set, key=sum)
# ---------------------------------------------------------------------------- #
class Interp(object):
"""
Interpolation in multi-dimensional space.
xv is the ``value data points'', i.e. the points
where the function value is available and given
by fxv; dfxv estimates the standard deviation of
the error in the function values; the default of
dfxv is 0 (fxv is exact).
xg is the ``gradient data points'', i.e. points
where the function gradient is available and given
by fpxg; dfpxg estimates the standard devisiton of
the error in the gradient values; the default of
dfpxg is 0 (dfxg is exact).
beta is the `magnitude' of the target function,
can be automatically calculated.
gamma is the `wave number' of the target function,
can be automatically calculated.
Combined with beta, it provides an estimate of
the derivative growth: f^(k) = O(beta * gamma**k)
Larger gamma yields more conservative, more robust
and lower order interpolation.
N is the order of the Taylor expansion, can be
automatically calculated. Smaller N yields lower
order interpolation. Numerical instability may
occur when N is too large.
l is the polynomial order. The interpolant is
forced to interpolate order l-1 polynomials
exactly. l=1 is the most robust, higher l makes
a difference only when gamma is large, or when
data is sparse and oscilatory if gamma is
automatically calculated.
verbose is the verbosity level. 0 is silent.
Reference:
* Q.Wang et al. A Rational Interpolation Scheme with
Super-polynomial Rate of Convergence.
"""
def __init__(self, xv, fxv, dfxv=None, xg=None, fpxg=None, dfpxg=None, \
beta=None, gamma=None, N=None, p=1, verbose=1, \
safety_factor=1.0):
"""
__init__(self, xv, fxv, dfxv=None, xg=None,
fpxg=None, dfpxg=None, beta=None,
gamma=None, N=None, l=1)
Instantiation function, see class documentation
for arguments.
fxv must has same size as xv.
dfxv must has same size as xv, or None for default
(all 0).
fpxg must be None if xg is None, or has same size
as xg if xg is not None.
dfpxg must be None if xg is None; if xg is not
None it must has same size as xg, or None for
default (all 0).
When beta and gamma must be both None or both given.
When they are None, their values are automatically
calculated. The calculation of gamma may take a
long time if the number of datapoints is large.
"""
assert verbose == 0 or verbose == 1 or verbose == 2
self.verbose = verbose
# verify and save value data points
assert xv.ndim == 2 and fxv.ndim == 1
d = xv.shape[1]
assert xv.shape[1] == d and xv.shape[0] == fxv.shape[0]
self.xv = copy.copy(xv)
self.fxv = copy.copy(fxv)
if dfxv is None:
self.dfxv = zeros(fxv.shape)
else:
assert dfxv.shape == fxv.shape
self.dfxv = copy.copy(dfxv)
# verify and save gradient data points
if xg is None:
assert fpxg is None and dfpxg is None
self.xg = zeros([0,d])
self.fpxg = zeros([0,d])
self.dfpxg = zeros(0)
else:
assert xg.ndim == 2 and xg.shape[1] == d
assert fpxg is not None and fpxg.shape == xg.shape
self.xg = copy.copy(xg)
self.fpxg = copy.copy(fpxg)
if dfpxg is None:
self.dfpxg = zeros(xg.shape[0])
else:
assert dfpxg.ndim == 1 and dfpxg.shape[0] == xg.shape[0]
self.dfpxg = copy.copy(dfpxg)
# check and save safety factor
assert safety_factor > 0.0
self.safety_factor = float(safety_factor)
# check and automatically calculate N
self.nv = self.xv.shape[0]
self.ng = self.xg.shape[0]
self.n = self.nv + self.ng * d
if N is None:
self.N = _binomial_inv(min(self.n, 100), d) - d
else:
self.N = N
assert self.N > 0
assert int(p) == p
self.p = int(p)
self.d = d
# automatically calculate beta and gamma
if beta is None:
assert gamma is None
self.beta = self.calc_beta()
self.gamma = self.calc_gamma()
else:
self.beta = float(beta)
self.gamma = float(gamma)
def interp_matrices(self, x, beta=None, gamma=None):
"""
Calculate the matrices X, E and C for interpolation
scheme x is the point where the interpolation is
evaluated.
"""
assert x.dtype == float
N, n, p, nv, ng, d = self.N, self.n, self.p, self.nv, self.ng, self.d
if beta is None:
beta = self.beta
if gamma is None:
gamma = self.gamma
# construct the order set
order_set = _order_set(N+1, self.d)
M1 = _binomial(N+d,d) - 1
M2 = _binomial(N+d+1,d) - 1
assert len(order_set) == M2
# construct X = [Xv,Xg]
X = zeros([M1, n], dtype=float)
for i, kappa in enumerate(order_set[:M1]):
X[i,:nv] = gamma**sum(kappa) / _factorial(kappa) * \
((self.xv - x)**kappa).prod(1)
assert ng == 0
X *= beta
# construct diagonal G matrix for the Lagrange residual
Er2 = zeros(n)
for kappa in order_set[M1:]:
Eri = gamma**sum(kappa) / _factoria<|code_end|>
Complete the logic.
|
Final:
l(kappa) * \
((self.xv - x)**kappa).prod(1)
Er2[:nv] += Eri**2
assert ng == 0
Er2 *= beta**2
# construct diagonal H matrix for measurement errors
Ee = zeros(n)
Ee[:nv] = self.dfxv
assert ng == 0
# construct E
E = sqrt(Er2 + Ee**2)
# construct C
M = _binomial(p+1,d) - 1
assert M <= len(order_set)
C = zeros([M+1, n])
C[0,:nv] = 1.0
C[0,nv:] = 0.0
for i, kappa in enumerate(order_set[:M]):
C[i+1,:nv] = ((self.xv - x)**kappa).prod(1)
assert ng == 0
return X, E, C
def interp_coef(self, x, beta=None, gamma=None):
"""
Calculate interpolation coefficients in two-dimensional
spacee.
x is the point where the interpolation is evaluated.
beta is the `magnitude' of the target function.
gamma is the `wave number' of the target function.
combined with beta, it provides an estimate of the derivative growth:
f^(k) = O(beta * gamma**k)
larger gamma = more conservative and lower order interpolation.
return values:
av, ag, er2 = interp.interp_coef(x)
av and ag are the interpolation coefficients.
er2 is the expected squared residual.
"""
x = numpy.asarray(x, dtype=float)
# calculate interpolation coefficients at x
if abs(self.xv - x).sum(1).min() < 1.0E-12: # exactly matches a data point
imatch = abs(self.xv - x).sum(1).argmin()
if abs(self.dfxv[imatch]).sum() == 0.0:
# and function value on that point is exact
av = numpy.array([0]*imatch+[1]+[0]*(self.nv-imatch-1))
ag = numpy.zeros(self.xg.shape)
return av, ag, 0.0
# construct matrices
X, E, C = self.interp_matrices(x, beta, gamma)
# assemble the diagonal of matrix A, and sort by its diagonal
diagA = (X**2).sum(0) + E**2
isort = sorted(range(self.n), key=diagA.__getitem__)
irevt = sorted(range(self.n), key=isort.__getitem__)
# permute columns of X and diagonal of G and H
X = X[:,isort]
E = E[isort]
C = C[:,isort]
# solve least square
if C.shape[0] == 1:
a = _lsqr_wang(X, E, C[0,:])
else:
b = zeros(X.shape[0] + X.shape[1])
e = zeros(C.shape[0])
e[0] = 1.0
a = _lsqr_golub(X, E, b, C, e)
# reverse sorting permutation to get a and b
arevt = a[irevt]
av = arevt[:self.nv]
ag = arevt[self.nv:].reshape([self.ng,self.d])
# compute the expeted squared residual
finite = (a != 0)
Xa = dot(X[:,finite], a[finite])
Ea = (E*a)[finite]
er2 = (Xa**2).sum() + (Ea**2).sum()
return av, ag, er2
def calc_beta(self):
"""
Estimate the `magnitude' parameter beta from data points.
"""
assert self.fxv.ndim == 1 and self.fxv.shape == self.dfxv.shape
f_bar = self.fxv.mean()
ratio = (self.dfxv**2).sum() / ((self.fxv - f_bar)**2).sum() * \
float(self.nv-1) / float(self.nv)
beta = sqrt(((self.fxv - f_bar)**2).sum() / (self.nv-1) * exp(-ratio))
return beta
def _calc_gamma_bounds(self):
"""
Calculate lower and upper bounds for gamma based
on the distribution of grid points.
Returns (gamma_min, gamma_max) pair.
"""
delta_min, delta_max = numpy.inf, 0.0
for xi in list(self.xv) + list(self.xg):
d2xv = ((xi - self.xv)**2).sum(1)
if self.ng > 0:
d2xg = ((xi - self.xg)**2).sum(1)
delta_max = max(delta_max, max(d2xv.max(), d2xg.max()))
else:
delta_max = max(delta_max, d2xv.max())
for i, xi in enumerate(self.xv):
if i > 0:
d2x = ((xi - self.xv[:i,:])**2).sum(1)
delta_min = min(delta_min, d2x.min())
for i, xi in enumerate(self.xg):
if i > 0:
d2x = ((xi - self.xg[:i,:])**2).sum(1)
delta_min = min(delta_min, d2x.min())
delta_min, delta_max = sqrt(delta_min), sqrt(delta_max)
assert delta_max > delta_min
gamma_min = 1. / delta_max
gamma_max = pi / delta_min
return gamma_min, gamma_max
def _calc_res_ratio(self, iv, beta, gamma, safety_factor=None):
"""
A utility function used by calc_gamma, calculates
the ratio of real residual to the estimated
residual at the iv'th value data point, which
is used to make decision in the bisection process
for gamma at the iv'th data point.
"""
if safety_factor is None:
safety_factor = self.safety_factor
base = range(iv) + range(iv+1,self.nv)
subinterp = Interp(self.xv[base], self.fxv[base], self.dfxv[base], \
self.xg, self.fpxg, self.dfpxg, beta, gamma, \
self.N, self.p, self.verbose)
av, ag, er2 = subinterp.interp_coef(self.xv[iv])
resid = dot(av,self.fxv[base]) + dot(ag.flat,self.fpxg.flat) - self.fxv[iv]
return resid**2 / (er2 + self.dfxv[iv]**2) * safety_factor
def calc_gamma(self):
"""
Estimate the `wave number' parameter gamma from
data points. This function prints stuff when
self.verbose > 1.
"""
assert isinstance(self.beta, float)
# logorithmic bisection for gamma
gamma_min, gamma_max = self._calc_gamma_bounds()
while gamma_max / gamma_min > 1.1:
if self.verbose > 1:
print ' bisecting [', gamma_min, ',', gamma_max, '] for gamma...'
gamma_mid = sqrt(gamma_max * gamma_min)
res_ratio = 0.0
for i in range(self.nv):
res_ratio += self._calc_res_ratio(i, self.beta, gamma_mid)
res_ratio /= self.nv
if res_ratio < 1.0:
gamma_max = gamma_mid
else:
gamma_min = gamma_mid
# final selected gamma
gamma_mid = sqrt(gamma_max * gamma_min)
if self.verbose > 1:
print ' using gamma = ', gamma_mid
return gamma_mid
def interp(self, x, compute_df=False):
"""
Interpolation in multi dimensional space
x is the point (size d array) or points
(a list of size d arrays or an shape(n,d) array)
where the interpolation is evaluated.
compute_df indicates whether an estimated standard
deviation of the error in the interpolation
approximation is also returned.
Usage:
fx = interp(x, compute_df=False)";
fx, df = interp(x, compute_df=True)"
"""
# evaluate interpolant value at a single point
x = numpy.array(x)
if x.shape == (self.d,):
av, ag, er2 = self.interp_coef(x)
fx = dot(av, self.fxv) + dot(ag.flat, self.fpxg.flat)
dfx = sqrt(er2)
if compute_df:
return fx, dfx
else:
return fx
# at multiple points
else:
fx, dfx = [], []
for xi in x:
assert xi.shape == (self.d,)
av, ag, er2 = self.interp_coef(xi)
fx.append(dot(av, self.fxv) + dot(ag.flat, self.fpxg.flat))
dfx.append(sqrt(er2))
if compute_df:
return numpy.asarray(fx), numpy.asarray(dfx)
else:
return numpy.asarray(fx)
|
Finish:
<|code_start|># -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""External user authentication for simple robots
This implement an external authentication system suitable for robots usage.
User attributes are retrieved directly from the form dictionary of the request
object.
"""
import os
import sys
import hmac
import time
import base64
if sys.hexversion < 0x2060000:
try:
import simplejson as json
except ImportError:
# Okay, no Ajax app will be possible, but continue anyway,
# since this package is only recommended, not mandatory.
pass
else:
import json
if sys.hexversion < 0x2050000:
import sha as sha1
else:
from hashlib import sha1
from cPickle import loads, dumps
from zlib import decompress, compress
from invenio.shellutils import mymkdir
from invenio.external_authentication import ExternalAuth, InvenioWebAccessExternalAuthError
from invenio.config import CFG_ETCDIR, CFG_SITE_URL, CFG_SITE_SECURE_URL
CFG_ROBOT_EMAIL_ATTRIBUTE_NAME = 'email'
CFG_ROBOT_NICKNAME_ATTRIBUTE_NAME = 'nickname'
CFG_ROBOT_GROUPS_ATTRIBUTE_NAME = 'groups'
CFG_ROBOT_TIMEOUT_ATTRIBUTE_NAME = '__timeout__'
CFG_ROBOT_USERIP_ATTRIBUTE_NAME = '__userip__'
CFG_ROBOT_GROUPS_SEPARATOR = ';'
CFG_ROBOT_URL_TIMEOUT = 3600
CFG_ROBOT_KEYS_PATH = os.path.join(CFG_ETCDIR, 'webaccess', 'robot_keys.dat')
def normalize_ip(ip):
"""
@return: a normalized IP, e.g. 123.02.12.12 -> 123.2.12.12
"""
try:
return '.'.join(str(int(number)) for number in ip.split('.'))
except ValueError:
## e.g. if it's IPV6 ::1
return ip
def load_robot_keys():
"""
@return: the robot key dictionary.
"""
from cPickle import loads
from zlib import decompress
try:
robot_keys = loads(decompress(open(CFG_ROBOT_KEYS_PATH).read()))
if not isinstance(robot_keys, dict):
return {}
else:
return robot_keys
except:
return {}
class ExternalAuthRobot(ExternalAuth):
"""
This class implement an external authentication method suitable to be
used by an external service that, after having authenticated a user,
will provide a URL to the user that, once followed, will successfully
login the user into Invenio, with any detail the external service
decided to provide to the Invenio installation.
Such URL should be built as follows:
BASE?QUERY
where BASE is CFG_SITE_SECURE_URL/youraccount/robotlogin
and QUERY is a urlencoded mapping of the following key->values:
- assertion: an assertion, i.e. a piece of information describing the
user, see below for more details.
- robot: the identifier of the external service providing the assertion
- login_method: the name of the login method as defined in CFG_EXTERNAL_AUTHENTICATION.
- digest: the digest of the signature as detailed below.
- referer: the URL where the user should be redirected after successful
login (it is called referer as, for historical reasons, this is the
original URL of the page on which, a human-user has clicked "login".
the "assertion" should be a JSON serialized mapping with the following
keys:
- email: the email of the user (i.e. its identifier).
- nickname: optional nickname of the user.
- groups: an optional ';'-separated list of groups to which the user
belongs to.
- __timeout__: the number of seconds (floating point) from the Epoch,
after which the URL will no longer be valid. (expressed in UTC)
- __userip__: the IP address of the user for whom this URL has been
created. (if the user will follow this URL using a different URL the
request will not be valid)
- any other key can be added and will be merged in the external user
settings.
If L{use_zlib} is True the assertion is a base64-url-flavour encoding
of the zlib compression of the original assertion (useful for shortening
the URL while make it easy to type).
The "digest" is the hexadecimal representation of the digest using the
HMAC-SHA1 method to sign the assertion with the secret key associated
with the robot for the given login_method.
@param enforce_external_nicknames: whether to trust nicknames provided by
the external service and use them (if possible) as unique identifier
in the system.
@type enforce_external_nicknames: boolean
@param email_attribute_name: the actual key in the assertion that will
contain the email.
@type email_attribute_name: string
@param nickname_attribute_name: the actual key in the assertion that will
contain the nickname.
@type nickname_attribute_name: string
@param groups_attribute_name: the actual key in the assertion that will
contain the groups.
@type groups_attribute_name: string
@param groups_separator: the string used to separate groups.
@type groups_separator: string
@param timeout_attribute_name: the actual key in the assertion that will
contain the timeout.
@type timeout_attribute_name: string
@param userip_attribute_name: the actual key in the assertion that will
contain the user IP.
@type userip_attribute_name: string
@param check_user_ip: whether to check for the IP address of the user
using the given URL, against the IP address stored in the assertion
to be identical.
@type check_user_ip: boolean
@param use_zlib: whether to use base64-url-flavour encoding of the zlib
compression of the json serialization of the assertion or simply
the json serialization of the assertion.
@type use_zlib: boolean
"""
def __init__(self, enforce_external_nicknames=False,
email_attribute_name=CFG_ROBOT_EMAIL_ATTRIBUTE_NAME,
nickname_attribute_name=CFG_ROBOT_NICKNAME_ATTRIBUTE_NAME,
groups_attribute_name=CFG_ROBOT_GROUPS_ATTRIBUTE_NAME,
groups_separator=CFG_ROBOT_GROUPS_SEPARATOR,
timeout_attribute_name=CFG_ROBOT_TIMEOUT_ATTRIBUTE_NAME,
userip_attribute_name=CFG_ROBOT_USERIP_ATTRIBUTE_NAME,
check_user_ip=True,
use_zlib=True,
):
ExternalAuth.__init__(self, enforce_external_nicknames=enforce_external_nicknames)
self.email_attribute_name = email_attribute_name
self.nickname_attribute_name = nickname_attribute_name
self.groups_attribute_name = groups_attribute_name
self.groups_separator = groups_separator
self.timeout_attribute_name = timeout_attribute_name
self.userip_attribute_name = userip_attribute_name
self.check_user_ip = check_user_ip
self.use_zlib = use_zlib
def __extract_attribute(self, req):
"""
Load from the request the given assertion, extract all the attribute
to properly login the user, and verify that the data are actually
both well formed and signed correctly.
"""
from invenio.bibedit_utils import json_unicode_to_utf8
from invenio.webinterface_handler import wash_urlargd
args = wash_urlargd(req.form, {
'assertion': (str, ''),
'robot': (str, ''),
'digest': (str, ''),
'login_method': (str, '')})
assertion = args['assertion']
digest = args['digest']
robot = args['robot']
login_method = args['login_method']
shared_key = load_robot_keys().get(login_method, {}).get(robot)
if shared_key is None:
raise InvenioWebAccessExternalAuthError("A key does not exist for robot: %s, login_method: %s" % (robot, login_method))
if not self.verify(shared_key, assertion, digest):
raise InvenioWebAccessExternalAuthError("The provided assertion does not validate against the digest %s for robot %s" % (repr(digest), repr(robot)))
if self.use_zlib:
try:
## Workaround to Perl implementation that does not add
## any padding to the base64 e<|code_end|>
|
Logical continuation:
ncoding.
needed_pad = (4 - len(assertion) % 4) % 4
assertion += needed_pad * '='
assertion = decompress(base64.urlsafe_b64decode(assertion))
except:
raise InvenioWebAccessExternalAuthError("The provided assertion is corrupted")
data = json_unicode_to_utf8(json.loads(assertion))
if not isinstance(data, dict):
raise InvenioWebAccessExternalAuthError("The provided assertion is invalid")
timeout = data[self.timeout_attribute_name]
if timeout < time.time():
raise InvenioWebAccessExternalAuthError("The provided assertion is expired")
userip = data.get(self.userip_attribute_name)
if not self.check_user_ip or (normalize_ip(userip) == normalize_ip(req.remote_ip)):
return data
else:
raise InvenioWebAccessExternalAuthError("The provided assertion has been issued for a different IP address (%s instead of %s)" % (userip, req.remote_ip))
def auth_user(self, username, password, req=None):
"""Authenticate user-supplied USERNAME and PASSWORD. Return
None if authentication failed, or the email address of the
person if the authentication was successful. In order to do
this you may perhaps have to keep a translation table between
usernames and email addresses.
Raise InvenioWebAccessExternalAuthError in case of external troubles.
"""
data = self.__extract_attribute(req)
email = data.get(self.email_attribute_name)
if email:
if isinstance(email, str):
return email.strip().lower()
else:
raise InvenioWebAccessExternalAuthError("The email provided in the assertion is invalid: %s" % (repr(email)))
else:
return None
def fetch_user_groups_membership(self, username, password=None, req=None):
"""Given a username and a password, returns a dictionary of groups
and their description to which the user is subscribed.
Raise InvenioWebAccessExternalAuthError in case of troubles.
"""
if self.groups_attribute_name:
data = self.__extract_attribute(req)
groups = data.get(self.groups_attribute_name)
if groups:
if isinstance(groups, str):
groups = [group.strip() for group in groups.split(self.groups_separator)]
return dict(zip(groups, groups))
else:
raise InvenioWebAccessExternalAuthError("The groups provided in the assertion are invalid: %s" % (repr(groups)))
return {}
def fetch_user_nickname(self, username, password=None, req=None):
"""Given a username and a password, returns the right nickname belonging
to that user (username could be an email).
"""
if self.nickname_attribute_name:
data = self.__extract_attribute(req)
nickname = data.get(self.nickname_attribute_name)
if nickname:
if isinstance(nickname, str):
return nickname.strip().lower()
else:
raise InvenioWebAccessExternalAuthError("The nickname provided in the assertion is invalid: %s" % (repr(nickname)))
return None
def fetch_user_preferences(self, username, password=None, req=None):
"""Given a username and a password, returns a dictionary of keys and
values, corresponding to external infos and settings.
userprefs = {"telephone": "2392489",
"address": "10th Downing Street"}
(WEBUSER WILL erase all prefs that starts by EXTERNAL_ and will
store: "EXTERNAL_telephone"; all internal preferences can use whatever
name but starting with EXTERNAL). If a pref begins with HIDDEN_ it will
be ignored.
"""
data = self.__extract_attribute(req)
for key in (self.email_attribute_name, self.groups_attribute_name, self.nickname_attribute_name, self.timeout_attribute_name, self.userip_attribute_name):
if key and key in data:
del data[key]
return data
def robot_login_method_p():
"""Return True if this method is dedicated to robots and should
not therefore be available as a choice to regular users upon login.
"""
return True
robot_login_method_p = staticmethod(robot_login_method_p)
def sign(secret, assertion):
"""
@return: a signature of the given assertion.
@rtype: string
@note: override this method if you want to change the signature
algorithm (e.g. to use GPG).
@see: L{verify}
"""
return hmac.new(secret, assertion, sha1).hexdigest()
sign = staticmethod(sign)
def verify(secret, assertion, signature):
"""
@return: True if the signature is valid
@rtype: boolean
@note: override this method if you want to change the signature
algorithm (e.g. to use GPG)
@see: L{sign}
"""
return hmac.new(secret, assertion, sha1).hexdigest() == signature
verify = staticmethod(verify)
def test_create_example_url(self, email, login_method, robot, ip, assertion=None, timeout=None, referer=None, groups=None, nickname=None):
"""
Create a test URL to test the robot login.
@param email: email of the user we want to login as.
@type email: string
@param login_method: the login_method name as specified in CFG_EXTERNAL_AUTHENTICATION.
@type login_method: string
@param robot: the identifier of this robot.
@type robot: string
@param assertion: any further data we want to send to.
@type: json serializable mapping
@param ip: the IP of the user.
@type: string
@param timeout: timeout when the URL will expire (in seconds from the Epoch)
@type timeout: float
@param referer: the URL where to land after successful login.
@type referer: string
@param groups: the list of optional group of the user.
@type groups: list of string
@param nickname: the optional nickname of the user.
@type nickname: string
@return: the URL to login as the user.
@rtype: string
"""
from invenio.access_control_config import CFG_EXTERNAL_AUTHENTICATION
from invenio.urlutils import create_url
if assertion is None:
assertion = {}
assertion[self.email_attribute_name] = email
if nickname:
assertion[self.nickname_attribute_name] = nickname
if groups:
assertion[self.groups_attribute_name] = self.groups_separator.join(groups)
if timeout is None:
timeout = time.time() + CFG_ROBOT_URL_TIMEOUT
assertion[self.timeout_attribute_name] = timeout
if referer is None:
referer = CFG_SITE_URL
if login_method is None:
for a_login_method, details in CFG_EXTERNAL_AUTHENTICATION.iteritems():
if details[2]:
login_method = a_login_method
break
robot_keys = load_robot_keys()
assertion[self.userip_attribute_name] = ip
assertion = json.dumps(assertion)
if self.use_zlib:
assertion = base64.urlsafe_b64encode(compress(assertion))
shared_key = robot_keys[login_method][robot]
digest = self.sign(shared_key, assertion)
return create_url("%s%s" % (CFG_SITE_SECURE_URL, "/youraccount/robotlogin"), {
'assertion': assertion,
'robot': robot,
'login_method': login_method,
'digest': digest,
'referer': referer})
def update_robot_key(login_method, robot, key=None):
"""
Utility to update the robot key store.
@param login_method: the login_method name as per L{CFG_EXTERNAL_AUTHENTICATION}.
It should correspond to a robot-enable login method.
@type: string
@param robot: the robot identifier
@type robot: string
@param key: the secret
@type key: string
@note: if the secret is empty the corresponding key will be removed.
"""
robot_keys = load_robot_keys()
if key is None and login_method in robot_keys and robot in robot_keys[login_method]:
del robot_keys[login_method][robot]
if not robot_keys[login_method]:
del robot_keys[login_method]
else:
if login_method not in robot_keys:
robot_keys[login_method] = {}
robot_keys[login_method][robot] = key
mymkdir(os.path.join(CFG_ETCDIR, 'webaccess'))
open(CFG_ROBOT_KEYS_PATH, 'w').write(compress(dumps(robot_keys, -1)))
|
Exact match needed:
<|code_start|>##############################################################################################
# HEADER
#
# cappylib/mathfinstat.py - defines useful math, financial, and statistical classes and
# functions for python scripts
#
# Copyright (C) 2008-2015 Chris Pappalardo <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this
# software and associated documentation files (the "Software"), to deal in the Software
# without restriction, including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons
# to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or
# substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
# FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
##############################################################################################
##############################################################################################
# IMPORTS
##############################################################################################
from __future__ import division # this fixes 1 / 2 = 0 problem
from cappylib.general import *
##############################################################################################
# GLOBAL VARS
##############################################################################################
##############################################################################################
# MAIN CODE
##############################################################################################
# MATH
def acct(st1, yt1, t):
"""returns smoothed accumulation for a t-period total and incremental value"""
return st1 - st1 / t + yt1
def avg(y):
"""returns simple average of list"""
return sum(y) / len(y)
def ema(l, a=None):
"""returns exponential moving average for a list of values, with optional coefficient"""
r = 0.0
for i in range(0, len(l)):
r = emat(r, l[i], len(l), a)
return r
def emat(st1, yt, t, a=None):
"""
returns exponential moving average for a t-period EMA and incremental value
where st1 is the previous average, yt is the incr value, and t is the size of the avg
a can optionally be overridden with a specific coefficient, else 2/(t-1) is used
"""
# St = a * Yt + (1 - a) * St-1
# where:
# St-1 = last St (i.e. St from t-1)
# Yt = data point for t
# a = alpha factor from 0.0 - 1.0, but 2 / (N + 1) gives 86% weighting with large N's
# see http://en.wikipedia.org/wiki/Moving_average
a = 2.0 / (t + 1.0) if a == None else a
return a * yt + (1.0 - a) * st1
def mmat(st1, yt, t):
"""returns modified moving average for a t-period MMA and incremental value"""
return (st1 * (t - 1) + yt ) / t
##############################################################################################
# TESTING
##############################################################################################
def main():
# acct test
a = acct(14, 2.75, 14)
print aColor('BLUE') + "acct... ", aColor('OFF'), True if a == 15.75 else a
# ema tests
a = ema([1 - 2 / (11 + 1), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
print aColor('BLUE') + "ema... ", aColor('OFF'), True if round(a, 6) == 0.022431 else a
a = emat(0.026917597, 0, 11)
print aColor('BLUE') + "emat... ", aColor('OFF'), True if round(a, 6) == 0.022431 else a
# mmat test
a = mmat(16, 32, 14)
print aColor('BLUE') + "emat... ", aColor('OFF'), True if round(a, 2) == 17.14 else a
if __name__ == '__main__':
try:
main()
except error as e: print e.error
<|code_end|>
|
Unchanged copy:
##############################################################################################
# HEADER
#
# cappylib/mathfinstat.py - defines useful math, financial, and statistical classes and
# functions for python scripts
#
# Copyright (C) 2008-2015 Chris Pappalardo <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this
# software and associated documentation files (the "Software"), to deal in the Software
# without restriction, including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons
# to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or
# substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
# FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
##############################################################################################
##############################################################################################
# IMPORTS
##############################################################################################
from __future__ import division # this fixes 1 / 2 = 0 problem
from cappylib.general import *
##############################################################################################
# GLOBAL VARS
##############################################################################################
##############################################################################################
# MAIN CODE
##############################################################################################
# MATH
def acct(st1, yt1, t):
"""returns smoothed accumulation for a t-period total and incremental value"""
return st1 - st1 / t + yt1
def avg(y):
"""returns simple average of list"""
return sum(y) / len(y)
def ema(l, a=None):
"""returns exponential moving average for a list of values, with optional coefficient"""
r = 0.0
for i in range(0, len(l)):
r = emat(r, l[i], len(l), a)
return r
def emat(st1, yt, t, a=None):
"""
returns exponential moving average for a t-period EMA and incremental value
where st1 is the previous average, yt is the incr value, and t is the size of the avg
a can optionally be overridden with a specific coefficient, else 2/(t-1) is used
"""
# St = a * Yt + (1 - a) * St-1
# where:
# St-1 = last St (i.e. St from t-1)
# Yt = data point for t
# a = alpha factor from 0.0 - 1.0, but 2 / (N + 1) gives 86% weighting with large N's
# see http://en.wikipedia.org/wiki/Moving_average
a = 2.0 / (t + 1.0) if a == None else a
return a * yt + (1.0 - a) * st1
def mmat(st1, yt, t):
"""returns modified moving average for a t-period MMA and incremental value"""
return (st1 * (t - 1) + yt ) / t
##############################################################################################
# TESTING
##############################################################################################
def main():
# acct test
a = acct(14, 2.75, 14)
print aColor('BLUE') + "acct... ", aColor('OFF'), True if a == 15.75 else a
# ema tests
a = ema([1 - 2 / (11 + 1), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
print aColor('BLUE') + "ema... ", aColor('OFF'), True if round(a, 6) == 0.022431 else a
a = emat(0.026917597, 0, 11)
print aColor('BLUE') + "emat... ", aColor('OFF'), True if round(a, 6) == 0.022431 else a
# mmat test
a = mmat(16, 32, 14)
print aColor('BLUE') + "emat... ", aColor('OFF'), True if round(a, 2) == 17.14 else a
if __name__ == '__main__':
try:
main()
except error as e: print e.error
|
<|code_start|># -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
for review in orm.Review.objects.all():
if review.approved is True:
review.approval_status = 'approved'
else:
review.approval_status = 'pending'
review.save()
def backwards(self, orm):
for review in orm.Review.objects.all():
if review.approval_status == 'approved':
review.approved = True
else:
review.approved = False
review.save()
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'vegancity.cuisinetag': {
'Meta': {'ordering': "('name',)", 'object_name': 'CuisineTag'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'search_index': ('djorm_pgfulltext.fields.VectorField', [], {'default': "''", 'null': 'True', 'db_index': 'True'})
},
u'vegancity.featuretag': {
'Meta': {'ordering': "('name',)", 'object_name': 'FeatureTag'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'search_index': ('djorm_pgfulltext.fields.VectorField', [], {'default': "''", 'null': 'True', 'db_index': 'True'})
},
u'vegancity.neighborhood': {
'Meta': {'ordering': "('name',)", 'object_name': 'Neighborhood'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
u'vegancity.review': {
'Meta': {'ordering': "('created',)", 'object_name': 'Review'},
'approval_status': ('vegancity.fields.StatusField', [], {'default': "'pending'", 'max_length': '100', 'db_index': 'True'}),
'approved': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'atmosphere_rating': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'best_vegan_dish': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['vegancity.VeganDish']", 'null': 'True', 'blank': 'True'}),
'content': ('django.db.models.fields.TextField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'food_rating': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'search_index': ('djorm_pgfulltext.fields.VectorField', [], {'default': "''", 'null': 'True', 'db_index': 'True'}),
'suggested_cuisine_tags': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'suggested_feature_tags': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'unlisted_vegan_dish': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'vendor': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['vegancity.Vendor']"})
},
u'vegancity.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'bio': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'karma_points': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'mailing_list': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'unique': 'True'})
},
u'vegancity.vegandish': {
'Meta': {'ordering': "('name',)", 'object_name': 'VeganDish'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'search_index': ('djorm_pgfulltext.fields.VectorField', [], {'default': "''", 'null': 'True', 'db_index': 'True'})
},
u'vegancity.veglevel': {
'Meta': {'object_name': 'VegLevel'},
'description': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'super_category': ('django.db.models.fields.CharField', [], {'max_length': '30'})
},
u'vegancity.vendor': {
'Meta': {'ordering': "('name',)", 'object_name': 'Vendor'},
'address': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'approval_status': ('vegancity.fields.StatusField', [], {'default': "'pending'", 'max_length': '100', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'cuisine_tags': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['vegancity.CuisineTag']", 'null': 'True', 'blank': 'True'}),
'feature_tags': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['vegancity.FeatureTag']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.contrib.gis.db.models.fields.PointField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'}),
'neighborhood': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['vegancity.Neighborhood']", 'null': 'True', 'blank': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'search_index': ('djorm_pgfulltext.fields.VectorField', [], {'default': "''", 'null': 'True', 'db_index': 'True'}),
'submitted_by': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'veg_level': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['vegancity.VegLevel']", 'null': 'True', 'blank': 'True'}),
'vegan_dishes': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['vegancity.VeganDish']", 'null': 'True', 'blank': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['vegancity']
symmetrical = True
<|code_end|>
Replicate exactly.
|
Done, here's the copy:
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
for review in orm.Review.objects.all():
if review.approved is True:
review.approval_status = 'approved'
else:
review.approval_status = 'pending'
review.save()
def backwards(self, orm):
for review in orm.Review.objects.all():
if review.approval_status == 'approved':
review.approved = True
else:
review.approved = False
review.save()
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'vegancity.cuisinetag': {
'Meta': {'ordering': "('name',)", 'object_name': 'CuisineTag'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'search_index': ('djorm_pgfulltext.fields.VectorField', [], {'default': "''", 'null': 'True', 'db_index': 'True'})
},
u'vegancity.featuretag': {
'Meta': {'ordering': "('name',)", 'object_name': 'FeatureTag'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'search_index': ('djorm_pgfulltext.fields.VectorField', [], {'default': "''", 'null': 'True', 'db_index': 'True'})
},
u'vegancity.neighborhood': {
'Meta': {'ordering': "('name',)", 'object_name': 'Neighborhood'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
u'vegancity.review': {
'Meta': {'ordering': "('created',)", 'object_name': 'Review'},
'approval_status': ('vegancity.fields.StatusField', [], {'default': "'pending'", 'max_length': '100', 'db_index': 'True'}),
'approved': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'atmosphere_rating': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'best_vegan_dish': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['vegancity.VeganDish']", 'null': 'True', 'blank': 'True'}),
'content': ('django.db.models.fields.TextField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'food_rating': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'search_index': ('djorm_pgfulltext.fields.VectorField', [], {'default': "''", 'null': 'True', 'db_index': 'True'}),
'suggested_cuisine_tags': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'suggested_feature_tags': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'unlisted_vegan_dish': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'vendor': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['vegancity.Vendor']"})
},
u'vegancity.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'bio': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'karma_points': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'mailing_list': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'unique': 'True'})
},
u'vegancity.vegandish': {
'Meta': {'ordering': "('name',)", 'object_name': 'VeganDish'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'search_index': ('djorm_pgfulltext.fields.VectorField', [], {'default': "''", 'null': 'True', 'db_index': 'True'})
},
u'vegancity.veglevel': {
'Meta': {'object_name': 'VegLevel'},
'description': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'super_category': ('django.db.models.fields.CharField', [], {'max_length': '30'})
},
u'vegancity.vendor': {
'Meta': {'ordering': "('name',)", 'object_name': 'Vendor'},
'address': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'approval_status': ('vegancity.fields.StatusField', [], {'default': "'pending'", 'max_length': '100', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'cuisine_tags': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['vegancity.CuisineTag']", 'null': 'True', 'blank': 'True'}),
'feature_tags': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['vegancity.FeatureTag']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.contrib.gis.db.models.fields.PointField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'}),
'neighborhood': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['vegancity.Neighborhood']", 'null': 'True', 'blank': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'search_index': ('djorm_pgfulltext.fields.VectorField', [], {'default': "''", 'null': 'True', 'db_index': 'True'}),
'submitted_by': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'veg_level': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['vegancity.VegLevel']", 'null': 'True', 'blank': 'True'}),
'vegan_dishes': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['vegancity.VeganDish']", 'null': 'True', 'blank': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['vegancity']
symmetrical = True
|
Here's a code snippet to echo:
<|code_start|>from framework.enums.enums import APIMappings
from androguard.core.analysis import analysis
from datetime import datetime
from blessings import Terminal
t = Terminal()
class APIPermissionMappings(object):
def __init__(self, apk, apks):
super(APIPermissionMappings, self).__init__()
self.apk = apk
self.apks = apks
@staticmethod
def run_search_method(apks, x, clz, method):
"""
Search for API calls and implementation location
"""
vm = apks.get_vm()
paths = x.get_tainted_packages().search_methods(clz, method, ".")
if paths:
for p in paths:
for method in apks.get_methods():
if method.get_name() == p.get_src(vm.get_class_manager())[1]:
if method.get_class_name() == p.get_src(vm.get_class_manager())[0]:
print(t.green("[{0}] ".format(datetime.now()) +
t.yellow("Found: ") +
"{0}".format(method)))
print(t.green("[{0}] ".format(datetime.now()) +
t.yellow("Class: ") +
"{0}".format(method.get_class_name())))
print(t.green("[{0}] ".format(datetime.now()) +
t.yellow("Method: ") +
"{0}".format(method.get_name())))
print(method.show())
def run_find_mapping(self):
"""
Map permissions to API calls with the analyzed
bytecode
"""
# APIMappings enum
# structure
#
enums = APIMappings()
# VM analysis
# object
#
x = analysis.uVMAnalysis(self.apks.get_vm())
for permission in self.apk.get_permissions():
for a, b in enums.mappings.items():
for c, d in b.items():
if "permission" in c:
if permission == d:
print(t.green("[{0}] ".format(datetime.now()) +
t.yellow("Found permission mapping : ") +
permission))
if b.get("class"):
for e, f in b.get("class").items():
print(t.green("[{0}] ".format(datetime.now()) +
t.yellow("Searching for : ") +
e))
if f.get("method"):
self.run_search_method(self.apks, x, e, f.get("method"))
elif f.get("methods"):
for method in f.get("methods"):
self.run_search_method(self.apks, x, e, method)
elif b.get("classes"):
for g, h in b.get("classes").items():
print(t.green("[{0}] ".format(datetime.now()) +
t.yellow("Searching for : ") +
g))
if h.get("method"):
self.run_search_method(self.apks, x, g, h.get("method"))
elif h.get("methods"):
for method in h.get("methods"):
self.run_search_method(self.apks, x, g, method)
<|code_end|>
|
from framework.enums.enums import APIMappings
from androguard.core.analysis import analysis
from datetime import datetime
from blessings import Terminal
t = Terminal()
class APIPermissionMappings(object):
def __init__(self, apk, apks):
super(APIPermissionMappings, self).__init__()
self.apk = apk
self.apks = apks
@staticmethod
def run_search_method(apks, x, clz, method):
"""
Search for API calls and implementation location
"""
vm = apks.get_vm()
paths = x.get_tainted_packages().search_methods(clz, method, ".")
if paths:
for p in paths:
for method in apks.get_methods():
if method.get_name() == p.get_src(vm.get_class_manager())[1]:
if method.get_class_name() == p.get_src(vm.get_class_manager())[0]:
print(t.green("[{0}] ".format(datetime.now()) +
t.yellow("Found: ") +
"{0}".format(method)))
print(t.green("[{0}] ".format(datetime.now()) +
t.yellow("Class: ") +
"{0}".format(method.get_class_name())))
print(t.green("[{0}] ".format(datetime.now()) +
t.yellow("Method: ") +
"{0}".format(method.get_name())))
print(method.show())
def run_find_mapping(self):
"""
Map permissions to API calls with the analyzed
bytecode
"""
# APIMappings enum
# structure
#
enums = APIMappings()
# VM analysis
# object
#
x = analysis.uVMAnalysis(self.apks.get_vm())
for permission in self.apk.get_permissions():
for a, b in enums.mappings.items():
for c, d in b.items():
if "permission" in c:
if permission == d:
print(t.green("[{0}] ".format(datetime.now()) +
t.yellow("Found permission mapping : ") +
permission))
if b.get("class"):
for e, f in b.get("class").items():
print(t.green("[{0}] ".format(datetime.now()) +
t.yellow("Searching for : ") +
e))
if f.get("method"):
self.run_search_method(self.apks, x, e, f.get("method"))
elif f.get("methods"):
for method in f.get("methods"):
self.run_search_method(self.apks, x, e, method)
elif b.get("classes"):
for g, h in b.get("classes").items():
print(t.green("[{0}] ".format(datetime.now()) +
t.yellow("Searching for : ") +
g))
if h.get("method"):
self.run_search_method(self.apks, x, g, h.get("method"))
elif h.get("methods"):
for method in h.get("methods"):
self.run_search_method(self.apks, x, g, method)
|
Incomplete code follows:
<|code_start|>#!X:\Python27\python.exe
# $Id: rst2odt_prepstyles.py 5839 2009-01-07 19:09:28Z dkuhlman $
# Author: Dave Kuhlman <[email protected]>
# Copyright: This module has been placed in the public domain.
"""
Fix a word-processor-generated styles.odt for odtwriter use: Drop page size
specifications from styles.xml in STYLE_FILE.odt.
"""
#
# Author: Michael Schutte <[email protected]>
from lxml import etree
import sys
import zipfile
from tempfile import mkstemp
import shutil
import os
NAMESPACES = {
"style": "urn:oasis:names:tc:opendocument:xmlns:style:1.0",
"fo": "urn:oasis:names:tc:opendocument:xmlns:xsl-fo-compatible:1.0"
}
def prepstyle(filename):
zin = zipfile.ZipFile(filename)
styles = zin.read("styles.xml")
root = etree.fromstring(styles)
for el in root.xpath("//style:page-layout-properties",
names<|code_end|>
Finish the remainder.
|
paces=NAMESPACES):
for attr in el.attrib:
if attr.startswith("{%s}" % NAMESPACES["fo"]):
del el.attrib[attr]
tempname = mkstemp()
zout = zipfile.ZipFile(os.fdopen(tempname[0], "w"), "w",
zipfile.ZIP_DEFLATED)
for item in zin.infolist():
if item.filename == "styles.xml":
zout.writestr(item, etree.tostring(root))
else:
zout.writestr(item, zin.read(item.filename))
zout.close()
zin.close()
shutil.move(tempname[1], filename)
def main():
args = sys.argv[1:]
if len(args) != 1:
print >> sys.stderr, __doc__
print >> sys.stderr, "Usage: %s STYLE_FILE.odt\n" % sys.argv[0]
sys.exit(1)
filename = args[0]
prepstyle(filename)
if __name__ == '__main__':
main()
# vim:tw=78:sw=4:sts=4:et:
|
Make this the default so we can create User objects.
from django.db import models
from django_enumfield import EnumField
from librement.utils.user_data import PerUserData
from .enums import AccountEnum, CountryEnum
class Profile(PerUserData('profile')):
account_type = EnumField(AccountEnum)
organisation = models.CharField(max_length=100, blank=True)
address_1 = models.CharField(max_length=150, blank=True)
address_2 = models.CharField(max_length=150, blank=True)
city = models.CharField(max_length=100, blank=True)
region = models.CharField(max_length=100, blank=True)
zipcode = models.CharField(max_length=100, blank=True)
country = EnumField(CountryEnum)
|
from django.db import models
from django_enumfield import EnumField
from librement.utils.user_data import PerUserData
from .enums import AccountEnum, CountryEnum
class Profile(PerUserData('profile')):
account_type = EnumField(AccountEnum, default=AccountEnum.INDIVIDUAL)
organisation = models.CharField(max_length=100, blank=True)
address_1 = models.CharField(max_length=150, blank=True)
address_2 = models.CharField(max_length=150, blank=True)
city = models.CharField(max_length=100, blank=True)
region = models.CharField(max_length=100, blank=True)
zipcode = models.CharField(max_length=100, blank=True)
country = EnumField(CountryEnum)
|
<|code_start|># pyTweener
#
# Tweening functions for python
#
# Heavily based on caurina Tweener: http://code.google.com/p/tweener/
#
# Released under M.I.T License - see above url
# Python version by Ben Harling 2009
# All kinds of slashing and dashing by Toms Baugis 2010, 2014
import math
import collections
import datetime as dt
import time
import re
class Tweener(object):
def __init__(self, default_duration = None, tween = None):
"""Tweener
This class manages all active tweens, and provides a factory for
creating and spawning tween motions."""
self.current_tweens = collections.defaultdict(set)
self.default_easing = tween or Easing.Cubic.ease_in_out
self.default_duration = default_duration or 1.0
def has_tweens(self):
return len(self.current_tweens) > 0
def add_tween(self, obj, duration = None, easing = None, on_complete = None,
on_update = None, round = False, delay = None, **kwargs):
"""
Add tween for the object to go from current values to set ones.
Example: add_tween(sprite, x = 500, y = 200, duration = 0.4)
This will move the sprite to coordinates (500, 200) in 0.4 seconds.
For parameter "easing" you can use one of the pytweener.Easing
functions, or specify your own.
The tweener can handle numbers, dates and color strings in hex ("#ffffff").
This function performs overwrite style conflict solving - in case
if a previous tween operates on same attributes, the attributes in
question are removed from that tween.
"""
if duration is None:
duration = self.default_duration
easing = easing or self.default_easing
tw = Tween(obj, duration, delay, easing, on_complete, on_update, round, **kwargs )
if obj in self.current_tweens:
for current_tween in tuple(self.current_tweens[obj]):
prev_keys = set((key for (key, tweenable) in current_tween.tweenables))
dif = prev_keys & set(kwargs.keys())
for key, tweenable in tuple(current_tween.tweenables):
if key in dif:
current_tween.tweenables.remove((key, tweenable))
if not current_tween.tweenables:
current_tween.finish()
self.current_tweens[obj].remove(current_tween)
self.current_tweens[obj].add(tw)
return tw
def get_tweens(self, obj):
"""Get a list of all tweens acting on the specified object
Useful for manipulating tweens on the fly"""
return self.current_tweens.get(obj, None)
def kill_tweens(self, obj = None):
"""Stop tweening an object, without completing the motion or firing the
on_complete"""
if obj is not None:
try:
del self.current_tweens[obj]
except:
pass
else:
self.current_tweens = collections.defaultdict(set)
def remove_tween(self, tween):
""""remove given tween without completing the motion or firing the on_complete"""
if tween.target in self.current_tweens and tween in self.current_tweens[tween.target]:
self.current_tweens[tween.target].remove(tween)
if not self.current_tweens[tween.target]:
del self.current_tweens[tween.target]
def finish(self):
"""jump the the last frame of all tweens"""
for obj in self.current_tweens:
for tween in self.current_tweens[obj]:
tween.finish()
self.current_tweens = {}
def update(self, delta_seconds):
"""update tweeners. delta_seconds is time in seconds since last frame"""
for obj in tuple(self.current_tweens):
for tween in tuple(self.current_tweens[obj]):
done = tween.update(delta_seconds)
if done:
self.current_tweens[obj].remove(tween)
if tween.on_complete: tween.on_complete(tween.target)
if not self.current_tweens[obj]:
del self.current_tweens[obj]
return self.current_tweens
class Tween(object):
__slots__ = ('tweenables', 'target', 'delta', 'duration', 'delay',
'ease', 'delta', 'complete', 'round',
'on_complete', 'on_update')
def __init__(self, obj, duration, delay, easing, on_complete, on_update, round,
**kwargs):
"""Tween object use Tweener.add_tween( ... ) to create"""
#: should the tween values truncated to integers or not. Default is False.
self.round = round
#: duration of the tween
self.duration = duration
#: delay before the animation should be started
self.delay = delay or 0
self.target = obj
#: easing function
self.ease = easing
# list of (property, start_value, delta)
self.tweenables = set()
for key, value in kwargs.items():
self.tweenables.add((key, Tweenable(getattr(self.target, key), value)))
self.delta = 0
#: callback to execute on complete
self.on_complete = on_complete
#: callback to execute on update
self.on_update = on_update
self.complete = False
def finish(self):
self.update(self.duration)
def update(self, ptime):
"""Update tween with the time since the last frame"""
delta = self.delta + ptime
total_duration = self.delay + self.duration
if delta > total_duration:
delta = total_duration
if delta < self.delay:
pass
elif delta == total_duration:
for key, tweenable in self.tweenables:
setattr(self.target, key, tweenable.target_value)
else:
fraction = self.ease((delta - self.delay) / (total_duration - self.delay))
for key, tweenable in self.tweenables:
res = tweenable.update(fraction)
if isinstance(res, float) and self.round:
res = int(res)
setattr(self.target, key, res)
if delta == total_duration or len(self.tweenables) == 0:
self.complete = True
self.delta = delta
if self.on_update:
self.on_update(self.target)
return self.complete
class Tweenable(object):
"""a single attribute that has to be tweened from start to target"""
__slots__ = ('start_value', 'change', 'decode_func', 'target_value', 'update')
hex_color_normal = re.compile("#([a-fA-F0-9]{2})([a-fA-F0-9]{2})([a-fA-F0-9]{2})")
hex_color_short = re.compile("#([a-fA-F0-9])([a-fA-F0-9])([a-fA-F0-9])")
def __init__(self, start_value, target_value):
self.decode_func = lambda x: x
self.target_value = target_value
def float_update(fraction):
return self.start_value + self.change * fraction
def date_update(fraction):
return dt.date.fromtimestamp(self.start_value + self.change * fraction)
def datetime_update(fraction):
return dt.datetime.fromtimestamp(self.start_value + self.change * fraction)
def color_update(fraction):
val = [max(min(self.start_value[i] + self.change[i] * fraction, 255), 0) for i in range(3)]
return "#%02x%02x%02x" % (val[0], val[1], val[2])
if isinstance(start_value, int) or isinstance(start_value, float):
self.start_value = start_value
self.change = target_value - start_value
self.update = float_update
else:
if isinstance(start_value, dt.datetime) or isinstance(start_value, dt.date):
if isinstance(start_value, dt.datetime):
self.update = datetime_update
else:
self.update = date_update
self.decode_func = lambda x: time.mktime(x.timetuple())
self.start_value = self.decode_func(start_value)
self.change = self.decode_func(target_value) - self.start_value
elif isinstance(start_value, basestring) \
and (self.hex_color_normal.match(start_value) or self.hex_color_short.match(start_value)):
self.update = color_update
if self.hex_color_normal.match(start_value):
self.decode_func = lambda val: [int(match, 16)
for match in self.hex_color_normal.match(val).groups()]
elif self.hex_color_short.match(start_value):
self.decode_func = lambda val: [int(match + match, 16)
for match in self.hex_color_short.match(val).groups()]
if self.hex_color_normal.match(target_value):
target_value = [int(match, 16)
for match in self.hex_color_normal.match(target_value).groups()]
else:
target_value = [int(match + match, 16)
for match in self.hex_color_short.match(target_value).groups()]
self.start_value = self.decode_func(start_value)
self.change = [target - start for start, target in zip(self.start_value, target_value)]
"""Robert Penner's classes stripped from the repetetive c,b,d mish-mash
(discovery of Patryk Zawadzki). This way we do the math once and apply to
all the tweenables instead of repeating it for each attribute
"""
def inverse(method):
def real_inverse(t, *args, **kwargs):
t = 1 - t
return 1 - method(t, *args, **kwargs)
return real_inverse
def symmetric(ease_in, ease_out):
def real_symmetric(t, *args, **kwargs):
if t < 0.5:
return ease_in(t * 2, *args, **kwargs) / 2
return ease_out((t - 0.5) * 2, *args, **kwargs) / 2 + 0.5
return real_symmetric
class Symmetric(object):
def __init__(self, ease_in = None, ease_out = None):
self.ease_in = ease_in or inverse(ease_out)
self.ease_out = ease_out or inverse(ease_in)
self.ease_in_out = symmetric(self.ease_in, self.ease_out)
class Easing(object):
"""Class containing easing classes to use together with the tweener.
All of the classes have :func:`ease_in`, :func:`ease_out` and
:func:`ease_in_out` functions."""
Linear = Symmetric(lambda t: t, lambda t: t)
Quad = Symmetric(lambda t: t*t)
Cubic = Symmetric(lambda t: t*t*t)
Quart = Symmetric(lambda t: t*t*t*t)
Quint = Symmetric(lambda t: t*t*t*t*t)
Strong = Quint #oh i wonder why but the ported code is the same as in Quint
Circ = Symmetric(lambda t: 1 - math.sqrt(1 - t * t))
Sine = Symmetric(lambda t: 1 - math.cos(t * (math.pi / 2)))
def _back_in(t, s=1.70158):
return t * t * ((s + 1) * t - s)
Back = Symmetric(_back_in)
def _bounce_out(t):
if t < 1 / 2.75:
return 7.5625 * t * t
elif t < 2 / 2.75:
t = t - 1.5 / 2.75
return 7.5625 * t * t + 0.75
elif t < 2.5 / 2.75:
t = t - 2.25 / 2.75
return 7.5625 * t * t + .9375
else:
t = t - 2.625 / 2.75
return 7.5625 * t * t + 0.984375
Bounce = Symmetric(ease_out = _bounce_out)
def _elastic_in(t, springiness = 0, wave_length = 0):
if t in(0, 1):
return t
wave_length = wave_length or (1 - t) * 0.3
if springiness <= 1:
springiness = t
s = wave_length / 4
else:
s = wave_length / (2 * math.pi) * math.asin(t / springiness)
t = t - 1
return -(springiness * math.pow(2, 10 * t) * math.sin((t * t - s) * (2 * math.pi) / wave_length))
Elastic = Symmetric(_elastic_in)
def _expo_in(t):
if t in (0, 1): return t
return math.pow(2, 10 * t) * 0.001
Expo = Symmetric(_expo_in)
class _Dummy(object):
def __init__(self, a, b, c):
self.a = a
self.b = b
self.c = c
if __name__ == "__main__":
import datetime as dt
tweener = Tweener()
objects = []
object_count, update_times = 1000, 100
for i in range(object_count):
objects.append(_Dummy(i-100, i-100, i-100))
total = dt.datetime.now()
t = dt.datetime.now()
print "Adding %d tweens..." % object_count
for i, o in enumerate(objects):
tweener.add_tween(o, a = i,
b = i,
c = i,
duration = 0.1 * update_times,
easing=Easing.Circ.ease_in_out)
print dt.datetime.now() - t
t = dt.datetime.now()
print "Updating %d times......" % update_times
for i in range(update_times): #update 1000 times
tweener.update(0.1)
print dt.datetime.now() - t
<|code_end|>
Copy this precisely.
|
# pyTweener
#
# Tweening functions for python
#
# Heavily based on caurina Tweener: http://code.google.com/p/tweener/
#
# Released under M.I.T License - see above url
# Python version by Ben Harling 2009
# All kinds of slashing and dashing by Toms Baugis 2010, 2014
import math
import collections
import datetime as dt
import time
import re
class Tweener(object):
def __init__(self, default_duration = None, tween = None):
"""Tweener
This class manages all active tweens, and provides a factory for
creating and spawning tween motions."""
self.current_tweens = collections.defaultdict(set)
self.default_easing = tween or Easing.Cubic.ease_in_out
self.default_duration = default_duration or 1.0
def has_tweens(self):
return len(self.current_tweens) > 0
def add_tween(self, obj, duration = None, easing = None, on_complete = None,
on_update = None, round = False, delay = None, **kwargs):
"""
Add tween for the object to go from current values to set ones.
Example: add_tween(sprite, x = 500, y = 200, duration = 0.4)
This will move the sprite to coordinates (500, 200) in 0.4 seconds.
For parameter "easing" you can use one of the pytweener.Easing
functions, or specify your own.
The tweener can handle numbers, dates and color strings in hex ("#ffffff").
This function performs overwrite style conflict solving - in case
if a previous tween operates on same attributes, the attributes in
question are removed from that tween.
"""
if duration is None:
duration = self.default_duration
easing = easing or self.default_easing
tw = Tween(obj, duration, delay, easing, on_complete, on_update, round, **kwargs )
if obj in self.current_tweens:
for current_tween in tuple(self.current_tweens[obj]):
prev_keys = set((key for (key, tweenable) in current_tween.tweenables))
dif = prev_keys & set(kwargs.keys())
for key, tweenable in tuple(current_tween.tweenables):
if key in dif:
current_tween.tweenables.remove((key, tweenable))
if not current_tween.tweenables:
current_tween.finish()
self.current_tweens[obj].remove(current_tween)
self.current_tweens[obj].add(tw)
return tw
def get_tweens(self, obj):
"""Get a list of all tweens acting on the specified object
Useful for manipulating tweens on the fly"""
return self.current_tweens.get(obj, None)
def kill_tweens(self, obj = None):
"""Stop tweening an object, without completing the motion or firing the
on_complete"""
if obj is not None:
try:
del self.current_tweens[obj]
except:
pass
else:
self.current_tweens = collections.defaultdict(set)
def remove_tween(self, tween):
""""remove given tween without completing the motion or firing the on_complete"""
if tween.target in self.current_tweens and tween in self.current_tweens[tween.target]:
self.current_tweens[tween.target].remove(tween)
if not self.current_tweens[tween.target]:
del self.current_tweens[tween.target]
def finish(self):
"""jump the the last frame of all tweens"""
for obj in self.current_tweens:
for tween in self.current_tweens[obj]:
tween.finish()
self.current_tweens = {}
def update(self, delta_seconds):
"""update tweeners. delta_seconds is time in seconds since last frame"""
for obj in tuple(self.current_tweens):
for tween in tuple(self.current_tweens[obj]):
done = tween.update(delta_seconds)
if done:
self.current_tweens[obj].remove(tween)
if tween.on_complete: tween.on_complete(tween.target)
if not self.current_tweens[obj]:
del self.current_tweens[obj]
return self.current_tweens
class Tween(object):
__slots__ = ('tweenables', 'target', 'delta', 'duration', 'delay',
'ease', 'delta', 'complete', 'round',
'on_complete', 'on_update')
def __init__(self, obj, duration, delay, easing, on_complete, on_update, round,
**kwargs):
"""Tween object use Tweener.add_tween( ... ) to create"""
#: should the tween values truncated to integers or not. Default is False.
self.round = round
#: duration of the tween
self.duration = duration
#: delay before the animation should be started
self.delay = delay or 0
self.target = obj
#: easing function
self.ease = easing
# list of (property, start_value, delta)
self.tweenables = set()
for key, value in kwargs.items():
self.tweenables.add((key, Tweenable(getattr(self.target, key), value)))
self.delta = 0
#: callback to execute on complete
self.on_complete = on_complete
#: callback to execute on update
self.on_update = on_update
self.complete = False
def finish(self):
self.update(self.duration)
def update(self, ptime):
"""Update tween with the time since the last frame"""
delta = self.delta + ptime
total_duration = self.delay + self.duration
if delta > total_duration:
delta = total_duration
if delta < self.delay:
pass
elif delta == total_duration:
for key, tweenable in self.tweenables:
setattr(self.target, key, tweenable.target_value)
else:
fraction = self.ease((delta - self.delay) / (total_duration - self.delay))
for key, tweenable in self.tweenables:
res = tweenable.update(fraction)
if isinstance(res, float) and self.round:
res = int(res)
setattr(self.target, key, res)
if delta == total_duration or len(self.tweenables) == 0:
self.complete = True
self.delta = delta
if self.on_update:
self.on_update(self.target)
return self.complete
class Tweenable(object):
"""a single attribute that has to be tweened from start to target"""
__slots__ = ('start_value', 'change', 'decode_func', 'target_value', 'update')
hex_color_normal = re.compile("#([a-fA-F0-9]{2})([a-fA-F0-9]{2})([a-fA-F0-9]{2})")
hex_color_short = re.compile("#([a-fA-F0-9])([a-fA-F0-9])([a-fA-F0-9])")
def __init__(self, start_value, target_value):
self.decode_func = lambda x: x
self.target_value = target_value
def float_update(fraction):
return self.start_value + self.change * fraction
def date_update(fraction):
return dt.date.fromtimestamp(self.start_value + self.change * fraction)
def datetime_update(fraction):
return dt.datetime.fromtimestamp(self.start_value + self.change * fraction)
def color_update(fraction):
val = [max(min(self.start_value[i] + self.change[i] * fraction, 255), 0) for i in range(3)]
return "#%02x%02x%02x" % (val[0], val[1], val[2])
if isinstance(start_value, int) or isinstance(start_value, float):
self.start_value = start_value
self.change = target_value - start_value
self.update = float_update
else:
if isinstance(start_value, dt.datetime) or isinstance(start_value, dt.date):
if isinstance(start_value, dt.datetime):
self.update = datetime_update
else:
self.update = date_update
self.decode_func = lambda x: time.mktime(x.timetuple())
self.start_value = self.decode_func(start_value)
self.change = self.decode_func(target_value) - self.start_value
elif isinstance(start_value, basestring) \
and (self.hex_color_normal.match(start_value) or self.hex_color_short.match(start_value)):
self.update = color_update
if self.hex_color_normal.match(start_value):
self.decode_func = lambda val: [int(match, 16)
for match in self.hex_color_normal.match(val).groups()]
elif self.hex_color_short.match(start_value):
self.decode_func = lambda val: [int(match + match, 16)
for match in self.hex_color_short.match(val).groups()]
if self.hex_color_normal.match(target_value):
target_value = [int(match, 16)
for match in self.hex_color_normal.match(target_value).groups()]
else:
target_value = [int(match + match, 16)
for match in self.hex_color_short.match(target_value).groups()]
self.start_value = self.decode_func(start_value)
self.change = [target - start for start, target in zip(self.start_value, target_value)]
"""Robert Penner's classes stripped from the repetetive c,b,d mish-mash
(discovery of Patryk Zawadzki). This way we do the math once and apply to
all the tweenables instead of repeating it for each attribute
"""
def inverse(method):
def real_inverse(t, *args, **kwargs):
t = 1 - t
return 1 - method(t, *args, **kwargs)
return real_inverse
def symmetric(ease_in, ease_out):
def real_symmetric(t, *args, **kwargs):
if t < 0.5:
return ease_in(t * 2, *args, **kwargs) / 2
return ease_out((t - 0.5) * 2, *args, **kwargs) / 2 + 0.5
return real_symmetric
class Symmetric(object):
def __init__(self, ease_in = None, ease_out = None):
self.ease_in = ease_in or inverse(ease_out)
self.ease_out = ease_out or inverse(ease_in)
self.ease_in_out = symmetric(self.ease_in, self.ease_out)
class Easing(object):
"""Class containing easing classes to use together with the tweener.
All of the classes have :func:`ease_in`, :func:`ease_out` and
:func:`ease_in_out` functions."""
Linear = Symmetric(lambda t: t, lambda t: t)
Quad = Symmetric(lambda t: t*t)
Cubic = Symmetric(lambda t: t*t*t)
Quart = Symmetric(lambda t: t*t*t*t)
Quint = Symmetric(lambda t: t*t*t*t*t)
Strong = Quint #oh i wonder why but the ported code is the same as in Quint
Circ = Symmetric(lambda t: 1 - math.sqrt(1 - t * t))
Sine = Symmetric(lambda t: 1 - math.cos(t * (math.pi / 2)))
def _back_in(t, s=1.70158):
return t * t * ((s + 1) * t - s)
Back = Symmetric(_back_in)
def _bounce_out(t):
if t < 1 / 2.75:
return 7.5625 * t * t
elif t < 2 / 2.75:
t = t - 1.5 / 2.75
return 7.5625 * t * t + 0.75
elif t < 2.5 / 2.75:
t = t - 2.25 / 2.75
return 7.5625 * t * t + .9375
else:
t = t - 2.625 / 2.75
return 7.5625 * t * t + 0.984375
Bounce = Symmetric(ease_out = _bounce_out)
def _elastic_in(t, springiness = 0, wave_length = 0):
if t in(0, 1):
return t
wave_length = wave_length or (1 - t) * 0.3
if springiness <= 1:
springiness = t
s = wave_length / 4
else:
s = wave_length / (2 * math.pi) * math.asin(t / springiness)
t = t - 1
return -(springiness * math.pow(2, 10 * t) * math.sin((t * t - s) * (2 * math.pi) / wave_length))
Elastic = Symmetric(_elastic_in)
def _expo_in(t):
if t in (0, 1): return t
return math.pow(2, 10 * t) * 0.001
Expo = Symmetric(_expo_in)
class _Dummy(object):
def __init__(self, a, b, c):
self.a = a
self.b = b
self.c = c
if __name__ == "__main__":
import datetime as dt
tweener = Tweener()
objects = []
object_count, update_times = 1000, 100
for i in range(object_count):
objects.append(_Dummy(i-100, i-100, i-100))
total = dt.datetime.now()
t = dt.datetime.now()
print "Adding %d tweens..." % object_count
for i, o in enumerate(objects):
tweener.add_tween(o, a = i,
b = i,
c = i,
duration = 0.1 * update_times,
easing=Easing.Circ.ease_in_out)
print dt.datetime.now() - t
t = dt.datetime.now()
print "Updating %d times......" % update_times
for i in range(update_times): #update 1000 times
tweener.update(0.1)
print dt.datetime.now() - t
|
Copy verbatim:
<|code_start|>"""
kombu.transport.pika
====================
Pika transport.
:copyright: (c) 2009 - 2012 by Ask Solem.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
import socket
from operator import attrgetter
from kombu.exceptions import StdChannelError
from . import base
import pika
from pika import spec
from pika.adapters import blocking_connection as blocking
from pika import exceptions
DEFAULT_PORT = 5672
BASIC_PROPERTIES = ("content_type", "content_encoding",
"headers", "delivery_mode", "priority",
"correlation_id", "reply_to", "expiration",
"message_id", "timestamp", "type", "user_id",
"app_id", "cluster_id")
class Message(base.Message):
def __init__(self, channel, amqp_message, **kwargs):
channel_id, method, props, body = amqp_message
propdict = dict(zip(BASIC_PROPERTIES,
attrgetter(*BASIC_PROPERTIES)(props)))
kwargs.update({"body": body,
"delivery_tag": method.delivery_tag,
"content_type": props.content_type,
"content_encoding": props.content_encoding,
"headers": props.headers,
"properties": propdict,
"delivery_info": dict(
consumer_tag=getattr(method, "consumer_tag", None),
routing_key=method.routing_key,
delivery_tag=method.delivery_tag,
redelivered=method.redelivered,
exchange=method.exchange)})
super(Message, self).__init__(channel, **kwargs)
class Channel(blocking.BlockingChannel, base.StdChannel):
Message = Message
def basic_get(self, queue, no_ack):
method = super(Channel, self).basic_get(self, queue=queue,
no_ack=no_ack)
# pika returns semi-predicates (GetEmpty/GetOk).
if isinstance(method, spec.Basic.GetEmpty):
return
return None, method, method._properties, method._body
def queue_purge(self, queue=None, nowait=False):
return super(Channel, self).queue_purge(queue=queue,
nowait=nowait).message_count
def basic_publish(self, message, exchange, routing_key, mandatory=False,
immediate=False):
body, properties = message
try:
return super(Channel, self).basic_publish(exchange,
routing_key,
body,
properties,
mandatory,
immediate)
finally:
# Pika does not automatically flush the outbound buffer
# TODO async: Needs to support `nowait`.
self.connection._flush_outbound()
def basic_consume(self, queue, no_ack=False, consumer_tag=None,
callback=None, nowait=False):
# Kombu callbacks only take a single `message` argument,
# but pika applies with 4 arguments, so need to wrap
# these into a single tuple.
def _callback_decode(channel, method, header, body):
return callback((channel, method, header, body))
return super(Channel, self).basic_consume(
_callback_decode, queue, no_ack, False, consumer_tag)
def prepare_message(self, body, priority=None,
content_type=None, content_encoding=None, headers=None,
properties=None):
properties = spec.BasicProperties(priority=priority,
content_type=content_type,
content_encoding=content_encoding,
headers=headers,
**properties)
return body, properties
def message_to_python(self, raw_message):
return self.Message(channel=self, amqp_message=raw_message)
def basic_qos(self, prefetch_size, prefetch_count, a_global=False):
return super(Channel, self).basic_qos(prefetch_size=prefetch_size,
prefetch_count=prefetch_count,
global_=a_global)
def __enter__(self):
return self
def __exit__(self, *exc_info):
self.close()
def close(self, *args):
super(Channel, self).close(*args)
self.connection = None
if getattr(self, "handler", None):
if getattr(self.handler, "connection", None):
self.handler.connection.channels.pop(
self.handler.channel_number, None)
self.handler.connection = None
self.handler = None
@property
def channel_id(self):
return self.channel_number
class Connection(blocking.BlockingConnection):
Channel = Channel
def __init__(self, client, *args, **kwargs):
self.client = client
super(Connection, self).__init__(*args, **kwargs)
def channel(self):
self._channel_open = False
cid = self._next_channel_number()
self.callbacks.add(cid, spec.Channel.CloseOk, self._on_channel_close)
transport = blocking.BlockingChannelTransport(self, cid)
channel = self._channels[cid] = self.Channel(self, cid, transport)
channel.connection = self
return channel
def drain_events(self, timeout=None):
if timeout:
prev = self.socket.gettimeout()
self.socket.settimeout(timeout)
try:
self._handle_read()
finally:
if timeout:
self.socket.settimeout(prev)
self._flush_outbound()
def close(self, *args):
self.client = None
super(Connection, self).close(*args)
AuthenticationError = getattr(exceptions, "AuthenticationError",
getattr(exceptions, "LoginError"))
class Transport(base.Transport):
default_port = DEFAULT_PORT
connection_errors = (socket.error,
exceptions.ConnectionClosed,
exceptions.ChannelClosed,
AuthenticationError,
exceptions.NoFreeChannels,
exceptions.DuplicateConsumerTag,
exceptions.UnknownConsumerTag,
exceptions.RecursiveOperationDetected,
exceptions.ProtocolSyntaxError)
channel_errors = (StdChannelError,
exceptions.ChannelClosed,
exceptions.DuplicateConsumerTag,
exceptions.UnknownConsumerTag,
exceptions.ProtocolSyntaxError)
Message = Message
Connection = Connection
def __init__(self, client, **kwargs):
self.client = client
self.default_port = kwargs.get("default_port", self.default_port)
def create_channel(self, connection):
return connection.channel()
def drain_events(self, connection, **kwargs):
return connection.drain_events(**kwargs)
def establish_connection(self):
"""Establish connection to the AMQP broker."""
conninfo = self.client
for name, default_value in self.default_connection_params.items():
if not getattr(conninfo, name, None):
setattr(conninfo, name, default_value)
credentials = pika.PlainCredentials(conninfo.userid,
conninfo.password)
return self.Connection(self.client,
pika.ConnectionParameters(
conninfo.hostname, port=conninfo.port,
virtual_host=conninfo.virtual_host,
credentials=credentials))
def close_connection(self, connection):
"""Close the AMQP broker connection."""
connection.close()
@property
def default_connection_params(self):
return {"hostname": "localhost", "port": self.default_port,
"userid": "guest", "password": "guest"}
<|code_end|>
|
Every character preserved:
"""
kombu.transport.pika
====================
Pika transport.
:copyright: (c) 2009 - 2012 by Ask Solem.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
import socket
from operator import attrgetter
from kombu.exceptions import StdChannelError
from . import base
import pika
from pika import spec
from pika.adapters import blocking_connection as blocking
from pika import exceptions
DEFAULT_PORT = 5672
BASIC_PROPERTIES = ("content_type", "content_encoding",
"headers", "delivery_mode", "priority",
"correlation_id", "reply_to", "expiration",
"message_id", "timestamp", "type", "user_id",
"app_id", "cluster_id")
class Message(base.Message):
def __init__(self, channel, amqp_message, **kwargs):
channel_id, method, props, body = amqp_message
propdict = dict(zip(BASIC_PROPERTIES,
attrgetter(*BASIC_PROPERTIES)(props)))
kwargs.update({"body": body,
"delivery_tag": method.delivery_tag,
"content_type": props.content_type,
"content_encoding": props.content_encoding,
"headers": props.headers,
"properties": propdict,
"delivery_info": dict(
consumer_tag=getattr(method, "consumer_tag", None),
routing_key=method.routing_key,
delivery_tag=method.delivery_tag,
redelivered=method.redelivered,
exchange=method.exchange)})
super(Message, self).__init__(channel, **kwargs)
class Channel(blocking.BlockingChannel, base.StdChannel):
Message = Message
def basic_get(self, queue, no_ack):
method = super(Channel, self).basic_get(self, queue=queue,
no_ack=no_ack)
# pika returns semi-predicates (GetEmpty/GetOk).
if isinstance(method, spec.Basic.GetEmpty):
return
return None, method, method._properties, method._body
def queue_purge(self, queue=None, nowait=False):
return super(Channel, self).queue_purge(queue=queue,
nowait=nowait).message_count
def basic_publish(self, message, exchange, routing_key, mandatory=False,
immediate=False):
body, properties = message
try:
return super(Channel, self).basic_publish(exchange,
routing_key,
body,
properties,
mandatory,
immediate)
finally:
# Pika does not automatically flush the outbound buffer
# TODO async: Needs to support `nowait`.
self.connection._flush_outbound()
def basic_consume(self, queue, no_ack=False, consumer_tag=None,
callback=None, nowait=False):
# Kombu callbacks only take a single `message` argument,
# but pika applies with 4 arguments, so need to wrap
# these into a single tuple.
def _callback_decode(channel, method, header, body):
return callback((channel, method, header, body))
return super(Channel, self).basic_consume(
_callback_decode, queue, no_ack, False, consumer_tag)
def prepare_message(self, body, priority=None,
content_type=None, content_encoding=None, headers=None,
properties=None):
properties = spec.BasicProperties(priority=priority,
content_type=content_type,
content_encoding=content_encoding,
headers=headers,
**properties)
return body, properties
def message_to_python(self, raw_message):
return self.Message(channel=self, amqp_message=raw_message)
def basic_qos(self, prefetch_size, prefetch_count, a_global=False):
return super(Channel, self).basic_qos(prefetch_size=prefetch_size,
prefetch_count=prefetch_count,
global_=a_global)
def __enter__(self):
return self
def __exit__(self, *exc_info):
self.close()
def close(self, *args):
super(Channel, self).close(*args)
self.connection = None
if getattr(self, "handler", None):
if getattr(self.handler, "connection", None):
self.handler.connection.channels.pop(
self.handler.channel_number, None)
self.handler.connection = None
self.handler = None
@property
def channel_id(self):
return self.channel_number
class Connection(blocking.BlockingConnection):
Channel = Channel
def __init__(self, client, *args, **kwargs):
self.client = client
super(Connection, self).__init__(*args, **kwargs)
def channel(self):
self._channel_open = False
cid = self._next_channel_number()
self.callbacks.add(cid, spec.Channel.CloseOk, self._on_channel_close)
transport = blocking.BlockingChannelTransport(self, cid)
channel = self._channels[cid] = self.Channel(self, cid, transport)
channel.connection = self
return channel
def drain_events(self, timeout=None):
if timeout:
prev = self.socket.gettimeout()
self.socket.settimeout(timeout)
try:
self._handle_read()
finally:
if timeout:
self.socket.settimeout(prev)
self._flush_outbound()
def close(self, *args):
self.client = None
super(Connection, self).close(*args)
AuthenticationError = getattr(exceptions, "AuthenticationError",
getattr(exceptions, "LoginError"))
class Transport(base.Transport):
default_port = DEFAULT_PORT
connection_errors = (socket.error,
exceptions.ConnectionClosed,
exceptions.ChannelClosed,
AuthenticationError,
exceptions.NoFreeChannels,
exceptions.DuplicateConsumerTag,
exceptions.UnknownConsumerTag,
exceptions.RecursiveOperationDetected,
exceptions.ProtocolSyntaxError)
channel_errors = (StdChannelError,
exceptions.ChannelClosed,
exceptions.DuplicateConsumerTag,
exceptions.UnknownConsumerTag,
exceptions.ProtocolSyntaxError)
Message = Message
Connection = Connection
def __init__(self, client, **kwargs):
self.client = client
self.default_port = kwargs.get("default_port", self.default_port)
def create_channel(self, connection):
return connection.channel()
def drain_events(self, connection, **kwargs):
return connection.drain_events(**kwargs)
def establish_connection(self):
"""Establish connection to the AMQP broker."""
conninfo = self.client
for name, default_value in self.default_connection_params.items():
if not getattr(conninfo, name, None):
setattr(conninfo, name, default_value)
credentials = pika.PlainCredentials(conninfo.userid,
conninfo.password)
return self.Connection(self.client,
pika.ConnectionParameters(
conninfo.hostname, port=conninfo.port,
virtual_host=conninfo.virtual_host,
credentials=credentials))
def close_connection(self, connection):
"""Close the AMQP broker connection."""
connection.close()
@property
def default_connection_params(self):
return {"hostname": "localhost", "port": self.default_port,
"userid": "guest", "password": "guest"}
|
Echo the following:
<|code_start|># coding: utf-8
from __future__ import (
absolute_import, division, print_function, unicode_literals)
import copy
import os
from functools import partial
from itertools import chain, count
import click
from ._pip_compat import install_req_from_line
from .cache import DependencyCache
from .logging import log
from .utils import (
UNSAFE_PACKAGES, first, format_requirement, format_specifier, full_groupby,
get_pinned_version, is_pinned_requirement, is_vcs_link, key_from_ireq)
green = partial(click.style, fg='green')
magenta = partial(click.style, fg='magenta')
class RequirementSummary(object):
"""
Summary of a requirement's properties for comparison purposes.
"""
def __init__(self, ireq):
self.req = ireq.req
self.key = key_from_ireq(ireq)
self.extras = str(sorted(ireq.extras))
self.specifier = str(ireq.specifier)
def __eq__(self, other):
return str(self) == str(other)
def __hash__(self):
return hash(str(self))
def __str__(self):
return repr([self.key, self.specifier, self.extras])
class Resolver(object):
def __init__(self, constraints, repository, cache=None, prereleases=False, clear_caches=False, allow_unsafe=False):
"""
This class resolves a given set of constraints (a collection of
InstallRequirement objects) by consulting the given Repository and the
DependencyCache.
"""
self.our_constraints = set(x for x in constraints if not x.constraint)
self.limiters = set(x for x in constraints if x.constraint)
self.their_constraints = set()
self.repository = repository
if cache is None:
cache = DependencyCache()
self.dependency_cache = cache
self.prereleases = prereleases
self.clear_caches = clear_caches
self.allow_unsafe = allow_unsafe
self.unsafe_constraints = set()
self._prepare_ireqs(self.our_constraints)
self._prepare_ireqs(self.limiters)
def _prepare_ireqs(self, constraints):
"""
Prepare install requirements for analysis.
:type constraints: Iterable[pip.req.InstallRequirement]
"""
for constraint in constraints:
if constraint.link and not constraint.prepared:
os.environ[str('PIP_EXISTS_ACTION')] = str('i')
self.repository.prepare_ireq(constraint)
del os.environ[str('PIP_EXISTS_ACTION')]
@property
def constraints(self):
grouped = self._group_constraints(chain(
self.our_constraints, self.their_constraints, self.limiters))
return set(ireq for ireq in grouped if not ireq.constraint)
def resolve_hashes(self, ireqs):
"""
Finds acceptable hashes for all of the given InstallRequirements.
"""
return {ireq: self.repository.get_hashes(ireq) for ireq in ireqs}
def resolve(self, max_rounds=10):
"""
Finds concrete package versions for all the given InstallRequirements
and their recursive dependencies. The end result is a flat list of
(name, version) tuples. (Or an editable package.)
Resolves constraints one round at a time, until they don't change
anymore. Protects against infinite loops by breaking out after a max
number rounds.
"""
if self.clear_caches:
self.dependency_cache.clear()
self.repository.clear_caches()
self.check_constraints(chain(self.our_constraints,
self.their_constraints))
log.debug('Limiting constraints:')
for constraint in sorted(self.limiters, key=key_from_ireq):
log.debug(' {}'.format(constraint))
# Ignore existing packages
os.environ[str('PIP_EXISTS_ACTION')] = str('i') # NOTE: str() wrapping necessary for Python 2/3 compat
for current_round in count(start=1):
if current_round > max_rounds:
raise RuntimeError('No stable configuration of concrete packages '
'could be found for the given constraints after '
'%d rounds of resolving.\n'
'This is likely a bug.' % max_rounds)
log.debug('')
log.debug(magenta('{:^60}'.format('ROUND {}'.format(current_round))))
has_changed, best_matches = self._resolve_one_round()
log.debug('-' * 60)
log.debug('Result of round {}: {}'.format(current_round,
'not stable' if has_changed else 'stable, done'))
if not has_changed:
break
# If a package version (foo==2.0) was built in a previous round,
# and in this round a different version of foo needs to be built
# (i.e. foo==1.0), the directory will exist already, which will
# cause a pip build failure. The trick is to start with a new
# build cache dir for every round, so this can never happen.
self.repository.freshen_build_caches()
del os.environ['PIP_EXISTS_ACTION']
# Only include hard requirements and not pip constraints
return {req for req in best_matches if not req.constraint}
@staticmethod
def check_constraints(constraints):
pass
def _group_constraints(self, constraints):
"""
Groups constraints (remember, InstallRequirements!) by their key name,
and combining their SpecifierSets into a single InstallRequirement per
package. For example, given the following constraints:
Django<1.9,>=1.4.2
django~=1.5
Flask~=0.7
This will be combined into a single entry per package:
django~=1.5,<1.9,>=1.4.2
flask~=0.7
"""
for _, ireqs in full_groupby(constraints, key=key_from_ireq):
ireqs = list(ireqs)
exception_ireq = first(
x for x in ireqs if x.editable or is_vcs_link(x))
if exception_ireq:
yield exception_ireq # ignore all the other specs: the editable/vcs one is the one that counts
continue
ireqs = iter(ireqs)
# deepcopy the accumulator so as to not modify the self.our_constraints invariant
combined_ireq = copy.deepcopy(next(ireqs))
combined_ireq.comes_from = None
for ireq in ireqs:
# NOTE we may be losing some info on dropped reqs here
combined_ireq.req.specifier &= ireq.req.specifier
combined_ireq.constraint &= ireq.constraint
# Return a sorted, de-duped tuple of extras
combined_ireq.extras = tuple(sorted(set(tuple(combined_ireq.extras) + tuple(ireq.extras))))
pinned_version = get_pinned_version(combined_ireq)
if pinned_version: # Simplify combined_ireq to single version
specset = type(combined_ireq.specifier)('==' + pinned_version)
combined_ireq.req.specifier = specset
yield combined_ireq
def _resolve_one_round(self): # noqa: C901 (too complex)
"""
Resolves one level of the current constraints, by finding the best
match for each package in the repository and adding all requirements
for those best package versions. Some of these constraints may be new
or updated.
Returns whether new constraints appeared in this round. If no
constraints were added or changed, this indicates a stable
configuration.
"""
# Sort this list for readability of terminal output
constraints = sorted(self.constraints, key=key_from_ireq)
unsafe_constraints = []
original_constraints = copy.copy(constraints)
if not self.allow_unsafe:
for constraint in original_constraints:
if constraint.name in UNSAFE_PACKAGES:
constraints.remove(constraint)
constraint.req.specifier = type(constraint.req.specifier)()
unsafe_constraints.append(constraint)
log.debug('Current constraints:')
for constraint in constraints:
log.debug(' {}'.format(constraint))
log.debug('')
log.debug('Finding the best candidates:')
best_matches = {self.get_best_match(ireq) for ireq in constraints}
# Find the new set of secondary dependencies
log.debug('')
log.debug('Finding secondary dependencies:')
safe_constraints = list(self.limiters)
for best_match in best_matches:
for dep in self._iter_dependencies(best_match):
if self.allow_unsafe or dep.name not in UNSAFE_PACKAGES:
safe_constraints.append(dep)
else:
dep.req.specifier = type(dep.req.specifier)()
unsafe_constraints.append(dep)
unsafe_constraints = list(
self._group_constraints(unsafe_constraints))
# Grouping constraints to make clean diff between rounds
theirs = set(
ireq for ireq in self._group_constraints(safe_constraints)
if not ireq.constraint)
# NOTE: We need to compare RequirementSummary objects, since
# InstallRequirement does not define equality
diff = {RequirementSummary(t) for t in theirs} - {RequirementSummary(t) for t in self.their_constraints}
removed = ({RequirementSummary(t) for t in self.their_constraints} -
{RequirementSummary(t) for t in theirs})
unsafe = ({RequirementSummary(t) for t in unsafe_constraints} -
{RequirementSummary(t) for t in self.unsafe_constraints})
has_changed = len(diff) > 0 or len(removed) > 0 or len(unsafe) > 0
if has_changed:
log.debug('')
log.debug('New dependencies found in this round:')
for new_dependency in sorted(diff, key=lambda ireq: key_from_ireq(ireq)):
log.debug(' adding {}'.format(new_dependency))
log.debug('Removed dependencies in this round:')
for removed_dependency in sorted(removed, key=lambda ireq: key_from_ireq(ireq)):
log.debug(' removing {}'.format(removed_dependency))
log.debug('Unsafe dependencies in this round:')
for unsafe_dependency in sorted(unsafe, key=lambda ireq: key_from_ireq(ireq)):
log.debug(' remembering unsafe {}'.format(unsafe_dependency))
# Store the last round's results in the their_constraints
self.their_constraints = theirs
# Store the last round's unsafe constraints
self.unsafe_constraints = unsafe_constraints
return has_changed, best_matches
def get_best_match(self, ireq):
"""
Returns a (pinned or editable) InstallRequirement, indicating the best
match to use for the given InstallRequirement (in the form of an
InstallRequirement).
Example:
Given the constraint Flask>=0.10, may return Flask==0.10.1 at
a certain moment in time.
Pinned requirements will always return themselves, i.e.
Flask==0.10.1 => Flask==0.10.1
"""
if ireq.editable or is_vcs_link(ireq):
# NOTE: it's much quicker to immediately return instead of
# hitting the index server
best_match = ireq
elif is_pinned_requirement(ireq):
# NOTE: it's much quicker to immediately return instead of
# hitting the index server
best_match = ireq
else:
best_match = self.repository.find_best_match(ireq, prereleases=self.prereleases)
# Format the best match
log.debug(' found candidate {} (constraint was {})'.format(format_requirement(best_match),
format_specifier(ireq)))
return best_match
def _iter_dependencies(self, ireq):
"""
Given a pinned or editable InstallRequirement, collects all the
secondary dependencies for them, either by looking them up in a local
cache, or by reaching out to the repository.
Editable requirements will never be looked up, as they may have
changed at any time.
"""
if not is_pinned_requirement(ireq) and not ireq.editable:
raise TypeError('Expected pinned or editable requirement, got {}'.format(ireq))
# Now, either get the dependencies from the dependency cache (for
# speed), or reach out to the external repository to
# download and inspect the package version and get dependencies
# from there
if ireq not in self.dependency_cache:
log.debug(' {} not in cache, need to check index'.format(format_requirement(ireq)), fg='yellow')
dependencies = self.repository.get_dependencies(ireq)
self.dependency_cache[ireq] = sorted(str(ireq.req) for ireq in dependencies)
# Example: ['Werkzeug>=0.9', 'Jinja2>=2.4']
dependency_strings = self.dependency_cache[ireq]
log.debug(' {:25} requires {}'.format(format_requirement(ireq),
', '.join(sorted(dependency_strings, key=lambda s: s.lower())) or '-'))
for dependency_string in dependency_strings:
yield install_req_from_line(
dependency_string, constraint=ireq.constraint)
def reverse_dependencies(self, ireqs):
return self.dependency_cache.reverse_dependencies(ireqs)
<|code_end|>
|
# coding: utf-8
from __future__ import (
absolute_import, division, print_function, unicode_literals)
import copy
import os
from functools import partial
from itertools import chain, count
import click
from ._pip_compat import install_req_from_line
from .cache import DependencyCache
from .logging import log
from .utils import (
UNSAFE_PACKAGES, first, format_requirement, format_specifier, full_groupby,
get_pinned_version, is_pinned_requirement, is_vcs_link, key_from_ireq)
green = partial(click.style, fg='green')
magenta = partial(click.style, fg='magenta')
class RequirementSummary(object):
"""
Summary of a requirement's properties for comparison purposes.
"""
def __init__(self, ireq):
self.req = ireq.req
self.key = key_from_ireq(ireq)
self.extras = str(sorted(ireq.extras))
self.specifier = str(ireq.specifier)
def __eq__(self, other):
return str(self) == str(other)
def __hash__(self):
return hash(str(self))
def __str__(self):
return repr([self.key, self.specifier, self.extras])
class Resolver(object):
def __init__(self, constraints, repository, cache=None, prereleases=False, clear_caches=False, allow_unsafe=False):
"""
This class resolves a given set of constraints (a collection of
InstallRequirement objects) by consulting the given Repository and the
DependencyCache.
"""
self.our_constraints = set(x for x in constraints if not x.constraint)
self.limiters = set(x for x in constraints if x.constraint)
self.their_constraints = set()
self.repository = repository
if cache is None:
cache = DependencyCache()
self.dependency_cache = cache
self.prereleases = prereleases
self.clear_caches = clear_caches
self.allow_unsafe = allow_unsafe
self.unsafe_constraints = set()
self._prepare_ireqs(self.our_constraints)
self._prepare_ireqs(self.limiters)
def _prepare_ireqs(self, constraints):
"""
Prepare install requirements for analysis.
:type constraints: Iterable[pip.req.InstallRequirement]
"""
for constraint in constraints:
if constraint.link and not constraint.prepared:
os.environ[str('PIP_EXISTS_ACTION')] = str('i')
self.repository.prepare_ireq(constraint)
del os.environ[str('PIP_EXISTS_ACTION')]
@property
def constraints(self):
grouped = self._group_constraints(chain(
self.our_constraints, self.their_constraints, self.limiters))
return set(ireq for ireq in grouped if not ireq.constraint)
def resolve_hashes(self, ireqs):
"""
Finds acceptable hashes for all of the given InstallRequirements.
"""
return {ireq: self.repository.get_hashes(ireq) for ireq in ireqs}
def resolve(self, max_rounds=10):
"""
Finds concrete package versions for all the given InstallRequirements
and their recursive dependencies. The end result is a flat list of
(name, version) tuples. (Or an editable package.)
Resolves constraints one round at a time, until they don't change
anymore. Protects against infinite loops by breaking out after a max
number rounds.
"""
if self.clear_caches:
self.dependency_cache.clear()
self.repository.clear_caches()
self.check_constraints(chain(self.our_constraints,
self.their_constraints))
log.debug('Limiting constraints:')
for constraint in sorted(self.limiters, key=key_from_ireq):
log.debug(' {}'.format(constraint))
# Ignore existing packages
os.environ[str('PIP_EXISTS_ACTION')] = str('i') # NOTE: str() wrapping necessary for Python 2/3 compat
for current_round in count(start=1):
if current_round > max_rounds:
raise RuntimeError('No stable configuration of concrete packages '
'could be found for the given constraints after '
'%d rounds of resolving.\n'
'This is likely a bug.' % max_rounds)
log.debug('')
log.debug(magenta('{:^60}'.format('ROUND {}'.format(current_round))))
has_changed, best_matches = self._resolve_one_round()
log.debug('-' * 60)
log.debug('Result of round {}: {}'.format(current_round,
'not stable' if has_changed else 'stable, done'))
if not has_changed:
break
# If a package version (foo==2.0) was built in a previous round,
# and in this round a different version of foo needs to be built
# (i.e. foo==1.0), the directory will exist already, which will
# cause a pip build failure. The trick is to start with a new
# build cache dir for every round, so this can never happen.
self.repository.freshen_build_caches()
del os.environ['PIP_EXISTS_ACTION']
# Only include hard requirements and not pip constraints
return {req for req in best_matches if not req.constraint}
@staticmethod
def check_constraints(constraints):
pass
def _group_constraints(self, constraints):
"""
Groups constraints (remember, InstallRequirements!) by their key name,
and combining their SpecifierSets into a single InstallRequirement per
package. For example, given the following constraints:
Django<1.9,>=1.4.2
django~=1.5
Flask~=0.7
This will be combined into a single entry per package:
django~=1.5,<1.9,>=1.4.2
flask~=0.7
"""
for _, ireqs in full_groupby(constraints, key=key_from_ireq):
ireqs = list(ireqs)
exception_ireq = first(
x for x in ireqs if x.editable or is_vcs_link(x))
if exception_ireq:
yield exception_ireq # ignore all the other specs: the editable/vcs one is the one that counts
continue
ireqs = iter(ireqs)
# deepcopy the accumulator so as to not modify the self.our_constraints invariant
combined_ireq = copy.deepcopy(next(ireqs))
combined_ireq.comes_from = None
for ireq in ireqs:
# NOTE we may be losing some info on dropped reqs here
combined_ireq.req.specifier &= ireq.req.specifier
combined_ireq.constraint &= ireq.constraint
# Return a sorted, de-duped tuple of extras
combined_ireq.extras = tuple(sorted(set(tuple(combined_ireq.extras) + tuple(ireq.extras))))
pinned_version = get_pinned_version(combined_ireq)
if pinned_version: # Simplify combined_ireq to single version
specset = type(combined_ireq.specifier)('==' + pinned_version)
combined_ireq.req.specifier = specset
yield combined_ireq
def _resolve_one_round(self): # noqa: C901 (too complex)
"""
Resolves one level of the current constraints, by finding the best
match for each package in the repository and adding all requirements
for those best package versions. Some of these constraints may be new
or updated.
Returns whether new constraints appeared in this round. If no
constraints were added or changed, this indicates a stable
configuration.
"""
# Sort this list for readability of terminal output
constraints = sorted(self.constraints, key=key_from_ireq)
unsafe_constraints = []
original_constraints = copy.copy(constraints)
if not self.allow_unsafe:
for constraint in original_constraints:
if constraint.name in UNSAFE_PACKAGES:
constraints.remove(constraint)
constraint.req.specifier = type(constraint.req.specifier)()
unsafe_constraints.append(constraint)
log.debug('Current constraints:')
for constraint in constraints:
log.debug(' {}'.format(constraint))
log.debug('')
log.debug('Finding the best candidates:')
best_matches = {self.get_best_match(ireq) for ireq in constraints}
# Find the new set of secondary dependencies
log.debug('')
log.debug('Finding secondary dependencies:')
safe_constraints = list(self.limiters)
for best_match in best_matches:
for dep in self._iter_dependencies(best_match):
if self.allow_unsafe or dep.name not in UNSAFE_PACKAGES:
safe_constraints.append(dep)
else:
dep.req.specifier = type(dep.req.specifier)()
unsafe_constraints.append(dep)
unsafe_constraints = list(
self._group_constraints(unsafe_constraints))
# Grouping constraints to make clean diff between rounds
theirs = set(
ireq for ireq in self._group_constraints(safe_constraints)
if not ireq.constraint)
# NOTE: We need to compare RequirementSummary objects, since
# InstallRequirement does not define equality
diff = {RequirementSummary(t) for t in theirs} - {RequirementSummary(t) for t in self.their_constraints}
removed = ({RequirementSummary(t) for t in self.their_constraints} -
{RequirementSummary(t) for t in theirs})
unsafe = ({RequirementSummary(t) for t in unsafe_constraints} -
{RequirementSummary(t) for t in self.unsafe_constraints})
has_changed = len(diff) > 0 or len(removed) > 0 or len(unsafe) > 0
if has_changed:
log.debug('')
log.debug('New dependencies found in this round:')
for new_dependency in sorted(diff, key=lambda ireq: key_from_ireq(ireq)):
log.debug(' adding {}'.format(new_dependency))
log.debug('Removed dependencies in this round:')
for removed_dependency in sorted(removed, key=lambda ireq: key_from_ireq(ireq)):
log.debug(' removing {}'.format(removed_dependency))
log.debug('Unsafe dependencies in this round:')
for unsafe_dependency in sorted(unsafe, key=lambda ireq: key_from_ireq(ireq)):
log.debug(' remembering unsafe {}'.format(unsafe_dependency))
# Store the last round's results in the their_constraints
self.their_constraints = theirs
# Store the last round's unsafe constraints
self.unsafe_constraints = unsafe_constraints
return has_changed, best_matches
def get_best_match(self, ireq):
"""
Returns a (pinned or editable) InstallRequirement, indicating the best
match to use for the given InstallRequirement (in the form of an
InstallRequirement).
Example:
Given the constraint Flask>=0.10, may return Flask==0.10.1 at
a certain moment in time.
Pinned requirements will always return themselves, i.e.
Flask==0.10.1 => Flask==0.10.1
"""
if ireq.editable or is_vcs_link(ireq):
# NOTE: it's much quicker to immediately return instead of
# hitting the index server
best_match = ireq
elif is_pinned_requirement(ireq):
# NOTE: it's much quicker to immediately return instead of
# hitting the index server
best_match = ireq
else:
best_match = self.repository.find_best_match(ireq, prereleases=self.prereleases)
# Format the best match
log.debug(' found candidate {} (constraint was {})'.format(format_requirement(best_match),
format_specifier(ireq)))
return best_match
def _iter_dependencies(self, ireq):
"""
Given a pinned or editable InstallRequirement, collects all the
secondary dependencies for them, either by looking them up in a local
cache, or by reaching out to the repository.
Editable requirements will never be looked up, as they may have
changed at any time.
"""
if not is_pinned_requirement(ireq) and not ireq.editable:
raise TypeError('Expected pinned or editable requirement, got {}'.format(ireq))
# Now, either get the dependencies from the dependency cache (for
# speed), or reach out to the external repository to
# download and inspect the package version and get dependencies
# from there
if ireq not in self.dependency_cache:
log.debug(' {} not in cache, need to check index'.format(format_requirement(ireq)), fg='yellow')
dependencies = self.repository.get_dependencies(ireq)
self.dependency_cache[ireq] = sorted(str(ireq.req) for ireq in dependencies)
# Example: ['Werkzeug>=0.9', 'Jinja2>=2.4']
dependency_strings = self.dependency_cache[ireq]
log.debug(' {:25} requires {}'.format(format_requirement(ireq),
', '.join(sorted(dependency_strings, key=lambda s: s.lower())) or '-'))
for dependency_string in dependency_strings:
yield install_req_from_line(
dependency_string, constraint=ireq.constraint)
def reverse_dependencies(self, ireqs):
return self.dependency_cache.reverse_dependencies(ireqs)
|
Here is a snippet: <|code_start|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openengsb.labs.paxexam.karaf.regression;
@RunWith(JUnit4TestRunner.class)
@ExamReactorStrategy(AllConfinedStagedReactorFactory.class)
public class BootDelegationOptionTest {
@Configuration
public Option[] config() {
return new Option[]{<|code_end|>
. Write the next line using the current file imports:
import org.ops4j.pax.exam.spi.reactors.AllConfinedStagedReactorFactory;
import static junit.framework.Assert.assertEquals;
import static org.openengsb.labs.paxexam.karaf.options.KarafDistributionOption.karafDistributionConfiguration;
import static org.ops4j.pax.exam.CoreOptions.bootDelegationPackage;
import java.io.FileInputStream;
import java.util.Properties;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.ops4j.pax.exam.Option;
import org.ops4j.pax.exam.junit.Configuration;
import org.ops4j.pax.exam.junit.ExamReactorStrategy;
import org.ops4j.pax.exam.junit.JUnit4TestRunner;
and context from other files:
// Path: options/src/main/java/org/openengsb/labs/paxexam/karaf/options/KarafDistributionOption.java
// public static KarafDistributionBaseConfigurationOption karafDistributionConfiguration(String frameworkURL,
// String name, String karafVersion) {
// return new KarafDistributionConfigurationOption(frameworkURL, name, karafVersion);
// }
, which may include functions, classes, or code. Output only the next line.
|
karafDistributionConfiguration("mvn:org.apache.karaf/apache-karaf/2.2.5/zip", "karaf", "2.2.5"),
|
Can you complete this?
<|code_start|>import os
from setuptools import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.md')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-easy-api',
version='0.3.0',
packages=['easy_api'],
install_requires=[''],
include_package_data=True,
license='MIT License',
description='Create an API for you Django project with only a single line of code!',
long_description=README,
url='https://github.com/Miserlou/django-<|code_end|>
|
easy-api',
author='Rich Jones',
author_email='[email protected]',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
|
Given snippet: <|code_start|>
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^$', IndexView.as_view(), name='index'),
url(r'^properties/$', listing_views.ListingList.as_view(), name='all_properties'),
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from django.conf.urls import *
from django.conf import settings
from django.contrib import admin
from django.views.generic import TemplateView
from realestate.listing import views as listing_views
from realestate.api.urls import router as api_router
from realestate.home.views import ContactView, IndexView, ListingFeed
from realestate.listing import sitemap
and context:
# Path: realestate/listing/views.py
# class ListingList(ListView):
# class ListingForSaleList(ListView):
# class ListingForRentList(ListView):
# class ListingView(FormMixin, DetailView):
# class MapView(JSONResponseMixin, AjaxResponseMixin, View):
# class AgentList(ListView):
# class AgentListing(ListView):
# def get_queryset(self):
# def get_context_data(self, **kwargs):
# def get_queryset(self):
# def get_context_data(self, **kwargs):
# def get_queryset(self):
# def get_context_data(self, **kwargs):
# def post(self, request, *args, **kwargs):
# def form_valid(self, form):
# def get_ajax(self, request, *args, **kwargs):
# def get_queryset(self):
# def get_queryset(self):
#
# Path: realestate/api/urls.py
#
# Path: realestate/home/views.py
# class ContactView(FormView):
# template_name = 'home/contact-us.html'
# form_class = ContactForm
# success_url = reverse_lazy('thank-you')
#
# def form_valid(self, form):
# form.send_email()
# return super(ContactView, self).form_valid(form)
#
# class IndexView(TemplateView):
# template_name = 'index.html'
#
# def get_context_data(self, **kwargs):
# context = super(IndexView, self).get_context_data(**kwargs)
# recentp = Listing.objects.active().order_by('-created_at')[:config.RECENTLY_ADDED]
# context['recent'] = recentp
# return context
#
# class ListingFeed(Feed):
# title = "Recent Listing Feed"
# link = "/rss/"
# description = "Recent Listing Feed"
# description_template = "home/rss-item-description.html"
#
# def items(self):
# return Listing.objects.order_by('-last_modified')[:10]
#
# def item_title(self, item):
# return item.title
#
# Path: realestate/listing/sitemap.py
# class ListingSitemap(Sitemap):
# def items(self):
# def lastmod(self, obj):
which might include code, classes, or functions. Output only the next line.
|
url(r'^sale/$', listing_views.ListingForSaleList.as_view(), name='properties_for_sale'),
|
Continue the code snippet: <|code_start|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package koper.demo.main;
/**
* @author caie
* @since 1.2
*/
public class SendMsgDemo {
public static void main(String[] args) {
final ApplicationContext context =
new ClassPathXmlApplicationContext("classpath:kafka/context-data-producer.xml");
final MemberService memberService = context.getBean(MemberService.class);
<|code_end|>
. Use current file imports:
import koper.demo.message.entity.Member;
import koper.demo.message.service.MemberService;
import org.springframework.context.ApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
and context (classes, functions, or code) from other files:
// Path: koper-demo/src/main/java/koper/demo/message/entity/Member.java
// public class Member {
//
// private Integer id;
//
// private String name;
//
// private String phoneNo;
//
// public Integer getId() {
// return id;
// }
//
// public void setId(Integer id) {
// this.id = id;
// }
//
// public String getName() {
// return name;
// }
//
// public void setName(String name) {
// this.name = name;
// }
//
// public String getPhoneNo() {
// return phoneNo;
// }
//
// public void setPhoneNo(String phoneNo) {
// this.phoneNo = phoneNo;
// }
// }
//
// Path: koper-demo/src/main/java/koper/demo/message/service/MemberService.java
// public interface MemberService {
//
// void signup(Member member);
//
// }
. Output only the next line.
|
Member member = new Member();
|
Implement the remainder:
<|code_start|># Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Buildgen transitive dependencies
This takes the list of libs, node_modules, and targets from our
yaml dictionary, and adds to each the transitive closure
of the list of dependencies.
"""
def transitive_deps(lib_map, node):
"""Returns a list of transitive dependencies from node.
Recursively iterate all dependent node in a depth-first fashion and
list a result using a topological sorting.
"""
result = []
seen = set()
start = node
def recursive_helper(node):
if node is None:
<|code_end|>
|
Writing completion:
return
for dep in node.get("deps", []):
if dep not in seen:
seen.add(dep)
next_node = lib_map.get(dep)
recursive_helper(next_node)
if node is not start:
result.insert(0, node["name"])
recursive_helper(node)
return result
def mako_plugin(dictionary):
"""The exported plugin code for transitive_dependencies.
Iterate over each list and check each item for a deps list. We add a
transitive_deps property to each with the transitive closure of those
dependency lists. The result list is sorted in a topological ordering.
"""
lib_map = {lib['name']: lib for lib in dictionary.get('libs')}
for target_name, target_list in dictionary.items():
for target in target_list:
if isinstance(target, dict) and 'deps' in target:
target['transitive_deps'] = transitive_deps(lib_map, target)
python_dependencies = dictionary.get('python_dependencies')
python_dependencies['transitive_deps'] = transitive_deps(
lib_map, python_dependencies)
|
Given snippet: <|code_start|>
all_supported_versions[guid] = (
app_versions[min_ver_pos:max_ver_pos + 1])
err.metadata['applications'][APPLICATIONS[guid]] = {
'min': min_version,
'max': max_version,
}
# Test whether it's a FF4 addon
# NOTE: This should probably also be extrapolated for
# Thunderbird and the like when they get up to speed. The tests
# will likely be the same down the line, so we can keep the
# "ff4" resource as a legacy thing and worry about it later.
if is_firefox:
ff4_pos = app_versions.index(FF4_MIN)
if max_ver_pos >= ff4_pos:
err.save_resource('ff4', True)
no_duplicate_targets = set(used_targets)
if len(used_targets) != len(no_duplicate_targets):
err.error(('testcases_targetapplication',
'test_targetedapplications',
'duplicate_targetapps'),
'Found duplicate target application elements.',
'Multiple target application elements were found in the '
'manifest file that refer to the same application GUID. '
'There should not be duplicate target applications '
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import validator.constants
from validator import decorator
from validator.constants import FF4_MIN, APPLICATIONS
and context:
# Path: validator/decorator.py
# TEST_TIERS = {}
# CLEANUP_FUNCTIONS = []
# def register_test(tier=1, expected_type=None, simple=False, versions=None):
# def wrap(function):
# def register_cleanup(cleanup):
# def cleanup():
# def get_tiers():
# def get_tests(tier, type_=None):
# def version_range(guid, version, before=None, app_versions=None):
which might include code, classes, or functions. Output only the next line.
|
'entries.',
|
Support Python-level Device ctor and comparison
#include "xchainer/python/device.h"
#include <sstream>
#include "xchainer/device.h"
namespace xchainer {
namespace py = pybind11; // standard convention
void InitXchainerDevice(pybind11::module& m) {
py::class_<Device>(m, "Device").def("__repr__", [](Device device) {
std::ostringstream os;
os << "<Device " << device.name << ">";
return os.str();
});
m.def("get_current_device", []() { return GetCurrentDevice(); });
m.def("set_current_device", [](const Device& device) { SetCurrentDevice(device); });
m.def("set_current_device", [](const std::string& name) { SetCurrentDevice(name); });
}
} // namespace xchainer
|
#include "xchainer/python/device.h"
#include <sstream>
#include "xchainer/device.h"
namespace xchainer {
namespace py = pybind11; // standard convention
void InitXchainerDevice(pybind11::module& m) {
py::class_<Device>(m, "Device")
.def(py::init(&MakeDevice))
.def("__eq__", py::overload_cast<const Device&, const Device&>(&operator==))
.def("__ne__", py::overload_cast<const Device&, const Device&>(&operator!=))
.def("__repr__",
[](Device device) {
std::ostringstream os;
os << "<Device " << device.name << ">";
return os.str();
});
m.def("get_current_device", []() { return GetCurrentDevice(); });
m.def("set_current_device", [](const Device& device) { SetCurrentDevice(device); });
m.def("set_current_device", [](const std::string& name) { SetCurrentDevice(name); });
}
} // namespace xchainer
|
<|code_start|># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
# Generic check string for checking if a user is authorized on a particular
# project, specifically with the member role.
PROJECT_MEMBER = 'role:member and project_id:%(project_id)s'
# Generic check string for checking if a user is authorized on a particular
# project but with read-only access. For example, this persona would be able to
# list private images owned by a project but cannot make any writeable changes
# to those images.
PROJECT_READER = 'role:reader and project_id:%(project_id)s'
# Make sure the member_id of the supplied target matches the project_id from
# the context object, which is derived from keystone tokens.
IMAGE_MEMBER_CHECK = 'project_id:%(member_id)s'
# Check if the visibility of the image supplied in the target matches
# "community"
COMMUNITY_VISIBILITY_CHECK = '"community":%(visibility)s'
# Check if the visibility of the image supplied in the target matches "public"
PUBLIC_VISIBILITY_CHECK = '"public":%(visibility)s'
PROJECT_MEMBER_OR_IMAGE_MEMBER_OR_COMMUNITY_OR_PUBLIC = (
f'role:member and (project_id:%(project_id)s or {IMAGE_MEMBER_CHECK} '
f'or {COMMUNITY_VISIBILITY_CHECK} or {PUBLIC_VISIBILITY_CHECK})'
)
PROJECT_READER_OR_IMAGE_MEMBER_OR_COMMUNITY_OR_PUBLIC = (
f'role:reader and (project_id:%(project_id)s or {IMAGE_MEMBER_CHECK} '
f'or {COMMUNITY_VISIBILITY_CHECK} or {PUBLIC_VISIBILITY_CHECK})'
)
# FIXME(lbragstad): These are composite check strings that represents glance's
# authorization code, some of which is implemented in the authorization wrapper
# and some is in the database driver.
#
# These check strings do not support tenancy with the `admin` role. This means
# anyone with the `admin` role on any project can execute a policy, which is
# typical in OpenStack services. Eventually, these check strings will be
# super<|code_end|>
Logic incomplete - finish.
|
Completing the second half:
ceded by check strings that implement scope checking and system-scope
# for applicable APIs (e.g., making an image public). But, we have a lot of
# cleanup to do in different parts of glance to sweep all the authorization
# code into a single layer before we can safely consume system-scope and
# implement scope checking. This refactoring also needs significant API testing
# to ensure we don't leave doors open to unintended users, or expose
# authoritative regressions. In the mean time, we can use the following check
# strings to offer formal support for project membership and a read-only
# variant consistent with other OpenStack services.
ADMIN_OR_PROJECT_MEMBER = f'role:admin or ({PROJECT_MEMBER})'
ADMIN_OR_PROJECT_READER = f'role:admin or ({PROJECT_READER})'
ADMIN_OR_PROJECT_READER_GET_IMAGE = (
f'role:admin or ({PROJECT_READER_OR_IMAGE_MEMBER_OR_COMMUNITY_OR_PUBLIC})'
)
ADMIN_OR_PROJECT_MEMBER_DOWNLOAD_IMAGE = (
f'role:admin or ({PROJECT_MEMBER_OR_IMAGE_MEMBER_OR_COMMUNITY_OR_PUBLIC})'
)
rules = [
policy.RuleDefault(name='default', check_str='',
description='Defines the default rule used for '
'policies that historically had an empty '
'policy in the supplied policy.json file.',
deprecated_rule=policy.DeprecatedRule(
name='default',
check_str='role:admin',
deprecated_reason='In order to allow operators to '
'accept the default policies from code by not '
'defining them in the policy file, while still '
'working with old policy files that rely on the '
'``default`` rule for policies that are '
'not specified in the policy file, the ``default`` '
'rule must now be explicitly set to '
'``"role:admin"`` when that is the desired default '
'for unspecified rules.',
deprecated_since='Ussuri')),
policy.RuleDefault(name='context_is_admin', check_str='role:admin',
description='Defines the rule for the is_admin:True '
'check.'),
]
def list_rules():
return rules
|
Add HttpCache to web application
<?php
namespace Fyuze\Kernel\Application;
use Fyuze\Http\Kernel;
use Fyuze\Http\Request;
use Fyuze\Http\Response;
use Fyuze\Kernel\Fyuze;
use Symfony\Component\HttpKernel\Controller\ControllerResolver;
use Symfony\Component\Routing\Matcher\UrlMatcher;
use Symfony\Component\Routing\RequestContext;
class Web extends Fyuze
{
/**
* @return Response
*/
public function boot($request = null)
{
$request = $this->getRegistry()->make('Fyuze\Http\Request');
$routes = include $this->path . '/routes.php';
$context = new RequestContext();
$matcher = new UrlMatcher($routes, $context);
$resolver = new ControllerResolver();
$kernel = new Kernel($matcher, $resolver);
return $kernel->handle($request);
}
}
|
<?php
namespace Fyuze\Kernel\Application;
use Fyuze\Http\Kernel;
use Fyuze\Http\Request;
use Fyuze\Http\Response;
use Fyuze\Kernel\Fyuze;
use Symfony\Component\HttpKernel\Controller\ControllerResolver;
use Symfony\Component\HttpKernel\HttpCache\HttpCache;
use Symfony\Component\HttpKernel\HttpCache\Store;
use Symfony\Component\Routing\Matcher\UrlMatcher;
use Symfony\Component\Routing\RequestContext;
class Web extends Fyuze
{
/**
* @return Response
*/
public function boot($request = null)
{
$request = $this->getRegistry()->make('Fyuze\Http\Request');
$routes = include $this->path . '/routes.php';
$context = new RequestContext();
$matcher = new UrlMatcher($routes, $context);
$resolver = new ControllerResolver();
$kernel = new Kernel($matcher, $resolver);
$kernel = new HttpCache($kernel, new Store($this->path.'/app/cache'));
return $kernel->handle($request);
}
}
|
Move the v_videocompile repo from yro to edx
---
#
# edX Configuration
#
# github: https://github.com/edx/configuration
# wiki: https://openedx.atlassian.net/wiki/display/OpenOPS
# code style: https://openedx.atlassian.net/wiki/display/OpenOPS/Ansible+Code+Conventions
# license: https://github.com/edx/configuration/blob/master/LICENSE.TXT
#
##
# Defaults for veda_ffmpeg role
#
VEDA_FFMPEG_GIT_URL: 'https://github.com/yro/v_videocompile'
# used /opt path to clone the repo and then install ffmpeg using the cloned repo
veda_ffmpeg_root: '/var/tmp/veda_ffmpeg'
VEDA_FFMPEG_VERSION: 'master'
|
---
#
# edX Configuration
#
# github: https://github.com/edx/configuration
# wiki: https://openedx.atlassian.net/wiki/display/OpenOPS
# code style: https://openedx.atlassian.net/wiki/display/OpenOPS/Ansible+Code+Conventions
# license: https://github.com/edx/configuration/blob/master/LICENSE.TXT
#
##
# Defaults for veda_ffmpeg role
#
VEDA_FFMPEG_GIT_URL: 'https://github.com/edx/v_videocompile'
# used /opt path to clone the repo and then install ffmpeg using the cloned repo
veda_ffmpeg_root: '/var/tmp/veda_ffmpeg'
VEDA_FFMPEG_VERSION: 'master'
|
End of preview. Expand
in Data Studio
No dataset card yet
- Downloads last month
- 25