input
stringlengths 0
2.17k
| instruction
stringlengths 18
2.94k
| output
stringlengths 47
3.36k
|
---|---|---|
from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_makevers import create_update_dir
import os
def test_mk_dirs(create_update_dir):
"""Test that ensures that downlaods directory is created properly"""
assert not os.path.isdir(Launcher.updatedir)
launch = Launcher('MUST_HAVE_SOMETHING', 'urlurlurl')
launch._reset_update_files()
assert os.path.isdir(Launcher.updatedir)
| Remove Launcher.updatedir after mkdirs test
Should go into fixture later
| from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_makevers import create_update_dir
import os
def test_mk_dirs(create_update_dir):
"""Test that ensures that downlaods directory is created properly"""
assert not os.path.isdir(Launcher.updatedir)
launch = Launcher('MUST_HAVE_SOMETHING', 'urlurlurl')
launch._reset_update_files()
assert os.path.isdir(Launcher.updatedir)
os.rmdir(Launcher.updatedir)
|
Add new skipUnlessBuiltin function for testing
| # Copyright (C) 2017 Martin Packman <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
"""Base classes and helpers for testing paramiko."""
import unittest
from paramiko.py3compat import (
builtins,
)
def skipUnlessBuiltin(name):
"""Skip decorated test if builtin name does not exist."""
if getattr(builtins, name, None) is None:
skip = getattr(unittest, "skip", None)
if skip is None:
# Python 2.6 pseudo-skip
return lambda func: None
return skip("No builtin " + repr(name))
return lambda func: func
|
|
# -*- coding: utf-8 -*-
import os, sys
sys.path.insert(0, '..')
PROJECT_ROOT = os.path.dirname(__file__)
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'test',
'USER': 'test',
'PASSWORD': 'test',
'HOST': 'localhost',
'PORT': '',
}
}
MIDDLEWARE_CLASSES = [
"django.middleware.common.CommonMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
]
TIME_ZONE = 'UTC'
LANGUAGE_CODE = 'en'
ADMIN_MEDIA_PREFIX = '/static/admin/'
STATICFILES_DIRS = ()
SECRET_KEY = 'di!n($kqa3)nd%ikad#kcjpkd^uw*h%*kj=*pm7$vbo6ir7h=l'
INSTALLED_APPS = (
'pg_json_fields',
)
#TEST_RUNNER = 'django.test.simple.DjangoTestSuiteRunner'
| Use default user/password for testing
| # -*- coding: utf-8 -*-
import os, sys
sys.path.insert(0, '..')
PROJECT_ROOT = os.path.dirname(__file__)
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'test',
'USER': '',
'PASSWORD': '',
'HOST': 'localhost',
'PORT': '',
}
}
MIDDLEWARE_CLASSES = [
"django.middleware.common.CommonMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
]
TIME_ZONE = 'UTC'
LANGUAGE_CODE = 'en'
ADMIN_MEDIA_PREFIX = '/static/admin/'
STATICFILES_DIRS = ()
SECRET_KEY = 'di!n($kqa3)nd%ikad#kcjpkd^uw*h%*kj=*pm7$vbo6ir7h=l'
INSTALLED_APPS = (
'pg_json_fields',
)
#TEST_RUNNER = 'django.test.simple.DjangoTestSuiteRunner'
|
# -*- coding: utf-8 -*-
"""
This module contains Service implementation of Archive.org.
http://archive.org
"""
from . import Service, JSONMixin
from six import text_type
from tornado.httpclient import HTTPRequest
##############################################################################
class ArchiveOrg(JSONMixin, Service):
"""
Implementation of Service which is intended to parse Archive.org.
"""
def generate_request(self):
resource = self.url.rstrip("/").rpartition("/")[-1]
return HTTPRequest(
"http://archive.org/metadata/" + resource + "/files/",
use_gzip=True,
headers=dict(Accept="application/json")
)
def parse(self, response):
converted_response = self.convert_response(response)
tracks = {}
required_fields = ("title", "track", "length", "album")
for file_ in converted_response["result"]:
if file_.get("source") != "original":
continue
if not all(field in file_ for field in required_fields):
continue
track = int(file_["track"])
title = text_type(file_["title"])
length = text_type(file_["length"])
if ":" not in length:
length = int(float(length))
length = self.second_to_timestamp(length)
length = self.normalize_track_length(length)
tracks[track] = (title, length)
if not tracks:
raise Exception("Empty list")
return tuple(data for track, data in sorted(tracks.iteritems())) | Put weaker requirements on Archive.org service
| # -*- coding: utf-8 -*-
"""
This module contains Service implementation of Archive.org.
http://archive.org
"""
from . import Service, JSONMixin
from six import text_type
from tornado.httpclient import HTTPRequest
##############################################################################
class ArchiveOrg(JSONMixin, Service):
"""
Implementation of Service which is intended to parse Archive.org.
"""
def generate_request(self):
resource = self.url.rstrip("/").rpartition("/")[-1]
return HTTPRequest(
"http://archive.org/metadata/" + resource + "/files/",
use_gzip=True,
headers=dict(Accept="application/json")
)
def parse(self, response):
converted_response = self.convert_response(response)
tracks = {}
required_fields = ("title", "track", "album")
for file_ in converted_response["result"]:
if file_.get("source") != "original":
continue
if not all(field in file_ for field in required_fields):
continue
track = int(file_["track"])
title = text_type(file_["title"])
length = text_type(file_.get("length", ""))
if length and ":" not in length:
length = int(float(length))
length = self.second_to_timestamp(length)
length = self.normalize_track_length(length)
tracks[track] = (title, length)
if not tracks:
raise Exception("Empty list")
return tuple(data for track, data in sorted(tracks.iteritems())) |
RESOURCE_METHODS = ['GET', 'POST', 'DELETE']
ITEM_METHODS = ['GET', 'PATCH', 'PUT', 'DELETE']
schema = {
'name': {
'type': 'string',
'minlength': 3,
'maxlength': 50,
'required': True,
},
'date': {
'type': 'datetime',
},
'reference': {
'type': 'string',
'minlength': 2,
'maxlength': 50,
'required': True,
},
'details': {
'type': 'string',
'minlength': 0,
'maxlength': 300,
'required': False
},
'reporter': {
'type': 'string',
'minlength': 3,
'maxlength': 20,
'required': True,
},
}
event = {
'item_title': 'event',
'additional_lookup': {
'url': 'regex("[\w]+")',
'field': 'name',
},
'cache_control': 'max-age=10, must-revalidate',
'cache_expires': 10,
'resource_methods': ['GET', 'POST'],
'schema': schema
}
DOMAIN = {
'event': event,
}
MONGO_HOST = 'localhost'
MONGO_PORT = 27017
MONGO_USERNAME = ''
MONGO_PASSWORD = ''
MONGO_DBNAME = 'historia'
| Change name of date to datetime
| RESOURCE_METHODS = ['GET', 'POST', 'DELETE']
ITEM_METHODS = ['GET', 'PATCH', 'PUT', 'DELETE']
schema = {
'name': {
'type': 'string',
'minlength': 3,
'maxlength': 50,
'required': True,
},
'datetime': {
'type': 'datetime',
},
'reference': {
'type': 'string',
'minlength': 2,
'maxlength': 50,
'required': True,
},
'details': {
'type': 'string',
'minlength': 0,
'maxlength': 300,
'required': False
},
'reporter': {
'type': 'string',
'minlength': 3,
'maxlength': 20,
'required': True,
},
}
event = {
'item_title': 'event',
'additional_lookup': {
'url': 'regex("[\w]+")',
'field': 'name',
},
'cache_control': 'max-age=10, must-revalidate',
'cache_expires': 10,
'resource_methods': ['GET', 'POST'],
'schema': schema
}
DOMAIN = {
'event': event,
}
MONGO_HOST = 'localhost'
MONGO_PORT = 27017
MONGO_USERNAME = ''
MONGO_PASSWORD = ''
MONGO_DBNAME = 'historia'
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Session Handling for SQLAlchemy backend
"""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from nova import flags
FLAGS = flags.FLAGS
_ENGINE = None
_MAKER = None
def get_session(autocommit=True, expire_on_commit=False):
"""Helper method to grab session"""
global _ENGINE
global _MAKER
if not _MAKER:
if not _ENGINE:
_ENGINE = create_engine(FLAGS.sql_connection, echo=False)
_MAKER = (sessionmaker(bind=_ENGINE,
autocommit=autocommit,
expire_on_commit=expire_on_commit))
session = _MAKER()
return session
| Add the pool_recycle setting to enable connection pooling features for the sql engine. The setting is hard-coded to 3600 seconds (one hour) per the recommendation provided on sqlalchemy's site | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Session Handling for SQLAlchemy backend
"""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from nova import flags
FLAGS = flags.FLAGS
_ENGINE = None
_MAKER = None
def get_session(autocommit=True, expire_on_commit=False):
"""Helper method to grab session"""
global _ENGINE
global _MAKER
if not _MAKER:
if not _ENGINE:
_ENGINE = create_engine(FLAGS.sql_connection, pool_recycle=3600, echo=False)
_MAKER = (sessionmaker(bind=_ENGINE,
autocommit=autocommit,
expire_on_commit=expire_on_commit))
session = _MAKER()
return session
|
from IRCMessage import IRCMessage
from IRCResponse import IRCResponse, ResponseType
from Function import Function
from GlobalVars import *
import re
class Instantiate(Function):
Help = 'Responds to greetings and such'
def GetResponse(self, message):
if message.Type != 'PRIVMSG':
return
match = re.search("^(?P<greeting>(wa+s+|')?so?u+p|hi(ya)?|hey|hello|'?lo|mornin[g']?|greetings|bonjour|salut|howdy|'?yo|o?hai|mojn|hej|dongs|ahoy( hoy)?|salutations|g'?day|hola|bye|night|herrow)( there)?,?[ ]%s([^a-zA-Z0-9_\|`\[\]\^-]|$)" % CurrentNick,
message.MessageString,
re.IGNORECASE)
if match:
return IRCResponse(ResponseType.Say,
'%s %s' % (match.group('greeting'), message.User.Name),
message.ReplyTo) | Add more greetings to the needs-multilining regex | from IRCMessage import IRCMessage
from IRCResponse import IRCResponse, ResponseType
from Function import Function
from GlobalVars import *
import re
class Instantiate(Function):
Help = 'Responds to greetings and such'
def GetResponse(self, message):
if message.Type != 'PRIVMSG':
return
match = re.search("^(?P<greeting>(wa+s+|')?so?u+p|hi(ya)?|hey|hello|'?lo|(good |g'?)?((mornin|evenin)[g']?|ni(ght|ni))|greetings|bonjour|salut|howdy|'?yo|o?hai|mojn|hej|dongs|ahoy( hoy)?|salutations|g'?day|hola|bye|herrow)( there)?,?[ ]%s([^a-zA-Z0-9_\|`\[\]\^-]|$)" % CurrentNick,
message.MessageString,
re.IGNORECASE)
if match:
return IRCResponse(ResponseType.Say,
'%s %s' % (match.group('greeting'), message.User.Name),
message.ReplyTo)
|
#!/usr/bin/env python
import os
import distutils.core
import sys
try:
import setuptools
except ImportError:
pass
try:
license = open('LICENSE').read()
except:
license = None
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
def desc():
info = read('README.rst')
try:
return info + '\n\n' + read('doc/changelog.rst')
except IOError:
return info
distutils.core.setup(
name='sockjs-tornado',
version='1.0.0',
author='Serge S. Koval',
author_email='[email protected]',
packages=['sockjs', 'sockjs.tornado', 'sockjs.tornado.transports'],
namespace_packages=['sockjs'],
scripts=[],
url='http://github.com/mrjoes/sockjs-tornado/',
license=license,
description='SockJS python server implementation on top of Tornado framework',
long_description=desc(),
requires=['tornado'],
install_requires=[
'tornado >= 2.1.1'
]
)
| Add the Python version and license classifiers
Signed-off-by: Tomas Sedovic <[email protected]>
| #!/usr/bin/env python
import os
import distutils.core
import sys
try:
import setuptools
except ImportError:
pass
try:
license = open('LICENSE').read()
except:
license = None
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
def desc():
info = read('README.rst')
try:
return info + '\n\n' + read('doc/changelog.rst')
except IOError:
return info
distutils.core.setup(
name='sockjs-tornado',
version='1.0.0',
author='Serge S. Koval',
author_email='[email protected]',
packages=['sockjs', 'sockjs.tornado', 'sockjs.tornado.transports'],
namespace_packages=['sockjs'],
scripts=[],
url='http://github.com/mrjoes/sockjs-tornado/',
license=license,
description='SockJS python server implementation on top of Tornado framework',
long_description=desc(),
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: Implementation :: CPython',
],
requires=['tornado'],
install_requires=[
'tornado >= 2.1.1'
]
)
|
import os
import subprocess
def load_variables_from_env(prefix="XII_INTEGRATION_"):
length = len(prefix)
vars = {}
for var in filter(lambda x: x.startswith(prefix), os.environ):
vars[var[length:]] = os.environ[var]
return vars
def run_xii(deffile, cmd, variables={}, gargs=None, cargs=None):
xii_env = os.environ.copy()
for key, value in variables.items():
print("=> XII_" + key + " defined")
xii_env["XII_" + key] = value
call = ["xii", "--no-parallel", "--deffile", deffile, gargs, cmd, cargs]
print("calling `{}`".format(" ".join(filter(None, call))))
process = subprocess.Popen(call, stdout=subprocess.PIPE, env=xii_env)
for line in process.stdout:
print("> " + line.rstrip(os.linesep))
if process.returncode != 0:
raise RuntimeError("running xii failed")
| Make cargs and gargs truly optional
| import os
import subprocess
def load_variables_from_env(prefix="XII_INTEGRATION_"):
length = len(prefix)
vars = {}
for var in filter(lambda x: x.startswith(prefix), os.environ):
vars[var[length:]] = os.environ[var]
return vars
def run_xii(deffile, cmd, variables={}, gargs=None, cargs=None):
xii_env = os.environ.copy()
for key, value in variables.items():
print("=> XII_" + key + " defined")
xii_env["XII_" + key] = value
call = ["xii", "--no-parallel", "--deffile", deffile, cmd]
print("calling `{}`".format(" ".join(call)))
process = subprocess.Popen(call, stdout=subprocess.PIPE, env=xii_env)
for line in process.stdout:
print("> " + line.rstrip(os.linesep))
if process.returncode != 0:
raise RuntimeError("running xii failed")
|
#!/usr/bin/env python
# php2python.py - Converts PHP to Python using codegen.py
# Usage: php2python.py < input.php > output.py
import sys
sys.path.append('..')
from phply.phpparse import parser
from phply import pythonast
from ast import Module
from unparse import Unparser
input = sys.stdin
output = sys.stdout
body = [pythonast.from_phpast(ast) for ast in parser.parse(input.read())]
Unparser(body, output)
| Update comment to reflect switch to unparse.py
| #!/usr/bin/env python
# php2python.py - Converts PHP to Python using unparse.py
# Usage: php2python.py < input.php > output.py
import sys
sys.path.append('..')
from phply.phpparse import parser
from phply import pythonast
from ast import Module
from unparse import Unparser
input = sys.stdin
output = sys.stdout
body = [pythonast.from_phpast(ast) for ast in parser.parse(input.read())]
Unparser(body, output)
|
from __future__ import absolute_import
from django.conf.urls import url
from admin.meetings import views
urlpatterns = [
url(r'^$', views.MeetingListView.as_view(), name='list'),
url(r'^create/$', views.MeetingCreateFormView.as_view(), name='create'),
url(r'^(?P<endpoint>[a-zA-Z0-9]+)/$', views.MeetingFormView.as_view(),
name='detail'),
]
| Allow underscores in meeting endpoints
See https://www.flowdock.com/app/cos/osf-dev/threads/oX7kE29lhFd8quQWt-hbFpyVriH
| from __future__ import absolute_import
from django.conf.urls import url
from admin.meetings import views
urlpatterns = [
url(r'^$', views.MeetingListView.as_view(), name='list'),
url(r'^create/$', views.MeetingCreateFormView.as_view(), name='create'),
url(r'^(?P<endpoint>[a-zA-Z0-9_]+)/$', views.MeetingFormView.as_view(),
name='detail'),
]
|
from .atflex import AtfLexer
from .atfyacc import AtfParser
from mako.template import Template
class AtfFile(object):
template = Template("${text.serialize()}")
def __init__(self, content):
self.content = content
if content[-1] != '\n':
content += "\n"
lexer = AtfLexer().lexer
parser = AtfParser().parser
self.text = parser.parse(content, lexer=lexer)
def __str__(self):
return AtfFile.template.render_unicode(**vars(self))
def serialize(self):
return AtfFile.template.render_unicode(**vars(self))
def _debug_lex_and_yac_file(file, debug=0):
import codecs
text = codecs.open(file, encoding='utf-8-sig').read()
from pyoracc.atf.atffile import AtfLexer
lexer = AtfLexer(debug=debug).lexer
lexer.input(text)
for tok in lexer:
print(tok)
print("Lexed file")
lexer = AtfLexer().lexer
parser = AtfParser().parser
parser.parse(text, lexer=lexer)
print("Parsed file")
| Add skip invalid to debug
| from .atflex import AtfLexer
from .atfyacc import AtfParser
from mako.template import Template
class AtfFile(object):
template = Template("${text.serialize()}")
def __init__(self, content):
self.content = content
if content[-1] != '\n':
content += "\n"
lexer = AtfLexer().lexer
parser = AtfParser().parser
self.text = parser.parse(content, lexer=lexer)
def __str__(self):
return AtfFile.template.render_unicode(**vars(self))
def serialize(self):
return AtfFile.template.render_unicode(**vars(self))
def _debug_lex_and_yac_file(file, debug=0, skipinvalid=False):
import codecs
text = codecs.open(file, encoding='utf-8-sig').read()
from pyoracc.atf.atffile import AtfLexer
lexer = AtfLexer(debug=debug, skipinvalid=skipinvalid).lexer
lexer.input(text)
for tok in lexer:
print(tok)
print("Lexed file")
lexer = AtfLexer().lexer
parser = AtfParser().parser
parser.parse(text, lexer=lexer)
print("Parsed file")
|
class Scope(object):
tmp_index = 0
def __init__(self, parent=None):
self.parent = parent
self.prefix = []
self.declarations = {}
self.globals = set()
self.inherited = True
def prefixed(self, name):
return '.'.join(self.prefix + [name])
def declare(self, name, var=True):
self.declarations[name] = var
def get_scope(self, name, inherit=False):
if name in self.declarations and (not inherit or self.inherited):
return self
elif self.parent is not None:
return self.parent.get_scope(name, True)
else:
return None
def declare_global(self, name):
self.globals.add(name)
def is_global(self, name):
return name in self.globals
def get_global_scope(self):
if self.parent:
return self.parent.get_global_scope()
else:
return self
@classmethod
def alloc_temp(cls):
cls.tmp_index += 1
return '__jpx_tmp_%i' % cls.tmp_index
| Remove temp var allocation code.
| class Scope(object):
def __init__(self, parent=None):
self.parent = parent
self.prefix = []
self.declarations = {}
self.globals = set()
self.inherited = True
def prefixed(self, name):
return '.'.join(self.prefix + [name])
def declare(self, name, var=True):
self.declarations[name] = var
def get_scope(self, name, inherit=False):
if name in self.declarations and (not inherit or self.inherited):
return self
elif self.parent is not None:
return self.parent.get_scope(name, True)
else:
return None
def declare_global(self, name):
self.globals.add(name)
def is_global(self, name):
return name in self.globals
def get_global_scope(self):
if self.parent:
return self.parent.get_global_scope()
else:
return self
|
from django.conf.urls import patterns, url
# for voting
from voting.views import vote_on_object
from bookmarks.models import Bookmark
urlpatterns = patterns('',
url(r'^$', 'bookmarks.views.bookmarks', name="all_bookmarks"),
url(r'^your_bookmarks/$', 'bookmarks.views.your_bookmarks', name="your_bookmarks"),
url(r'^add/$', 'bookmarks.views.add', name="add_bookmark"),
url(r'^(\d+)/delete/$', 'bookmarks.views.delete', name="delete_bookmark_instance"),
# for voting
(r'^(?P<object_id>\d+)/(?P<direction>up|down|clear)vote/?$',
vote_on_object, dict(
model=Bookmark,
template_object_name='bookmark',
template_name='kb/link_confirm_vote.html',
allow_xmlhttprequest=True)),
)
| Disable csrf checks for voting
| from django.conf.urls import patterns, url
from django.views.decorators.csrf import csrf_exempt
# for voting
from voting.views import vote_on_object
from bookmarks.models import Bookmark
urlpatterns = patterns('',
url(r'^$', 'bookmarks.views.bookmarks', name="all_bookmarks"),
url(r'^your_bookmarks/$', 'bookmarks.views.your_bookmarks', name="your_bookmarks"),
url(r'^add/$', 'bookmarks.views.add', name="add_bookmark"),
url(r'^(\d+)/delete/$', 'bookmarks.views.delete', name="delete_bookmark_instance"),
# for voting
(r'^(?P<object_id>\d+)/(?P<direction>up|down|clear)vote/?$',
csrf_exempt(vote_on_object), dict(
model=Bookmark,
template_object_name='bookmark',
template_name='kb/link_confirm_vote.html',
allow_xmlhttprequest=True)),
)
|
from poliastro.bodies import (
Earth,
Jupiter,
Mars,
Mercury,
Neptune,
Saturn,
Uranus,
Venus,
)
from poliastro.plotting.core import OrbitPlotter2D, OrbitPlotter3D
from poliastro.twobody import Orbit
def plot_solar_system(outer=True, epoch=None, use_3d=False):
"""
Plots the whole solar system in one single call.
.. versionadded:: 0.9.0
Parameters
------------
outer : bool, optional
Whether to print the outer Solar System, default to True.
epoch: ~astropy.time.Time, optional
Epoch value of the plot, default to J2000.
"""
bodies = [Mercury, Venus, Earth, Mars]
if outer:
bodies.extend([Jupiter, Saturn, Uranus, Neptune])
if use_3d:
op = OrbitPlotter3D()
else:
op = OrbitPlotter2D()
for body in bodies:
orb = Orbit.from_body_ephem(body, epoch)
op.plot(orb, label=str(body))
# Sets frame to the orbit of the Earth by default
# TODO: Wait until https://github.com/poliastro/poliastro/issues/316
# op.set_frame(*Orbit.from_body_ephem(Earth, epoch).pqw())
return op
| Set frame only when using 2D
| from typing import Union
from poliastro.bodies import (
Earth,
Jupiter,
Mars,
Mercury,
Neptune,
Saturn,
Uranus,
Venus,
)
from poliastro.plotting.core import OrbitPlotter2D, OrbitPlotter3D
from poliastro.twobody import Orbit
def plot_solar_system(outer=True, epoch=None, use_3d=False):
"""
Plots the whole solar system in one single call.
.. versionadded:: 0.9.0
Parameters
------------
outer : bool, optional
Whether to print the outer Solar System, default to True.
epoch : ~astropy.time.Time, optional
Epoch value of the plot, default to J2000.
use_3d : bool, optional
Produce 3D plot, default to False.
"""
bodies = [Mercury, Venus, Earth, Mars]
if outer:
bodies.extend([Jupiter, Saturn, Uranus, Neptune])
if use_3d:
op = OrbitPlotter3D() # type: Union[OrbitPlotter3D, OrbitPlotter2D]
else:
op = OrbitPlotter2D()
op.set_frame(*Orbit.from_body_ephem(Earth, epoch).pqw()) # type: ignore
for body in bodies:
orb = Orbit.from_body_ephem(body, epoch)
op.plot(orb, label=str(body))
return op
|
from django.db import models
from experiment_session.models import ExperimentSession
from django.core.validators import MinValueValidator
class Experiment(models.Model):
LIGHTOFF_FIXED = 'fixed'
LIGHTOFF_WAITING = 'waiting'
_LIGHTOFF_CHOICES = (
(LIGHTOFF_FIXED, 'Fixed'),
(LIGHTOFF_WAITING, 'Waiting')
)
AUDIO_NONE = 'none'
AUDIO_BEEP = 'beep'
_AUDIO_CHOICES = (
(AUDIO_NONE, 'None'),
(AUDIO_BEEP, 'Audible beep on error')
)
name = models.CharField(unique=True, max_length=255)
lightoffmode = models.CharField(
choices=_LIGHTOFF_CHOICES,
max_length=30
)
lightofftimeout = models.IntegerField(validators=(MinValueValidator(0),))
audiomode = models.CharField(
choices=_AUDIO_CHOICES,
max_length=30
)
repeatscount = models.IntegerField(
validators=(
MinValueValidator(1),
)
)
createdon = models.DateTimeField(auto_now_add=True, editable=False)
traininglength = models.IntegerField(validators=(MinValueValidator(0),))
instructions = models.CharField(max_length=10000, default='')
def __str__(self):
return self.name
| Allow empty strings as instructions
| from django.db import models
from experiment_session.models import ExperimentSession
from django.core.validators import MinValueValidator
class Experiment(models.Model):
LIGHTOFF_FIXED = 'fixed'
LIGHTOFF_WAITING = 'waiting'
_LIGHTOFF_CHOICES = (
(LIGHTOFF_FIXED, 'Fixed'),
(LIGHTOFF_WAITING, 'Waiting')
)
AUDIO_NONE = 'none'
AUDIO_BEEP = 'beep'
_AUDIO_CHOICES = (
(AUDIO_NONE, 'None'),
(AUDIO_BEEP, 'Audible beep on error')
)
name = models.CharField(unique=True, max_length=255)
lightoffmode = models.CharField(
choices=_LIGHTOFF_CHOICES,
max_length=30
)
lightofftimeout = models.IntegerField(validators=(MinValueValidator(0),))
audiomode = models.CharField(
choices=_AUDIO_CHOICES,
max_length=30
)
repeatscount = models.IntegerField(
validators=(
MinValueValidator(1),
)
)
createdon = models.DateTimeField(auto_now_add=True, editable=False)
traininglength = models.IntegerField(validators=(MinValueValidator(0),))
instructions = models.CharField(max_length=10000, blank=True)
def __str__(self):
return self.name
|
class Database:
def __init__(self, db):
self.db = db
self.cursor = db.cursor()
def disconnect(self):
self.cursor.close()
self.db.close()
def query(self, sql):
self.cursor.execute(sql)
return self.cursor.fetchall()
def insert(self, sql):
self.cursor.execute(sql)
self.db.commit()
| Reconnect if the connection times out.
| import pymysql
class Database:
def __init__(self, db):
self.db = db
self.cursor = db.cursor()
def disconnect(self):
self.cursor.close()
self.db.close()
def query(self, sql):
try:
self.cursor.execute(sql)
return self.cursor.fetchall()
except pymysql.OperationalError:
self.db.ping()
self.cursor.execute(sql)
return self.cursor.fetchall()
def insert(self, sql):
try:
self.cursor.execute(sql)
self.db.commit()
except pymysql.OperationalError:
self.db.ping()
self.cursor.execute(sql)
self.db.commit()
|
# -*- coding: utf-8 -*-
# ###
# Copyright (c) 2013, Rice University
# This software is subject to the provisions of the GNU Affero General
# Public License version 3 (AGPLv3).
# See LICENCE.txt for details.
# ###
from pyramid.events import NewRequest
def add_cors_headers(request, response):
settings = request.registry.settings
acac = settings['cors.access_control_allow_credentials']
acao = settings['cors.access_control_allow_origin'].split()
acah = settings['cors.access_control_allow_headers']
acam = settings['cors.access_control_allow_methods']
if acac:
response.headerlist.append(
('Access-Control-Allow-Credentials', acac))
if acao:
if request.host in acao:
response.headerlist.append(
('Access-Control-Allow-Origin', request.host))
else:
response.headerlist.append(
('Access-Control-Allow-Origin', acao[0]))
if acah:
response.headerlist.append(
('Access-Control-Allow-Headers', acah))
if acam:
response.headerlist.append(
('Access-Control-Allow-Methods', acam))
def new_request_subscriber(event):
request = event.request
request.add_response_callback(add_cors_headers)
def main(config):
config.add_subscriber(new_request_subscriber, NewRequest)
| Fix Access-Control-Allow-Origin to return the request origin
request.host is the host part of the request url. For example, if
webview is trying to access http://localhost:8080/users/profile,
request. It's the Origin field in the headers that we should be
matching.
| # -*- coding: utf-8 -*-
# ###
# Copyright (c) 2013, Rice University
# This software is subject to the provisions of the GNU Affero General
# Public License version 3 (AGPLv3).
# See LICENCE.txt for details.
# ###
from pyramid.events import NewRequest
def add_cors_headers(request, response):
settings = request.registry.settings
acac = settings['cors.access_control_allow_credentials']
acao = settings['cors.access_control_allow_origin'].split()
acah = settings['cors.access_control_allow_headers']
acam = settings['cors.access_control_allow_methods']
if acac:
response.headerlist.append(
('Access-Control-Allow-Credentials', acac))
if acao:
if request.headers.get('Origin') in acao:
response.headerlist.append(
('Access-Control-Allow-Origin', request.headers.get('Origin')))
else:
response.headerlist.append(
('Access-Control-Allow-Origin', acao[0]))
if acah:
response.headerlist.append(
('Access-Control-Allow-Headers', acah))
if acam:
response.headerlist.append(
('Access-Control-Allow-Methods', acam))
def new_request_subscriber(event):
request = event.request
request.add_response_callback(add_cors_headers)
def main(config):
config.add_subscriber(new_request_subscriber, NewRequest)
|
import sys
import time
from infrastructure.models import Server
from jobs.models import Job
TIMEOUT = 600
def is_reachable(server):
"""
:type server: Server
"""
instance_id = server.ec2serverinfo.instance_id
ec2_region = server.ec2serverinfo.ec2_region
rh = server.resource_handler.cast()
rh.connect_ec2(ec2_region)
wc = rh.resource_technology.work_class
instance = wc.get_instance(instance_id)
conn = instance.connection
status = conn.get_all_instance_status(instance_id)
return True if status[0].instance_status.details[u'reachability'] == u'passed' else False
def run(job, logger=None):
assert isinstance(job, Job)
assert job.type == u'provision'
server = job.server_set.first()
timeout = time.time() + TIMEOUT
while True:
if is_reachable(server):
job.set_progress("EC2 instance is reachable.")
break
elif time.time() > timeout:
job.set_progress("Waited {} seconds. Continuing...".format(TIMEOUT))
break
else:
time.sleep(2)
return "", "", ""
if __name__ == '__main__':
if len(sys.argv) != 2:
print ' Usage: {} <job_id>'.format(sys.argv[0])
sys.exit(1)
print run(Job.objects.get(id=sys.argv[1]))
| Clean up poll for init complete script
| import time
from jobs.models import Job
TIMEOUT = 600
def is_reachable(server):
instance_id = server.ec2serverinfo.instance_id
ec2_region = server.ec2serverinfo.ec2_region
rh = server.resource_handler.cast()
rh.connect_ec2(ec2_region)
wc = rh.resource_technology.work_class
instance = wc.get_instance(instance_id)
status = instance.connection.get_all_instance_status(instance_id)
return True if status[0].instance_status.details[u'reachability'] == u'passed' else False
def run(job, logger=None, **kwargs):
assert isinstance(job, Job) and job.type == u'provision'
server = job.server_set.first()
timeout = time.time() + TIMEOUT
while True:
if is_reachable(server):
job.set_progress("EC2 instance is reachable.")
break
elif time.time() > timeout:
job.set_progress("Waited {} seconds. Continuing...".format(TIMEOUT))
break
else:
time.sleep(2)
return "", "", ""
|
#!/usr/bin/env python
# encoding: utf-8
import json
import re
from .resource import Resource
class Response(Resource):
"""Represents an HTTP response that is hopefully a HAL document."""
def __init__(self, response):
"""Pass it a Requests response object.
:response: A response object from the Requests library.
"""
self._response = response
self._hal_regex = re.compile(r"application/hal\+json")
self._parsed_hal = None
def is_hal(self):
"""Test if a response was a HAL document or not.
:returns: True or False
"""
return bool(self._hal_regex.match(self._response.headers['content-type']))
@property
def _hal(self):
"""Returns the parsed HAL body of the response
:returns: A parsed HAL body (dicts and lists) or an empty dictionary.
"""
if self._parsed_hal != None: return self._parsed_hal
self._parsed_hal = self._parse_hal()
return self._parsed_hal
def _parse_hal(self):
"""Parses the JSON body of the response.
:returns: A parsed JSON body (dicts and lists) or an empty dictionary.
"""
if not self.is_hal(): return {}
try:
return json.loads(self._response.content)
except ValueError, e:
return {}
| Move _hal_regex to class scope.
| #!/usr/bin/env python
# encoding: utf-8
import json
import re
from .resource import Resource
class Response(Resource):
"""Represents an HTTP response that is hopefully a HAL document."""
_hal_regex = re.compile(r"application/hal\+json")
def __init__(self, response):
"""Pass it a Requests response object.
:response: A response object from the Requests library.
"""
self._response = response
self._parsed_hal = None
def is_hal(self):
"""Test if a response was a HAL document or not.
:returns: True or False
"""
return bool(self._hal_regex.match(self._response.headers['content-type']))
@property
def _hal(self):
"""Returns the parsed HAL body of the response
:returns: A parsed HAL body (dicts and lists) or an empty dictionary.
"""
if self._parsed_hal != None: return self._parsed_hal
self._parsed_hal = self._parse_hal()
return self._parsed_hal
def _parse_hal(self):
"""Parses the JSON body of the response.
:returns: A parsed JSON body (dicts and lists) or an empty dictionary.
"""
if not self.is_hal(): return {}
try:
return json.loads(self._response.content)
except ValueError, e:
return {}
|
import logging, urllib2
from lighter.util import merge, build_request
class HipChat(object):
def __init__(self, url, token):
self._url = url or 'https://api.hipchat.com'
self._token = token
self_rooms = []
self._sender = 'Lighter'
self._message_attribs = {
'from': 'Lighter',
'color': 'green',
'notify': True,
'message_format': 'html'
}
def rooms(self, ids):
self._rooms = ids
return self
def notify(self, message):
for room in self._rooms:
self._call('/v2/room/%s/notification' % room, merge({'message': message}, self._message_attribs))
def _call(self, endpoint, data):
if self._url is None or self._token is None:
logging.debug('HipChat is not enabled')
return
try:
url = self._url.rstrip('/') + '/' + endpoint + '?auth_token=' + self._token
logging.debug('Calling HipChat endpoint %s', endpoint)
response = urllib2.urlopen(build_request(url, data, {}, 'POST'))
content = response.read()
except urllib2.URLError, e:
logging.warn(str(e))
return {}
| Remove extra label for user
| import logging, urllib2
from lighter.util import merge, build_request
class HipChat(object):
def __init__(self, url, token):
self._url = url or 'https://api.hipchat.com'
self._token = token
self_rooms = []
self._sender = 'Lighter'
self._message_attribs = {
'color': 'green',
'notify': True,
'message_format': 'html'
}
def rooms(self, ids):
self._rooms = ids
return self
def notify(self, message):
for room in self._rooms:
self._call('/v2/room/%s/notification' % room, merge({'message': message}, self._message_attribs))
def _call(self, endpoint, data):
if self._url is None or self._token is None:
logging.debug('HipChat is not enabled')
return
try:
url = self._url.rstrip('/') + '/' + endpoint + '?auth_token=' + self._token
logging.debug('Calling HipChat endpoint %s', endpoint)
response = urllib2.urlopen(build_request(url, data, {}, 'POST'))
content = response.read()
except urllib2.URLError, e:
logging.warn(str(e))
return {}
|
import urlparse
from django.test import TestCase, override_settings
from mock import patch, Mock
from opendebates.context_processors import global_vars
from opendebates.tests.factories import SubmissionFactory
class NumberOfVotesTest(TestCase):
def test_number_of_votes(self):
mock_request = Mock()
with patch('opendebates.utils.cache') as mock_cache:
mock_cache.get.return_value = 2
context = global_vars(mock_request)
self.assertEqual(2, int(context['NUMBER_OF_VOTES']))
class ThemeTests(TestCase):
def setUp(self):
self.idea = SubmissionFactory()
@override_settings(SITE_THEME={'HASHTAG': 'TestHashtag'})
def test_email_url(self):
email_url = self.idea.email_url()
fields = urlparse.parse_qs(urlparse.urlparse(email_url).query)
self.assertTrue('subject' in fields, fields)
self.assertTrue('#TestHashtag' in fields['subject'][0], fields['subject'][0])
| Fix test_email_url() after changes to email templating for sharing emails
| import urlparse
from django.test import TestCase, override_settings
from mock import patch, Mock
from opendebates.context_processors import global_vars
from opendebates.tests.factories import SubmissionFactory
class NumberOfVotesTest(TestCase):
def test_number_of_votes(self):
mock_request = Mock()
with patch('opendebates.utils.cache') as mock_cache:
mock_cache.get.return_value = 2
context = global_vars(mock_request)
self.assertEqual(2, int(context['NUMBER_OF_VOTES']))
class ThemeTests(TestCase):
def setUp(self):
self.idea = SubmissionFactory()
@override_settings(SITE_THEME={
'EMAIL_SUBJECT': 'THE EMAIL SUBJECT',
'EMAIL_BODY': 'THE EMAIL BODY\nAND SECOND LINE',
})
def test_email_url(self):
email_url = self.idea.email_url()
fields = urlparse.parse_qs(urlparse.urlparse(email_url).query)
self.assertTrue('subject' in fields, fields)
self.assertEqual('THE EMAIL SUBJECT', fields['subject'][0], fields['subject'][0])
self.assertEqual('THE EMAIL BODY\nAND SECOND LINE', fields['body'][0], fields['body'][0])
|
"""
Public model API
"""
from .article import search_for_article
from .article import get_available_articles
from .article import read_article
from .article import save_article
from .article import delete_article
from .article import branch_article
from .article import branch_or_save_article
from .article import get_articles_for_author
from .article import get_public_articles_for_author
from .article import save_article_meta_data
from .article import find_article_by_title
from .article import change_article_stack
from .file import read_file
from .file import read_redirects
from .file import update_article_listing
from .file import published_articles
from .file import in_review_articles
from .file import draft_articles
from .user import find_user
from .email_list import add_subscriber
from .image import save_image
from .lib import to_json
| Remove some functions from exported model API that are not used outside model layer
- Just some refactoring to trim down the number of things exported that aren't
necessary at this time.
| """
Public model API
"""
from .article import search_for_article
from .article import get_available_articles
from .article import read_article
from .article import save_article
from .article import delete_article
from .article import branch_article
from .article import branch_or_save_article
from .article import get_articles_for_author
from .article import get_public_articles_for_author
from .article import find_article_by_title
from .article import change_article_stack
from .file import read_file
from .file import read_redirects
from .file import update_article_listing
from .user import find_user
from .email_list import add_subscriber
from .image import save_image
from .lib import to_json
|
import fnmatch
import re
from urllib.parse import urlparse
# Hardcoded URL patterns where client is assumed to be embedded.
#
# Only the hostname and path are included in the pattern. The path must be
# specified; use "example.com/*" to match all URLs on a particular domain.
#
# Patterns are shell-style wildcards ('*' matches any number of chars, '?'
# matches a single char).
PATTERNS = [
"h.readthedocs.io/*",
"web.hypothes.is/blog/*",
]
COMPILED_PATTERNS = [re.compile(fnmatch.translate(pat)) for pat in PATTERNS]
def url_embeds_client(url):
"""
Test whether ``url`` is known to embed the client.
This currently just tests the URL against the pattern list ``PATTERNS``.
Only the hostname and path of the URL are tested. Returns false for non-HTTP
URLs.
:return: True if the URL matches a pattern.
"""
parsed_url = urlparse(url)
if not parsed_url.scheme.startswith("http"):
return False
path = parsed_url.path
if not path:
path = "/"
netloc_and_path = parsed_url.netloc + path
for pat in COMPILED_PATTERNS:
if pat.fullmatch(netloc_and_path):
return True
return False
| Add APA websites to URL patterns where client is known to be embedded.
URL patterns provided by Kadidra McCloud at APA.
Fixes https://github.com/hypothesis/product-backlog/issues/814
| import fnmatch
import re
from urllib.parse import urlparse
# Hardcoded URL patterns where client is assumed to be embedded.
#
# Only the hostname and path are included in the pattern. The path must be
# specified; use "example.com/*" to match all URLs on a particular domain.
#
# Patterns are shell-style wildcards ('*' matches any number of chars, '?'
# matches a single char).
PATTERNS = [
# Hypothesis websites.
"h.readthedocs.io/*",
"web.hypothes.is/blog/*",
# Publisher partners:
# American Psychological Organization.
"psycnet.apa.org/fulltext/*",
"awspntest.apa.org/fulltext/*",
]
COMPILED_PATTERNS = [re.compile(fnmatch.translate(pat)) for pat in PATTERNS]
def url_embeds_client(url):
"""
Test whether ``url`` is known to embed the client.
This currently just tests the URL against the pattern list ``PATTERNS``.
Only the hostname and path of the URL are tested. Returns false for non-HTTP
URLs.
:return: True if the URL matches a pattern.
"""
parsed_url = urlparse(url)
if not parsed_url.scheme.startswith("http"):
return False
path = parsed_url.path
if not path:
path = "/"
netloc_and_path = parsed_url.netloc + path
for pat in COMPILED_PATTERNS:
if pat.fullmatch(netloc_and_path):
return True
return False
|
from django import template
register = template.Library()
NO_PLACE_ORGS = ('parliament', 'national-assembly', )
MEMBER_ORGS = ('parliament', 'national-assembly', )
@register.assignment_tag()
def should_display_place(organisation):
return organisation.slug not in NO_PLACE_ORGS
@register.assignment_tag()
def should_display_position(organisation, position_title):
should_display = True
if organisation.slug in MEMBER_ORGS and unicode(position_title) in (u'Member',):
should_display = False
if 'ncop' == organisation.slug and unicode(position_title) in (u'Delegate',):
should_display = False
return should_display
| [ZA] Fix display of people on constituency office page
This template tag was being called without an organisation, so in
production it was just silently failing, but in development it was
raising an exception.
This adds an extra check so that if there is no organisation then we
just short circuit and return `True`.
| from django import template
register = template.Library()
NO_PLACE_ORGS = ('parliament', 'national-assembly', )
MEMBER_ORGS = ('parliament', 'national-assembly', )
@register.assignment_tag()
def should_display_place(organisation):
if not organisation:
return True
return organisation.slug not in NO_PLACE_ORGS
@register.assignment_tag()
def should_display_position(organisation, position_title):
should_display = True
if organisation.slug in MEMBER_ORGS and unicode(position_title) in (u'Member',):
should_display = False
if 'ncop' == organisation.slug and unicode(position_title) in (u'Delegate',):
should_display = False
return should_display
|
#!/bin/env python3
# Prosite regular expressions matcher
# Copyright (c) 2014 Tomasz Truszkowski
# All rights reserved.
import prosite_matcher
if __name__ == '__main__':
print("\n Hi, this is Prosite Matcher! \n")
sequence = input("Sequence: ")
regex = input("Regular expression: ")
prositeMatcher = prosite_matcher.PrositeMatcher()
prositeMatcher.compile(regex)
matches, ranges = prositeMatcher.get_matches(sequence)
print("Found patterns: ", end="")
if (len(matches) > 0):
print(sequence[ 0 : ranges[0][0] ], end="")
for i in range(0, len(matches)):
print("\033[91m", end="")
print(sequence[ ranges[i][0] : ranges[i][1] ], end="")
print("\033[0m", end="")
if (i < len(matches) - 1):
print(sequence[ ranges[i][1] : ranges[i + 1][0] ], end="")
print(sequence[ ranges[len(ranges) - 1][1] : len(sequence)])
else:
print(sequence)
print("")
for elem in list(zip(matches, ranges)):
print(elem[0], end=" ")
print(elem[1])
print("")
| Add check for empty sequence or regex.
| #!/bin/env python3
# Prosite regular expressions matcher
# Copyright (c) 2014 Tomasz Truszkowski
# All rights reserved.
import prosite_matcher
if __name__ == '__main__':
print("\n Hi, this is Prosite Matcher! \n")
sequence = input("Sequence: ")
regex = input("Regular expression: ")
if sequence != None and sequence != "" and regex != None and regex != "":
prositeMatcher = prosite_matcher.PrositeMatcher()
prositeMatcher.compile(regex)
matches, ranges = prositeMatcher.get_matches(sequence)
print("Found patterns: ", end="")
if (len(matches) > 0):
print(sequence[ 0 : ranges[0][0] ], end="")
for i in range(0, len(matches)):
print("\033[91m", end="")
print(sequence[ ranges[i][0] : ranges[i][1] ], end="")
print("\033[0m", end="")
if (i < len(matches) - 1):
print(sequence[ ranges[i][1] : ranges[i + 1][0] ], end="")
print(sequence[ ranges[len(ranges) - 1][1] : len(sequence)])
else:
print(sequence)
print("")
for elem in list(zip(matches, ranges)):
print(elem[0], end=" ")
print(elem[1])
print("")
else:
print("Sequence and regular expression can't be empty.") |
# A simple demo for working with SparkSQL and Tweets
from pyspark import SparkContext, SparkConf
from pyspark.sql import HiveContext, Row, IntegerType
import json
import sys
if __name__ == "__main__":
inputFile = sys.argv[1]
conf = SparkConf().setAppName("SparkSQLTwitter")
sc = SparkContext()
hiveCtx = HiveContext(sc)
print "Loading tweets from " + inputFile
input = hiveCtx.jsonFile(inputFile)
input.registerTempTable("tweets")
topTweets = hiveCtx.sql("SELECT text, retweetCount FROM tweets ORDER BY retweetCount LIMIT 10")
print topTweets.collect()
topTweetText = topTweets.map(lambda row : row.text)
print topTweetText.collect()
# Make a happy person row
happyPeopleRDD = sc.parallelize([Row(name="holden", favouriteBeverage="coffee")])
happyPeopleSchemaRDD = hiveCtx.inferSchema(happyPeopleRDD)
happyPeopleSchemaRDD.registerTempTable("happy_people")
# Make a UDF to tell us how long some text is
hiveCtx.registerFunction("strLenPython", lambda x: len(x), IntegerType())
lengthSchemaRDD = hiveCtx.sql("SELECT strLenPython('text') FROM tweets LIMIT 10")
print lengthSchemaRDD.collect()
sc.stop()
| Fix IntegerType import for Spark SQL
| # A simple demo for working with SparkSQL and Tweets
from pyspark import SparkContext, SparkConf
from pyspark.sql import HiveContext, Row
from pyspark.sql.types import IntegerType
import json
import sys
if __name__ == "__main__":
inputFile = sys.argv[1]
conf = SparkConf().setAppName("SparkSQLTwitter")
sc = SparkContext()
hiveCtx = HiveContext(sc)
print "Loading tweets from " + inputFile
input = hiveCtx.jsonFile(inputFile)
input.registerTempTable("tweets")
topTweets = hiveCtx.sql("SELECT text, retweetCount FROM tweets ORDER BY retweetCount LIMIT 10")
print topTweets.collect()
topTweetText = topTweets.map(lambda row : row.text)
print topTweetText.collect()
# Make a happy person row
happyPeopleRDD = sc.parallelize([Row(name="holden", favouriteBeverage="coffee")])
happyPeopleSchemaRDD = hiveCtx.inferSchema(happyPeopleRDD)
happyPeopleSchemaRDD.registerTempTable("happy_people")
# Make a UDF to tell us how long some text is
hiveCtx.registerFunction("strLenPython", lambda x: len(x), IntegerType())
lengthSchemaRDD = hiveCtx.sql("SELECT strLenPython('text') FROM tweets LIMIT 10")
print lengthSchemaRDD.collect()
sc.stop()
|
from django.test.client import Client
from pytest_django.client import RequestFactory
pytest_plugins = ['pytester']
def test_params(testdir):
testdir.makeconftest("""
import os, sys
import pytest_django as plugin
sys.path.insert(0, os.path.realpath(os.path.join(os.path.dirname(plugin.__file__), '../')))
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
pytest_plugins = ['django']
""")
p = testdir.makepyfile("""
import py
@py.test.params([dict(arg1=1, arg2=1), dict(arg1=1, arg2=2)])
def test_myfunc(arg1, arg2):
assert arg1 == arg2
""")
result = testdir.runpytest("-v", p)
assert result.stdout.fnmatch_lines([
"*test_myfunc*0*PASS*",
"*test_myfunc*1*FAIL*",
"*1 failed, 1 passed*"
])
def test_client(client):
assert isinstance(client, Client)
def test_rf(rf):
assert isinstance(rf, RequestFactory)
| Disable params test for now
| from django.test.client import Client
from pytest_django.client import RequestFactory
import py
pytest_plugins = ['pytester']
def test_params(testdir):
# Setting up the path isn't working - plugin.__file__ points to the wrong place
return
testdir.makeconftest("""
import os, sys
import pytest_django as plugin
sys.path.insert(0, os.path.realpath(os.path.join(os.path.dirname(plugin.__file__), '../')))
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
pytest_plugins = ['django']
""")
p = testdir.makepyfile("""
import py
@py.test.params([dict(arg1=1, arg2=1), dict(arg1=1, arg2=2)])
def test_myfunc(arg1, arg2):
assert arg1 == arg2
""")
result = testdir.runpytest("-v", p)
assert result.stdout.fnmatch_lines([
"*test_myfunc*0*PASS*",
"*test_myfunc*1*FAIL*",
"*1 failed, 1 passed*"
])
def test_client(client):
assert isinstance(client, Client)
def test_rf(rf):
assert isinstance(rf, RequestFactory)
|
from bush.aws.session import create_session
class AWSBase:
# USAGE = ""
# SUB_COMMANDS = []
def __init__(self, options, resource_name):
self.name = resource_name
self.options = options
self.session = create_session(options)
self.resource = self.session.resource(resource_name)
self.client = self.session.client(resource_name)
| Set resource and client when it is needed
| from bush.aws.session import create_session
class AWSBase:
# USAGE = ""
# SUB_COMMANDS = []
def __init__(self, options, resource_name):
self.name = resource_name
self.options = options
self.session = create_session(options)
@property
def resource(self):
if not hasattr(self, '__resource'):
self.__set_resource()
return self.__resource
@property
def client(self):
if not hasattr(self, '__client'):
self.__set_client()
return self.__client
def __set_resource(self):
self.__resource = self.session.resource(self.name)
def __set_client(self):
self.__client = self.session.client(self.name)
|
from config_secret import SecretConfig
class Config:
SECRET_KEY = SecretConfig.SECRET_KEY
# MongoDB config
MONGO_DBNAME = SecretConfig.MONGO_DBNAME
MONGO_HOST = SecretConfig.MONGO_HOST
MONGO_PORT = SecretConfig.MONGO_PORT
# Cloning config
CLONE_TMP_DIR = 'tmp'
CLONE_TIMEOUT = 15
| Increase the cloning timeout limit
| from config_secret import SecretConfig
class Config:
SECRET_KEY = SecretConfig.SECRET_KEY
# MongoDB config
MONGO_DBNAME = SecretConfig.MONGO_DBNAME
MONGO_HOST = SecretConfig.MONGO_HOST
MONGO_PORT = SecretConfig.MONGO_PORT
# Cloning config
CLONE_TMP_DIR = 'tmp'
CLONE_TIMEOUT = 30
|
import sys
import os
sys.path.append(os.path.join(os.path.dirname(__file__), "../"))
from http_test_suite import HTTPTestSuite
from mozdef_util.utilities.dot_dict import DotDict
import mock
from configlib import OptionParser
class RestTestDict(DotDict):
@property
def __dict__(self):
return self
class RestTestSuite(HTTPTestSuite):
def setup(self):
sample_config = RestTestDict()
sample_config.configfile = os.path.join(os.path.dirname(__file__), 'index.conf')
OptionParser.parse_args = mock.Mock(return_value=(sample_config, {}))
from rest import index
self.application = index.application
super(RestTestSuite, self).setup()
| Fix import path for rest plugins
| import sys
import os
sys.path.append(os.path.join(os.path.dirname(__file__), "../"))
from http_test_suite import HTTPTestSuite
from mozdef_util.utilities.dot_dict import DotDict
import mock
from configlib import OptionParser
import importlib
class RestTestDict(DotDict):
@property
def __dict__(self):
return self
class RestTestSuite(HTTPTestSuite):
def setup(self):
sample_config = RestTestDict()
sample_config.configfile = os.path.join(os.path.dirname(__file__), 'index.conf')
OptionParser.parse_args = mock.Mock(return_value=(sample_config, {}))
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "../../rest"))
import plugins
importlib.reload(plugins)
from rest import index
self.application = index.application
super(RestTestSuite, self).setup()
|
import unittest
from pydirections.route_requester import DirectionsRequest
from pydirections.exceptions import InvalidModeError, InvalidAPIKeyError, InvalidAlternativeError
class TestOptionalParameters(unittest.TestCase):
requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA")
def test_invalid_mode(self):
"""
Tests the is_valid_mode function for an invalid input
"""
with self.assertRaises(InvalidModeError):
requester.set_mode("flying")
def test_invalid_alternative(self):
"""
Tests for error handling when an invalid value is provided to
the set_alternative function
"""
with self.assertRaises(InvalidAlternativeError):
requester.set_alternatives('False')
class TestAPIKey(unittest.TestCase):
requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA")
def test_invalid_api_key(self):
invalid_key = 123456
with self.assertRaises(InvalidAPIKeyError):
requester.set_api_key(invalid_key)
if __name__ == '__main__':
unittest.main() | Fix bug in unit tests
| import unittest
from pydirections.route_requester import DirectionsRequest
from pydirections.exceptions import InvalidModeError, InvalidAPIKeyError, InvalidAlternativeError
requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA")
class TestOptionalParameters(unittest.TestCase):
def test_invalid_mode(self):
"""
Tests the is_valid_mode function for an invalid input
"""
with self.assertRaises(InvalidModeError):
requester.set_mode("flying")
def test_invalid_alternative(self):
"""
Tests for error handling when an invalid value is provided to
the set_alternative function
"""
with self.assertRaises(InvalidAlternativeError):
requester.set_alternatives('False')
class TestAPIKey(unittest.TestCase):
def test_invalid_api_key(self):
invalid_key = 123456
with self.assertRaises(InvalidAPIKeyError):
requester.set_api_key(invalid_key)
if __name__ == '__main__':
unittest.main() |
from heutagogy import app
import heutagogy.persistence
import os
from datetime import timedelta
app.config.from_object(__name__)
app.config.update(dict(
USERS={
'myuser': {'password': 'mypassword'},
'user2': {'password': 'pass2'},
},
JWT_AUTH_URL_RULE='/api/v1/login',
JWT_EXPIRATION_DELTA=timedelta(seconds=2592000), # 1 month
DATABASE=os.path.join(app.root_path, 'heutagogy.sqlite3'),
DEBUG=True))
app.config.from_envvar('HEUTAGOGY_SETTINGS', silent=True)
if not app.config['SECRET_KEY']:
app.config['SECRET_KEY'] = 'super-secret'
@app.cli.command('initdb')
def initdb_command():
"""Creates the database tables."""
heutagogy.persistence.initialize()
| Initialize database if it does not exist
| from heutagogy import app
import heutagogy.persistence
import os
from datetime import timedelta
app.config.from_object(__name__)
app.config.update(dict(
USERS={
'myuser': {'password': 'mypassword'},
'user2': {'password': 'pass2'},
},
JWT_AUTH_URL_RULE='/api/v1/login',
JWT_EXPIRATION_DELTA=timedelta(seconds=2592000), # 1 month
DATABASE=os.path.join(app.root_path, 'heutagogy.sqlite3'),
DEBUG=True))
app.config.from_envvar('HEUTAGOGY_SETTINGS', silent=True)
if not app.config['SECRET_KEY']:
app.config['SECRET_KEY'] = 'super-secret'
@app.cli.command('initdb')
def initdb_command():
"""Creates the database tables."""
heutagogy.persistence.initialize()
with app.app_context():
if not os.path.isfile(app.config['DATABASE']):
heutagogy.persistence.initialize()
|
import os
from nxdrive.tests.common import IntegrationTestCase
from nxdrive.client import LocalClient
class TestIntegrationCopy(IntegrationTestCase):
def test_synchronize_remote_copy(self):
# Get local and remote clients
local = LocalClient(os.path.join(self.local_nxdrive_folder_1,
self.workspace_title))
remote = self.remote_document_client_1
# Bind the server and root workspace
self.bind_server(self.ndrive_1, self.user_1, self.nuxeo_url, self.local_nxdrive_folder_1, self.password_1)
#TODO: allow use of self.bind_root(self.ndrive_1, self.workspace, self.local_nxdrive_folder_1)
remote.register_as_root(self.workspace)
# Create a file and a folder in the remote root workspace
remote.make_file('/', 'test.odt', 'Some content.')
remote.make_folder('/', 'Test folder')
# Copy the file to the folder remotely
remote.copy('/test.odt', '/Test folder')
# Launch ndrive, expecting 4 synchronized items
self.ndrive(self.ndrive_1, quit_timeout=300)
self.assertTrue(local.exists('/'))
self.assertTrue(local.exists('/Test folder'))
self.assertTrue(local.exists('/test.odt'))
self.assertTrue(local.exists('/Test folder/test.odt'))
self.assertEquals(local.get_content('/Test folder/test.odt'),
'Some content.')
| NXDRIVE-170: Remove long timeout to make file blacklisting bug appear, waiting for the fix
| import os
from nxdrive.tests.common import IntegrationTestCase
from nxdrive.client import LocalClient
class TestIntegrationCopy(IntegrationTestCase):
def test_synchronize_remote_copy(self):
# Get local and remote clients
local = LocalClient(os.path.join(self.local_nxdrive_folder_1,
self.workspace_title))
remote = self.remote_document_client_1
# Bind the server and root workspace
self.bind_server(self.ndrive_1, self.user_1, self.nuxeo_url, self.local_nxdrive_folder_1, self.password_1)
#TODO: allow use of self.bind_root(self.ndrive_1, self.workspace, self.local_nxdrive_folder_1)
remote.register_as_root(self.workspace)
# Create a file and a folder in the remote root workspace
remote.make_file('/', 'test.odt', 'Some content.')
remote.make_folder('/', 'Test folder')
# Copy the file to the folder remotely
remote.copy('/test.odt', '/Test folder')
# Launch ndrive, expecting 4 synchronized items
self.ndrive(self.ndrive_1)
self.assertTrue(local.exists('/'))
self.assertTrue(local.exists('/Test folder'))
self.assertTrue(local.exists('/test.odt'))
self.assertTrue(local.exists('/Test folder/test.odt'))
self.assertEquals(local.get_content('/Test folder/test.odt'),
'Some content.')
|
##
# Copyright (c) 2013 Sprymix Inc.
# All rights reserved.
#
# See LICENSE for details.
##
# Import languages to register them
from metamagic.utils.lang import yaml, python, javascript
| utils.lang.js: Switch class system to use JPlus types machinery
| ##
# Copyright (c) 2013 Sprymix Inc.
# All rights reserved.
#
# See LICENSE for details.
##
# Import languages to register them
from metamagic.utils.lang import yaml, python, javascript, jplus
|
import kerberos
import logging
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
logger = logging.getLogger(__name__)
class KrbBackend(ModelBackend):
"""
Django Authentication backend using Kerberos for password checking.
"""
def authenticate(self, username=None, password=None):
UserModel = get_user_model()
if username is None:
username = kwargs.get(UserModel.USERNAME_FIELD)
if not self.check_password(username, password):
return None
UserModel = get_user_model()
user, created = UserModel.objects.get_or_create(**{
UserModel.USERNAME_FIELD: username
})
return user
def check_password(self, username, password):
"""The actual password checking logic. Separated from the authenticate code from Django for easier updating"""
try:
kerberos.checkPassword(username.lower(), password, settings.KRB5_SERVICE, settings.KRB5_REALM)
return True
except kerberos.BasicAuthError:
if getattr(settings, "KRB5_DEBUG", False):
logger.exception("Failure during authentication")
return False
| Make user query case insensitive | import kerberos
import logging
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
logger = logging.getLogger(__name__)
class KrbBackend(ModelBackend):
"""
Django Authentication backend using Kerberos for password checking.
"""
def authenticate(self, username=None, password=None):
UserModel = get_user_model()
if username is None:
username = kwargs.get(UserModel.USERNAME_FIELD)
if not self.check_password(username, password):
return None
UserModel = get_user_model()
user, created = UserModel.objects.get_or_create(**{
UserModel.USERNAME_FIELD+"__iexact": username,
defaults: { UserModel.USERNAME_FIELD: username }
})
return user
def check_password(self, username, password):
"""The actual password checking logic. Separated from the authenticate code from Django for easier updating"""
try:
kerberos.checkPassword(username.lower(), password, settings.KRB5_SERVICE, settings.KRB5_REALM)
return True
except kerberos.BasicAuthError:
if getattr(settings, "KRB5_DEBUG", False):
logger.exception("Failure during authentication")
return False
|
from decimal import Decimal as D
from decimal import InvalidOperation
from babel.numbers import format_currency
from django import template
from django.conf import settings
from django.utils.translation import get_language, to_locale
register = template.Library()
@register.filter(name='currency')
def currency(value, currency=None):
"""
Format decimal value as currency
"""
try:
value = D(value)
except (TypeError, InvalidOperation):
return ""
# Using Babel's currency formatting
# http://babel.pocoo.org/en/latest/api/numbers.html#babel.numbers.format_currency
OSCAR_CURRENCY_FORMAT = getattr(settings, 'OSCAR_CURRENCY_FORMAT', None)
kwargs = {
'currency': currency or settings.OSCAR_DEFAULT_CURRENCY,
'locale': to_locale(get_language() or settings.LANGUAGE_CODE)
}
if isinstance(OSCAR_CURRENCY_FORMAT, dict):
kwargs.update(OSCAR_CURRENCY_FORMAT.get(currency, {}))
else:
kwargs['format'] = OSCAR_CURRENCY_FORMAT
return format_currency(value, **kwargs)
| Fix for missing default in currency filter
| from decimal import Decimal as D
from decimal import InvalidOperation
from babel.numbers import format_currency
from django import template
from django.conf import settings
from django.utils.translation import get_language, to_locale
register = template.Library()
@register.filter(name='currency')
def currency(value, currency=None):
"""
Format decimal value as currency
"""
if currency is None:
currency = settings.OSCAR_DEFAULT_CURRENCY
try:
value = D(value)
except (TypeError, InvalidOperation):
return ""
# Using Babel's currency formatting
# http://babel.pocoo.org/en/latest/api/numbers.html#babel.numbers.format_currency
OSCAR_CURRENCY_FORMAT = getattr(settings, 'OSCAR_CURRENCY_FORMAT', None)
kwargs = {
'currency': currency,
'locale': to_locale(get_language() or settings.LANGUAGE_CODE)
}
if isinstance(OSCAR_CURRENCY_FORMAT, dict):
kwargs.update(OSCAR_CURRENCY_FORMAT.get(currency, {}))
else:
kwargs['format'] = OSCAR_CURRENCY_FORMAT
return format_currency(value, **kwargs)
|
from kokki import *
if env.config.mdadm.arrays:
Package("mdadm")
Execute("mdadm-update-conf",
action = "nothing",
command = ("("
"echo DEVICE partitions > /etc/mdadm/mdadm.conf"
"; mdadm --detail --scan >> /etc/mdadm/mdadm.conf"
")"
))
for array in env.config.mdadm.arrays:
env.cookbooks.mdadm.Array(**array)
if array.get('fstype'):
if array['fstype'] == "xfs":
Package("xfsprogs")
Execute("mkfs.%(fstype)s -f %(device)s" % dict(fstype=array['fstype'], device=array['name']),
not_if = """if [ "`file -s %(device)s`" = "%(device)s: data" ]; then exit 1; fi""" % dict(device=array['name']))
if array.get('mount_point'):
Mount(array['mount_point'],
device = array['name'],
fstype = array['fstype'],
options = array['fsoptions'] if array.get('fsoptions') is not None else ["noatime"],
action = ["mount", "enable"])
| Fix to mounting mdadm raid arrays
|
from kokki import *
if env.config.mdadm.arrays:
Package("mdadm")
Execute("mdadm-update-conf",
action = "nothing",
command = ("("
"echo DEVICE partitions > /etc/mdadm/mdadm.conf"
"; mdadm --detail --scan >> /etc/mdadm/mdadm.conf"
")"
))
for array in env.config.mdadm.arrays:
fstype = array.pop('fstype', None)
fsoptions = array.pop('fsoptions', None)
mount_point = array.pop('mount_point', None)
env.cookbooks.mdadm.Array(**array)
if fstype:
if fstype == "xfs":
Package("xfsprogs")
Execute("mkfs.%(fstype)s -f %(device)s" % dict(fstype=fstype, device=array['name']),
not_if = """if [ "`file -s %(device)s`" = "%(device)s: data" ]; then exit 1; fi""" % dict(device=array['name']))
if mount_point:
Mount(mount_point,
device = array['name'],
fstype = fstype,
options = fsoptions if fsoptions is not None else ["noatime"],
action = ["mount", "enable"])
|
import subprocess
def run(command):
try:
subprocess.check_call(command, shell=True)
except:
pass
| Set expected exception class in "except" block
Fix issue E722 of flake8
| import subprocess
def run(command):
try:
subprocess.check_call(command, shell=True)
except Exception:
pass
|
from setuptools import setup
setup(name="Demo",
packages=["orangedemo"],
package_data={"orangedemo": ["icons/*.svg"]},
classifiers=["Example :: Invalid"],
# Declare orangedemo package to contain widgets for the "Demo" category
entry_points={"orange.widgets": ("Demo = orangedemo")},
) | Remove tuple, since string is sufficient here
As discovered in [discussion around issue #1184](https://github.com/biolab/orange3/issues/1184#issuecomment-214215811), the tuple is not necessary here. Only a single string is necessary, as is documented in the setuptools official [entry_points example](https://pythonhosted.org/setuptools/setuptools.html#dynamic-discovery-of-services-and-plugins). | from setuptools import setup
setup(name="Demo",
packages=["orangedemo"],
package_data={"orangedemo": ["icons/*.svg"]},
classifiers=["Example :: Invalid"],
# Declare orangedemo package to contain widgets for the "Demo" category
entry_points={"orange.widgets": "Demo = orangedemo"},
)
|
from django.urls import reverse
from six.moves.urllib.parse import urljoin
from kolibri.utils.conf import OPTIONS
def reverse_remote(
baseurl, viewname, urlconf=None, args=None, kwargs=None, current_app=None
):
# Get the reversed URL
reversed_url = reverse(
viewname, urlconf=urlconf, args=args, kwargs=kwargs, current_app=current_app
)
# Remove any configured URL prefix from the URL that is specific to this deployment
reversed_url = reversed_url.replace(OPTIONS["Deployment"]["URL_PATH_PREFIX"], "")
# Join the URL to baseurl, but remove any leading "/" to ensure that if there is a path prefix on baseurl
# it doesn't get ignored by the urljoin (which it would if the reversed_url had a leading '/',
# as it would be read as an absolute path)
return urljoin(baseurl, reversed_url.lstrip("/"))
| Truncate rather than replace to prevent erroneous substitutions.
| from django.urls import reverse
from six.moves.urllib.parse import urljoin
from kolibri.utils.conf import OPTIONS
def reverse_remote(
baseurl, viewname, urlconf=None, args=None, kwargs=None, current_app=None
):
# Get the reversed URL
reversed_url = reverse(
viewname, urlconf=urlconf, args=args, kwargs=kwargs, current_app=current_app
)
# Remove any configured URL prefix from the URL that is specific to this deployment
prefix_length = len(OPTIONS["Deployment"]["URL_PATH_PREFIX"])
reversed_url = reversed_url[prefix_length:]
# Join the URL to baseurl, but remove any leading "/" to ensure that if there is a path prefix on baseurl
# it doesn't get ignored by the urljoin (which it would if the reversed_url had a leading '/',
# as it would be read as an absolute path)
return urljoin(baseurl, reversed_url.lstrip("/"))
|
from module_interface import Module, ModuleType
from message import IRCResponse, ResponseType
from pyheufybot import globalvars
class NickServIdentify(Module):
def __init__(self):
self.moduleType = ModuleType.PASSIVE
self.messageTypes = ["USER"]
self.helpText = "Attempts to log into NickServ with the password in the config"
def execute(self, message, serverInfo):
config = globalvars.botHandler.factories[serverInfo.name].config
passwordType = config.getSettingWithDefault("passwordType", None)
password = config.getSettingWithDefault("password", "")
if passwordType == "NickServ":
return [ IRCResponse("NickServ", password, responseType.MESSAGE) ]
else:
return []
| Fix the syntax for NickServ logins
| from pyheufybot.module_interface import Module, ModuleType
from pyheufybot.message import IRCResponse, ResponseType
from pyheufybot import globalvars
class NickServIdentify(Module):
def __init__(self):
self.moduleType = ModuleType.PASSIVE
self.messageTypes = ["USER"]
self.helpText = "Attempts to log into NickServ with the password in the config"
def execute(self, message, serverInfo):
config = globalvars.botHandler.factories[serverInfo.name].config
passwordType = config.getSettingWithDefault("passwordType", None)
password = config.getSettingWithDefault("password", "")
if passwordType == "NickServ":
return [ IRCResponse("NickServ", "IDENTIFY " + password, responseType.MESSAGE) ]
else:
return []
|
#deletes wahji content
import os, shutil, platform
def rem(loc):
os.chdir(loc)
print "deleting content"
"""delete them folder and its contents"""
shutil.rmtree("themes")
"""delete .wahji file"""
os.remove(".wahji")
"""delete 4040.html file"""
os.remove("404.html")
"""delete content folder"""
shutil.rmtree("content")
| Remove now asks for site file to be deleted
| #deletes wahji content
import os, shutil, platform
def rem(loc):
os.chdir(loc)
site = raw_input("Input site folder: ")
print "Are you sure you want to delete", site, "Y/N: "
confirm = raw_input()
if confirm == "Y" or confirm == "y":
"""delete site folder"""
shutil.rmtree(site)
print "Deleting site"
elif confirm == "N" or confirm == "n":
print "Site folder was not deleted"
|
import sys
# Detect Python 3
PY3 = (sys.hexversion >= 0x03000000)
if PY3:
types_not_to_encode = (int, str)
string_type = str
from urllib.parse import urlparse
else:
types_not_to_encode = (int, long, basestring)
string_type = basestring
from urllib2 import urlparse
| Fix the urlparse import in Python 2. (This is used for saving/loading tokens in files.) The urlparse that you can import from urllib2 is actually the urlparse module, but what we want is the urlparse function inside that module.
| import sys
# Detect Python 3
PY3 = (sys.hexversion >= 0x03000000)
if PY3:
types_not_to_encode = (int, str)
string_type = str
from urllib.parse import urlparse
else:
types_not_to_encode = (int, long, basestring)
string_type = basestring
from urlparse import urlparse
|
# -*- coding: utf-8 -*-
from openerp import fields,models
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6,2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor")
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string='Attendees')
| [REF] openacademy: Add domain ir and ilike
| # -*- coding: utf-8 -*-
from openerp import fields,models
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6,2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor",
domain = ['|',
('instructor', '=', True),
('category_id.name', 'ilike', "Teacher")])
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string='Attendees')
|
import json
import webapp2
from controllers.api.api_base_controller import ApiBaseController
from helpers.model_to_dict import ModelToDict
from models.match import Match
class ApiMatchControllerBase(ApiBaseController):
CACHE_KEY_FORMAT = "apiv2_match_controller_{}" # (match_key)
CACHE_VERSION = 2
CACHE_HEADER_LENGTH = 60 * 60
def __init__(self, *args, **kw):
super(ApiMatchControllerBase, self).__init__(*args, **kw)
self.match_key = self.request.route_kwargs["match_key"]
self._partial_cache_key = self.CACHE_KEY_FORMAT.format(self.match_key)
@property
def _validators(self):
return [("match_id_validator", self.match_key)]
def _set_match(self, match_key):
self.match = Match.get_by_id(match_key)
if self.match is None:
self._errors = json.dumps({"404": "%s match not found" % self.match_key})
self.abort(404)
class ApiMatchController(ApiMatchControllerBase):
def _track_call(self, match_key):
self._track_call_defer('match', match_key)
def _render(self, match_key):
self._set_match(match_key)
match_dict = ModelToDict.matchConverter(self.match)
return json.dumps(match_dict, ensure_ascii=True)
| Move cache key out of base class
| import json
import webapp2
from controllers.api.api_base_controller import ApiBaseController
from helpers.model_to_dict import ModelToDict
from models.match import Match
class ApiMatchControllerBase(ApiBaseController):
def __init__(self, *args, **kw):
super(ApiMatchControllerBase, self).__init__(*args, **kw)
self.match_key = self.request.route_kwargs["match_key"]
@property
def _validators(self):
return [("match_id_validator", self.match_key)]
def _set_match(self, match_key):
self.match = Match.get_by_id(match_key)
if self.match is None:
self._errors = json.dumps({"404": "%s match not found" % self.match_key})
self.abort(404)
class ApiMatchController(ApiMatchControllerBase):
CACHE_KEY_FORMAT = "apiv2_match_controller_{}" # (match_key)
CACHE_VERSION = 2
CACHE_HEADER_LENGTH = 60 * 60
def __init__(self, *args, **kw):
super(ApiMatchController, self).__init__(*args, **kw)
self._partial_cache_key = self.CACHE_KEY_FORMAT.format(self.match_key)
def _track_call(self, match_key):
self._track_call_defer('match', match_key)
def _render(self, match_key):
self._set_match(match_key)
match_dict = ModelToDict.matchConverter(self.match)
return json.dumps(match_dict, ensure_ascii=True)
|
from .rest import RestClient
class DeviceCredentials(object):
"""Auth0 connection endpoints
Args:
domain (str): Your Auth0 domain, e.g: 'username.auth0.com'
jwt_token (str): An API token created with your account's global
keys. You can create one by using the token generator in the
API Explorer: https://auth0.com/docs/api/v2
"""
def __init__(self, domain, jwt_token):
self.domain = domain
self.client = RestClient(jwt=jwt_token)
def _url(self, id=None):
url = 'https://%s/api/v2/device-credentials' % self.domain
if id is not None:
return url + '/' + id
return url
def get(self, user_id=None, client_id=None, type=None,
fields=[], include_fields=True):
params = {
'fields': ','.join(fields) or None,
'include_fields': str(include_fields).lower(),
'user_id': user_id,
'client_id': client_id,
'type': type,
}
return self.client.get(self._url(), params=params)
def create(self, body):
return self.client.post(self._url(), data=body)
def delete(self, id):
return self.client.delete(self._url(id))
| Remove default arguments for user_id, client_id and type
| from .rest import RestClient
class DeviceCredentials(object):
"""Auth0 connection endpoints
Args:
domain (str): Your Auth0 domain, e.g: 'username.auth0.com'
jwt_token (str): An API token created with your account's global
keys. You can create one by using the token generator in the
API Explorer: https://auth0.com/docs/api/v2
"""
def __init__(self, domain, jwt_token):
self.domain = domain
self.client = RestClient(jwt=jwt_token)
def _url(self, id=None):
url = 'https://%s/api/v2/device-credentials' % self.domain
if id is not None:
return url + '/' + id
return url
def get(self, user_id, client_id, type, fields=[], include_fields=True):
params = {
'fields': ','.join(fields) or None,
'include_fields': str(include_fields).lower(),
'user_id': user_id,
'client_id': client_id,
'type': type,
}
return self.client.get(self._url(), params=params)
def create(self, body):
return self.client.post(self._url(), data=body)
def delete(self, id):
return self.client.delete(self._url(id))
|
from flask_restful import Api
from flask_migrate import Migrate, MigrateCommand
from flask_script import Manager
from app import app
from app import db
from app.auth import Register, Login
from app.bucketlist_api import BucketList, BucketListEntry
from app.bucketlist_items import BucketListItems, BucketListItemSingle
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
api = Api(app)
api.add_resource(Register, '/auth/register')
api.add_resource(Login, '/auth/login')
api.add_resource(BucketList, '/bucketlists')
api.add_resource(BucketListEntry, '/bucketlists/<int:bucketlist_id>')
api.add_resource(BucketListItems, '/bucketlists/<int:bucketlist_id>/items')
api.add_resource(BucketListItemSingle,
'/bucketlists/<int:bucketlist_id>/items/<int:item_id>')
if __name__ == '__main__':
manager.run()
| Set urls for bucketlist items endpoints
| from flask_restful import Api
from flask_migrate import Migrate, MigrateCommand
from flask_script import Manager
from app import app, db
from app.auth import Register, Login
from app.bucketlist_api import BucketLists, BucketListSingle
from app.bucketlist_items import BucketListItems, BucketListItemSingle
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
api = Api(app)
api.add_resource(Register, '/auth/register')
api.add_resource(Login, '/auth/login')
api.add_resource(BucketLists, '/bucketlists')
api.add_resource(BucketListSingle, '/bucketlists/<int:bucketlist_id>')
api.add_resource(BucketListItems, '/bucketlists/<int:bucketlist_id>/items')
api.add_resource(BucketListItemSingle,
'/bucketlists/<int:bucketlist_id>/items/<int:item_id>')
if __name__ == '__main__':
manager.run()
|
from tests.scoring_engine.engine.checks.check_test import CheckTest
class TestHTTPSCheck(CheckTest):
check_name = 'HTTPSCheck'
required_properties = ['useragent', 'vhost', 'uri']
properties = {
'useragent': 'testagent',
'vhost': 'www.example.com',
'uri': '/index.html'
}
cmd = "curl -s -S -4 -v -L -k --ssl-reqd -A 'testagent' 'https://www.example.com:100/index.html'" | Fix pep8 new line in test https check
Signed-off-by: Brandon Myers <[email protected]>
| from tests.scoring_engine.engine.checks.check_test import CheckTest
class TestHTTPSCheck(CheckTest):
check_name = 'HTTPSCheck'
required_properties = ['useragent', 'vhost', 'uri']
properties = {
'useragent': 'testagent',
'vhost': 'www.example.com',
'uri': '/index.html'
}
cmd = "curl -s -S -4 -v -L -k --ssl-reqd -A 'testagent' 'https://www.example.com:100/index.html'"
|
# Copyright 2013 The Servo Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
import sys
import re
import subprocess
symbol_regex = re.compile("D \*UND\*\t(.*) (.*)$")
allowed_symbols = frozenset(['unshare', 'malloc_usable_size'])
actual_symbols = set()
objdump_output = subprocess.check_output([
'arm-linux-androideabi-objdump',
'-T',
'target/arm-linux-androideabi/debug/libservo.so']
).split('\n')
for line in objdump_output:
m = symbol_regex.search(line)
if m is not None:
actual_symbols.add(m.group(2))
difference = actual_symbols - allowed_symbols
if len(difference) > 0:
human_readable_difference = ", ".join(str(s) for s in difference)
print("Unexpected dynamic symbols in binary: {0}".format(human_readable_difference))
sys.exit(-1)
| Add Python 3 compatibility to Android symbol checker
Make the script that checks for undefined Android symbols compatible
with both Python 2 and Python 3, to allow for future updates to the
default system Python on our build machines.
I'd like to land this before https://github.com/servo/saltfs/pull/249.
We currently use Ubuntu 14.04 (an LTS release); Ubuntu is aiming for
Python 3 as the default Python in the next LTS release, 16.04, and
I'd like to have any scripts be ready for the transition.
| # Copyright 2013 The Servo Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
import sys
import re
import subprocess
symbol_regex = re.compile(b"D \*UND\*\t(.*) (.*)$")
allowed_symbols = frozenset([b'unshare', b'malloc_usable_size'])
actual_symbols = set()
objdump_output = subprocess.check_output([
'arm-linux-androideabi-objdump',
'-T',
'target/arm-linux-androideabi/debug/libservo.so']
).split(b'\n')
for line in objdump_output:
m = symbol_regex.search(line)
if m is not None:
actual_symbols.add(m.group(2))
difference = actual_symbols - allowed_symbols
if len(difference) > 0:
human_readable_difference = ", ".join(str(s) for s in difference)
print("Unexpected dynamic symbols in binary: {0}".format(human_readable_difference))
sys.exit(-1)
|
from .base import *
DEBUG = False
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.abspath(os.path.join(
BASE_DIR, '..', 'data', 'db.sqlite3')),
}
}
ALLOWED_HOSTS = [
'127.0.0.1'
]
# email settings
EMAIL_HOST = 'localhost'
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_REPLY_TO = 'donotreply@'
# Logging
LOG_ENV = 'travis'
EXAMPLES_LOG_FILE = '{}/{}-examples.log'.format(LOG_DIR, LOG_ENV)
DCOLUMNS_LOG_FILE = '{}/{}-dcolumn.log'.format(LOG_DIR, LOG_ENV)
LOGGING.get('handlers', {}).get(
'examples_file', {})['filename'] = EXAMPLES_LOG_FILE
LOGGING.get('handlers', {}).get(
'dcolumns_file', {})['filename'] = DCOLUMNS_LOG_FILE
LOGGING.get('loggers', {}).get('django.request', {})['level'] = 'DEBUG'
LOGGING.get('loggers', {}).get('examples', {})['level'] = 'DEBUG'
LOGGING.get('loggers', {}).get('dcolumns', {})['level'] = 'DEBUG'
| Add the creation of the data dir for the sqlite file.
| from .base import *
DEBUG = False
# Make data dir
DATA_DIR = os.path.abspath(os.path.join(BASE_DIR, '..', 'data'))
not os.path.isdir(DATA_DIR) and os.mkdir(DATA_DIR, 0o0775)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.abspath(os.path.join(
BASE_DIR, '..', 'data', 'db.sqlite3')),
}
}
ALLOWED_HOSTS = [
'127.0.0.1'
]
# email settings
EMAIL_HOST = 'localhost'
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_REPLY_TO = 'donotreply@'
# Logging
LOG_ENV = 'travis'
EXAMPLES_LOG_FILE = '{}/{}-examples.log'.format(LOG_DIR, LOG_ENV)
DCOLUMNS_LOG_FILE = '{}/{}-dcolumn.log'.format(LOG_DIR, LOG_ENV)
LOGGING.get('handlers', {}).get(
'examples_file', {})['filename'] = EXAMPLES_LOG_FILE
LOGGING.get('handlers', {}).get(
'dcolumns_file', {})['filename'] = DCOLUMNS_LOG_FILE
LOGGING.get('loggers', {}).get('django.request', {})['level'] = 'DEBUG'
LOGGING.get('loggers', {}).get('examples', {})['level'] = 'DEBUG'
LOGGING.get('loggers', {}).get('dcolumns', {})['level'] = 'DEBUG'
|
# SymPy symbols to exclude
exclude = ('C', 'O', 'S', 'N', 'E', 'E1', 'Q')
# Aliases for SymPy symbols
aliases = {'delta': 'DiracDelta', 'step': 'Heaviside', 'u': 'Heaviside',
'j': 'I'}
# String replacements when printing as LaTeX. For example, SymPy uses
# theta for Heaviside's step.
latex_string_map = {r'\theta\left': r'u\left'}
import sympy as sym
print_expr_map = {sym.I: 'j'}
# Hack to print i as j
from sympy.printing.pretty.pretty_symbology import atoms_table
atoms_table['ImaginaryUnit'] = '\u2149'
| Exclude beta, gamma, zeta functions
| # SymPy symbols to exclude
exclude = ('C', 'O', 'S', 'N', 'E', 'E1', 'Q', 'beta', 'gamma', 'zeta')
# Aliases for SymPy symbols
aliases = {'delta': 'DiracDelta', 'step': 'Heaviside', 'u': 'Heaviside',
'j': 'I'}
# String replacements when printing as LaTeX. For example, SymPy uses
# theta for Heaviside's step.
latex_string_map = {r'\theta\left': r'u\left'}
import sympy as sym
print_expr_map = {sym.I: 'j'}
# Hack to pretty print i as j
from sympy.printing.pretty.pretty_symbology import atoms_table
atoms_table['ImaginaryUnit'] = '\u2149'
|
from abc import ABCMeta, abstractmethod
class Driver(metaclass=ABCMeta):
@abstractmethod
def create(self):
pass
@abstractmethod
def resize(self, id, quota):
pass
@abstractmethod
def clone(self, id):
pass
@abstractmethod
def remove(self, id):
pass
@abstractmethod
def expose(self, id):
pass | Fix inconsistency in parameters with base class
| from abc import ABCMeta, abstractmethod
class Driver(metaclass=ABCMeta):
@abstractmethod
def create(self, requirements):
pass
@abstractmethod
def _set_quota(self, id, quota):
pass
@abstractmethod
def resize(self, id, quota):
pass
@abstractmethod
def clone(self, id):
pass
@abstractmethod
def remove(self, id):
pass
@abstractmethod
def expose(self, id, host, permissions):
pass
|
import os
import pwd
import subprocess
from .exceptions import CommandFailed
def get_user_shell():
return pwd.getpwuid(os.getuid()).pw_shell
def execute_command_assert_success(cmd, **kw):
returned = execute_command(cmd, **kw)
if returned.returncode != 0:
raise CommandFailed("Command {0!r} failed with exit code {1}".format(cmd, returned))
return returned
def execute_command(cmd, **kw):
returned = subprocess.Popen(cmd, shell=True, **kw)
returned.wait()
return returned
| Fix execute_command_assert_success return code logging
| import os
import pwd
import subprocess
from .exceptions import CommandFailed
def get_user_shell():
return pwd.getpwuid(os.getuid()).pw_shell
def execute_command_assert_success(cmd, **kw):
returned = execute_command(cmd, **kw)
if returned.returncode != 0:
raise CommandFailed("Command {0!r} failed with exit code {1}".format(cmd, returned.returncode))
return returned
def execute_command(cmd, **kw):
returned = subprocess.Popen(cmd, shell=True, **kw)
returned.wait()
return returned
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'purepython.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
)
| Add url to access the index view.
| from django.conf.urls import patterns, include, url
from django.contrib import admin
from fb.views import index
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', index),
url(r'^admin/', include(admin.site.urls)),
)
|
import os
def env_or_default(var, default=None):
"""Get environment variable or provide default.
Args:
var (str): environment variable to search for
default (optional(str)): default to return
"""
if var in os.environ:
return os.environ[var]
return default
| Add new helper function to encrypt for KMS
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from future.utils import bytes_to_native_str as n
from base64 import b64encode
import os
import boto3
def env_or_default(var, default=None):
"""Get environment variable or provide default.
Args:
var (str): environment variable to search for
default (optional(str)): default to return
"""
if var in os.environ:
return os.environ[var]
return default
def kms_encrypt(value, key, aws_config=None):
"""Encrypt and value with KMS key.
Args:
value (str): value to encrypt
key (str): key id or alias
aws_config (optional[dict]): aws credentials
dict of arguments passed into boto3 session
example:
aws_creds = {'aws_access_key_id': aws_access_key_id,
'aws_secret_access_key': aws_secret_access_key,
'region_name': 'us-east-1'}
Returns:
str: encrypted cipher text
"""
aws_config = aws_config or {}
aws = boto3.session.Session(**aws_config)
client = aws.client('kms')
enc_res = client.encrypt(KeyId=key,
Plaintext=value)
return n(b64encode(enc_res['CiphertextBlob']))
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from args import get_args
from subprocess import Popen, PIPE
if __name__ == '__main__':
# Get the arguments passed by user
args = get_args()
# TODO: Check whether Vim is available
# TODO: Arguments validation
# TODO: Test the inupt/output directories
# TODO: Call Vim to do the conversion
| Check whether Vim is available
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from args import get_args
from subprocess import Popen, PIPE
if __name__ == '__main__':
# Get the arguments passed by user
args = get_args()
# Check whether Vim is available
p1 = Popen(["vim", "--version"], stdout=PIPE)
p2 = Popen(["grep", "IMproved"], stdin=p1.stdout, stdout=PIPE)
vim_header = p2.communicate()[0].strip('\n')
if vim_header:
pass # Vim detected
else:
sys.exit(u'ERROR: Vim is not yet installed on this system, aborted.')
# TODO: Arguments validation
# TODO: Test the inupt/output directories
# TODO: Call Vim to do the conversion
|
import hashlib
import itertools
import os
import time
from django.conf import settings
from django.core.files.storage import FileSystemStorage
from storages.backends.s3boto3 import S3Boto3Storage
DjangoStorage = S3Boto3Storage if settings.AWS_ACCESS_KEY_ID else FileSystemStorage
class RenameFileStorage(DjangoStorage):
"""Subclass Django's file system storage to add our file naming
conventions."""
def get_available_name(self, name):
dir_name, file_name = os.path.split(name)
file_root, file_ext = os.path.splitext(file_name)
# Set file_root to something we like: clean and all ascii
md5_sub = hashlib.md5(file_root.encode('utf8')).hexdigest()[0:6]
file_root = time.strftime('%Y-%m-%d-%H-%M-%S-',
time.localtime()) + md5_sub
name = os.path.join(dir_name, file_root + file_ext)
# If the filename already exists, add an underscore and a number
# (before the file extension, if one exists) to the filename until
# the generated filename doesn't exist.
count = itertools.count(1)
while self.exists(name):
# file_ext includes the dot.
name = os.path.join(dir_name, "%s_%s%s" %
(file_root, count.next(), file_ext))
return name
| Update RenameFileStorage method to be 1.11 compatible
| import hashlib
import itertools
import os
import time
from django.conf import settings
from django.core.files.storage import FileSystemStorage
from storages.backends.s3boto3 import S3Boto3Storage
DjangoStorage = S3Boto3Storage if settings.AWS_ACCESS_KEY_ID else FileSystemStorage
class RenameFileStorage(DjangoStorage):
"""Subclass Django's file system storage to add our file naming
conventions."""
def get_available_name(self, name, max_length=None):
dir_name, file_name = os.path.split(name)
file_root, file_ext = os.path.splitext(file_name)
# Set file_root to something we like: clean and all ascii
md5_sub = hashlib.md5(file_root.encode('utf8')).hexdigest()[0:6]
file_root = time.strftime('%Y-%m-%d-%H-%M-%S-',
time.localtime()) + md5_sub
name = os.path.join(dir_name, file_root + file_ext)
# If the filename already exists, add an underscore and a number
# (before the file extension, if one exists) to the filename until
# the generated filename doesn't exist.
count = itertools.count(1)
while self.exists(name):
# file_ext includes the dot.
name = os.path.join(dir_name, "%s_%s%s" %
(file_root, count.next(), file_ext))
return name
|
"""Configuration for Django system."""
__version__ = "0.8.1"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
| Increment version number to 0.8.2
| """Configuration for Django system."""
__version__ = "0.8.2"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 3),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.1', 2, '-stable', 1),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
| Update stable channel builders to the 1.2 branch
Review URL: https://codereview.chromium.org/179723002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@253135 0039d316-1c4b-4281-b951-d872f2087c98
| # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 3),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.2', 2, '-stable', 1),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-10-11 17:28
from __future__ import unicode_literals
from django.db import migrations
def forwards_func(apps, schema_editor):
User = apps.get_model('auth', 'User')
old_password_patterns = (
'sha1$',
# RTD's production database doesn't have any of these
# but they are included for completeness
'md5$',
'crypt$',
)
for pattern in old_password_patterns:
users = User.objects.filter(password__startswith=pattern)
for user in users:
user.set_unusable_password()
user.save()
class Migration(migrations.Migration):
dependencies = [
('core', '0004_ad-opt-out'),
('auth', '0008_alter_user_username_max_length'),
]
operations = [
migrations.RunPython(forwards_func),
]
| Migrate old passwords without "set_unusable_password"
| # -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-10-11 17:28
from __future__ import unicode_literals
from django.db import migrations
from django.contrib.auth.hashers import make_password
def forwards_func(apps, schema_editor):
User = apps.get_model('auth', 'User')
old_password_patterns = (
'sha1$',
# RTD's production database doesn't have any of these
# but they are included for completeness
'md5$',
'crypt$',
)
for pattern in old_password_patterns:
users = User.objects.filter(password__startswith=pattern)
for user in users:
user.password = make_password(None)
user.save()
class Migration(migrations.Migration):
dependencies = [
('core', '0004_ad-opt-out'),
('auth', '0008_alter_user_username_max_length'),
]
operations = [
migrations.RunPython(forwards_func),
]
|
import argparse
import sys
def main(raw_args=sys.argv[1:]):
"""
A tool to automatically request, renew and distribute Let's Encrypt
certificates for apps running on Marathon and served by marathon-lb.
"""
parser = argparse.ArgumentParser(
description='Automatically manage ACME certificates for Marathon apps')
parser.add_argument('-a', '--acme',
help='The address for the ACME Directory Resource '
'(default: %(default)s)',
default=(
'https://acme-v01.api.letsencrypt.org/directory'))
parser.add_argument('-m', '--marathon', nargs='+',
help='The address for the Marathon HTTP API (default: '
'%(default)s)',
default='http://marathon.mesos:8080')
parser.add_argument('-l', '--lb', nargs='+',
help='The address for the marathon-lb HTTP API '
'(default: %(default)s)',
default='http://marathon-lb.marathon.mesos:9090')
parser.add_argument('storage-dir',
help='Path to directory for storing certificates')
args = parser.parse_args(raw_args) # noqa
if __name__ == '__main__':
main()
| Add --group option to CLI
| import argparse
import sys
def main(raw_args=sys.argv[1:]):
"""
A tool to automatically request, renew and distribute Let's Encrypt
certificates for apps running on Marathon and served by marathon-lb.
"""
parser = argparse.ArgumentParser(
description='Automatically manage ACME certificates for Marathon apps')
parser.add_argument('-a', '--acme',
help='The address for the ACME Directory Resource '
'(default: %(default)s)',
default=(
'https://acme-v01.api.letsencrypt.org/directory'))
parser.add_argument('-m', '--marathon', nargs='+',
help='The address for the Marathon HTTP API (default: '
'%(default)s)',
default='http://marathon.mesos:8080')
parser.add_argument('-l', '--lb', nargs='+',
help='The address for the marathon-lb HTTP API '
'(default: %(default)s)',
default='http://marathon-lb.marathon.mesos:9090')
parser.add_argument('-g', '--group',
help='The marathon-lb group to issue certificates for '
'(default: %(default)s)',
default='external')
parser.add_argument('storage-dir',
help='Path to directory for storing certificates')
args = parser.parse_args(raw_args) # noqa
if __name__ == '__main__':
main()
|
from redsolutioncms.make import BaseMake
from redsolutioncms.models import CMSSettings
class Make(BaseMake):
def make(self):
super(Make, self).make()
cms_settings = CMSSettings.objects.get_settings()
cms_settings.render_to('settings.py', 'chunks/redsolutioncms/settings.pyt')
cms_settings.render_to(['..', 'templates', 'base_chunks.html'],
'chunks/redsolutioncms/base_chunks.html', {
}, 'w')
cms_settings.render_to('urls.py', 'chunks/redsolutioncms/urls.pyt')
cms_settings.render_to(['..', 'templates', 'robots.txt'],
'chunks/redsolutioncms/robots.txt')
cms_settings.base_template = 'base_chunks.html'
cms_settings.save()
make = Make()
| Rewrite robots.txt file in setup
| from redsolutioncms.make import BaseMake
from redsolutioncms.models import CMSSettings
class Make(BaseMake):
def make(self):
super(Make, self).make()
cms_settings = CMSSettings.objects.get_settings()
cms_settings.render_to('settings.py', 'chunks/redsolutioncms/settings.pyt')
cms_settings.render_to(['..', 'templates', 'base_chunks.html'],
'chunks/redsolutioncms/base_chunks.html', {
}, 'w')
cms_settings.render_to('urls.py', 'chunks/redsolutioncms/urls.pyt')
cms_settings.render_to(['..', 'templates', 'robots.txt'],
'chunks/redsolutioncms/robots.txt', {}, 'w')
cms_settings.base_template = 'base_chunks.html'
cms_settings.save()
make = Make()
|
# -*- coding: utf-8 -*-
from django.core.checks import Warning, register
def warn_1_3_changes(app_configs, **kwargs):
return [
Warning(
'cmsplugin-contact-plus >= 1.3 has renamed the "input" field. Do not forget to migrate your '
'database and update your templates',
hint=None,
obj=None,
id='cmsplugin_contact_plus.W001',
)
]
def register_checks():
for check in [
warn_1_3_changes,
]:
register(check)
| Comment out warning for renamed field
| # -*- coding: utf-8 -*-
from django.core.checks import Warning, register
def warn_1_3_changes(app_configs, **kwargs):
return [
Warning(
'cmsplugin-contact-plus >= 1.3 has renamed the "input" field. Do not forget to migrate your '
'database and update your templates',
hint=None,
obj=None,
id='cmsplugin_contact_plus.W001',
)
]
def register_checks():
for check in [
# warn_1_3_changes, # Might be more annoying than useful
]:
register(check)
|
from __future__ import absolute_import
from django.core.urlresolvers import reverse
from sentry.testutils import TestCase, PermissionTestCase
class OrganizationStatsPermissionTest(PermissionTestCase):
def setUp(self):
super(OrganizationStatsPermissionTest, self).setUp()
self.path = reverse('sentry-organization-stats', args=[self.organization.slug])
def test_teamless_admin_cannot_load(self):
self.assert_teamless_admin_cannot_access(self.path)
def test_org_member_cannot_load(self):
self.assert_org_member_cannot_access(self.path)
def test_org_admin_can_load(self):
self.assert_org_admin_can_access(self.path)
class OrganizationStatsTest(TestCase):
def test_renders_with_context(self):
organization = self.create_organization(name='foo', owner=self.user)
team_1 = self.create_team(name='foo', organization=organization)
team_2 = self.create_team(name='bar', organization=organization)
path = reverse('sentry-organization-stats', args=[organization.slug])
self.login_as(self.user)
resp = self.client.get(path)
assert resp.status_code == 200
self.assertTemplateUsed(resp, 'sentry/organization-stats.html')
assert resp.context['organization'] == organization
| Correct permission tests for organization stats
| from __future__ import absolute_import
from django.core.urlresolvers import reverse
from sentry.testutils import TestCase, PermissionTestCase
class OrganizationStatsPermissionTest(PermissionTestCase):
def setUp(self):
super(OrganizationStatsPermissionTest, self).setUp()
self.path = reverse('sentry-organization-stats', args=[self.organization.slug])
def test_teamless_admin_cannot_load(self):
self.assert_teamless_admin_cannot_access(self.path)
def test_org_member_can_load(self):
self.assert_org_member_can_access(self.path)
class OrganizationStatsTest(TestCase):
def test_renders_with_context(self):
organization = self.create_organization(name='foo', owner=self.user)
team_1 = self.create_team(name='foo', organization=organization)
team_2 = self.create_team(name='bar', organization=organization)
path = reverse('sentry-organization-stats', args=[organization.slug])
self.login_as(self.user)
resp = self.client.get(path)
assert resp.status_code == 200
self.assertTemplateUsed(resp, 'sentry/organization-stats.html')
assert resp.context['organization'] == organization
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import twitter_rss
import time
import subprocess
import config
# Launch web server
p = subprocess.Popen(['/usr/bin/python2', config.INSTALL_DIR + 'server.py'])
# Update the feeds
try:
while 1:
print 'Updating ALL THE FEEDS!'
try:
with open(config.XML_DIR + 'user/user.txt', 'r') as usernames:
for user in usernames:
twitter_rss.UserTweetGetter(user)
usernames.close()
with open(config.XML_DIR + 'htag/htag.txt', 'r') as hashtags:
for htag in hashtags:
twitter_rss.HashtagTweetGetter(user)
hashtags.close()
except IOError:
print 'File could not be read'
time.sleep(config.TIMER)
except (KeyboardInterrupt, SystemExit):
p.kill() # kill the subprocess
print '\nKeyboardInterrupt catched -- Finishing program.' | Use sys.executable instead of harcoded python path
Fixes issue when running in a virtualenv and in non-standard python
installations.
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import twitter_rss
import time
import subprocess
import config
import sys
# Launch web server
p = subprocess.Popen([sys.executable, config.INSTALL_DIR + 'server.py'])
# Update the feeds
try:
while 1:
print 'Updating ALL THE FEEDS!'
try:
with open(config.XML_DIR + 'user/user.txt', 'r') as usernames:
for user in usernames:
twitter_rss.UserTweetGetter(user)
usernames.close()
with open(config.XML_DIR + 'htag/htag.txt', 'r') as hashtags:
for htag in hashtags:
twitter_rss.HashtagTweetGetter(user)
hashtags.close()
except IOError:
print 'File could not be read'
time.sleep(config.TIMER)
except (KeyboardInterrupt, SystemExit):
p.kill() # kill the subprocess
print '\nKeyboardInterrupt catched -- Finishing program.'
|
from mock import patch
from django.test.testcases import TestCase
from device_notifications import settings
from device_notifications.models import AbstractBaseDevice
from device_notifications.models import InvalidDeviceType
class ConcreteTestDevice(AbstractBaseDevice):
pass
@patch.object(settings, 'get_device_model', return_value=ConcreteTestDevice)
class AbstractBaseDeviceTests(TestCase):
@patch('device_notifications.models.gcm_send_message_task')
def test_send_message(self, gcm_send_message_task):
device = ConcreteTestDevice(
pk=1,
device_type='android')
message = 'Hello World'
device.send_message(message)
gcm_send_message_task.apply_async.assert_called_with(
args=[device.pk, message])
@patch('device_notifications.models.gcm_send_message_task')
def test_send_message_bad_device_type(self, gcm_send_message_task):
device = ConcreteTestDevice(
pk=1,
device_type='windows_phone')
self.assertRaises(InvalidDeviceType, device.send_message, 'Hi')
| Patch get_device_model in the setUp and tearDown methods so that we don't send the mock object to each test method. | from mock import patch
from django.test.testcases import TestCase
from device_notifications import settings
from device_notifications.models import AbstractBaseDevice
from device_notifications.models import InvalidDeviceType
class ConcreteTestDevice(AbstractBaseDevice):
pass
class AbstractBaseDeviceTests(TestCase):
def setUp(self):
self.get_device_model_patcher = patch.object(
settings,
'get_device_model',
return_value=ConcreteTestDevice)
self.get_device_model_patcher.start()
super(AbstractBaseDeviceTests, self).setUp()
def tearDown(self):
super(AbstractBaseDeviceTests, self).tearDown()
self.get_device_model_patcher.stop()
@patch('device_notifications.models.gcm_send_message_task')
def test_send_message(self, gcm_send_message_task):
device = ConcreteTestDevice(
pk=1,
device_type='android')
message = 'Hello World'
device.send_message(message)
gcm_send_message_task.apply_async.assert_called_with(
args=[device.pk, message])
@patch('device_notifications.models.gcm_send_message_task')
def test_send_message_bad_device_type(self, gcm_send_message_task):
device = ConcreteTestDevice(
pk=1,
device_type='windows_phone')
self.assertRaises(InvalidDeviceType, device.send_message, 'Hi')
|
# Copyright (c) 2013-2014 Molly White
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software
# and associated documentation files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or
# substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
# BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from plugins.util import command
from random import choice
@command("8ball", "8-ball")
def eightball(m):
"""Returns 8-ball advice."""
with open(m.bot.base_path + '/plugins/responses/8ball.txt', 'r') as replies:
lines = replies.read().splitlines()
m.bot.private_message(m.location, choice(lines)) | Add documentation for 8ball command
| # Copyright (c) 2013-2014 Molly White
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software
# and associated documentation files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or
# substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
# BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from plugins.util import command
from random import choice
@command("8ball", "8-ball")
def eightball(m):
"""Returns 8-ball advice."""
#- !8ball [question]
#-
#- ```irc
#- < GorillaWarfare> !8ball
#- < GorillaBot> Most likely.
#- ```
#-
#- Returns a magic 8 ball response.
with open(m.bot.base_path + '/plugins/responses/8ball.txt', 'r') as replies:
lines = replies.read().splitlines()
m.bot.private_message(m.location, choice(lines)) |
import pytest
from ethereum import blocks
from ethereum.db import DB
from ethereum.config import Env
from pyethapp.utils import merge_dict
from pyethapp.utils import update_config_from_genesis_json
import pyethapp.config as konfig
from pyethapp.profiles import PROFILES
def check_genesis(profile):
config = dict(eth=dict())
# Set config values based on profile selection
merge_dict(config, PROFILES[profile])
# Load genesis config
update_config_from_genesis_json(config, config['eth']['genesis'])
konfig.update_config_with_defaults(config, {'eth': {'block': blocks.default_config}})
print config['eth'].keys()
bc = config['eth']['block']
print bc.keys()
env = Env(DB(), bc)
genesis = blocks.genesis(env)
print 'genesis.hash', genesis.hash.encode('hex')
print 'expected', config['eth']['genesis_hash']
assert genesis.hash == config['eth']['genesis_hash'].decode('hex')
@pytest.mark.xfail # FIXME
def test_olympic():
check_genesis('olympic')
def test_frontier():
check_genesis('frontier')
if __name__ == '__main__':
test_genesis()
| Fix & cleanup profile genesis tests
| from pprint import pprint
import pytest
from ethereum import blocks
from ethereum.db import DB
from ethereum.config import Env
from pyethapp.utils import merge_dict
from pyethapp.utils import update_config_from_genesis_json
import pyethapp.config as konfig
from pyethapp.profiles import PROFILES
@pytest.mark.parametrize('profile', PROFILES.keys())
def test_profile(profile):
config = dict(eth=dict())
konfig.update_config_with_defaults(config, {'eth': {'block': blocks.default_config}})
# Set config values based on profile selection
merge_dict(config, PROFILES[profile])
# Load genesis config
update_config_from_genesis_json(config, config['eth']['genesis'])
bc = config['eth']['block']
pprint(bc)
env = Env(DB(), bc)
genesis = blocks.genesis(env)
assert genesis.hash.encode('hex') == config['eth']['genesis_hash']
|
from django.contrib.auth.forms import AuthenticationForm
from django import forms
from django.core.validators import MinLengthValidator
from .models import PoolingUser
from users.forms import UserCreationForm
class LoginForm(AuthenticationForm):
username = forms.CharField(widget=forms.EmailInput(attrs={'placeholder': 'Email',
'class': 'form-control',
}))
password = forms.CharField(widget=forms.PasswordInput(attrs={'placeholder': 'Password',
'class': 'form-control',
}))
class SearchTrip(forms.Form):
"""
Pay attention that id fields are meant to be hidden, since we suppose they come from
an autocomplete AJAX request via an another CharField.
"""
origin_id = forms.IntegerField()
destination_id = forms.IntegerField()
datetime = forms.DateTimeField()
class PoolingUserForm(forms.ModelForm):
class Meta:
model = PoolingUser
# Exclude the one-to-one relation with User
fields = ['birth_date', 'driving_license', 'cellphone_number']
class UserForm(UserCreationForm):
class Meta(UserCreationForm.Meta):
fields = ('email', 'first_name', 'last_name')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for field_name in self.Meta.fields:
self[field_name].field.required = True
self['password1'].field.validators = [MinLengthValidator(6)]
| Add Trip and Step ModelForms
| from django.contrib.auth.forms import AuthenticationForm
from django import forms
from django.core.validators import MinLengthValidator
from .models import PoolingUser, Trip, Step
from users.forms import UserCreationForm
class LoginForm(AuthenticationForm):
username = forms.CharField(widget=forms.EmailInput(attrs={'placeholder': 'Email',
'class': 'form-control',
}))
password = forms.CharField(widget=forms.PasswordInput(attrs={'placeholder': 'Password',
'class': 'form-control',
}))
class SearchTrip(forms.Form):
"""
Pay attention that id fields are meant to be hidden, since we suppose they come from
an autocomplete AJAX request via an another CharField.
"""
origin_id = forms.IntegerField()
destination_id = forms.IntegerField()
datetime = forms.DateTimeField()
class PoolingUserForm(forms.ModelForm):
class Meta:
model = PoolingUser
# Exclude the one-to-one relation with User
fields = ['birth_date', 'driving_license', 'cellphone_number']
class TripForm(forms.ModelForm):
class Meta:
model = Trip
fields = ['date_origin', 'max_num_passengers']
class StepForm(forms.ModelForm):
class Meta:
model = Step
fields = ['origin', 'destination', 'hour_origin', 'hour_destination', 'max_price']
class UserForm(UserCreationForm):
class Meta(UserCreationForm.Meta):
fields = ('email', 'first_name', 'last_name')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for field_name in self.Meta.fields:
self[field_name].field.required = True
self['password1'].field.validators = [MinLengthValidator(6)]
|
"""A module containing tests for the library implementation of accessing utilities."""
from lxml import etree
import iati.core.resources
import iati.core.utilities
class TestUtilities(object):
"""A container for tests relating to utilities"""
def test_convert_to_schema(self):
"""Check that an etree can be converted to a schema."""
path = iati.core.resources.path_schema('iati-activities-schema')
tree = iati.core.resources.load_as_tree(path)
if not tree:
assert False
schema = iati.core.utilities.convert_to_schema(tree)
assert isinstance(schema, etree.XMLSchema)
def test_log(self):
pass
def test_log_error(self):
pass
| Add more logging test stubs
| """A module containing tests for the library implementation of accessing utilities."""
from lxml import etree
import iati.core.resources
import iati.core.utilities
class TestUtilities(object):
"""A container for tests relating to utilities"""
def test_convert_to_schema(self):
"""Check that an etree can be converted to a schema."""
path = iati.core.resources.path_schema('iati-activities-schema')
tree = iati.core.resources.load_as_tree(path)
if not tree:
assert False
schema = iati.core.utilities.convert_to_schema(tree)
assert isinstance(schema, etree.XMLSchema)
def test_log(self):
pass
def test_log_error(self):
pass
def test_log_exception(self):
pass
def test_log_warning(self):
pass
|
# coding=utf-8
import logging
from django.core.management.base import BaseCommand
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = "This runs the MCCB1sSLA report"
def handle(self, *args, **options):
self.create_report()
def create_report():
print("stuff goes here")
# '{"action": "Export", "csrfmiddlewaretoken": "PQk4Pt55CL0NBapx9hSqZTJkSn6tL6TL", "date_from": "08/05/2021", "date_to": "10/05/2021"}'
# report_data = json_stuff_goes_here
# ExportTask().delay(user_person.pk, filename_of_report, mi_cb1_extract_agilisys, report_data)
| Send weekly report to aws
| # coding=utf-8
import logging
from django.core.management.base import BaseCommand
from reports.tasks import ExportTask
from core.models import get_web_user
from django.views.decorators.csrf import csrf_exempt
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = "This runs the MCCB1sSLA report"
def handle(self, *args, **options):
self.create_report()
@csrf_exempt
def create_report(self):
report_data = '{"action": "Export", "csrfmiddlewaretoken": "PQk4Pt55CL0NBapx9hSqZTJkSn6tL6TL", "date_from": "2021-05-08", "date_to": "2021-05-10"}'
# report_data = json_stuff_goes_here
web_user = get_web_user()
filename_of_report = "WEEKLY-REPORT-TEST.csv"
ExportTask().delay(web_user.pk, filename_of_report, "MICB1Extract", report_data)
|
# -*- coding: utf-8 -*-
import pkg_resources
import skyfield
from skyfield.api import load
from skyfield.functions import load_bundled_npy
def main():
print('Skyfield version: {0}'.format(skyfield.__version__))
print('jplephem version: {0}'.format(version_of('jplephem')))
print('sgp4 version: {0}'.format(version_of('sgp4')))
ts = load.timescale()
fmt = '%Y-%m-%d'
final_leap = (ts._leap_tai[-1] - 1) / (24 * 60 * 60)
print('Built-in leap seconds table ends with leap second at: {0}'
.format(ts.tai_jd(final_leap).utc_strftime()))
arrays = load_bundled_npy('iers.npz')
tt, delta_t = arrays['delta_t_recent']
start = ts.tt_jd(tt[0])
end = ts.tt_jd(tt[-1])
print('Built-in ∆T table from finals2000A.all covers: {0} to {1}'
.format(start.utc_strftime(fmt), end.utc_strftime(fmt)))
def version_of(distribution):
try:
d = pkg_resources.get_distribution(distribution)
except pkg_resources.DistributionNotFound:
return 'Unknown'
else:
return d.version
main()
| Fix “python -m skyfield” following ∆T array rename
| # -*- coding: utf-8 -*-
import pkg_resources
import numpy as np
import skyfield
from skyfield.api import load
from skyfield.functions import load_bundled_npy
def main():
print('Skyfield version: {0}'.format(skyfield.__version__))
print('jplephem version: {0}'.format(version_of('jplephem')))
print('sgp4 version: {0}'.format(version_of('sgp4')))
ts = load.timescale()
fmt = '%Y-%m-%d'
final_leap = (ts._leap_tai[-1] - 1) / (24 * 60 * 60)
print('Built-in leap seconds table ends with leap second at: {0}'
.format(ts.tai_jd(final_leap).utc_strftime()))
arrays = load_bundled_npy('iers.npz')
daily_tt = arrays['tt_jd_minus_arange']
daily_tt += np.arange(len(daily_tt))
start = ts.tt_jd(daily_tt[0])
end = ts.tt_jd(daily_tt[-1])
print('Built-in ∆T table from finals2000A.all covers: {0} to {1}'
.format(start.utc_strftime(fmt), end.utc_strftime(fmt)))
def version_of(distribution):
try:
d = pkg_resources.get_distribution(distribution)
except pkg_resources.DistributionNotFound:
return 'Unknown'
else:
return d.version
main()
|
'''
Implementation of Stack data structure
'''
class Stack:
def __init__(self):
'''
Initialize stack
'''
self.items = []
def is_empty(self):
'''
Return True if stack if empty else False
'''
return self.item == []
def push(self, item):
'''
Push item to stack
'''
self.items.append(item)
def pop(self):
'''
Pop item from stack
'''
return self.items.pop()
def peek(self):
'''
Return value of item on top of stack
'''
return self.items[-1]
def size(self):
'''
Return number of items in stack
'''
return len(self.items)
| Convert old-style class to new-style class
| '''
Implementation of Stack data structure
'''
class Stack(object):
def __init__(self):
'''
Initialize stack
'''
self.items = []
def is_empty(self):
'''
Return True if stack if empty else False
'''
return self.item == []
def push(self, item):
'''
Push item to stack
'''
self.items.append(item)
def pop(self):
'''
Pop item from stack
'''
return self.items.pop()
def peek(self):
'''
Return value of item on top of stack
'''
return self.items[-1]
def size(self):
'''
Return number of items in stack
'''
return len(self.items)
|
# encoding: utf-8
import mimetypes
import re
from django.core.urlresolvers import reverse
def order_name(name):
"""order_name -- Limit a text to 20 chars length, if necessary strips the
middle of the text and substitute it for an ellipsis.
name -- text to be limited.
"""
name = re.sub(r'^.*/', '', name)
if len(name) <= 20:
return name
return name[:10] + "..." + name[-7:]
def serialize(instance, file_attr='file'):
"""serialize -- Serialize a Picture instance into a dict.
instance -- Picture instance
file_attr -- attribute name that contains the FileField or ImageField
"""
obj = getattr(instance, file_attr)
return {
'resource_id': instance.pk,
'url': obj.url,
'file_type': obj.file_type,
'name': order_name(obj.name),
'type': mimetypes.guess_type(obj.path)[0] or 'image/png',
'thumbnailUrl': obj.url,
'size': obj.size,
'deleteUrl': reverse('upload-delete', args=[instance.pk]),
'deleteType': 'DELETE',
}
| FIX add links to reports generated v2
| # encoding: utf-8
import mimetypes
import re
from django.core.urlresolvers import reverse
def order_name(name):
"""order_name -- Limit a text to 20 chars length, if necessary strips the
middle of the text and substitute it for an ellipsis.
name -- text to be limited.
"""
name = re.sub(r'^.*/', '', name)
if len(name) <= 20:
return name
return name[:10] + "..." + name[-7:]
def serialize(instance, file_attr='file'):
"""serialize -- Serialize a Picture instance into a dict.
instance -- Picture instance
file_attr -- attribute name that contains the FileField or ImageField
"""
obj = getattr(instance, file_attr)
return {
'resource_id': instance.pk,
'url': obj.url,
'file_type': instance.file_type,
'name': order_name(obj.name),
'type': mimetypes.guess_type(obj.path)[0] or 'image/png',
'thumbnailUrl': obj.url,
'size': obj.size,
'deleteUrl': reverse('upload-delete', args=[instance.pk]),
'deleteType': 'DELETE',
}
|
from go.vumitools.conversation.definition import (
ConversationDefinitionBase, ConversationAction)
class SendDialogueAction(ConversationAction):
action_name = 'send_dialogue'
action_display_name = 'Send Dialogue'
needs_confirmation = True
needs_group = True
needs_running = True
def check_disabled(self):
if self._conv.has_channel_supporting(generic_sends=True):
return None
return ("This action needs channels capable of sending"
" messages attached to this conversation.")
def perform_action(self, action_data):
return self.send_command(
'send_survey', batch_id=self._conv.get_latest_batch_key(),
msg_options={}, is_client_initiated=False,
delivery_class=self._conv.delivery_class)
class DownloadUserDataAction(ConversationAction):
action_name = 'download_user_data'
action_display_name = 'Download User Data'
redirect_to = 'user_data'
class ConversationDefinition(ConversationDefinitionBase):
conversation_type = 'dialogue'
actions = (
SendDialogueAction,
DownloadUserDataAction,
)
| Fix name of command sent by send_dialogue action.
| from go.vumitools.conversation.definition import (
ConversationDefinitionBase, ConversationAction)
class SendDialogueAction(ConversationAction):
action_name = 'send_dialogue'
action_display_name = 'Send Dialogue'
needs_confirmation = True
needs_group = True
needs_running = True
def check_disabled(self):
if self._conv.has_channel_supporting(generic_sends=True):
return None
return ("This action needs channels capable of sending"
" messages attached to this conversation.")
def perform_action(self, action_data):
return self.send_command(
'send_dialogue', batch_id=self._conv.get_latest_batch_key(),
msg_options={}, is_client_initiated=False,
delivery_class=self._conv.delivery_class)
class DownloadUserDataAction(ConversationAction):
action_name = 'download_user_data'
action_display_name = 'Download User Data'
redirect_to = 'user_data'
class ConversationDefinition(ConversationDefinitionBase):
conversation_type = 'dialogue'
actions = (
SendDialogueAction,
DownloadUserDataAction,
)
|
import time
from itest_utils import wait_for_marathon
from itest_utils import print_container_logs
def before_all(context):
wait_for_marathon()
def after_scenario(context, scenario):
"""If a marathon client object exists in our context, delete any apps in Marathon and wait until they die."""
if scenario.status != 'passed':
print "Zookeeper container logs:"
print_container_logs('zookeeper')
print "Marathon container logs:"
print_container_logs('marathon')
if context.client:
while True:
apps = context.client.list_apps()
if not apps:
break
for app in apps:
context.client.delete_app(app.id, force=True)
time.sleep(0.5)
while context.client.list_deployments():
time.sleep(0.5)
| Move log print to after_step
| import time
from itest_utils import wait_for_marathon
from itest_utils import print_container_logs
def before_all(context):
wait_for_marathon()
def after_scenario(context, scenario):
"""If a marathon client object exists in our context, delete any apps in Marathon and wait until they die."""
if context.client:
while True:
apps = context.client.list_apps()
if not apps:
break
for app in apps:
context.client.delete_app(app.id, force=True)
time.sleep(0.5)
while context.client.list_deployments():
time.sleep(0.5)
def after_step(context, step):
if step.status == "failed":
print "Zookeeper container logs:"
print_container_logs('zookeeper')
print "Marathon container logs:"
print_container_logs('marathon')
|
import time
from mycroft.messagebus.message import Message
class AudioService():
def __init__(self, emitter):
self.emitter = emitter
self.emitter.on('MycroftAudioServiceTrackInfoReply', self._track_info)
self.info = None
def _track_info(self, message=None):
self.info = message.data
def play(self, tracks=[], utterance=''):
self.emitter.emit(Message('MycroftAudioServicePlay',
data={'tracks': tracks,
'utterance': utterance}))
def track_info(self):
self.info = None
self.emitter.emit(Message('MycroftAudioServiceTrackInfo'))
while self.info is None:
time.sleep(0.1)
return self.info
| Add check for valid type of tracks
| import time
from mycroft.messagebus.message import Message
class AudioService():
def __init__(self, emitter):
self.emitter = emitter
self.emitter.on('MycroftAudioServiceTrackInfoReply', self._track_info)
self.info = None
def _track_info(self, message=None):
self.info = message.data
def play(self, tracks=[], utterance=''):
if isinstance(tracks, basestring):
tracks = [tracks]
elif not isinstance(tracks, list):
raise ValueError
self.emitter.emit(Message('MycroftAudioServicePlay',
data={'tracks': tracks,
'utterance': utterance}))
def track_info(self):
self.info = None
self.emitter.emit(Message('MycroftAudioServiceTrackInfo'))
while self.info is None:
time.sleep(0.1)
return self.info
|
import sys
fix_url = sys.argv[1]
for line in sys.stdin:
e = line.strip().split(" ")
if e[0].startswith("_:"):
e[0] = "<%s>" % e[0].replace("_:",fix_url)
if e[2].startswith("_:"):
e[2] = "<%s>" % e[2].replace("_:",fix_url)
print(" ".join(e))
| Fix datathub DCT uris to DC
| import sys
fix_url = sys.argv[1]
dct = "<http://purl.org/dc/terms/"
dcelems = ["contributor", "coverage>", "creator>", "date>", "description>",
"format>", "identifier>", "language>", "publisher>", "relation>",
"rights>", "source>", "subject>", "title>", "type>"]
for line in sys.stdin:
e = line.strip().split(" ")
if e[0].startswith("_:"):
e[0] = "<%s>" % e[0].replace("_:",fix_url)
if e[1].startswith(dct) and e[1][len(dct):] in dcelems:
e[1] = "<http://purl.org/dc/elements/1.1/" + e[1][len(dct):]
if e[2].startswith("_:"):
e[2] = "<%s>" % e[2].replace("_:",fix_url)
print(" ".join(e))
|
from __future__ import unicode_literals
from django.core.mail.backends.smtp import EmailBackend as SMTPBackend
from bandit.backends.base import HijackBackendMixin, LogOnlyBackendMixin
class HijackSMTPBackend(HijackBackendMixin, SMTPBackend):
"""
This backend intercepts outgoing messages drops them to a single email
address.
"""
pass
class LogOnlySMTPBackend(LogOnlyBackendMixin, SMTPBackend):
"""
This backend intercepts outgoing messages and logs them, allowing
only messages destined for ADMINS to be sent via SMTP.
"""
pass
| Update LogOnlySMTPBackend docstring.
Not only admin emails are allowed, all approved emails are still sent.
| from __future__ import unicode_literals
from django.core.mail.backends.smtp import EmailBackend as SMTPBackend
from bandit.backends.base import HijackBackendMixin, LogOnlyBackendMixin
class HijackSMTPBackend(HijackBackendMixin, SMTPBackend):
"""
This backend intercepts outgoing messages drops them to a single email
address.
"""
pass
class LogOnlySMTPBackend(LogOnlyBackendMixin, SMTPBackend):
"""
This backend intercepts outgoing messages and logs them, allowing
only messages destined for ADMINS, BANDIT_EMAIL, SERVER_EMAIL, or
BANDIT_WHITELIST to be sent via SMTP.
"""
pass
|
# -*- coding: utf-8 -*-
DEBUG = True
SECRET_KEY = '##CHANGEME##'
SQLALCHEMY_DATABASE_URI = 'postgresql://massa:secret@localhost/massa'
SQLALCHEMY_ECHO = False
| Disable debug mode in default configuration. | # -*- coding: utf-8 -*-
DEBUG = False
SECRET_KEY = '##CHANGEME##'
SQLALCHEMY_DATABASE_URI = 'postgresql://massa:secret@localhost/massa'
SQLALCHEMY_ECHO = False
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from monascaclient.openstack.common.apiclient import exceptions as monascacli
from openstack_dashboard.test.test_data import exceptions
def data(TEST):
TEST.exceptions = exceptions.data
monitoring_exception = monascacli.ClientException
TEST.exceptions.monitoring = exceptions.create_stubbed_exception(
monitoring_exception)
| Adjust tests for python-monascaclient >= 1.3.0
the exceptions module was moved out of the openstack.common namespace,
so try to import the new location first and fall back to the old
one if it doesn't exist.
Change-Id: I3305775baaab15dca8d5e7e5cfc0932f94d4d153
| #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# NOTE(dmllr): Remove me when we require monascaclient >= 1.3.0
try:
from monascaclient.apiclient import exceptions as monascacli
except ImportError:
from monascaclient.openstack.common.apiclient import exceptions as monascacli
from openstack_dashboard.test.test_data import exceptions
def data(TEST):
TEST.exceptions = exceptions.data
monitoring_exception = monascacli.ClientException
TEST.exceptions.monitoring = exceptions.create_stubbed_exception(
monitoring_exception)
|
from django.core.exceptions import PermissionDenied
from ...order import models
from ..utils import get_node
from .types import Order
def resolve_orders(info):
user = info.context.user
if user.is_anonymous:
raise PermissionDenied('You have no permission to see this')
if user.get_all_permissions() & {'order.view_order', 'order.edit_order'}:
return models.Order.objects.all().distinct().prefetch_related('lines')
return user.orders.confirmed().distinct().prefetch_related('lines')
def resolve_order(info, id):
"""Return order only for user assigned to it or proper staff user."""
order = get_node(info, id, only_type=Order)
user = info.context.user
if (order.user == user or user.get_all_permissions() & {
'order.view_order', 'order.edit_order'}):
return order
| Add more info to the permission denied exception
| from django.core.exceptions import PermissionDenied
from ...order import models
from ..utils import get_node
from .types import Order
def resolve_orders(info):
user = info.context.user
if user.is_anonymous:
raise PermissionDenied('You have no permission to see this order.')
if user.get_all_permissions() & {'order.view_order', 'order.edit_order'}:
return models.Order.objects.all().distinct().prefetch_related('lines')
return user.orders.confirmed().distinct().prefetch_related('lines')
def resolve_order(info, id):
"""Return order only for user assigned to it or proper staff user."""
order = get_node(info, id, only_type=Order)
user = info.context.user
if (order.user == user or user.get_all_permissions() & {
'order.view_order', 'order.edit_order'}):
return order
|
from django.test import TestCase
from django.contrib.auth.models import User
from mks.models import Member
from .models import Suggestion
class SuggestionsTests(TestCase):
def setUp(self):
self.member = Member.objects.create(name='mk_1')
self.regular_user = User.objects.create_user('reg_user')
def test_simple_text_suggestion(self):
MK_SITE = 'http://mk1.example.com'
suggestion = Suggestion.objects.create_suggestion(
suggested_by=self.regular_user,
content_object=self.member,
suggestion_action=Suggestion.UPDATE,
suggested_field='website',
suggested_text=MK_SITE
)
self.assertIsNone(self.member.website)
suggestion.auto_apply()
mk = Member.objects.get(pk=self.member.pk)
self.assertEqual(mk.website, MK_SITE)
| Undo member changes in test
| from django.test import TestCase
from django.contrib.auth.models import User
from mks.models import Member
from .models import Suggestion
class SuggestionsTests(TestCase):
def setUp(self):
self.member = Member.objects.create(name='mk_1')
self.regular_user = User.objects.create_user('reg_user')
def test_simple_text_suggestion(self):
MK_SITE = 'http://mk1.example.com'
suggestion = Suggestion.objects.create_suggestion(
suggested_by=self.regular_user,
content_object=self.member,
suggestion_action=Suggestion.UPDATE,
suggested_field='website',
suggested_text=MK_SITE
)
self.assertIsNone(self.member.website)
suggestion.auto_apply()
mk = Member.objects.get(pk=self.member.pk)
self.assertEqual(mk.website, MK_SITE)
# cleanup
mk.website = None
mk.save()
self.member = mk
|
# Given a singly linked list of integers l and an integer k, remove all elements from list l that have a value equal to k
class Node(object): # define constructor
def __init__(self, value):
self.value = value
self.next = None
class LinkedList(object):
def __init__(self, head=None):
self.head = head
def add(self, new_node):
current_node = self.head
if self.head:
while current_node.next:
current_node = current_node.next
current_node.next = new_node
else:
self.head = new_node
def __repr__(self):
current_node = self.head
output_arr = []
while current_node:
output_arr.append(str(current_node.data))
current_node = current_node.next
return "->".join(output_arr)
| Remove linked list class and implement algorithm just using single method
| # Given a singly linked list of integers l and an integer k, remove all elements from list l that have a value equal to k
class Node(object):
def __init__(self, value):
self.value = value
self.next = None
def remove_k_from_list(l, k):
fake_head = Node(None)
fake_head.next = l
current_node = fake_head
while current_node:
while current_node.next and current_node.next.value == k:
current_node.next = current_node.next.next
current_node = current_node.next
return fake_head.next
|
from enum import Enum, EnumMeta
from functools import total_ordering
class _MultiValueMeta(EnumMeta):
def __init__(self, cls, bases, classdict):
# make sure we only have tuple values, not single values
for member in self.__members__.values():
if not isinstance(member.value, tuple):
raise TypeError('{} = {!r}, should be tuple!'
.format(member.name, member.value))
def __call__(cls, value):
"""Return the appropriate instance with any of the values listed."""
for member in cls:
if value in member.value:
return member
# raise ValueError otherwise
return super().__call__(value)
class MultiValueEnum(Enum, metaclass=_MultiMeta):
"""Enum subclass where members are declared as tuples."""
@total_ordering
class OrderableMixin:
"""Mixin for comparable Enums. The order is the definition order
from smaller to bigger.
"""
def __eq__(self, other):
if self.__class__ is other.__class__:
return self.value == other.value
return NotImplemented
def __lt__(self, other):
if self.__class__ is other.__class__:
names = self.__class__._member_names_
return names.index(self.name) < names.index(other.name)
return NotImplemented
| Raise ValueError explicitly from __call__ rather than with super()
because super() would make another lookup, but we already know the value isn't there.
| from enum import Enum, EnumMeta
from functools import total_ordering
class _MultiValueMeta(EnumMeta):
def __init__(self, cls, bases, classdict):
# make sure we only have tuple values, not single values
for member in self.__members__.values():
if not isinstance(member.value, tuple):
raise TypeError('{} = {!r}, should be tuple!'
.format(member.name, member.value))
def __call__(cls, value):
"""Return the appropriate instance with any of the values listed."""
for member in cls:
if value in member.value:
return member
else:
raise ValueError("%s is not a valid %s" % (value, cls.__name__))
class MultiValueEnum(Enum, metaclass=_MultiMeta):
"""Enum subclass where members are declared as tuples."""
@total_ordering
class OrderableMixin:
"""Mixin for comparable Enums. The order is the definition order
from smaller to bigger.
"""
def __eq__(self, other):
if self.__class__ is other.__class__:
return self.value == other.value
return NotImplemented
def __lt__(self, other):
if self.__class__ is other.__class__:
names = self.__class__._member_names_
return names.index(self.name) < names.index(other.name)
return NotImplemented
|
# This module contains built-in introspector plugins for various common
# Django apps.
# These imports trigger the lower-down files
import south.introspection_plugins.geodjango
import south.introspection_plugins.django_tagging
import south.introspection_plugins.django_taggit
import south.introspection_plugins.django_objectpermissions
| Add import of django-annoying patch
| # This module contains built-in introspector plugins for various common
# Django apps.
# These imports trigger the lower-down files
import south.introspection_plugins.geodjango
import south.introspection_plugins.django_tagging
import south.introspection_plugins.django_taggit
import south.introspection_plugins.django_objectpermissions
import south.introspection_plugins.annoying_autoonetoone
|
from buildbot.status.web.hooks.github import GitHubEventHandler
from twisted.python import log
import abconfig
class AutobuilderGithubEventHandler(GitHubEventHandler):
def handle_push(self, payload):
# This field is unused:
user = None
# user = payload['pusher']['name']
repo = payload['repository']['name']
repo_url = payload['repository']['url']
# NOTE: what would be a reasonable value for project?
# project = request.args.get('project', [''])[0]
project = abconfig.get_project_for_url(repo_url,
default_if_not_found=payload['repository']['full_name'])
changes = self._process_change(payload, user, repo, repo_url, project)
log.msg("Received %d changes from github" % len(changes))
return changes, 'git'
| Add a codebase generator to the Github web hoook handler,
to map the URL to the repo name for use as the codebase.
| from buildbot.status.web.hooks.github import GitHubEventHandler
from twisted.python import log
import abconfig
def codebasemap(payload):
return abconfig.get_project_for_url(payload['repository']['url'])
class AutobuilderGithubEventHandler(GitHubEventHandler):
def __init__(self, secret, strict codebase=None):
if codebase is None:
codebase = codebasemap
GitHubEventHandler.__init__(self, secret, strict, codebase)
def handle_push(self, payload):
# This field is unused:
user = None
# user = payload['pusher']['name']
repo = payload['repository']['name']
repo_url = payload['repository']['url']
# NOTE: what would be a reasonable value for project?
# project = request.args.get('project', [''])[0]
project = abconfig.get_project_for_url(repo_url,
default_if_not_found=payload['repository']['full_name'])
changes = self._process_change(payload, user, repo, repo_url, project)
log.msg("Received %d changes from github" % len(changes))
return changes, 'git'
|
import sys;s='%s: lines %d, RaSa: %d'
def u(z):
r=I=0;b=1
for m in open(z):
r+=1
for k in m:
if '{'==k:b+=1
if ';'==k:I+=b
if '}'==k:b-=1
return(r,I)
c=D=0
for z in sys.argv[1:]:
r,I=u(z);c+=r;D+=I;print s%(z,r,I)
print s%('total',c,D) | Revert to try cleanest solution
| #!/usr/bin/env python
import argparse
def calculate_file_rasa(file_path):
row_count = 0
multiplier = 1
rasa = 0
for line in open(file_path):
row_count += 1
for char in line:
if char == '{':
multiplier += 1
if char == ';':
rasa += multiplier
if char == '}':
multiplier -= 1
return (row_count, rasa)
def main(args):
total_rows = 0
total_rasa = 0
for file_path in args.argument:
row_count, rasa = calculate_file_rasa(file_path)
total_rows += row_count
total_rasa += rasa
print '%s: lines %d, RaSa: %d' % (file_path, row_count, rasa)
print 'total: lines %d, RaSa: %d' % (total_rows, total_rasa)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('argument', nargs='*')
main(parser.parse_args())
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pkg_resources
pkg_resources.declare_namespace(__name__)
VERSION = (0, 2, 1)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Open Source Content Management Platform - CMS for the "
u"magazines, newspappers websites and portals with "
u"high-traffic, using the Django Framework."
__author__ = u"Thiago Avelino"
__credits__ = ['Bruno Rocha']
__email__ = u"[email protected]"
__license__ = u"MIT License"
__copyright__ = u"Copyright 2013, Opps Project"
| Set new developer version `0.2.2`
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import pkg_resources
pkg_resources.declare_namespace(__name__)
VERSION = (0, 2, 2)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Open Source Content Management Platform - CMS for the "
u"magazines, newspappers websites and portals with "
u"high-traffic, using the Django Framework."
__author__ = u"Thiago Avelino"
__credits__ = ['Bruno Rocha']
__email__ = u"[email protected]"
__license__ = u"MIT License"
__copyright__ = u"Copyright 2013, Opps Project"
|
#!/usr/bin/env python
# Andre Anjos <[email protected]>
# Thu 23 Jun 20:22:28 2011 CEST
# vim: set fileencoding=utf-8 :
"""The db package contains simplified APIs to access data for various databases
that can be used in Biometry, Machine Learning or Pattern Classification."""
import pkg_resources
__version__ = pkg_resources.require(__name__)[0].version
def get_config():
"""Returns a string containing the configuration information.
"""
import bob.extension
return bob.extension.get_config(__name__)
from . import utils, driver
from .file import File
from .database import Database, SQLiteDatabase
# gets sphinx autodoc done right - don't remove it
__all__ = [_ for _ in dir() if not _.startswith('_')]
| Add a low-level database API
| #!/usr/bin/env python
# Andre Anjos <[email protected]>
# Thu 23 Jun 20:22:28 2011 CEST
# vim: set fileencoding=utf-8 :
"""The db package contains simplified APIs to access data for various databases
that can be used in Biometry, Machine Learning or Pattern Classification."""
import pkg_resources
from . import utils, driver
from .file import File
from .database import Database, SQLiteDatabase
__version__ = pkg_resources.require(__name__)[0].version
def get_config():
"""Returns a string containing the configuration information.
"""
import bob.extension
return bob.extension.get_config(__name__)
# gets sphinx autodoc done right - don't remove it
__all__ = [_ for _ in dir() if not _.startswith('_')]
|
# Adapted from http://stackoverflow.com/questions/110803/dirty-fields-in-django
from django.db.models.signals import post_save
class DirtyFieldsMixin(object):
def __init__(self, *args, **kwargs):
super(DirtyFieldsMixin, self).__init__(*args, **kwargs)
post_save.connect(reset_state, sender=self.__class__,
dispatch_uid='%s-DirtyFieldsMixin-sweeper' % self.__class__.__name__)
reset_state(sender=self.__class__, instance=self)
def _as_dict(self):
return dict([(f.name, getattr(self, f.name)) for f in self._meta.local_fields if not f.rel])
def get_dirty_fields(self):
new_state = self._as_dict()
return dict([(key, value) for key, value in self._original_state.iteritems() if value != new_state[key]])
def is_dirty(self):
# in order to be dirty we need to have been saved at least once, so we
# check for a primary key and we need our dirty fields to not be empty
if not self.pk:
return True
return {} != self.get_dirty_fields()
def reset_state(sender, instance, **kwargs):
instance._original_state = instance._as_dict()
| Use field.to_python to do django type conversions on the field before checking if dirty.
This solves issues where you might have a decimal field that you write a string to, eg:
>>> m = MyModel.objects.get(id=1)
>>> m.my_decimal_field
Decimal('1.00')
>>> m.my_decimal_field = u'1.00' # from a form or something
>>> m.is_dirty() # currently evaluates to True, should evaluate to False
False
This pull request could probably use some unit testing, but it should be safe as the base class for django fields defines to_python as:
def to_python(self, value):
return value
So, any field type that does not have an explicit to_python method will behave as before this change.
| # Adapted from http://stackoverflow.com/questions/110803/dirty-fields-in-django
from django.db.models.signals import post_save
class DirtyFieldsMixin(object):
def __init__(self, *args, **kwargs):
super(DirtyFieldsMixin, self).__init__(*args, **kwargs)
post_save.connect(reset_state, sender=self.__class__,
dispatch_uid='%s-DirtyFieldsMixin-sweeper' % self.__class__.__name__)
reset_state(sender=self.__class__, instance=self)
def _as_dict(self):
return dict([(f.name, f.to_python(getattr(self, f.name))) for f in self._meta.local_fields if not f.rel])
def get_dirty_fields(self):
new_state = self._as_dict()
return dict([(key, value) for key, value in self._original_state.iteritems() if value != new_state[key]])
def is_dirty(self):
# in order to be dirty we need to have been saved at least once, so we
# check for a primary key and we need our dirty fields to not be empty
if not self.pk:
return True
return {} != self.get_dirty_fields()
def reset_state(sender, instance, **kwargs):
instance._original_state = instance._as_dict()
|
# coding: utf-8
from django.core.management.base import BaseCommand
from ...models import Poll, Option
class Command(BaseCommand):
def handle(self, *args, **kwargs):
Poll.objects.filter(id=1).delete()
Option.objects.filter(id__in=[1, 2, 3, 4]).delete()
question = Poll.objects.create(id=1, title="Quem deve ser o vencedor")
option1 = Option.objects.create(id=1, name="Mario", pool=question, votes=0)
option2 = Option.objects.create(id=2, name="Luigi", pool=question, votes=0)
option3 = Option.objects.create(id=3, name="Yoshi", pool=question, votes=0)
option4 = Option.objects.create(id=4, name="Princesa", pool=question, votes=0)
question.save()
option1.save()
option2.save()
option3.save()
option4.save()
print "Pesquisa e Opções cadastradas com sucesso"
| Remove chaves do redis referentes a votaçãoi
| # coding: utf-8
from django.core.management.base import BaseCommand
from ...models import Poll, Option
import redis
cache = redis.StrictRedis(host='127.0.0.1', port=6379, db=0)
class Command(BaseCommand):
def handle(self, *args, **kwargs):
options = [1, 2, 3, 4]
Poll.objects.filter(id=1).delete()
Option.objects.filter(id__in=options).delete()
[cache.delete('votacao:option:{}'.format(opt)) for opt in options]
question = Poll.objects.create(id=1, title="Quem deve ser o vencedor")
option1 = Option.objects.create(id=1, name="Mario", pool=question, votes=0)
option2 = Option.objects.create(id=2, name="Luigi", pool=question, votes=0)
option3 = Option.objects.create(id=3, name="Yoshi", pool=question, votes=0)
option4 = Option.objects.create(id=4, name="Princesa", pool=question, votes=0)
question.save()
option1.save()
option2.save()
option3.save()
option4.save()
print "Pesquisa e Opções cadastradas com sucesso"
|
from django.conf.urls import url, include
from rest_framework import routers
from . import views
from .views import PlayerViewSet, AdventureViewSet, RoomViewSet, ArtifactViewSet, EffectViewSet, MonsterViewSet
router = routers.DefaultRouter(trailing_slash=False)
router.register(r'players', PlayerViewSet)
router.register(r'adventures', AdventureViewSet)
router.register(r'adventures/(?P<adventure_id>[\w-]+)/rooms$', RoomViewSet)
router.register(r'adventures/(?P<adventure_id>[\w-]+)/artifacts$', ArtifactViewSet)
router.register(r'adventures/(?P<adventure_id>[\w-]+)/effects$', EffectViewSet)
router.register(r'adventures/(?P<adventure_id>[\w-]+)/monsters$', MonsterViewSet)
urlpatterns = [
url(r'^api/', include(router.urls)),
url(r'^$', views.index, name='index'),
url(r'^adventure/(?P<adventure_id>[\w-]+)/$', views.adventure, name='adventure'),
# this route is a catch-all for compatibility with the Angular routes. It must be last in the list.
# NOTE: non-existent URLs won't 404 with this in place. They will be sent into the Angular app.
url(r'^(?P<path>.*)/$', views.index),
]
| Update Django catch-all URL path to not catch URLs with a . in them.
This makes missing JS files 404 properly instead of returning the HTML 404 page which confuses the parser.
| from django.conf.urls import url, include
from rest_framework import routers
from . import views
from .views import PlayerViewSet, AdventureViewSet, RoomViewSet, ArtifactViewSet, EffectViewSet, MonsterViewSet
router = routers.DefaultRouter(trailing_slash=False)
router.register(r'players', PlayerViewSet)
router.register(r'adventures', AdventureViewSet)
router.register(r'adventures/(?P<adventure_id>[\w-]+)/rooms$', RoomViewSet)
router.register(r'adventures/(?P<adventure_id>[\w-]+)/artifacts$', ArtifactViewSet)
router.register(r'adventures/(?P<adventure_id>[\w-]+)/effects$', EffectViewSet)
router.register(r'adventures/(?P<adventure_id>[\w-]+)/monsters$', MonsterViewSet)
urlpatterns = [
url(r'^api/', include(router.urls)),
url(r'^$', views.index, name='index'),
url(r'^adventure/(?P<adventure_id>[\w-]+)/$', views.adventure, name='adventure'),
# this route is a catch-all for compatibility with the Angular routes. It must be last in the list.
# NOTE: this currently matches URLs without a . in them, so .js files and broken images will still 404.
# NOTE: non-existent URLs won't 404 with this in place. They will be sent into the Angular app.
url(r'^(?P<path>[^\.]*)/$', views.index),
]
|
from __future__ import unicode_literals
registry = {}
bound_registry = {}
def register(cls):
registry[cls.api_type] = cls
return cls
def bind(parent=None, resource=None):
def wrapper(endpointset):
if parent is not None:
endpointset.parent = parent
endpointset.url.parent = parent.url
if resource is not None:
BoundResource = type(
str("Bound{}".format(resource.__class__.__name__)),
(resource,),
{"endpointset": endpointset},
)
endpointset.resource_class = BoundResource
# override registry with bound resource (typically what we want)
registry[resource.api_type] = BoundResource
endpointset.relationships = getattr(endpointset, "relationships", {})
return endpointset
return wrapper
| Attach as_jsonapi to models for easy serialization
| from __future__ import unicode_literals
registry = {}
bound_registry = {}
def register(cls):
registry[cls.api_type] = cls
def as_jsonapi(self):
return cls(self).serialize()
cls.model.as_jsonapi = as_jsonapi
return cls
def bind(parent=None, resource=None):
def wrapper(endpointset):
if parent is not None:
endpointset.parent = parent
endpointset.url.parent = parent.url
if resource is not None:
BoundResource = type(
str("Bound{}".format(resource.__class__.__name__)),
(resource,),
{"endpointset": endpointset},
)
endpointset.resource_class = BoundResource
# override registry with bound resource (typically what we want)
registry[resource.api_type] = BoundResource
endpointset.relationships = getattr(endpointset, "relationships", {})
return endpointset
return wrapper
|
import os
import moderngl_window as mglw
class Example(mglw.WindowConfig):
gl_version = (3, 3)
title = "ModernGL Example"
window_size = (1280, 720)
aspect_ratio = 16 / 9
resizable = False
resource_dir = os.path.normpath(os.path.join(__file__, '../../data'))
def __init__(self, **kwargs):
super().__init__(**kwargs)
@classmethod
def run(cls):
mglw.run_window_config(cls)
| Make examples resizable by default
| import os
import moderngl_window as mglw
class Example(mglw.WindowConfig):
gl_version = (3, 3)
title = "ModernGL Example"
window_size = (1280, 720)
aspect_ratio = 16 / 9
resizable = True
resource_dir = os.path.normpath(os.path.join(__file__, '../../data'))
def __init__(self, **kwargs):
super().__init__(**kwargs)
@classmethod
def run(cls):
mglw.run_window_config(cls)
|
from models import Protein, Mutation
class SearchResult:
def __init__(self, protein, mutation, is_mutation_novel, type, **kwargs):
self.protein = protein
self.mutation = mutation
self.is_mutation_novel = is_mutation_novel
self.type = type
self.meta_user = None
self.__dict__.update(kwargs)
def __getstate__(self):
state = self.__dict__.copy()
state['protein_refseq'] = self.protein.refseq
del state['protein']
state['mutation_kwargs'] = {
'position': self.mutation.position,
'alt': self.mutation.alt
}
del state['mutation']
state['meta_user'].mutation = None
return state
def __setstate__(self, state):
state['protein'] = Protein.query.filter_by(
refseq=state['protein_refseq']
).one()
del state['protein_refseq']
state['mutation'] = Mutation.query.filter_by(
protein=state['protein'],
**state['mutation_kwargs']
).one()
del state['mutation_kwargs']
state['meta_user'].mutation = state['mutation']
state['mutation'].meta_user = state['meta_user']
self.__dict__.update(state)
| Fix result loading for novel mutations
| from models import Protein, Mutation
from database import get_or_create
class SearchResult:
def __init__(self, protein, mutation, is_mutation_novel, type, **kwargs):
self.protein = protein
self.mutation = mutation
self.is_mutation_novel = is_mutation_novel
self.type = type
self.meta_user = None
self.__dict__.update(kwargs)
def __getstate__(self):
state = self.__dict__.copy()
state['protein_refseq'] = self.protein.refseq
del state['protein']
state['mutation_kwargs'] = {
'position': self.mutation.position,
'alt': self.mutation.alt
}
del state['mutation']
state['meta_user'].mutation = None
return state
def __setstate__(self, state):
state['protein'] = Protein.query.filter_by(
refseq=state['protein_refseq']
).one()
del state['protein_refseq']
state['mutation'], created = get_or_create(
Mutation,
protein=state['protein'],
**state['mutation_kwargs']
)
del state['mutation_kwargs']
state['meta_user'].mutation = state['mutation']
state['mutation'].meta_user = state['meta_user']
self.__dict__.update(state)
|
import gi
import gnupg # Requires python3-gnupg
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk
class MainWindow(Gtk.Window):
def __init__(self):
Gtk.Window.__init__(self, title="EZ GPG")
self.connect("delete-event", Gtk.main_quit)
self.set_border_width(30)
gpg_keys_list = Gtk.ListStore(str, str)
for key in self._get_gpg_keys():
gpg_keys_list.append([key['keyid'], "%s %s" % (key['keyid'], key['uids'][0])])
gpg_key_combo_box = Gtk.ComboBox.new_with_model_and_entry(gpg_keys_list)
gpg_key_combo_box.set_entry_text_column(1)
self.add(gpg_key_combo_box)
def _get_gpg_keys(self):
gpg = gnupg.GPG()
return gpg.list_keys()
class EzGpg(Gtk.Window):
def launch(self):
MainWindow().show_all()
Gtk.main()
| Split out gpg key list into its own class
This will make it easy to break out into a module when we need it. In
the process, window was also set to be in the center of the user's
screen.
| import gi
import gnupg # Requires python3-gnupg
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk
class GpgKeyList(Gtk.ComboBox):
def __init__(self):
Gtk.ComboBox.__init__(self)
gpg_keys_list = Gtk.ListStore(str, str)
for key in self._get_gpg_keys():
key_id = key['keyid']
key_name = "%s %s" % (key['keyid'], key['uids'][0])
gpg_keys_list.append([key_id, key_name])
cell = Gtk.CellRendererText()
self.pack_start(cell, True)
self.add_attribute(cell, 'text', 1)
self.set_model(gpg_keys_list)
self.set_entry_text_column(1)
def _get_gpg_keys(self):
gpg = gnupg.GPG()
return gpg.list_keys()
class MainWindow(Gtk.Window):
def __init__(self):
Gtk.Window.__init__(self, title="EZ GPG")
self.connect("delete-event", Gtk.main_quit)
self.set_border_width(30)
self.set_position(Gtk.WindowPosition.CENTER)
gpg_key_combo = GpgKeyList()
self.add(gpg_key_combo)
class EzGpg(Gtk.Window):
def launch(self):
MainWindow().show_all()
Gtk.main()
|
import os.path
from flask import url_for
from npactflask import app
# TODO: I think this is more simply a template_global:
# http://flask.pocoo.org/docs/0.10/api/#flask.Flask.template_global
@app.context_processor
def vSTATIC():
def STATICV(filename):
if app.config['DEBUG']:
vnum = os.path.getmtime(os.path.join(app.static_folder, filename))
else:
vnum = app.config['VERSION']
return (url_for('static', filename=filename, vnum=vnum))
return dict(vSTATIC=STATICV)
| Disable vSTATIC version during DEBUG
when DEBUG is set we serve static files with max-age of zero which is
hopefully good enough. Adding to the querystring is messing up chrome
development workflow (breakpoints are associated with full uri,
including querystring).
|
from flask import url_for
from npactflask import app
@app.template_global()
def vSTATIC(filename):
if app.config['DEBUG']:
return url_for('static', filename=filename)
else:
return url_for('static',
filename=filename, vnum=app.config['VERSION'])
|
#!/usr/bin/env python
"""Test for parsing arff headers only."""
import os
from scipy.testing import *
from scipy.io.arff.arffread import read_header, MetaData
data_path = os.path.join(os.path.dirname(__file__), 'data')
test1 = os.path.join(data_path, 'test1.arff')
class HeaderTest(TestCase):
def test_trivial1(self):
"""Parsing trivial header with nothing."""
ofile = open(test1)
rel, attrs = read_header(ofile)
# Test relation
assert rel == 'test1'
# Test numerical attributes
assert len(attrs) == 5
for i in range(4):
assert attrs[i][0] == 'attr%d' % i
assert attrs[i][1] == 'REAL'
classes = attrs[4][1]
# Test nominal attribute
assert attrs[4][0] == 'class'
assert attrs[4][1] == '{class0, class1, class2, class3}'
if __name__ == "__main__":
nose.run(argv=['', __file__])
| Change name for arff read test.
git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@4095 d6536bca-fef9-0310-8506-e4c0a848fbcf
| #!/usr/bin/env python
"""Test for parsing arff headers only."""
import os
from scipy.testing import *
from scipy.io.arff.arffread import read_header, MetaData
data_path = os.path.join(os.path.dirname(__file__), 'data')
test1 = os.path.join(data_path, 'test1.arff')
class HeaderTest(TestCase):
def test_fullheader1(self):
"""Parsing trivial header with nothing."""
ofile = open(test1)
rel, attrs = read_header(ofile)
# Test relation
assert rel == 'test1'
# Test numerical attributes
assert len(attrs) == 5
for i in range(4):
assert attrs[i][0] == 'attr%d' % i
assert attrs[i][1] == 'REAL'
classes = attrs[4][1]
# Test nominal attribute
assert attrs[4][0] == 'class'
assert attrs[4][1] == '{class0, class1, class2, class3}'
if __name__ == "__main__":
nose.run(argv=['', __file__])
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.