commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 10
2.94k
| new_contents
stringlengths 21
3.18k
| subject
stringlengths 16
444
| message
stringlengths 17
2.63k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43k
| ndiff
stringlengths 52
3.32k
| instruction
stringlengths 16
444
| content
stringlengths 133
4.32k
| fuzzy_diff
stringlengths 16
3.18k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
50ead4fe13eec7ad9760f0f577212beb8e8a51be | pombola/info/views.py | pombola/info/views.py | from django.views.generic import DetailView
from models import InfoPage
class InfoPageView(DetailView):
"""Show the page, or 'index' if no slug"""
model = InfoPage
| from django.views.generic import DetailView
from models import InfoPage
class InfoPageView(DetailView):
"""Show the page for the given slug"""
model = InfoPage
queryset = InfoPage.objects.filter(kind=InfoPage.KIND_PAGE)
| Use a queryset to display only kind=page | Use a queryset to display only kind=page
| Python | agpl-3.0 | mysociety/pombola,mysociety/pombola,geoffkilpin/pombola,hzj123/56th,ken-muturi/pombola,mysociety/pombola,patricmutwiri/pombola,patricmutwiri/pombola,ken-muturi/pombola,ken-muturi/pombola,hzj123/56th,ken-muturi/pombola,ken-muturi/pombola,hzj123/56th,mysociety/pombola,hzj123/56th,geoffkilpin/pombola,geoffkilpin/pombola,hzj123/56th,ken-muturi/pombola,patricmutwiri/pombola,mysociety/pombola,geoffkilpin/pombola,patricmutwiri/pombola,patricmutwiri/pombola,mysociety/pombola,geoffkilpin/pombola,hzj123/56th,patricmutwiri/pombola,geoffkilpin/pombola | from django.views.generic import DetailView
from models import InfoPage
class InfoPageView(DetailView):
- """Show the page, or 'index' if no slug"""
+ """Show the page for the given slug"""
model = InfoPage
+ queryset = InfoPage.objects.filter(kind=InfoPage.KIND_PAGE)
| Use a queryset to display only kind=page | ## Code Before:
from django.views.generic import DetailView
from models import InfoPage
class InfoPageView(DetailView):
"""Show the page, or 'index' if no slug"""
model = InfoPage
## Instruction:
Use a queryset to display only kind=page
## Code After:
from django.views.generic import DetailView
from models import InfoPage
class InfoPageView(DetailView):
"""Show the page for the given slug"""
model = InfoPage
queryset = InfoPage.objects.filter(kind=InfoPage.KIND_PAGE)
| ...
class InfoPageView(DetailView):
"""Show the page for the given slug"""
model = InfoPage
queryset = InfoPage.objects.filter(kind=InfoPage.KIND_PAGE)
... |
6689858b2364a668b362a5f00d4c86e57141dc37 | numba/cuda/models.py | numba/cuda/models.py | from llvmlite import ir
from numba.core.datamodel.registry import register_default
from numba.core.extending import register_model, models
from numba.core import types
from numba.cuda.types import Dim3, GridGroup, CUDADispatcher
@register_model(Dim3)
class Dim3Model(models.StructModel):
def __init__(self, dmm, fe_type):
members = [
('x', types.int32),
('y', types.int32),
('z', types.int32)
]
super().__init__(dmm, fe_type, members)
@register_model(GridGroup)
class GridGroupModel(models.PrimitiveModel):
def __init__(self, dmm, fe_type):
be_type = ir.IntType(64)
super().__init__(dmm, fe_type, be_type)
@register_default(types.Float)
class FloatModel(models.PrimitiveModel):
def __init__(self, dmm, fe_type):
if fe_type == types.float32:
be_type = ir.FloatType()
elif fe_type == types.float16:
be_type = ir.IntType(16)
elif fe_type == types.float64:
be_type = ir.DoubleType()
else:
raise NotImplementedError(fe_type)
super(FloatModel, self).__init__(dmm, fe_type, be_type)
register_model(CUDADispatcher)(models.OpaqueModel)
| from llvmlite import ir
from numba.core.datamodel.registry import register_default
from numba.core.extending import register_model, models
from numba.core import types
from numba.cuda.types import Dim3, GridGroup, CUDADispatcher
@register_model(Dim3)
class Dim3Model(models.StructModel):
def __init__(self, dmm, fe_type):
members = [
('x', types.int32),
('y', types.int32),
('z', types.int32)
]
super().__init__(dmm, fe_type, members)
@register_model(GridGroup)
class GridGroupModel(models.PrimitiveModel):
def __init__(self, dmm, fe_type):
be_type = ir.IntType(64)
super().__init__(dmm, fe_type, be_type)
@register_default(types.Float)
class FloatModel(models.PrimitiveModel):
def __init__(self, dmm, fe_type):
if fe_type == types.float16:
be_type = ir.IntType(16)
elif fe_type == types.float32:
be_type = ir.FloatType()
elif fe_type == types.float64:
be_type = ir.DoubleType()
else:
raise NotImplementedError(fe_type)
super(FloatModel, self).__init__(dmm, fe_type, be_type)
register_model(CUDADispatcher)(models.OpaqueModel)
| Reorder FloatModel checks in ascending order | CUDA: Reorder FloatModel checks in ascending order
| Python | bsd-2-clause | cpcloud/numba,numba/numba,numba/numba,seibert/numba,cpcloud/numba,cpcloud/numba,seibert/numba,seibert/numba,cpcloud/numba,numba/numba,IntelLabs/numba,numba/numba,IntelLabs/numba,cpcloud/numba,seibert/numba,IntelLabs/numba,IntelLabs/numba,seibert/numba,IntelLabs/numba,numba/numba | from llvmlite import ir
from numba.core.datamodel.registry import register_default
from numba.core.extending import register_model, models
from numba.core import types
from numba.cuda.types import Dim3, GridGroup, CUDADispatcher
@register_model(Dim3)
class Dim3Model(models.StructModel):
def __init__(self, dmm, fe_type):
members = [
('x', types.int32),
('y', types.int32),
('z', types.int32)
]
super().__init__(dmm, fe_type, members)
@register_model(GridGroup)
class GridGroupModel(models.PrimitiveModel):
def __init__(self, dmm, fe_type):
be_type = ir.IntType(64)
super().__init__(dmm, fe_type, be_type)
@register_default(types.Float)
class FloatModel(models.PrimitiveModel):
def __init__(self, dmm, fe_type):
+ if fe_type == types.float16:
+ be_type = ir.IntType(16)
- if fe_type == types.float32:
+ elif fe_type == types.float32:
be_type = ir.FloatType()
- elif fe_type == types.float16:
- be_type = ir.IntType(16)
elif fe_type == types.float64:
be_type = ir.DoubleType()
else:
raise NotImplementedError(fe_type)
super(FloatModel, self).__init__(dmm, fe_type, be_type)
register_model(CUDADispatcher)(models.OpaqueModel)
| Reorder FloatModel checks in ascending order | ## Code Before:
from llvmlite import ir
from numba.core.datamodel.registry import register_default
from numba.core.extending import register_model, models
from numba.core import types
from numba.cuda.types import Dim3, GridGroup, CUDADispatcher
@register_model(Dim3)
class Dim3Model(models.StructModel):
def __init__(self, dmm, fe_type):
members = [
('x', types.int32),
('y', types.int32),
('z', types.int32)
]
super().__init__(dmm, fe_type, members)
@register_model(GridGroup)
class GridGroupModel(models.PrimitiveModel):
def __init__(self, dmm, fe_type):
be_type = ir.IntType(64)
super().__init__(dmm, fe_type, be_type)
@register_default(types.Float)
class FloatModel(models.PrimitiveModel):
def __init__(self, dmm, fe_type):
if fe_type == types.float32:
be_type = ir.FloatType()
elif fe_type == types.float16:
be_type = ir.IntType(16)
elif fe_type == types.float64:
be_type = ir.DoubleType()
else:
raise NotImplementedError(fe_type)
super(FloatModel, self).__init__(dmm, fe_type, be_type)
register_model(CUDADispatcher)(models.OpaqueModel)
## Instruction:
Reorder FloatModel checks in ascending order
## Code After:
from llvmlite import ir
from numba.core.datamodel.registry import register_default
from numba.core.extending import register_model, models
from numba.core import types
from numba.cuda.types import Dim3, GridGroup, CUDADispatcher
@register_model(Dim3)
class Dim3Model(models.StructModel):
def __init__(self, dmm, fe_type):
members = [
('x', types.int32),
('y', types.int32),
('z', types.int32)
]
super().__init__(dmm, fe_type, members)
@register_model(GridGroup)
class GridGroupModel(models.PrimitiveModel):
def __init__(self, dmm, fe_type):
be_type = ir.IntType(64)
super().__init__(dmm, fe_type, be_type)
@register_default(types.Float)
class FloatModel(models.PrimitiveModel):
def __init__(self, dmm, fe_type):
if fe_type == types.float16:
be_type = ir.IntType(16)
elif fe_type == types.float32:
be_type = ir.FloatType()
elif fe_type == types.float64:
be_type = ir.DoubleType()
else:
raise NotImplementedError(fe_type)
super(FloatModel, self).__init__(dmm, fe_type, be_type)
register_model(CUDADispatcher)(models.OpaqueModel)
| ...
def __init__(self, dmm, fe_type):
if fe_type == types.float16:
be_type = ir.IntType(16)
elif fe_type == types.float32:
be_type = ir.FloatType()
elif fe_type == types.float64:
... |
2ebbe2f9f23621d10a70d0817d83da33b002299e | rest_surveys/urls.py | rest_surveys/urls.py | from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import include, url
from rest_framework_bulk.routes import BulkRouter
from rest_surveys.views import (
SurveyViewSet,
SurveyResponseViewSet,
)
# API
# With trailing slash appended:
router = BulkRouter()
router.register(r'surveys', SurveyViewSet, base_name='survey')
router.register(r'survey-responses', SurveyResponseViewSet,
base_name='survey-response')
slashless_router = BulkRouter(trailing_slash=False)
slashless_router.registry = router.registry[:]
urlpatterns = [
url(r'^{api_path}'.format(api_path=settings.REST_SURVEYS['API_PATH']),
include(router.urls)),
url(r'^{api_path}'.format(api_path=settings.REST_SURVEYS['API_PATH']),
include(slashless_router.urls)),
]
| from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import include, url
from rest_framework_bulk.routes import BulkRouter
from rest_surveys.views import (
SurveyViewSet,
SurveyResponseViewSet,
)
# API
# With trailing slash appended:
router = BulkRouter()
router.register(r'surveys', SurveyViewSet, base_name='survey')
router.register(r'survey-responses', SurveyResponseViewSet,
base_name='survey-response')
slashless_router = BulkRouter(trailing_slash=False)
slashless_router.registry = router.registry[:]
urlpatterns = [
url(r'^{api_path}'.format(
api_path=settings.REST_SURVEYS.get('API_PATH', 'api/')),
include(router.urls)),
url(r'^{api_path}'.format(
api_path=settings.REST_SURVEYS.get('API_PATH', 'api/')),
include(slashless_router.urls)),
]
| Set a default api path | Set a default api path
| Python | mit | danxshap/django-rest-surveys | from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import include, url
from rest_framework_bulk.routes import BulkRouter
from rest_surveys.views import (
SurveyViewSet,
SurveyResponseViewSet,
)
# API
# With trailing slash appended:
router = BulkRouter()
router.register(r'surveys', SurveyViewSet, base_name='survey')
router.register(r'survey-responses', SurveyResponseViewSet,
base_name='survey-response')
slashless_router = BulkRouter(trailing_slash=False)
slashless_router.registry = router.registry[:]
urlpatterns = [
- url(r'^{api_path}'.format(api_path=settings.REST_SURVEYS['API_PATH']),
+ url(r'^{api_path}'.format(
+ api_path=settings.REST_SURVEYS.get('API_PATH', 'api/')),
include(router.urls)),
- url(r'^{api_path}'.format(api_path=settings.REST_SURVEYS['API_PATH']),
+ url(r'^{api_path}'.format(
+ api_path=settings.REST_SURVEYS.get('API_PATH', 'api/')),
include(slashless_router.urls)),
]
| Set a default api path | ## Code Before:
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import include, url
from rest_framework_bulk.routes import BulkRouter
from rest_surveys.views import (
SurveyViewSet,
SurveyResponseViewSet,
)
# API
# With trailing slash appended:
router = BulkRouter()
router.register(r'surveys', SurveyViewSet, base_name='survey')
router.register(r'survey-responses', SurveyResponseViewSet,
base_name='survey-response')
slashless_router = BulkRouter(trailing_slash=False)
slashless_router.registry = router.registry[:]
urlpatterns = [
url(r'^{api_path}'.format(api_path=settings.REST_SURVEYS['API_PATH']),
include(router.urls)),
url(r'^{api_path}'.format(api_path=settings.REST_SURVEYS['API_PATH']),
include(slashless_router.urls)),
]
## Instruction:
Set a default api path
## Code After:
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import include, url
from rest_framework_bulk.routes import BulkRouter
from rest_surveys.views import (
SurveyViewSet,
SurveyResponseViewSet,
)
# API
# With trailing slash appended:
router = BulkRouter()
router.register(r'surveys', SurveyViewSet, base_name='survey')
router.register(r'survey-responses', SurveyResponseViewSet,
base_name='survey-response')
slashless_router = BulkRouter(trailing_slash=False)
slashless_router.registry = router.registry[:]
urlpatterns = [
url(r'^{api_path}'.format(
api_path=settings.REST_SURVEYS.get('API_PATH', 'api/')),
include(router.urls)),
url(r'^{api_path}'.format(
api_path=settings.REST_SURVEYS.get('API_PATH', 'api/')),
include(slashless_router.urls)),
]
| # ... existing code ...
urlpatterns = [
url(r'^{api_path}'.format(
api_path=settings.REST_SURVEYS.get('API_PATH', 'api/')),
include(router.urls)),
url(r'^{api_path}'.format(
api_path=settings.REST_SURVEYS.get('API_PATH', 'api/')),
include(slashless_router.urls)),
# ... rest of the code ... |
116d9565050ad69888cb38c302f8ae0d9232eec3 | gameserver/utils.py | gameserver/utils.py | import random as orig_random
from decorator import decorator
import binascii
import struct
import hashlib
from gameserver.database import db
db_session = db.session
random = orig_random.Random()
random.seed()
def node_to_dict(node):
connections = []
for edge in node.lower_edges:
connections.append(
{"to_id": edge.higher_node.id,
"from_id": node.id,
"weight": "{:.2f}".format(edge.weight),
}
)
data = {"id": node.id,
"name": node.name,
"short_name": node.short_name,
"group": node.group,
"leakage": "{:.2f}".format(node.leak),
"max_amount": "{:.2f}".format(node.max_level),
"activation_amount": "{:.2f}".format(node.activation),
"balance": "{:.2f}".format(node.balance),
"connections": connections
}
return data
def pack_amount(value):
return binascii.hexlify(struct.pack("f", value)).decode('ascii')
def unpack_amount(value):
return struct.unpack("f", binascii.unhexlify(value))[0]
def checksum(seller_id, policy_id, price, salt):
input = "{}{}{}{}".format(seller_id, policy_id, pack_amount(price), salt)
return hashlib.sha1(input).hexdigest()
| import random as orig_random
from decorator import decorator
import binascii
import struct
import hashlib
from gameserver.database import db
db_session = db.session
random = orig_random.Random()
random.seed()
def node_to_dict(node):
connections = []
for edge in node.lower_edges:
connections.append(
{"to_id": edge.higher_node.id,
"from_id": node.id,
"weight": "{:.2f}".format(edge.weight),
}
)
data = {"id": node.id,
"name": node.name,
"short_name": node.short_name,
"group": node.group,
"leakage": "{:.2f}".format(node.leak),
"max_amount": "{:.2f}".format(node.max_level),
"activation_amount": "{:.2f}".format(node.activation),
"active_level": "{:.2f}".format(node.active_level),
"balance": "{:.2f}".format(node.balance),
"connections": connections
}
return data
def pack_amount(value):
return binascii.hexlify(struct.pack("f", value)).decode('ascii')
def unpack_amount(value):
return struct.unpack("f", binascii.unhexlify(value))[0]
def checksum(seller_id, policy_id, price, salt):
input = "{}{}{}{}".format(seller_id, policy_id, pack_amount(price), salt)
return hashlib.sha1(input).hexdigest()
| Add in the active level to json output for the mobile game to use | Add in the active level to json output for the mobile game to use
| Python | apache-2.0 | hammertoe/didactic-spork,hammertoe/didactic-spork,hammertoe/didactic-spork,hammertoe/didactic-spork | import random as orig_random
from decorator import decorator
import binascii
import struct
import hashlib
from gameserver.database import db
db_session = db.session
random = orig_random.Random()
random.seed()
def node_to_dict(node):
connections = []
for edge in node.lower_edges:
connections.append(
{"to_id": edge.higher_node.id,
"from_id": node.id,
"weight": "{:.2f}".format(edge.weight),
}
)
data = {"id": node.id,
"name": node.name,
"short_name": node.short_name,
"group": node.group,
"leakage": "{:.2f}".format(node.leak),
"max_amount": "{:.2f}".format(node.max_level),
"activation_amount": "{:.2f}".format(node.activation),
+ "active_level": "{:.2f}".format(node.active_level),
"balance": "{:.2f}".format(node.balance),
"connections": connections
}
return data
def pack_amount(value):
return binascii.hexlify(struct.pack("f", value)).decode('ascii')
def unpack_amount(value):
return struct.unpack("f", binascii.unhexlify(value))[0]
def checksum(seller_id, policy_id, price, salt):
input = "{}{}{}{}".format(seller_id, policy_id, pack_amount(price), salt)
return hashlib.sha1(input).hexdigest()
| Add in the active level to json output for the mobile game to use | ## Code Before:
import random as orig_random
from decorator import decorator
import binascii
import struct
import hashlib
from gameserver.database import db
db_session = db.session
random = orig_random.Random()
random.seed()
def node_to_dict(node):
connections = []
for edge in node.lower_edges:
connections.append(
{"to_id": edge.higher_node.id,
"from_id": node.id,
"weight": "{:.2f}".format(edge.weight),
}
)
data = {"id": node.id,
"name": node.name,
"short_name": node.short_name,
"group": node.group,
"leakage": "{:.2f}".format(node.leak),
"max_amount": "{:.2f}".format(node.max_level),
"activation_amount": "{:.2f}".format(node.activation),
"balance": "{:.2f}".format(node.balance),
"connections": connections
}
return data
def pack_amount(value):
return binascii.hexlify(struct.pack("f", value)).decode('ascii')
def unpack_amount(value):
return struct.unpack("f", binascii.unhexlify(value))[0]
def checksum(seller_id, policy_id, price, salt):
input = "{}{}{}{}".format(seller_id, policy_id, pack_amount(price), salt)
return hashlib.sha1(input).hexdigest()
## Instruction:
Add in the active level to json output for the mobile game to use
## Code After:
import random as orig_random
from decorator import decorator
import binascii
import struct
import hashlib
from gameserver.database import db
db_session = db.session
random = orig_random.Random()
random.seed()
def node_to_dict(node):
connections = []
for edge in node.lower_edges:
connections.append(
{"to_id": edge.higher_node.id,
"from_id": node.id,
"weight": "{:.2f}".format(edge.weight),
}
)
data = {"id": node.id,
"name": node.name,
"short_name": node.short_name,
"group": node.group,
"leakage": "{:.2f}".format(node.leak),
"max_amount": "{:.2f}".format(node.max_level),
"activation_amount": "{:.2f}".format(node.activation),
"active_level": "{:.2f}".format(node.active_level),
"balance": "{:.2f}".format(node.balance),
"connections": connections
}
return data
def pack_amount(value):
return binascii.hexlify(struct.pack("f", value)).decode('ascii')
def unpack_amount(value):
return struct.unpack("f", binascii.unhexlify(value))[0]
def checksum(seller_id, policy_id, price, salt):
input = "{}{}{}{}".format(seller_id, policy_id, pack_amount(price), salt)
return hashlib.sha1(input).hexdigest()
| // ... existing code ...
"activation_amount": "{:.2f}".format(node.activation),
"active_level": "{:.2f}".format(node.active_level),
"balance": "{:.2f}".format(node.balance),
// ... rest of the code ... |
4848baf76e4972401530b624816ba48cb08d9398 | appconf/utils.py | appconf/utils.py | import sys
def import_attribute(import_path, exception_handler=None):
from django.utils.importlib import import_module
module_name, object_name = import_path.rsplit('.', 1)
try:
module = import_module(module_name)
except: # pragma: no cover
if callable(exception_handler):
exctype, excvalue, tb = sys.exc_info()
return exception_handler(import_path, exctype, excvalue, tb)
else:
raise
try:
return getattr(module, object_name)
except: # pragma: no cover
if callable(exception_handler):
exctype, excvalue, tb = sys.exc_info()
return exception_handler(import_path, exctype, excvalue, tb)
else:
raise
| import sys
def import_attribute(import_path, exception_handler=None):
try:
from importlib import import_module
except ImportError:
from django.utils.importlib import import_module
module_name, object_name = import_path.rsplit('.', 1)
try:
module = import_module(module_name)
except: # pragma: no cover
if callable(exception_handler):
exctype, excvalue, tb = sys.exc_info()
return exception_handler(import_path, exctype, excvalue, tb)
else:
raise
try:
return getattr(module, object_name)
except: # pragma: no cover
if callable(exception_handler):
exctype, excvalue, tb = sys.exc_info()
return exception_handler(import_path, exctype, excvalue, tb)
else:
raise
| Use import_module from standard library if exists | Use import_module from standard library if exists
Django 1.8+ drops `django.utils.importlib`. I imagine because that is because an older version of Python (either 2.5 and/or 2.6) is being dropped. I haven't checked older versions but `importlib` exists in Python 2.7. | Python | bsd-3-clause | diox/django-appconf,carltongibson/django-appconf,django-compressor/django-appconf,jezdez/django-appconf,jessehon/django-appconf,treyhunner/django-appconf,jezdez-archive/django-appconf | import sys
def import_attribute(import_path, exception_handler=None):
+ try:
+ from importlib import import_module
+ except ImportError:
- from django.utils.importlib import import_module
+ from django.utils.importlib import import_module
module_name, object_name = import_path.rsplit('.', 1)
try:
module = import_module(module_name)
except: # pragma: no cover
if callable(exception_handler):
exctype, excvalue, tb = sys.exc_info()
return exception_handler(import_path, exctype, excvalue, tb)
else:
raise
try:
return getattr(module, object_name)
except: # pragma: no cover
if callable(exception_handler):
exctype, excvalue, tb = sys.exc_info()
return exception_handler(import_path, exctype, excvalue, tb)
else:
raise
| Use import_module from standard library if exists | ## Code Before:
import sys
def import_attribute(import_path, exception_handler=None):
from django.utils.importlib import import_module
module_name, object_name = import_path.rsplit('.', 1)
try:
module = import_module(module_name)
except: # pragma: no cover
if callable(exception_handler):
exctype, excvalue, tb = sys.exc_info()
return exception_handler(import_path, exctype, excvalue, tb)
else:
raise
try:
return getattr(module, object_name)
except: # pragma: no cover
if callable(exception_handler):
exctype, excvalue, tb = sys.exc_info()
return exception_handler(import_path, exctype, excvalue, tb)
else:
raise
## Instruction:
Use import_module from standard library if exists
## Code After:
import sys
def import_attribute(import_path, exception_handler=None):
try:
from importlib import import_module
except ImportError:
from django.utils.importlib import import_module
module_name, object_name = import_path.rsplit('.', 1)
try:
module = import_module(module_name)
except: # pragma: no cover
if callable(exception_handler):
exctype, excvalue, tb = sys.exc_info()
return exception_handler(import_path, exctype, excvalue, tb)
else:
raise
try:
return getattr(module, object_name)
except: # pragma: no cover
if callable(exception_handler):
exctype, excvalue, tb = sys.exc_info()
return exception_handler(import_path, exctype, excvalue, tb)
else:
raise
| ...
def import_attribute(import_path, exception_handler=None):
try:
from importlib import import_module
except ImportError:
from django.utils.importlib import import_module
module_name, object_name = import_path.rsplit('.', 1)
... |
78b2978c3e0e56c4c75a3a6b532e02c995ca69ed | openedx/core/djangoapps/user_api/permissions/views.py | openedx/core/djangoapps/user_api/permissions/views.py | from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from rest_framework import permissions
from django.db import transaction
from django.utils.translation import ugettext as _
from openedx.core.lib.api.authentication import (
SessionAuthenticationAllowInactiveUser,
OAuth2AuthenticationAllowInactiveUser,
)
from openedx.core.lib.api.parsers import MergePatchParser
from openedx.core.lib.api.permissions import IsUserInUrlOrStaff
from ..errors import UserNotFound, UserNotAuthorized
class PermissionsView(APIView):
authentication_classes = (OAuth2AuthenticationAllowInactiveUser, SessionAuthenticationAllowInactiveUser)
parser_classes = (MergePatchParser,)
def get(self, request):
"""
GET /api/user/v1/
"""
try:
is_staff = request.user.is_staff
except UserNotAuthorized:
return Response(status=status.HTTP_403_FORBIDDEN)
except UserNotFound:
return Response(status=status.HTTP_404_NOT_FOUND)
return Response(is_staff)
| from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from openedx.core.lib.api.authentication import (
SessionAuthenticationAllowInactiveUser,
OAuth2AuthenticationAllowInactiveUser,
)
from openedx.core.lib.api.parsers import MergePatchParser
from ..errors import UserNotFound, UserNotAuthorized
class PermissionsView(APIView):
authentication_classes = (OAuth2AuthenticationAllowInactiveUser, SessionAuthenticationAllowInactiveUser)
parser_classes = (MergePatchParser,)
def get(self, request):
"""
GET /api/user/v1/
"""
try:
is_staff = request.user.is_staff
except UserNotAuthorized:
return Response(status=status.HTTP_403_FORBIDDEN)
except UserNotFound:
return Response(status=status.HTTP_404_NOT_FOUND)
return Response(is_staff)
| Remove unused import and redundant comment | Remove unused import and redundant comment
| Python | agpl-3.0 | mbareta/edx-platform-ft,mbareta/edx-platform-ft,mbareta/edx-platform-ft,mbareta/edx-platform-ft | from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
- from rest_framework import permissions
-
- from django.db import transaction
- from django.utils.translation import ugettext as _
from openedx.core.lib.api.authentication import (
SessionAuthenticationAllowInactiveUser,
OAuth2AuthenticationAllowInactiveUser,
)
from openedx.core.lib.api.parsers import MergePatchParser
- from openedx.core.lib.api.permissions import IsUserInUrlOrStaff
from ..errors import UserNotFound, UserNotAuthorized
class PermissionsView(APIView):
authentication_classes = (OAuth2AuthenticationAllowInactiveUser, SessionAuthenticationAllowInactiveUser)
parser_classes = (MergePatchParser,)
def get(self, request):
"""
GET /api/user/v1/
"""
try:
is_staff = request.user.is_staff
except UserNotAuthorized:
return Response(status=status.HTTP_403_FORBIDDEN)
except UserNotFound:
return Response(status=status.HTTP_404_NOT_FOUND)
return Response(is_staff)
| Remove unused import and redundant comment | ## Code Before:
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from rest_framework import permissions
from django.db import transaction
from django.utils.translation import ugettext as _
from openedx.core.lib.api.authentication import (
SessionAuthenticationAllowInactiveUser,
OAuth2AuthenticationAllowInactiveUser,
)
from openedx.core.lib.api.parsers import MergePatchParser
from openedx.core.lib.api.permissions import IsUserInUrlOrStaff
from ..errors import UserNotFound, UserNotAuthorized
class PermissionsView(APIView):
authentication_classes = (OAuth2AuthenticationAllowInactiveUser, SessionAuthenticationAllowInactiveUser)
parser_classes = (MergePatchParser,)
def get(self, request):
"""
GET /api/user/v1/
"""
try:
is_staff = request.user.is_staff
except UserNotAuthorized:
return Response(status=status.HTTP_403_FORBIDDEN)
except UserNotFound:
return Response(status=status.HTTP_404_NOT_FOUND)
return Response(is_staff)
## Instruction:
Remove unused import and redundant comment
## Code After:
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from openedx.core.lib.api.authentication import (
SessionAuthenticationAllowInactiveUser,
OAuth2AuthenticationAllowInactiveUser,
)
from openedx.core.lib.api.parsers import MergePatchParser
from ..errors import UserNotFound, UserNotAuthorized
class PermissionsView(APIView):
authentication_classes = (OAuth2AuthenticationAllowInactiveUser, SessionAuthenticationAllowInactiveUser)
parser_classes = (MergePatchParser,)
def get(self, request):
"""
GET /api/user/v1/
"""
try:
is_staff = request.user.is_staff
except UserNotAuthorized:
return Response(status=status.HTTP_403_FORBIDDEN)
except UserNotFound:
return Response(status=status.HTTP_404_NOT_FOUND)
return Response(is_staff)
| ...
from rest_framework import status
...
from openedx.core.lib.api.parsers import MergePatchParser
from ..errors import UserNotFound, UserNotAuthorized
... |
b9dde5e9fc56feaea581cecca3f919f4e053044d | brumecli/config.py | brumecli/config.py | import os
import yaml
from subprocess import check_output, CalledProcessError
from colors import red
from jinja2 import Template
class Config():
@staticmethod
def load(config_file='brume.yml'):
"""Return the YAML configuration for a project based on the `config_file` template."""
template_functions = {}
def env(key):
"""Return the value of the `key` environment variable."""
try:
return os.environ[key]
except KeyError:
print(red('[ERROR] No environment variable with key {}'.format(key)))
exit(1)
template_functions['env'] = env
if os.path.isdir('.git'):
def git_commit():
"""Return the SHA1 of the latest Git commit (HEAD)."""
try:
return check_output(['git', 'rev-parse', '--short', 'HEAD']).strip()
except CalledProcessError:
print(red('[ERROR] Current directory is not a Git repository'))
exit(1)
def git_branch():
"""Return the name of the current Git branch."""
try:
return check_output(['git', 'rev-parse', '--abbrev-ref', 'HEAD']).strip()
except CalledProcessError:
print(red('[ERROR] Current directory is not a Git repository'))
exit(1)
template_functions['git_commit'] = git_commit()
template_functions['git_branch'] = git_branch()
template = Template(open(config_file, 'r').read())
return yaml.load(
template.render(**template_functions)
)
| import os
import yaml
from subprocess import check_output, CalledProcessError
from colors import red
from jinja2 import Template
class Config():
@staticmethod
def env(key):
"""Return the value of the `key` environment variable."""
try:
return os.environ[key]
except KeyError:
print(red('[ERROR] No environment variable with key {}'.format(key)))
exit(1)
@staticmethod
def git_commit():
"""Return the SHA1 of the latest Git commit (HEAD)."""
try:
return check_output(['git', 'rev-parse', '--short', 'HEAD']).strip()
except CalledProcessError:
print(red('[ERROR] Current directory is not a Git repository'))
exit(1)
@staticmethod
def git_branch():
"""Return the name of the current Git branch."""
try:
return check_output(['git', 'rev-parse', '--abbrev-ref', 'HEAD']).strip()
except CalledProcessError:
print(red('[ERROR] Current directory is not a Git repository'))
exit(1)
@staticmethod
def load(config_file='brume.yml'):
"""
Return the YAML configuration for a project based on the `config_file` template.
By default, the template exposes the `env` function.
The `git_branch` and `git_commit` values are exposed only when a `.git` folder
exists in the current directory
"""
template_functions = {}
template_functions['env'] = Config.env
if os.path.isdir('.git'):
template_functions['git_commit'] = Config.git_commit()
template_functions['git_branch'] = Config.git_branch()
template = Template(open(config_file, 'r').read())
return yaml.load(
template.render(**template_functions)
)
| Move template functions out of `Config.load()` | Move template functions out of `Config.load()`
| Python | mit | flou/brume,geronimo-iia/brume | import os
import yaml
from subprocess import check_output, CalledProcessError
from colors import red
from jinja2 import Template
class Config():
@staticmethod
+ def env(key):
+ """Return the value of the `key` environment variable."""
+ try:
+ return os.environ[key]
+ except KeyError:
+ print(red('[ERROR] No environment variable with key {}'.format(key)))
+ exit(1)
+
+ @staticmethod
+ def git_commit():
+ """Return the SHA1 of the latest Git commit (HEAD)."""
+ try:
+ return check_output(['git', 'rev-parse', '--short', 'HEAD']).strip()
+ except CalledProcessError:
+ print(red('[ERROR] Current directory is not a Git repository'))
+ exit(1)
+
+ @staticmethod
+ def git_branch():
+ """Return the name of the current Git branch."""
+ try:
+ return check_output(['git', 'rev-parse', '--abbrev-ref', 'HEAD']).strip()
+ except CalledProcessError:
+ print(red('[ERROR] Current directory is not a Git repository'))
+ exit(1)
+
+ @staticmethod
def load(config_file='brume.yml'):
+ """
- """Return the YAML configuration for a project based on the `config_file` template."""
+ Return the YAML configuration for a project based on the `config_file` template.
+
+ By default, the template exposes the `env` function.
+ The `git_branch` and `git_commit` values are exposed only when a `.git` folder
+ exists in the current directory
+ """
template_functions = {}
-
- def env(key):
- """Return the value of the `key` environment variable."""
- try:
- return os.environ[key]
- except KeyError:
- print(red('[ERROR] No environment variable with key {}'.format(key)))
- exit(1)
- template_functions['env'] = env
+ template_functions['env'] = Config.env
if os.path.isdir('.git'):
- def git_commit():
- """Return the SHA1 of the latest Git commit (HEAD)."""
- try:
- return check_output(['git', 'rev-parse', '--short', 'HEAD']).strip()
- except CalledProcessError:
- print(red('[ERROR] Current directory is not a Git repository'))
- exit(1)
-
- def git_branch():
- """Return the name of the current Git branch."""
- try:
- return check_output(['git', 'rev-parse', '--abbrev-ref', 'HEAD']).strip()
- except CalledProcessError:
- print(red('[ERROR] Current directory is not a Git repository'))
- exit(1)
- template_functions['git_commit'] = git_commit()
+ template_functions['git_commit'] = Config.git_commit()
- template_functions['git_branch'] = git_branch()
+ template_functions['git_branch'] = Config.git_branch()
template = Template(open(config_file, 'r').read())
return yaml.load(
template.render(**template_functions)
)
| Move template functions out of `Config.load()` | ## Code Before:
import os
import yaml
from subprocess import check_output, CalledProcessError
from colors import red
from jinja2 import Template
class Config():
@staticmethod
def load(config_file='brume.yml'):
"""Return the YAML configuration for a project based on the `config_file` template."""
template_functions = {}
def env(key):
"""Return the value of the `key` environment variable."""
try:
return os.environ[key]
except KeyError:
print(red('[ERROR] No environment variable with key {}'.format(key)))
exit(1)
template_functions['env'] = env
if os.path.isdir('.git'):
def git_commit():
"""Return the SHA1 of the latest Git commit (HEAD)."""
try:
return check_output(['git', 'rev-parse', '--short', 'HEAD']).strip()
except CalledProcessError:
print(red('[ERROR] Current directory is not a Git repository'))
exit(1)
def git_branch():
"""Return the name of the current Git branch."""
try:
return check_output(['git', 'rev-parse', '--abbrev-ref', 'HEAD']).strip()
except CalledProcessError:
print(red('[ERROR] Current directory is not a Git repository'))
exit(1)
template_functions['git_commit'] = git_commit()
template_functions['git_branch'] = git_branch()
template = Template(open(config_file, 'r').read())
return yaml.load(
template.render(**template_functions)
)
## Instruction:
Move template functions out of `Config.load()`
## Code After:
import os
import yaml
from subprocess import check_output, CalledProcessError
from colors import red
from jinja2 import Template
class Config():
@staticmethod
def env(key):
"""Return the value of the `key` environment variable."""
try:
return os.environ[key]
except KeyError:
print(red('[ERROR] No environment variable with key {}'.format(key)))
exit(1)
@staticmethod
def git_commit():
"""Return the SHA1 of the latest Git commit (HEAD)."""
try:
return check_output(['git', 'rev-parse', '--short', 'HEAD']).strip()
except CalledProcessError:
print(red('[ERROR] Current directory is not a Git repository'))
exit(1)
@staticmethod
def git_branch():
"""Return the name of the current Git branch."""
try:
return check_output(['git', 'rev-parse', '--abbrev-ref', 'HEAD']).strip()
except CalledProcessError:
print(red('[ERROR] Current directory is not a Git repository'))
exit(1)
@staticmethod
def load(config_file='brume.yml'):
"""
Return the YAML configuration for a project based on the `config_file` template.
By default, the template exposes the `env` function.
The `git_branch` and `git_commit` values are exposed only when a `.git` folder
exists in the current directory
"""
template_functions = {}
template_functions['env'] = Config.env
if os.path.isdir('.git'):
template_functions['git_commit'] = Config.git_commit()
template_functions['git_branch'] = Config.git_branch()
template = Template(open(config_file, 'r').read())
return yaml.load(
template.render(**template_functions)
)
| // ... existing code ...
@staticmethod
def env(key):
"""Return the value of the `key` environment variable."""
try:
return os.environ[key]
except KeyError:
print(red('[ERROR] No environment variable with key {}'.format(key)))
exit(1)
@staticmethod
def git_commit():
"""Return the SHA1 of the latest Git commit (HEAD)."""
try:
return check_output(['git', 'rev-parse', '--short', 'HEAD']).strip()
except CalledProcessError:
print(red('[ERROR] Current directory is not a Git repository'))
exit(1)
@staticmethod
def git_branch():
"""Return the name of the current Git branch."""
try:
return check_output(['git', 'rev-parse', '--abbrev-ref', 'HEAD']).strip()
except CalledProcessError:
print(red('[ERROR] Current directory is not a Git repository'))
exit(1)
@staticmethod
def load(config_file='brume.yml'):
"""
Return the YAML configuration for a project based on the `config_file` template.
By default, the template exposes the `env` function.
The `git_branch` and `git_commit` values are exposed only when a `.git` folder
exists in the current directory
"""
template_functions = {}
template_functions['env'] = Config.env
// ... modified code ...
if os.path.isdir('.git'):
template_functions['git_commit'] = Config.git_commit()
template_functions['git_branch'] = Config.git_branch()
// ... rest of the code ... |
56528264cdc76dc1b00804b7f67908d3bb1b1b0e | flask_appconfig/docker.py | flask_appconfig/docker.py |
import os
from six.moves.urllib_parse import urlparse
def from_docker_envvars(config):
# linked postgres database (link name 'pg' or 'postgres')
if 'PG_PORT' in os.environ:
pg_url = urlparse(os.environ['PG_PORT'])
if not pg_url.scheme == 'tcp':
raise ValueError('Only tcp scheme supported for postgres')
host, port = pg_url.netloc.split(':')
uri = 'postgres://{user}:{password}@{host}:{port}/{database}'.format(
user=os.environ.get('PG_ENV_POSTGRES_USER', 'postgres'),
password=os.environ.get('PG_ENV_POSTGRES_PASSWORD', ''),
host=host,
port=port,
database=os.environ.get('PG_ENV_POSTGRES_DB'))
config['SQLALCHEMY_DATABASE_URI'] = uri
if 'REDIS_PORT' in os.environ:
redis_url = urlparse(os.environ['REDIS_PORT'])
if not redis_url.scheme == 'tcp':
raise ValueError('Only tcp scheme supported for redis')
host, port = redis_url.netloc.split(':')
uri = 'redis://{host}:{port}/0'.format(host=host, port=port, )
config['REDIS_URL'] = uri
config['REDIS_HOST'] = host
config['REDIS_PORT'] = int(port)
|
import os
from six.moves.urllib_parse import urlparse
def from_docker_envvars(config):
# linked postgres database (link name 'pg' or 'postgres')
if 'PG_PORT' in os.environ:
pg_url = urlparse(os.environ['PG_PORT'])
if not pg_url.scheme == 'tcp':
raise ValueError('Only tcp scheme supported for postgres')
host, port = pg_url.netloc.split(':')
uri = 'postgres://{user}:{password}@{host}:{port}/{database}'.format(
user=os.environ.get('PG_ENV_POSTGRES_USER', 'postgres'),
password=os.environ.get('PG_ENV_POSTGRES_PASSWORD', ''),
host=host,
port=port,
database=os.environ.get('PG_ENV_POSTGRES_DB', 'postgres'))
config['SQLALCHEMY_DATABASE_URI'] = uri
if 'REDIS_PORT' in os.environ:
redis_url = urlparse(os.environ['REDIS_PORT'])
if not redis_url.scheme == 'tcp':
raise ValueError('Only tcp scheme supported for redis')
host, port = redis_url.netloc.split(':')
uri = 'redis://{host}:{port}/0'.format(host=host, port=port, )
config['REDIS_URL'] = uri
config['REDIS_HOST'] = host
config['REDIS_PORT'] = int(port)
| Use correct database name instead of None when not supplied. | Use correct database name instead of None when not supplied.
| Python | mit | mbr/flask-appconfig |
import os
from six.moves.urllib_parse import urlparse
def from_docker_envvars(config):
# linked postgres database (link name 'pg' or 'postgres')
if 'PG_PORT' in os.environ:
pg_url = urlparse(os.environ['PG_PORT'])
if not pg_url.scheme == 'tcp':
raise ValueError('Only tcp scheme supported for postgres')
host, port = pg_url.netloc.split(':')
uri = 'postgres://{user}:{password}@{host}:{port}/{database}'.format(
user=os.environ.get('PG_ENV_POSTGRES_USER', 'postgres'),
password=os.environ.get('PG_ENV_POSTGRES_PASSWORD', ''),
host=host,
port=port,
- database=os.environ.get('PG_ENV_POSTGRES_DB'))
+ database=os.environ.get('PG_ENV_POSTGRES_DB', 'postgres'))
config['SQLALCHEMY_DATABASE_URI'] = uri
if 'REDIS_PORT' in os.environ:
redis_url = urlparse(os.environ['REDIS_PORT'])
if not redis_url.scheme == 'tcp':
raise ValueError('Only tcp scheme supported for redis')
host, port = redis_url.netloc.split(':')
uri = 'redis://{host}:{port}/0'.format(host=host, port=port, )
config['REDIS_URL'] = uri
config['REDIS_HOST'] = host
config['REDIS_PORT'] = int(port)
| Use correct database name instead of None when not supplied. | ## Code Before:
import os
from six.moves.urllib_parse import urlparse
def from_docker_envvars(config):
# linked postgres database (link name 'pg' or 'postgres')
if 'PG_PORT' in os.environ:
pg_url = urlparse(os.environ['PG_PORT'])
if not pg_url.scheme == 'tcp':
raise ValueError('Only tcp scheme supported for postgres')
host, port = pg_url.netloc.split(':')
uri = 'postgres://{user}:{password}@{host}:{port}/{database}'.format(
user=os.environ.get('PG_ENV_POSTGRES_USER', 'postgres'),
password=os.environ.get('PG_ENV_POSTGRES_PASSWORD', ''),
host=host,
port=port,
database=os.environ.get('PG_ENV_POSTGRES_DB'))
config['SQLALCHEMY_DATABASE_URI'] = uri
if 'REDIS_PORT' in os.environ:
redis_url = urlparse(os.environ['REDIS_PORT'])
if not redis_url.scheme == 'tcp':
raise ValueError('Only tcp scheme supported for redis')
host, port = redis_url.netloc.split(':')
uri = 'redis://{host}:{port}/0'.format(host=host, port=port, )
config['REDIS_URL'] = uri
config['REDIS_HOST'] = host
config['REDIS_PORT'] = int(port)
## Instruction:
Use correct database name instead of None when not supplied.
## Code After:
import os
from six.moves.urllib_parse import urlparse
def from_docker_envvars(config):
# linked postgres database (link name 'pg' or 'postgres')
if 'PG_PORT' in os.environ:
pg_url = urlparse(os.environ['PG_PORT'])
if not pg_url.scheme == 'tcp':
raise ValueError('Only tcp scheme supported for postgres')
host, port = pg_url.netloc.split(':')
uri = 'postgres://{user}:{password}@{host}:{port}/{database}'.format(
user=os.environ.get('PG_ENV_POSTGRES_USER', 'postgres'),
password=os.environ.get('PG_ENV_POSTGRES_PASSWORD', ''),
host=host,
port=port,
database=os.environ.get('PG_ENV_POSTGRES_DB', 'postgres'))
config['SQLALCHEMY_DATABASE_URI'] = uri
if 'REDIS_PORT' in os.environ:
redis_url = urlparse(os.environ['REDIS_PORT'])
if not redis_url.scheme == 'tcp':
raise ValueError('Only tcp scheme supported for redis')
host, port = redis_url.netloc.split(':')
uri = 'redis://{host}:{port}/0'.format(host=host, port=port, )
config['REDIS_URL'] = uri
config['REDIS_HOST'] = host
config['REDIS_PORT'] = int(port)
| // ... existing code ...
port=port,
database=os.environ.get('PG_ENV_POSTGRES_DB', 'postgres'))
// ... rest of the code ... |
e65a8c057d9dbd156222542a0e544d294292de00 | thinglang/lexer/symbols/base.py | thinglang/lexer/symbols/base.py | from thinglang.utils.type_descriptors import ValueType
from thinglang.lexer.symbols import LexicalSymbol
class LexicalQuote(LexicalSymbol): # "
EMITTABLE = False
@classmethod
def next_operator_set(cls, current, original):
if current is original:
return {'"': LexicalQuote}
return original
class LexicalParenthesesOpen(LexicalSymbol):
pass # (
class LexicalParenthesesClose(LexicalSymbol):
pass # )
class LexicalBracketOpen(LexicalSymbol):
pass # [
class LexicalBracketClose(LexicalSymbol):
pass # ]
class LexicalSeparator(LexicalSymbol):
pass # ,
class LexicalIndent(LexicalSymbol):
pass # <TAB>
class LexicalAccess(LexicalSymbol):
pass # a.b
class LexicalInlineComment(LexicalSymbol): pass
class LexicalAssignment(LexicalSymbol): pass
class LexicalIdentifier(LexicalSymbol, ValueType):
def __init__(self, value):
super(LexicalIdentifier, self).__init__(value)
self.value = value
def describe(self):
return self.value
def evaluate(self, stack):
return stack[self.value]
def __hash__(self):
return hash(self.value)
def __eq__(self, other):
return type(other) == type(self) and self.value == other.value
LexicalIdentifier.SELF = LexicalIdentifier("self") | from thinglang.utils.type_descriptors import ValueType
from thinglang.lexer.symbols import LexicalSymbol
class LexicalQuote(LexicalSymbol): # "
EMITTABLE = False
@classmethod
def next_operator_set(cls, current, original):
if current is original:
return {'"': LexicalQuote}
return original
class LexicalParenthesesOpen(LexicalSymbol):
pass # (
class LexicalParenthesesClose(LexicalSymbol):
pass # )
class LexicalBracketOpen(LexicalSymbol):
pass # [
class LexicalBracketClose(LexicalSymbol):
pass # ]
class LexicalSeparator(LexicalSymbol):
pass # ,
class LexicalIndent(LexicalSymbol):
pass # <TAB>
class LexicalAccess(LexicalSymbol):
pass # a.b
class LexicalInlineComment(LexicalSymbol): pass
class LexicalAssignment(LexicalSymbol): pass
class LexicalIdentifier(LexicalSymbol, ValueType):
def __init__(self, value):
super(LexicalIdentifier, self).__init__(value)
self.value = value
def describe(self):
return self.value
def evaluate(self, resolver):
return resolver.resolve(self)
def __hash__(self):
return hash(self.value)
def __eq__(self, other):
return type(other) == type(self) and self.value == other.value
LexicalIdentifier.SELF = LexicalIdentifier("self") | Use new resolver in LexicalID resolution | Use new resolver in LexicalID resolution
| Python | mit | ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang | from thinglang.utils.type_descriptors import ValueType
from thinglang.lexer.symbols import LexicalSymbol
class LexicalQuote(LexicalSymbol): # "
EMITTABLE = False
@classmethod
def next_operator_set(cls, current, original):
if current is original:
return {'"': LexicalQuote}
return original
class LexicalParenthesesOpen(LexicalSymbol):
pass # (
class LexicalParenthesesClose(LexicalSymbol):
pass # )
class LexicalBracketOpen(LexicalSymbol):
pass # [
class LexicalBracketClose(LexicalSymbol):
pass # ]
class LexicalSeparator(LexicalSymbol):
pass # ,
class LexicalIndent(LexicalSymbol):
pass # <TAB>
class LexicalAccess(LexicalSymbol):
pass # a.b
class LexicalInlineComment(LexicalSymbol): pass
class LexicalAssignment(LexicalSymbol): pass
class LexicalIdentifier(LexicalSymbol, ValueType):
+
def __init__(self, value):
super(LexicalIdentifier, self).__init__(value)
self.value = value
def describe(self):
return self.value
- def evaluate(self, stack):
+ def evaluate(self, resolver):
- return stack[self.value]
+ return resolver.resolve(self)
def __hash__(self):
return hash(self.value)
def __eq__(self, other):
return type(other) == type(self) and self.value == other.value
LexicalIdentifier.SELF = LexicalIdentifier("self") | Use new resolver in LexicalID resolution | ## Code Before:
from thinglang.utils.type_descriptors import ValueType
from thinglang.lexer.symbols import LexicalSymbol
class LexicalQuote(LexicalSymbol): # "
EMITTABLE = False
@classmethod
def next_operator_set(cls, current, original):
if current is original:
return {'"': LexicalQuote}
return original
class LexicalParenthesesOpen(LexicalSymbol):
pass # (
class LexicalParenthesesClose(LexicalSymbol):
pass # )
class LexicalBracketOpen(LexicalSymbol):
pass # [
class LexicalBracketClose(LexicalSymbol):
pass # ]
class LexicalSeparator(LexicalSymbol):
pass # ,
class LexicalIndent(LexicalSymbol):
pass # <TAB>
class LexicalAccess(LexicalSymbol):
pass # a.b
class LexicalInlineComment(LexicalSymbol): pass
class LexicalAssignment(LexicalSymbol): pass
class LexicalIdentifier(LexicalSymbol, ValueType):
def __init__(self, value):
super(LexicalIdentifier, self).__init__(value)
self.value = value
def describe(self):
return self.value
def evaluate(self, stack):
return stack[self.value]
def __hash__(self):
return hash(self.value)
def __eq__(self, other):
return type(other) == type(self) and self.value == other.value
LexicalIdentifier.SELF = LexicalIdentifier("self")
## Instruction:
Use new resolver in LexicalID resolution
## Code After:
from thinglang.utils.type_descriptors import ValueType
from thinglang.lexer.symbols import LexicalSymbol
class LexicalQuote(LexicalSymbol): # "
EMITTABLE = False
@classmethod
def next_operator_set(cls, current, original):
if current is original:
return {'"': LexicalQuote}
return original
class LexicalParenthesesOpen(LexicalSymbol):
pass # (
class LexicalParenthesesClose(LexicalSymbol):
pass # )
class LexicalBracketOpen(LexicalSymbol):
pass # [
class LexicalBracketClose(LexicalSymbol):
pass # ]
class LexicalSeparator(LexicalSymbol):
pass # ,
class LexicalIndent(LexicalSymbol):
pass # <TAB>
class LexicalAccess(LexicalSymbol):
pass # a.b
class LexicalInlineComment(LexicalSymbol): pass
class LexicalAssignment(LexicalSymbol): pass
class LexicalIdentifier(LexicalSymbol, ValueType):
def __init__(self, value):
super(LexicalIdentifier, self).__init__(value)
self.value = value
def describe(self):
return self.value
def evaluate(self, resolver):
return resolver.resolve(self)
def __hash__(self):
return hash(self.value)
def __eq__(self, other):
return type(other) == type(self) and self.value == other.value
LexicalIdentifier.SELF = LexicalIdentifier("self") | # ... existing code ...
class LexicalIdentifier(LexicalSymbol, ValueType):
def __init__(self, value):
# ... modified code ...
def evaluate(self, resolver):
return resolver.resolve(self)
# ... rest of the code ... |
7ceba1f2b83628a2b89ffbdd30e435970e8c5e91 | tests/test_kafka_streams.py | tests/test_kafka_streams.py |
import pytest
from winton_kafka_streams import kafka_config
from winton_kafka_streams.errors.kafka_streams_error import KafkaStreamsError
from winton_kafka_streams.kafka_streams import KafkaStreams
from winton_kafka_streams.processor.processor import BaseProcessor
from winton_kafka_streams.processor.topology import TopologyBuilder
class MyTestProcessor(BaseProcessor):
pass
def test_Given_StreamAlreadyStarted_When_CallStartAgain_Then_RaiseError():
kafka_config.NUM_STREAM_THREADS = 0
topology_builder = TopologyBuilder()
topology_builder.source('my-source', ['my-input-topic-1'])
topology_builder.processor('my-processor', MyTestProcessor, 'my-source')
topology_builder.sink('my-sink', 'my-output-topic-1', 'my-processor')
topology = topology_builder.build()
kafka_streams = KafkaStreams(topology, kafka_config)
kafka_streams.start()
with pytest.raises(KafkaStreamsError, message='KafkaStreams already started.'):
kafka_streams.start()
|
import pytest
from winton_kafka_streams import kafka_config
from winton_kafka_streams.errors.kafka_streams_error import KafkaStreamsError
from winton_kafka_streams.kafka_streams import KafkaStreams
from winton_kafka_streams.processor.processor import BaseProcessor
from winton_kafka_streams.processor.topology import TopologyBuilder
class MyTestProcessor(BaseProcessor):
pass
def test__given__stream_already_started__when__call_start_again__then__raise_error():
kafka_config.NUM_STREAM_THREADS = 0
topology_builder = TopologyBuilder()
topology_builder.source('my-source', ['my-input-topic-1'])
topology_builder.processor('my-processor', MyTestProcessor, 'my-source')
topology_builder.sink('my-sink', 'my-output-topic-1', 'my-processor')
topology = topology_builder.build()
kafka_streams = KafkaStreams(topology, kafka_config)
kafka_streams.start()
with pytest.raises(KafkaStreamsError, message='KafkaStreams already started.'):
kafka_streams.start()
| Use more Pythonic name for test. | Use more Pythonic name for test.
| Python | apache-2.0 | wintoncode/winton-kafka-streams |
import pytest
from winton_kafka_streams import kafka_config
from winton_kafka_streams.errors.kafka_streams_error import KafkaStreamsError
from winton_kafka_streams.kafka_streams import KafkaStreams
from winton_kafka_streams.processor.processor import BaseProcessor
from winton_kafka_streams.processor.topology import TopologyBuilder
class MyTestProcessor(BaseProcessor):
pass
- def test_Given_StreamAlreadyStarted_When_CallStartAgain_Then_RaiseError():
+ def test__given__stream_already_started__when__call_start_again__then__raise_error():
kafka_config.NUM_STREAM_THREADS = 0
topology_builder = TopologyBuilder()
topology_builder.source('my-source', ['my-input-topic-1'])
topology_builder.processor('my-processor', MyTestProcessor, 'my-source')
topology_builder.sink('my-sink', 'my-output-topic-1', 'my-processor')
topology = topology_builder.build()
kafka_streams = KafkaStreams(topology, kafka_config)
kafka_streams.start()
with pytest.raises(KafkaStreamsError, message='KafkaStreams already started.'):
kafka_streams.start()
| Use more Pythonic name for test. | ## Code Before:
import pytest
from winton_kafka_streams import kafka_config
from winton_kafka_streams.errors.kafka_streams_error import KafkaStreamsError
from winton_kafka_streams.kafka_streams import KafkaStreams
from winton_kafka_streams.processor.processor import BaseProcessor
from winton_kafka_streams.processor.topology import TopologyBuilder
class MyTestProcessor(BaseProcessor):
pass
def test_Given_StreamAlreadyStarted_When_CallStartAgain_Then_RaiseError():
kafka_config.NUM_STREAM_THREADS = 0
topology_builder = TopologyBuilder()
topology_builder.source('my-source', ['my-input-topic-1'])
topology_builder.processor('my-processor', MyTestProcessor, 'my-source')
topology_builder.sink('my-sink', 'my-output-topic-1', 'my-processor')
topology = topology_builder.build()
kafka_streams = KafkaStreams(topology, kafka_config)
kafka_streams.start()
with pytest.raises(KafkaStreamsError, message='KafkaStreams already started.'):
kafka_streams.start()
## Instruction:
Use more Pythonic name for test.
## Code After:
import pytest
from winton_kafka_streams import kafka_config
from winton_kafka_streams.errors.kafka_streams_error import KafkaStreamsError
from winton_kafka_streams.kafka_streams import KafkaStreams
from winton_kafka_streams.processor.processor import BaseProcessor
from winton_kafka_streams.processor.topology import TopologyBuilder
class MyTestProcessor(BaseProcessor):
pass
def test__given__stream_already_started__when__call_start_again__then__raise_error():
kafka_config.NUM_STREAM_THREADS = 0
topology_builder = TopologyBuilder()
topology_builder.source('my-source', ['my-input-topic-1'])
topology_builder.processor('my-processor', MyTestProcessor, 'my-source')
topology_builder.sink('my-sink', 'my-output-topic-1', 'my-processor')
topology = topology_builder.build()
kafka_streams = KafkaStreams(topology, kafka_config)
kafka_streams.start()
with pytest.raises(KafkaStreamsError, message='KafkaStreams already started.'):
kafka_streams.start()
| ...
def test__given__stream_already_started__when__call_start_again__then__raise_error():
kafka_config.NUM_STREAM_THREADS = 0
... |
a3c68f6f70a2d4d1ecdcdb982eda9ec15fa4c127 | utils.py | utils.py | from google.appengine.api import users
from google.appengine.ext import db
from model import User
@db.transactional
def create_user(google_user):
user = User(
google_user=google_user
)
user.put()
return user
def get_current_user():
google_user = users.get_current_user()
user = get_user_model_for(google_user)
return user
def get_user_model_for(google_user=None):
return User.all().filter('google_user =', google_user).get()
def get_user_model_by_id_or_nick(id_or_nick):
if id_or_nick.isdigit():
return User.get_by_id(int(id_or_nick))
else:
return User.all().filter('nickname_lower = ', id_or_nick.lower()).get() | from google.appengine.api import users
from google.appengine.ext import db
from model import User
latest_signup = None
@db.transactional
def create_user(google_user):
global latest_signup
user = User(
google_user=google_user
)
user.put()
latest_signup = user
return user
def get_current_user():
google_user = users.get_current_user()
if latest_signup != None and google_user == latest_signup.google_user:
return latest_signup
user = get_user_model_for(google_user)
return user
def get_user_model_for(google_user=None):
return User.all().filter('google_user =', google_user).get()
def get_user_model_by_id_or_nick(id_or_nick):
if id_or_nick.isdigit():
return User.get_by_id(int(id_or_nick))
else:
return User.all().filter('nickname_lower = ', id_or_nick.lower()).get() | Fix bug where user could not be found | Fix bug where user could not be found
This problem only occured when a request tried to find the user right
after it had been created.
| Python | mit | youtify/newscontrol,studyindenmark/newscontrol,studyindenmark/newscontrol,youtify/newscontrol | from google.appengine.api import users
from google.appengine.ext import db
from model import User
+ latest_signup = None
+
@db.transactional
def create_user(google_user):
+ global latest_signup
user = User(
google_user=google_user
)
user.put()
+ latest_signup = user
return user
def get_current_user():
google_user = users.get_current_user()
+
+ if latest_signup != None and google_user == latest_signup.google_user:
+ return latest_signup
+
user = get_user_model_for(google_user)
return user
def get_user_model_for(google_user=None):
return User.all().filter('google_user =', google_user).get()
def get_user_model_by_id_or_nick(id_or_nick):
if id_or_nick.isdigit():
return User.get_by_id(int(id_or_nick))
else:
return User.all().filter('nickname_lower = ', id_or_nick.lower()).get() | Fix bug where user could not be found | ## Code Before:
from google.appengine.api import users
from google.appengine.ext import db
from model import User
@db.transactional
def create_user(google_user):
user = User(
google_user=google_user
)
user.put()
return user
def get_current_user():
google_user = users.get_current_user()
user = get_user_model_for(google_user)
return user
def get_user_model_for(google_user=None):
return User.all().filter('google_user =', google_user).get()
def get_user_model_by_id_or_nick(id_or_nick):
if id_or_nick.isdigit():
return User.get_by_id(int(id_or_nick))
else:
return User.all().filter('nickname_lower = ', id_or_nick.lower()).get()
## Instruction:
Fix bug where user could not be found
## Code After:
from google.appengine.api import users
from google.appengine.ext import db
from model import User
latest_signup = None
@db.transactional
def create_user(google_user):
global latest_signup
user = User(
google_user=google_user
)
user.put()
latest_signup = user
return user
def get_current_user():
google_user = users.get_current_user()
if latest_signup != None and google_user == latest_signup.google_user:
return latest_signup
user = get_user_model_for(google_user)
return user
def get_user_model_for(google_user=None):
return User.all().filter('google_user =', google_user).get()
def get_user_model_by_id_or_nick(id_or_nick):
if id_or_nick.isdigit():
return User.get_by_id(int(id_or_nick))
else:
return User.all().filter('nickname_lower = ', id_or_nick.lower()).get() | ...
latest_signup = None
@db.transactional
...
def create_user(google_user):
global latest_signup
user = User(
...
user.put()
latest_signup = user
return user
...
google_user = users.get_current_user()
if latest_signup != None and google_user == latest_signup.google_user:
return latest_signup
user = get_user_model_for(google_user)
... |
8afbd0fe7f4732d8484a2a41b91451ec220fc2f8 | tools/perf/benchmarks/memory.py | tools/perf/benchmarks/memory.py | from telemetry import test
from measurements import memory
class Memory(test.Test):
test = memory.Memory
page_set = 'page_sets/top_25.json'
class Reload(test.Test):
test = memory.Memory
page_set = 'page_sets/2012Q3.json'
| from telemetry import test
from measurements import memory
class MemoryTop25(test.Test):
test = memory.Memory
page_set = 'page_sets/top_25.json'
class Reload2012Q3(test.Test):
test = memory.Memory
page_set = 'page_sets/2012Q3.json'
| Rename Memory benchmark to avoid conflict with Memory measurement. | [telemetry] Rename Memory benchmark to avoid conflict with Memory measurement.
Quick fix for now, but I may need to reconsider how run_measurement resolved name conflicts.
BUG=263511
TEST=None.
[email protected]
Review URL: https://chromiumcodereview.appspot.com/19915008
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@213290 0039d316-1c4b-4281-b951-d872f2087c98
| Python | bsd-3-clause | PeterWangIntel/chromium-crosswalk,markYoungH/chromium.src,dednal/chromium.src,jaruba/chromium.src,mogoweb/chromium-crosswalk,jaruba/chromium.src,chuan9/chromium-crosswalk,Jonekee/chromium.src,mogoweb/chromium-crosswalk,M4sse/chromium.src,fujunwei/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,M4sse/chromium.src,littlstar/chromium.src,TheTypoMaster/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,jaruba/chromium.src,markYoungH/chromium.src,dushu1203/chromium.src,Just-D/chromium-1,markYoungH/chromium.src,Just-D/chromium-1,mogoweb/chromium-crosswalk,Fireblend/chromium-crosswalk,Fireblend/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,fujunwei/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,jaruba/chromium.src,bright-sparks/chromium-spacewalk,Pluto-tv/chromium-crosswalk,ltilve/chromium,markYoungH/chromium.src,Fireblend/chromium-crosswalk,anirudhSK/chromium,ondra-novak/chromium.src,dushu1203/chromium.src,Jonekee/chromium.src,ChromiumWebApps/chromium,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,crosswalk-project/chromium-crosswalk-efl,jaruba/chromium.src,bright-sparks/chromium-spacewalk,krieger-od/nwjs_chromium.src,chuan9/chromium-crosswalk,dednal/chromium.src,ChromiumWebApps/chromium,dednal/chromium.src,markYoungH/chromium.src,Chilledheart/chromium,anirudhSK/chromium,krieger-od/nwjs_chromium.src,crosswalk-project/chromium-crosswalk-efl,markYoungH/chromium.src,Pluto-tv/chromium-crosswalk,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,chuan9/chromium-crosswalk,Jonekee/chromium.src,hgl888/chromium-crosswalk-efl,ltilve/chromium,mohamed--abdel-maksoud/chromium.src,TheTypoMaster/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,dushu1203/chromium.src,hgl888/chromium-crosswalk-efl,anirudhSK/chromium,hgl888/chromium-crosswalk-efl,patrickm/chromium.src,patrickm/chromium.src,Chilledheart/chromium,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,patrickm/chromium.src,axinging/chromium-crosswalk,Fireblend/chromium-crosswalk,ltilve/chromium,ondra-novak/chromium.src,dednal/chromium.src,Pluto-tv/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,ChromiumWebApps/chromium,crosswalk-project/chromium-crosswalk-efl,Chilledheart/chromium,anirudhSK/chromium,axinging/chromium-crosswalk,dednal/chromium.src,Just-D/chromium-1,dednal/chromium.src,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk-efl,M4sse/chromium.src,dushu1203/chromium.src,TheTypoMaster/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,ChromiumWebApps/chromium,ondra-novak/chromium.src,markYoungH/chromium.src,patrickm/chromium.src,mohamed--abdel-maksoud/chromium.src,anirudhSK/chromium,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,Pluto-tv/chromium-crosswalk,littlstar/chromium.src,markYoungH/chromium.src,Pluto-tv/chromium-crosswalk,anirudhSK/chromium,chuan9/chromium-crosswalk,Jonekee/chromium.src,Jonekee/chromium.src,Jonekee/chromium.src,krieger-od/nwjs_chromium.src,dednal/chromium.src,Jonekee/chromium.src,Just-D/chromium-1,mogoweb/chromium-crosswalk,patrickm/chromium.src,hgl888/chromium-crosswalk,Just-D/chromium-1,M4sse/chromium.src,Just-D/chromium-1,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,axinging/chromium-crosswalk,Fireblend/chromium-crosswalk,patrickm/chromium.src,ltilve/chromium,bright-sparks/chromium-spacewalk,mogoweb/chromium-crosswalk,ltilve/chromium,bright-sparks/chromium-spacewalk,mogoweb/chromium-crosswalk,M4sse/chromium.src,dednal/chromium.src,ltilve/chromium,mogoweb/chromium-crosswalk,jaruba/chromium.src,Pluto-tv/chromium-crosswalk,Fireblend/chromium-crosswalk,ltilve/chromium,bright-sparks/chromium-spacewalk,ChromiumWebApps/chromium,fujunwei/chromium-crosswalk,anirudhSK/chromium,markYoungH/chromium.src,mogoweb/chromium-crosswalk,M4sse/chromium.src,bright-sparks/chromium-spacewalk,littlstar/chromium.src,patrickm/chromium.src,ltilve/chromium,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,Jonekee/chromium.src,ondra-novak/chromium.src,jaruba/chromium.src,jaruba/chromium.src,ChromiumWebApps/chromium,dushu1203/chromium.src,fujunwei/chromium-crosswalk,ChromiumWebApps/chromium,ondra-novak/chromium.src,hgl888/chromium-crosswalk,anirudhSK/chromium,ondra-novak/chromium.src,krieger-od/nwjs_chromium.src,TheTypoMaster/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,dushu1203/chromium.src,littlstar/chromium.src,Chilledheart/chromium,Just-D/chromium-1,chuan9/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,crosswalk-project/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,axinging/chromium-crosswalk,dushu1203/chromium.src,bright-sparks/chromium-spacewalk,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,patrickm/chromium.src,ChromiumWebApps/chromium,dushu1203/chromium.src,M4sse/chromium.src,TheTypoMaster/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk-efl,anirudhSK/chromium,ChromiumWebApps/chromium,bright-sparks/chromium-spacewalk,hgl888/chromium-crosswalk-efl,ondra-novak/chromium.src,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,anirudhSK/chromium,mogoweb/chromium-crosswalk,hgl888/chromium-crosswalk,Chilledheart/chromium,ChromiumWebApps/chromium,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,littlstar/chromium.src,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,Chilledheart/chromium,hgl888/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,littlstar/chromium.src,axinging/chromium-crosswalk,Jonekee/chromium.src,M4sse/chromium.src,anirudhSK/chromium,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,ltilve/chromium,anirudhSK/chromium,bright-sparks/chromium-spacewalk,krieger-od/nwjs_chromium.src,mohamed--abdel-maksoud/chromium.src,Chilledheart/chromium,Fireblend/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,M4sse/chromium.src,ChromiumWebApps/chromium,markYoungH/chromium.src,ChromiumWebApps/chromium,crosswalk-project/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,jaruba/chromium.src,Just-D/chromium-1,dednal/chromium.src,littlstar/chromium.src,dednal/chromium.src,dushu1203/chromium.src,hgl888/chromium-crosswalk,krieger-od/nwjs_chromium.src,jaruba/chromium.src,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,chuan9/chromium-crosswalk,M4sse/chromium.src,krieger-od/nwjs_chromium.src,Chilledheart/chromium,jaruba/chromium.src,markYoungH/chromium.src,mohamed--abdel-maksoud/chromium.src,crosswalk-project/chromium-crosswalk-efl,patrickm/chromium.src,Just-D/chromium-1,littlstar/chromium.src,ondra-novak/chromium.src,M4sse/chromium.src,Jonekee/chromium.src,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,mogoweb/chromium-crosswalk,axinging/chromium-crosswalk,ondra-novak/chromium.src | from telemetry import test
from measurements import memory
- class Memory(test.Test):
+ class MemoryTop25(test.Test):
test = memory.Memory
page_set = 'page_sets/top_25.json'
- class Reload(test.Test):
+ class Reload2012Q3(test.Test):
test = memory.Memory
page_set = 'page_sets/2012Q3.json'
| Rename Memory benchmark to avoid conflict with Memory measurement. | ## Code Before:
from telemetry import test
from measurements import memory
class Memory(test.Test):
test = memory.Memory
page_set = 'page_sets/top_25.json'
class Reload(test.Test):
test = memory.Memory
page_set = 'page_sets/2012Q3.json'
## Instruction:
Rename Memory benchmark to avoid conflict with Memory measurement.
## Code After:
from telemetry import test
from measurements import memory
class MemoryTop25(test.Test):
test = memory.Memory
page_set = 'page_sets/top_25.json'
class Reload2012Q3(test.Test):
test = memory.Memory
page_set = 'page_sets/2012Q3.json'
| # ... existing code ...
class MemoryTop25(test.Test):
test = memory.Memory
# ... modified code ...
class Reload2012Q3(test.Test):
test = memory.Memory
# ... rest of the code ... |
d2f02fa4171cb490df87a4426c78ffc37560c5d6 | equadratures/distributions/__init__.py | equadratures/distributions/__init__.py | import equadratures.distributions.template
import equadratures.distributions.gaussian
import equadratures.distributions.truncated_gaussian
import equadratures.distributions.chebyshev
import equadratures.distributions.cauchy
import equadratures.distributions.chisquared
import equadratures.distributions.beta
import equadratures.distributions.gamma
import equadratures.distributions.rayleigh
import equadratures.distributions.uniform
import equadratures.distributions.weibull
import equadratures.distributions.chi
import equadratures.distributions.pareto
import equadratures.distributions.gumbel
import equadratures.distributions.studentst
import equadratures.distributions.lognormal | import equadratures.distributions.template
import equadratures.distributions.gaussian
import equadratures.distributions.truncated_gaussian
import equadratures.distributions.chebyshev
import equadratures.distributions.cauchy
import equadratures.distributions.chisquared
import equadratures.distributions.beta
import equadratures.distributions.gamma
import equadratures.distributions.rayleigh
import equadratures.distributions.uniform
import equadratures.distributions.triangular
import equadratures.distributions.weibull
import equadratures.distributions.chi
import equadratures.distributions.pareto
import equadratures.distributions.gumbel
import equadratures.distributions.studentst
import equadratures.distributions.lognormal | Add tri distribution import to init. | Add tri distribution import to init.
| Python | lgpl-2.1 | Effective-Quadratures/Effective-Quadratures | import equadratures.distributions.template
import equadratures.distributions.gaussian
import equadratures.distributions.truncated_gaussian
import equadratures.distributions.chebyshev
import equadratures.distributions.cauchy
import equadratures.distributions.chisquared
import equadratures.distributions.beta
import equadratures.distributions.gamma
import equadratures.distributions.rayleigh
import equadratures.distributions.uniform
+ import equadratures.distributions.triangular
import equadratures.distributions.weibull
import equadratures.distributions.chi
import equadratures.distributions.pareto
import equadratures.distributions.gumbel
import equadratures.distributions.studentst
import equadratures.distributions.lognormal | Add tri distribution import to init. | ## Code Before:
import equadratures.distributions.template
import equadratures.distributions.gaussian
import equadratures.distributions.truncated_gaussian
import equadratures.distributions.chebyshev
import equadratures.distributions.cauchy
import equadratures.distributions.chisquared
import equadratures.distributions.beta
import equadratures.distributions.gamma
import equadratures.distributions.rayleigh
import equadratures.distributions.uniform
import equadratures.distributions.weibull
import equadratures.distributions.chi
import equadratures.distributions.pareto
import equadratures.distributions.gumbel
import equadratures.distributions.studentst
import equadratures.distributions.lognormal
## Instruction:
Add tri distribution import to init.
## Code After:
import equadratures.distributions.template
import equadratures.distributions.gaussian
import equadratures.distributions.truncated_gaussian
import equadratures.distributions.chebyshev
import equadratures.distributions.cauchy
import equadratures.distributions.chisquared
import equadratures.distributions.beta
import equadratures.distributions.gamma
import equadratures.distributions.rayleigh
import equadratures.distributions.uniform
import equadratures.distributions.triangular
import equadratures.distributions.weibull
import equadratures.distributions.chi
import equadratures.distributions.pareto
import equadratures.distributions.gumbel
import equadratures.distributions.studentst
import equadratures.distributions.lognormal | ...
import equadratures.distributions.uniform
import equadratures.distributions.triangular
import equadratures.distributions.weibull
... |
63a539ff4a3a832286136c40a74b1a8b3db1a5c0 | falcom/api/uri/api_querier.py | falcom/api/uri/api_querier.py | from time import sleep
class APIQuerier:
def __init__ (self, uri, url_opener, sleep_time=300, max_tries=0):
self.uri = uri
self.url_opener = url_opener
self.sleep_time = sleep_time
self.max_tries = max_tries
def get (self, **kwargs):
try:
return self.__open_uri(kwargs)
except ConnectionError:
sleep(self.sleep_time)
i = 1
while i != self.max_tries:
i += 1
try:
return self.__open_uri(kwargs)
except ConnectionError:
sleep(self.sleep_time)
return b""
@staticmethod
def utf8 (str_or_bytes):
if isinstance(str_or_bytes, bytes):
return str_or_bytes.decode("utf_8")
else:
return str_or_bytes
def __open_uri (self, kwargs):
with self.url_opener(self.uri(**kwargs)) as response:
result = self.utf8(response.read())
return result
| from time import sleep
class APIQuerier:
def __init__ (self, uri, url_opener, sleep_time=300, max_tries=0):
self.uri = uri
self.url_opener = url_opener
self.sleep_time = sleep_time
self.max_tries = max_tries
def get (self, **kwargs):
class SpecialNull: pass
result = SpecialNull
i = 1
while result is SpecialNull:
try:
result = self.__open_uri(kwargs)
except ConnectionError:
sleep(self.sleep_time)
if i == self.max_tries:
result = b""
else:
i += 1
return result
@staticmethod
def utf8 (str_or_bytes):
if isinstance(str_or_bytes, bytes):
return str_or_bytes.decode("utf_8")
else:
return str_or_bytes
def __open_uri (self, kwargs):
with self.url_opener(self.uri(**kwargs)) as response:
result = self.utf8(response.read())
return result
| Rewrite get() to be less repetitive but still stupid | Rewrite get() to be less repetitive but still stupid
| Python | bsd-3-clause | mlibrary/image-conversion-and-validation,mlibrary/image-conversion-and-validation | from time import sleep
class APIQuerier:
def __init__ (self, uri, url_opener, sleep_time=300, max_tries=0):
self.uri = uri
self.url_opener = url_opener
self.sleep_time = sleep_time
self.max_tries = max_tries
def get (self, **kwargs):
- try:
- return self.__open_uri(kwargs)
+ class SpecialNull: pass
+ result = SpecialNull
+ i = 1
+ while result is SpecialNull:
- except ConnectionError:
- sleep(self.sleep_time)
-
- i = 1
- while i != self.max_tries:
- i += 1
-
try:
- return self.__open_uri(kwargs)
+ result = self.__open_uri(kwargs)
except ConnectionError:
sleep(self.sleep_time)
+ if i == self.max_tries:
+ result = b""
+
+ else:
+ i += 1
+
- return b""
+ return result
@staticmethod
def utf8 (str_or_bytes):
if isinstance(str_or_bytes, bytes):
return str_or_bytes.decode("utf_8")
else:
return str_or_bytes
def __open_uri (self, kwargs):
with self.url_opener(self.uri(**kwargs)) as response:
result = self.utf8(response.read())
return result
| Rewrite get() to be less repetitive but still stupid | ## Code Before:
from time import sleep
class APIQuerier:
def __init__ (self, uri, url_opener, sleep_time=300, max_tries=0):
self.uri = uri
self.url_opener = url_opener
self.sleep_time = sleep_time
self.max_tries = max_tries
def get (self, **kwargs):
try:
return self.__open_uri(kwargs)
except ConnectionError:
sleep(self.sleep_time)
i = 1
while i != self.max_tries:
i += 1
try:
return self.__open_uri(kwargs)
except ConnectionError:
sleep(self.sleep_time)
return b""
@staticmethod
def utf8 (str_or_bytes):
if isinstance(str_or_bytes, bytes):
return str_or_bytes.decode("utf_8")
else:
return str_or_bytes
def __open_uri (self, kwargs):
with self.url_opener(self.uri(**kwargs)) as response:
result = self.utf8(response.read())
return result
## Instruction:
Rewrite get() to be less repetitive but still stupid
## Code After:
from time import sleep
class APIQuerier:
def __init__ (self, uri, url_opener, sleep_time=300, max_tries=0):
self.uri = uri
self.url_opener = url_opener
self.sleep_time = sleep_time
self.max_tries = max_tries
def get (self, **kwargs):
class SpecialNull: pass
result = SpecialNull
i = 1
while result is SpecialNull:
try:
result = self.__open_uri(kwargs)
except ConnectionError:
sleep(self.sleep_time)
if i == self.max_tries:
result = b""
else:
i += 1
return result
@staticmethod
def utf8 (str_or_bytes):
if isinstance(str_or_bytes, bytes):
return str_or_bytes.decode("utf_8")
else:
return str_or_bytes
def __open_uri (self, kwargs):
with self.url_opener(self.uri(**kwargs)) as response:
result = self.utf8(response.read())
return result
| ...
def get (self, **kwargs):
class SpecialNull: pass
result = SpecialNull
i = 1
while result is SpecialNull:
try:
result = self.__open_uri(kwargs)
...
if i == self.max_tries:
result = b""
else:
i += 1
return result
... |
9ae4ebf7e95cb301321911886cbb4041fae1eff6 | bookmarks/search_indexes.py | bookmarks/search_indexes.py | from haystack.indexes import CharField, DateTimeField, RealTimeSearchIndex, Indexable
from models import Bookmark
class BookmarkIndex(RealTimeSearchIndex, Indexable):
text = CharField(document=True, use_template=True)
title = CharField(model_attr='description')
author = CharField(model_attr='adder')
pub_date = DateTimeField(model_attr='added')
summary = CharField(model_attr='note')
sites = CharField(model_attr='site_slugs')
def index_queryset(self):
"""Used when the entire index for model is updated."""
return self.get_model().objects.all()
def get_model(self):
return Bookmark
| from haystack.indexes import CharField, DateTimeField, SearchIndex, Indexable
from models import Bookmark
class BookmarkIndex(SearchIndex, Indexable):
text = CharField(document=True, use_template=True)
title = CharField(model_attr='description')
author = CharField(model_attr='adder')
pub_date = DateTimeField(model_attr='added')
summary = CharField(model_attr='note')
sites = CharField(model_attr='site_slugs')
def index_queryset(self):
"""Used when the entire index for model is updated."""
return self.get_model().objects.all()
def get_model(self):
return Bookmark
| Use `SearchIndex` instead of deprecated `RealTimeSearchIndex`. | Use `SearchIndex` instead of deprecated `RealTimeSearchIndex`.
| Python | mit | incuna/incuna-bookmarks,incuna/incuna-bookmarks | - from haystack.indexes import CharField, DateTimeField, RealTimeSearchIndex, Indexable
+ from haystack.indexes import CharField, DateTimeField, SearchIndex, Indexable
from models import Bookmark
- class BookmarkIndex(RealTimeSearchIndex, Indexable):
+ class BookmarkIndex(SearchIndex, Indexable):
text = CharField(document=True, use_template=True)
title = CharField(model_attr='description')
author = CharField(model_attr='adder')
pub_date = DateTimeField(model_attr='added')
summary = CharField(model_attr='note')
sites = CharField(model_attr='site_slugs')
def index_queryset(self):
"""Used when the entire index for model is updated."""
return self.get_model().objects.all()
def get_model(self):
return Bookmark
| Use `SearchIndex` instead of deprecated `RealTimeSearchIndex`. | ## Code Before:
from haystack.indexes import CharField, DateTimeField, RealTimeSearchIndex, Indexable
from models import Bookmark
class BookmarkIndex(RealTimeSearchIndex, Indexable):
text = CharField(document=True, use_template=True)
title = CharField(model_attr='description')
author = CharField(model_attr='adder')
pub_date = DateTimeField(model_attr='added')
summary = CharField(model_attr='note')
sites = CharField(model_attr='site_slugs')
def index_queryset(self):
"""Used when the entire index for model is updated."""
return self.get_model().objects.all()
def get_model(self):
return Bookmark
## Instruction:
Use `SearchIndex` instead of deprecated `RealTimeSearchIndex`.
## Code After:
from haystack.indexes import CharField, DateTimeField, SearchIndex, Indexable
from models import Bookmark
class BookmarkIndex(SearchIndex, Indexable):
text = CharField(document=True, use_template=True)
title = CharField(model_attr='description')
author = CharField(model_attr='adder')
pub_date = DateTimeField(model_attr='added')
summary = CharField(model_attr='note')
sites = CharField(model_attr='site_slugs')
def index_queryset(self):
"""Used when the entire index for model is updated."""
return self.get_model().objects.all()
def get_model(self):
return Bookmark
| # ... existing code ...
from haystack.indexes import CharField, DateTimeField, SearchIndex, Indexable
# ... modified code ...
class BookmarkIndex(SearchIndex, Indexable):
text = CharField(document=True, use_template=True)
# ... rest of the code ... |
4f46fe7abf5efcd93bc161f2cfccc58df4ab1ee4 | whats_fresh/whats_fresh_api/tests/views/entry/test_list_preparations.py | whats_fresh/whats_fresh_api/tests/views/entry/test_list_preparations.py | from django.test import TestCase
from django.core.urlresolvers import reverse
from whats_fresh_api.models import *
from django.contrib.gis.db import models
import json
class ListPreparationTestCase(TestCase):
fixtures = ['test_fixtures']
def test_url_endpoint(self):
url = reverse('entry-list-preparations')
self.assertEqual(url, '/entry/preparations')
def test_list_items(self):
"""
Tests to see if the list of preparations
contains the proper preparations
"""
response = self.client.get(reverse('entry-list-preparations'))
items = response.context['item_list']
for preparation in Preparation.objects.all():
self.assertEqual(
items[preparation.id-1]['description'], preparation.description)
self.assertEqual(
items[preparation.id-1]['name'], preparation.name)
self.assertEqual(
items[preparation.id-1]['link'],
reverse('edit-preparation', kwargs={'id': preparation.id}))
| from django.test import TestCase
from django.core.urlresolvers import reverse
from whats_fresh_api.models import *
from django.contrib.gis.db import models
import json
class ListPreparationTestCase(TestCase):
fixtures = ['test_fixtures']
def test_url_endpoint(self):
url = reverse('entry-list-preparations')
self.assertEqual(url, '/entry/preparations')
def test_list_items(self):
"""
Tests to see if the list of preparations
contains the proper preparations
"""
response = self.client.get(reverse('entry-list-preparations'))
items = response.context['item_list']
for preparation in Preparation.objects.all():
self.assertEqual(
items[preparation.id-1]['description'],
preparation.description)
self.assertEqual(
items[preparation.id-1]['name'], preparation.name)
self.assertEqual(
items[preparation.id-1]['link'],
reverse('edit-preparation', kwargs={'id': preparation.id}))
| Rewrite preparations list test to get ID from URL | Rewrite preparations list test to get ID from URL
| Python | apache-2.0 | iCHAIT/whats-fresh-api,osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api,osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api,osu-cass/whats-fresh-api,osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api | from django.test import TestCase
from django.core.urlresolvers import reverse
from whats_fresh_api.models import *
from django.contrib.gis.db import models
import json
class ListPreparationTestCase(TestCase):
fixtures = ['test_fixtures']
def test_url_endpoint(self):
url = reverse('entry-list-preparations')
self.assertEqual(url, '/entry/preparations')
def test_list_items(self):
"""
Tests to see if the list of preparations
contains the proper preparations
"""
response = self.client.get(reverse('entry-list-preparations'))
items = response.context['item_list']
for preparation in Preparation.objects.all():
self.assertEqual(
- items[preparation.id-1]['description'], preparation.description)
+ items[preparation.id-1]['description'],
+ preparation.description)
self.assertEqual(
items[preparation.id-1]['name'], preparation.name)
self.assertEqual(
items[preparation.id-1]['link'],
reverse('edit-preparation', kwargs={'id': preparation.id}))
+
+
+
+ | Rewrite preparations list test to get ID from URL | ## Code Before:
from django.test import TestCase
from django.core.urlresolvers import reverse
from whats_fresh_api.models import *
from django.contrib.gis.db import models
import json
class ListPreparationTestCase(TestCase):
fixtures = ['test_fixtures']
def test_url_endpoint(self):
url = reverse('entry-list-preparations')
self.assertEqual(url, '/entry/preparations')
def test_list_items(self):
"""
Tests to see if the list of preparations
contains the proper preparations
"""
response = self.client.get(reverse('entry-list-preparations'))
items = response.context['item_list']
for preparation in Preparation.objects.all():
self.assertEqual(
items[preparation.id-1]['description'], preparation.description)
self.assertEqual(
items[preparation.id-1]['name'], preparation.name)
self.assertEqual(
items[preparation.id-1]['link'],
reverse('edit-preparation', kwargs={'id': preparation.id}))
## Instruction:
Rewrite preparations list test to get ID from URL
## Code After:
from django.test import TestCase
from django.core.urlresolvers import reverse
from whats_fresh_api.models import *
from django.contrib.gis.db import models
import json
class ListPreparationTestCase(TestCase):
fixtures = ['test_fixtures']
def test_url_endpoint(self):
url = reverse('entry-list-preparations')
self.assertEqual(url, '/entry/preparations')
def test_list_items(self):
"""
Tests to see if the list of preparations
contains the proper preparations
"""
response = self.client.get(reverse('entry-list-preparations'))
items = response.context['item_list']
for preparation in Preparation.objects.all():
self.assertEqual(
items[preparation.id-1]['description'],
preparation.description)
self.assertEqual(
items[preparation.id-1]['name'], preparation.name)
self.assertEqual(
items[preparation.id-1]['link'],
reverse('edit-preparation', kwargs={'id': preparation.id}))
| ...
self.assertEqual(
items[preparation.id-1]['description'],
preparation.description)
self.assertEqual(
...
reverse('edit-preparation', kwargs={'id': preparation.id}))
... |
3b28a1fa47d4e2339f2219eaf688b88b5901afea | migrations/versions/0074_update_sms_rate.py | migrations/versions/0074_update_sms_rate.py |
import uuid
revision = '0074_update_sms_rate'
down_revision = '0072_add_dvla_orgs'
from alembic import op
def upgrade():
op.get_bind()
op.execute("INSERT INTO provider_rates (id, valid_from, rate, provider_id) "
"VALUES ('{}', '2017-04-01 00:00:00', 1.58, "
"(SELECT id FROM provider_details WHERE identifier = 'mmg'))".format(uuid.uuid4())
)
def downgrade():
op.get_bind()
op.execute("DELETE FROM provider_rates where valid_from = '2017-04-01 00:00:00' "
"and provider_id = (SELECT id FROM provider_details WHERE identifier = 'mmg')") |
import uuid
revision = '0074_update_sms_rate'
down_revision = '0073_add_international_sms_flag'
from alembic import op
def upgrade():
op.get_bind()
op.execute("INSERT INTO provider_rates (id, valid_from, rate, provider_id) "
"VALUES ('{}', '2017-04-01 00:00:00', 1.58, "
"(SELECT id FROM provider_details WHERE identifier = 'mmg'))".format(uuid.uuid4())
)
def downgrade():
op.get_bind()
op.execute("DELETE FROM provider_rates where valid_from = '2017-04-01 00:00:00' "
"and provider_id = (SELECT id FROM provider_details WHERE identifier = 'mmg')") | Fix db migration merge conflicts | Fix db migration merge conflicts
| Python | mit | alphagov/notifications-api,alphagov/notifications-api |
import uuid
revision = '0074_update_sms_rate'
- down_revision = '0072_add_dvla_orgs'
+ down_revision = '0073_add_international_sms_flag'
from alembic import op
def upgrade():
op.get_bind()
op.execute("INSERT INTO provider_rates (id, valid_from, rate, provider_id) "
"VALUES ('{}', '2017-04-01 00:00:00', 1.58, "
"(SELECT id FROM provider_details WHERE identifier = 'mmg'))".format(uuid.uuid4())
)
def downgrade():
op.get_bind()
op.execute("DELETE FROM provider_rates where valid_from = '2017-04-01 00:00:00' "
"and provider_id = (SELECT id FROM provider_details WHERE identifier = 'mmg')") | Fix db migration merge conflicts | ## Code Before:
import uuid
revision = '0074_update_sms_rate'
down_revision = '0072_add_dvla_orgs'
from alembic import op
def upgrade():
op.get_bind()
op.execute("INSERT INTO provider_rates (id, valid_from, rate, provider_id) "
"VALUES ('{}', '2017-04-01 00:00:00', 1.58, "
"(SELECT id FROM provider_details WHERE identifier = 'mmg'))".format(uuid.uuid4())
)
def downgrade():
op.get_bind()
op.execute("DELETE FROM provider_rates where valid_from = '2017-04-01 00:00:00' "
"and provider_id = (SELECT id FROM provider_details WHERE identifier = 'mmg')")
## Instruction:
Fix db migration merge conflicts
## Code After:
import uuid
revision = '0074_update_sms_rate'
down_revision = '0073_add_international_sms_flag'
from alembic import op
def upgrade():
op.get_bind()
op.execute("INSERT INTO provider_rates (id, valid_from, rate, provider_id) "
"VALUES ('{}', '2017-04-01 00:00:00', 1.58, "
"(SELECT id FROM provider_details WHERE identifier = 'mmg'))".format(uuid.uuid4())
)
def downgrade():
op.get_bind()
op.execute("DELETE FROM provider_rates where valid_from = '2017-04-01 00:00:00' "
"and provider_id = (SELECT id FROM provider_details WHERE identifier = 'mmg')") | // ... existing code ...
revision = '0074_update_sms_rate'
down_revision = '0073_add_international_sms_flag'
// ... rest of the code ... |
786bc416ca00c7021f5881e459d2634e8fcd8458 | src/vdb/src/_vdb/common.py | src/vdb/src/_vdb/common.py |
from typing import Collection, Mapping, Union
import ipaddress
SupportedTypes = Union[str, int, float, bool, ipaddress.IPv4Interface, ipaddress.IPv6Interface, dict, list, None]
# Convenience types
ResultDict = dict[str, SupportedTypes] # A result in dict form
ResultsDict = list[ResultDict] # A list of results
ValueParam = Mapping[str, SupportedTypes] # A parameter suitable for passing db values
WhereParam = Mapping[str, SupportedTypes] # A parameter suitable for WHERE
ParamDict = dict[str, SupportedTypes] # A concrete dict for values
OrderParam = Collection[str] # A parameter suitable for ORDER BY
class VDBError(Exception):
pass
|
from typing import Collection, Mapping, Union
import ipaddress
SupportedTypes = Union[str, int, float, bool, ipaddress.IPv4Interface, ipaddress.IPv6Interface,
ipaddress.IPv4Network, ipaddress.IPv6Network, dict, list, None]
# Convenience types
ResultDict = dict[str, SupportedTypes] # A result in dict form
ResultsDict = list[ResultDict] # A list of results
ValueParam = Mapping[str, SupportedTypes] # A parameter suitable for passing db values
WhereParam = Mapping[str, SupportedTypes] # A parameter suitable for WHERE
ParamDict = dict[str, SupportedTypes] # A concrete dict for values
OrderParam = Collection[str] # A parameter suitable for ORDER BY
class VDBError(Exception):
pass
| Add ipaddress.IPv[46]Network to the supported types | Add ipaddress.IPv[46]Network to the supported types
| Python | apache-2.0 | sharhalakis/vdns |
from typing import Collection, Mapping, Union
import ipaddress
- SupportedTypes = Union[str, int, float, bool, ipaddress.IPv4Interface, ipaddress.IPv6Interface, dict, list, None]
+ SupportedTypes = Union[str, int, float, bool, ipaddress.IPv4Interface, ipaddress.IPv6Interface,
+ ipaddress.IPv4Network, ipaddress.IPv6Network, dict, list, None]
# Convenience types
ResultDict = dict[str, SupportedTypes] # A result in dict form
ResultsDict = list[ResultDict] # A list of results
ValueParam = Mapping[str, SupportedTypes] # A parameter suitable for passing db values
WhereParam = Mapping[str, SupportedTypes] # A parameter suitable for WHERE
ParamDict = dict[str, SupportedTypes] # A concrete dict for values
OrderParam = Collection[str] # A parameter suitable for ORDER BY
class VDBError(Exception):
pass
| Add ipaddress.IPv[46]Network to the supported types | ## Code Before:
from typing import Collection, Mapping, Union
import ipaddress
SupportedTypes = Union[str, int, float, bool, ipaddress.IPv4Interface, ipaddress.IPv6Interface, dict, list, None]
# Convenience types
ResultDict = dict[str, SupportedTypes] # A result in dict form
ResultsDict = list[ResultDict] # A list of results
ValueParam = Mapping[str, SupportedTypes] # A parameter suitable for passing db values
WhereParam = Mapping[str, SupportedTypes] # A parameter suitable for WHERE
ParamDict = dict[str, SupportedTypes] # A concrete dict for values
OrderParam = Collection[str] # A parameter suitable for ORDER BY
class VDBError(Exception):
pass
## Instruction:
Add ipaddress.IPv[46]Network to the supported types
## Code After:
from typing import Collection, Mapping, Union
import ipaddress
SupportedTypes = Union[str, int, float, bool, ipaddress.IPv4Interface, ipaddress.IPv6Interface,
ipaddress.IPv4Network, ipaddress.IPv6Network, dict, list, None]
# Convenience types
ResultDict = dict[str, SupportedTypes] # A result in dict form
ResultsDict = list[ResultDict] # A list of results
ValueParam = Mapping[str, SupportedTypes] # A parameter suitable for passing db values
WhereParam = Mapping[str, SupportedTypes] # A parameter suitable for WHERE
ParamDict = dict[str, SupportedTypes] # A concrete dict for values
OrderParam = Collection[str] # A parameter suitable for ORDER BY
class VDBError(Exception):
pass
| // ... existing code ...
SupportedTypes = Union[str, int, float, bool, ipaddress.IPv4Interface, ipaddress.IPv6Interface,
ipaddress.IPv4Network, ipaddress.IPv6Network, dict, list, None]
// ... rest of the code ... |
f7dd16abcab5d5e0134083267f21672de8e3d5e1 | hc/front/context_processors.py | hc/front/context_processors.py | from django.conf import settings
def branding(request):
return {
"site_name": settings.SITE_NAME,
"site_root": settings.SITE_ROOT,
"site_logo_url": settings.SITE_LOGO_URL,
}
| from django.conf import settings
def branding(request):
return {
"site_name": settings.SITE_NAME,
"site_logo_url": settings.SITE_LOGO_URL,
}
| Remove site_root from template context, it's never used | Remove site_root from template context, it's never used
| Python | bsd-3-clause | iphoting/healthchecks,iphoting/healthchecks,healthchecks/healthchecks,healthchecks/healthchecks,healthchecks/healthchecks,iphoting/healthchecks,healthchecks/healthchecks,iphoting/healthchecks | from django.conf import settings
def branding(request):
return {
"site_name": settings.SITE_NAME,
- "site_root": settings.SITE_ROOT,
"site_logo_url": settings.SITE_LOGO_URL,
}
| Remove site_root from template context, it's never used | ## Code Before:
from django.conf import settings
def branding(request):
return {
"site_name": settings.SITE_NAME,
"site_root": settings.SITE_ROOT,
"site_logo_url": settings.SITE_LOGO_URL,
}
## Instruction:
Remove site_root from template context, it's never used
## Code After:
from django.conf import settings
def branding(request):
return {
"site_name": settings.SITE_NAME,
"site_logo_url": settings.SITE_LOGO_URL,
}
| # ... existing code ...
"site_name": settings.SITE_NAME,
"site_logo_url": settings.SITE_LOGO_URL,
# ... rest of the code ... |
63fe76240a819a0211aab566c1cd36b31c49c5d9 | freepacktbook/pushover.py | freepacktbook/pushover.py | import json
import requests
class PushoverNotification(object):
def __init__(self, pushover_user, pushover_token):
self.pushover_api = 'https://api.pushover.net/1/messages.json'
self.pushover_user = pushover_user
self.pushover_token = pushover_token
def get_image_content(self, image_url):
return requests.get(image_url, stream=True).content
def notify(self, data):
if not all([self.pushover_user, self.pushover_token]):
return
payload = {
'user': self.pushover_user,
'token': self.pushover_token,
'title': data['title'],
'url': data['book_url'],
'url_title': data['title'],
'message': 'Today\'s Free eBook\n%s\n%s' % data['title'], data['description']
}
try:
image_content = get_image_content(data['image_url'].replace(' ', '%20'))
except Exception:
files = None
else:
files = {'attachment': ('cover.jpg', image_content)}
requests.post(
self.pushover_api,
data=payload,
files={
'attachment': ('cover.jpg', image_content)
}
)
| import json
import requests
class PushoverNotification(object):
def __init__(self, pushover_user, pushover_token):
self.pushover_api = 'https://api.pushover.net/1/messages.json'
self.pushover_user = pushover_user
self.pushover_token = pushover_token
def get_image_content(self, image_url):
return requests.get(image_url, stream=True).content
def notify(self, data):
if not all([self.pushover_user, self.pushover_token]):
return
payload = {
'user': self.pushover_user,
'token': self.pushover_token,
'title': data['title'],
'url': data['book_url'],
'url_title': data['title'],
'message': 'Today\'s Free eBook\n%s\n%s' % (
data['title'], data['description'])
}
try:
image_content = get_image_content(data['image_url'].replace(' ', '%20'))
except Exception:
files = None
else:
files = {'attachment': ('cover.jpg', image_content)}
requests.post(
self.pushover_api,
data=payload,
files=files
)
| Fix syntax error and reuse variable | Fix syntax error and reuse variable
| Python | mit | bogdal/freepacktbook | import json
import requests
class PushoverNotification(object):
def __init__(self, pushover_user, pushover_token):
self.pushover_api = 'https://api.pushover.net/1/messages.json'
self.pushover_user = pushover_user
self.pushover_token = pushover_token
def get_image_content(self, image_url):
return requests.get(image_url, stream=True).content
def notify(self, data):
if not all([self.pushover_user, self.pushover_token]):
return
payload = {
'user': self.pushover_user,
'token': self.pushover_token,
'title': data['title'],
'url': data['book_url'],
'url_title': data['title'],
- 'message': 'Today\'s Free eBook\n%s\n%s' % data['title'], data['description']
+ 'message': 'Today\'s Free eBook\n%s\n%s' % (
+ data['title'], data['description'])
}
try:
image_content = get_image_content(data['image_url'].replace(' ', '%20'))
except Exception:
files = None
else:
files = {'attachment': ('cover.jpg', image_content)}
requests.post(
self.pushover_api,
data=payload,
- files={
+ files=files
- 'attachment': ('cover.jpg', image_content)
- }
)
| Fix syntax error and reuse variable | ## Code Before:
import json
import requests
class PushoverNotification(object):
def __init__(self, pushover_user, pushover_token):
self.pushover_api = 'https://api.pushover.net/1/messages.json'
self.pushover_user = pushover_user
self.pushover_token = pushover_token
def get_image_content(self, image_url):
return requests.get(image_url, stream=True).content
def notify(self, data):
if not all([self.pushover_user, self.pushover_token]):
return
payload = {
'user': self.pushover_user,
'token': self.pushover_token,
'title': data['title'],
'url': data['book_url'],
'url_title': data['title'],
'message': 'Today\'s Free eBook\n%s\n%s' % data['title'], data['description']
}
try:
image_content = get_image_content(data['image_url'].replace(' ', '%20'))
except Exception:
files = None
else:
files = {'attachment': ('cover.jpg', image_content)}
requests.post(
self.pushover_api,
data=payload,
files={
'attachment': ('cover.jpg', image_content)
}
)
## Instruction:
Fix syntax error and reuse variable
## Code After:
import json
import requests
class PushoverNotification(object):
def __init__(self, pushover_user, pushover_token):
self.pushover_api = 'https://api.pushover.net/1/messages.json'
self.pushover_user = pushover_user
self.pushover_token = pushover_token
def get_image_content(self, image_url):
return requests.get(image_url, stream=True).content
def notify(self, data):
if not all([self.pushover_user, self.pushover_token]):
return
payload = {
'user': self.pushover_user,
'token': self.pushover_token,
'title': data['title'],
'url': data['book_url'],
'url_title': data['title'],
'message': 'Today\'s Free eBook\n%s\n%s' % (
data['title'], data['description'])
}
try:
image_content = get_image_content(data['image_url'].replace(' ', '%20'))
except Exception:
files = None
else:
files = {'attachment': ('cover.jpg', image_content)}
requests.post(
self.pushover_api,
data=payload,
files=files
)
| ...
'url_title': data['title'],
'message': 'Today\'s Free eBook\n%s\n%s' % (
data['title'], data['description'])
}
...
data=payload,
files=files
)
... |
c28de968845f98c6590784df1fe5beff7b3d021e | workshops/templatetags/training_progress.py | workshops/templatetags/training_progress.py | from django import template
from django.utils.safestring import mark_safe
from workshops.models import TrainingProgress
register = template.Library()
@register.simple_tag
def progress_label(progress):
assert isinstance(progress, TrainingProgress)
if progress.discarded:
additional_label = 'default'
else:
switch = {
'n': 'warning',
'f': 'danger',
'p': 'success',
}
additional_label = switch[progress.state]
fmt = 'label label-{}'.format(additional_label)
return mark_safe(fmt)
@register.simple_tag
def progress_description(progress):
assert isinstance(progress, TrainingProgress)
text = '{discarded}{state} {type}<br />{evaluated_by}<br />on {day}.{notes}'.format(
discarded='discarded ' if progress.discarded else '',
state=progress.get_state_display(),
type=progress.requirement,
evaluated_by=('evaluated by {}'.format(
progress.evaluated_by.full_name)
if progress.evaluated_by is not None else 'submitted'),
day=progress.created_at.strftime('%A %d %B %Y at %H:%M'),
notes='<br />Notes: {}'.format(progress.notes) if progress.notes else '',
)
text = text[0].upper() + text[1:]
return mark_safe(text)
| from django import template
from django.template.defaultfilters import escape
from django.utils.safestring import mark_safe
from workshops.models import TrainingProgress
register = template.Library()
@register.simple_tag
def progress_label(progress):
assert isinstance(progress, TrainingProgress)
if progress.discarded:
additional_label = 'default'
else:
switch = {
'n': 'warning',
'f': 'danger',
'p': 'success',
}
additional_label = switch[progress.state]
fmt = 'label label-{}'.format(additional_label)
return mark_safe(fmt)
@register.simple_tag
def progress_description(progress):
assert isinstance(progress, TrainingProgress)
text = '{discarded}{state} {type}<br />{evaluated_by}<br />on {day}.{notes}'.format(
discarded='discarded ' if progress.discarded else '',
state=progress.get_state_display(),
type=progress.requirement,
evaluated_by=('evaluated by {}'.format(
progress.evaluated_by.full_name)
if progress.evaluated_by is not None else 'submitted'),
day=progress.created_at.strftime('%A %d %B %Y at %H:%M'),
notes='<br />Notes: {}'.format(escape(progress.notes)) if progress.notes else '',
)
text = text[0].upper() + text[1:]
return mark_safe(text)
| Fix unescaped content in training progress description templatetag | Fix unescaped content in training progress description templatetag
This template tag was using content from entry notes directly. In cases
of some users this messed up the display of label in the templates.
| Python | mit | pbanaszkiewicz/amy,pbanaszkiewicz/amy,swcarpentry/amy,swcarpentry/amy,pbanaszkiewicz/amy,swcarpentry/amy | from django import template
+ from django.template.defaultfilters import escape
from django.utils.safestring import mark_safe
from workshops.models import TrainingProgress
register = template.Library()
@register.simple_tag
def progress_label(progress):
assert isinstance(progress, TrainingProgress)
if progress.discarded:
additional_label = 'default'
else:
switch = {
'n': 'warning',
'f': 'danger',
'p': 'success',
}
additional_label = switch[progress.state]
fmt = 'label label-{}'.format(additional_label)
return mark_safe(fmt)
@register.simple_tag
def progress_description(progress):
assert isinstance(progress, TrainingProgress)
text = '{discarded}{state} {type}<br />{evaluated_by}<br />on {day}.{notes}'.format(
discarded='discarded ' if progress.discarded else '',
state=progress.get_state_display(),
type=progress.requirement,
evaluated_by=('evaluated by {}'.format(
progress.evaluated_by.full_name)
if progress.evaluated_by is not None else 'submitted'),
day=progress.created_at.strftime('%A %d %B %Y at %H:%M'),
- notes='<br />Notes: {}'.format(progress.notes) if progress.notes else '',
+ notes='<br />Notes: {}'.format(escape(progress.notes)) if progress.notes else '',
)
text = text[0].upper() + text[1:]
return mark_safe(text)
| Fix unescaped content in training progress description templatetag | ## Code Before:
from django import template
from django.utils.safestring import mark_safe
from workshops.models import TrainingProgress
register = template.Library()
@register.simple_tag
def progress_label(progress):
assert isinstance(progress, TrainingProgress)
if progress.discarded:
additional_label = 'default'
else:
switch = {
'n': 'warning',
'f': 'danger',
'p': 'success',
}
additional_label = switch[progress.state]
fmt = 'label label-{}'.format(additional_label)
return mark_safe(fmt)
@register.simple_tag
def progress_description(progress):
assert isinstance(progress, TrainingProgress)
text = '{discarded}{state} {type}<br />{evaluated_by}<br />on {day}.{notes}'.format(
discarded='discarded ' if progress.discarded else '',
state=progress.get_state_display(),
type=progress.requirement,
evaluated_by=('evaluated by {}'.format(
progress.evaluated_by.full_name)
if progress.evaluated_by is not None else 'submitted'),
day=progress.created_at.strftime('%A %d %B %Y at %H:%M'),
notes='<br />Notes: {}'.format(progress.notes) if progress.notes else '',
)
text = text[0].upper() + text[1:]
return mark_safe(text)
## Instruction:
Fix unescaped content in training progress description templatetag
## Code After:
from django import template
from django.template.defaultfilters import escape
from django.utils.safestring import mark_safe
from workshops.models import TrainingProgress
register = template.Library()
@register.simple_tag
def progress_label(progress):
assert isinstance(progress, TrainingProgress)
if progress.discarded:
additional_label = 'default'
else:
switch = {
'n': 'warning',
'f': 'danger',
'p': 'success',
}
additional_label = switch[progress.state]
fmt = 'label label-{}'.format(additional_label)
return mark_safe(fmt)
@register.simple_tag
def progress_description(progress):
assert isinstance(progress, TrainingProgress)
text = '{discarded}{state} {type}<br />{evaluated_by}<br />on {day}.{notes}'.format(
discarded='discarded ' if progress.discarded else '',
state=progress.get_state_display(),
type=progress.requirement,
evaluated_by=('evaluated by {}'.format(
progress.evaluated_by.full_name)
if progress.evaluated_by is not None else 'submitted'),
day=progress.created_at.strftime('%A %d %B %Y at %H:%M'),
notes='<br />Notes: {}'.format(escape(progress.notes)) if progress.notes else '',
)
text = text[0].upper() + text[1:]
return mark_safe(text)
| # ... existing code ...
from django import template
from django.template.defaultfilters import escape
from django.utils.safestring import mark_safe
# ... modified code ...
day=progress.created_at.strftime('%A %d %B %Y at %H:%M'),
notes='<br />Notes: {}'.format(escape(progress.notes)) if progress.notes else '',
)
# ... rest of the code ... |
dc6f82bce52419c7c2153a33be15f3d811161d1d | flask_app.py | flask_app.py | from flask import Flask, abort, jsonify
from flask_caching import Cache
from flask_cors import CORS
import main
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
cors = CORS(app, resources={r"/*": {"origins": "*"}})
@app.route('/api/')
@cache.cached(timeout=3600)
def nbis_list_entities():
return jsonify({'entities': ['restaurant']})
@app.route('/api/restaurant/')
@cache.cached(timeout=3600)
def nbis_api_list_restaurants():
return jsonify({'restaurants': main.list_restaurants()})
@app.route('/api/restaurant/<name>/')
@cache.cached(timeout=3600)
def nbis_api_get_restaurant(name):
data = main.get_restaurant(name)
if not data:
abort(status=404)
data['menu'] = [{'dish': entry} for entry in data['menu']]
return jsonify({'restaurant': data})
| from flask import Flask, abort, jsonify
from flask_caching import Cache
from flask_cors import CORS
import main
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
cors = CORS(app, resources={r"/*": {"origins": "*"}})
@app.route('/api/')
@cache.cached(timeout=3600)
def list_entities():
return jsonify({'entities': ['restaurant']})
@app.route('/api/restaurant/')
@cache.cached(timeout=3600)
def list_restaurants():
return jsonify({'restaurants': [entry['identifier'] for entry in main.list_restaurants()]})
@app.route('/api/restaurant/<name>/')
@cache.cached(timeout=3600)
def get_restaurant(name):
data = main.get_restaurant(name)
if not data:
abort(status=404)
data['menu'] = [{'dish': entry} for entry in data['menu']]
return jsonify({'restaurant': data})
| Return a list of identifiers instead of almost all info | Return a list of identifiers instead of almost all info
| Python | bsd-3-clause | talavis/kimenu | from flask import Flask, abort, jsonify
from flask_caching import Cache
from flask_cors import CORS
import main
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
cors = CORS(app, resources={r"/*": {"origins": "*"}})
@app.route('/api/')
@cache.cached(timeout=3600)
- def nbis_list_entities():
+ def list_entities():
return jsonify({'entities': ['restaurant']})
@app.route('/api/restaurant/')
@cache.cached(timeout=3600)
- def nbis_api_list_restaurants():
+ def list_restaurants():
- return jsonify({'restaurants': main.list_restaurants()})
+ return jsonify({'restaurants': [entry['identifier'] for entry in main.list_restaurants()]})
@app.route('/api/restaurant/<name>/')
@cache.cached(timeout=3600)
- def nbis_api_get_restaurant(name):
+ def get_restaurant(name):
data = main.get_restaurant(name)
if not data:
abort(status=404)
data['menu'] = [{'dish': entry} for entry in data['menu']]
return jsonify({'restaurant': data})
| Return a list of identifiers instead of almost all info | ## Code Before:
from flask import Flask, abort, jsonify
from flask_caching import Cache
from flask_cors import CORS
import main
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
cors = CORS(app, resources={r"/*": {"origins": "*"}})
@app.route('/api/')
@cache.cached(timeout=3600)
def nbis_list_entities():
return jsonify({'entities': ['restaurant']})
@app.route('/api/restaurant/')
@cache.cached(timeout=3600)
def nbis_api_list_restaurants():
return jsonify({'restaurants': main.list_restaurants()})
@app.route('/api/restaurant/<name>/')
@cache.cached(timeout=3600)
def nbis_api_get_restaurant(name):
data = main.get_restaurant(name)
if not data:
abort(status=404)
data['menu'] = [{'dish': entry} for entry in data['menu']]
return jsonify({'restaurant': data})
## Instruction:
Return a list of identifiers instead of almost all info
## Code After:
from flask import Flask, abort, jsonify
from flask_caching import Cache
from flask_cors import CORS
import main
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
cors = CORS(app, resources={r"/*": {"origins": "*"}})
@app.route('/api/')
@cache.cached(timeout=3600)
def list_entities():
return jsonify({'entities': ['restaurant']})
@app.route('/api/restaurant/')
@cache.cached(timeout=3600)
def list_restaurants():
return jsonify({'restaurants': [entry['identifier'] for entry in main.list_restaurants()]})
@app.route('/api/restaurant/<name>/')
@cache.cached(timeout=3600)
def get_restaurant(name):
data = main.get_restaurant(name)
if not data:
abort(status=404)
data['menu'] = [{'dish': entry} for entry in data['menu']]
return jsonify({'restaurant': data})
| # ... existing code ...
@cache.cached(timeout=3600)
def list_entities():
return jsonify({'entities': ['restaurant']})
# ... modified code ...
@cache.cached(timeout=3600)
def list_restaurants():
return jsonify({'restaurants': [entry['identifier'] for entry in main.list_restaurants()]})
...
@cache.cached(timeout=3600)
def get_restaurant(name):
data = main.get_restaurant(name)
# ... rest of the code ... |
7d7043560f26c31346472b6452e8b191729c54a3 | offsite_storage/settings.py | offsite_storage/settings.py | from django.conf import settings
AWS_ACCESS_KEY_ID = getattr(settings, 'AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = getattr(settings, 'AWS_SECRET_ACCESS_KEY')
AWS_STATIC_BUCKET_NAME = getattr(settings, 'AWS_STATIC_BUCKET_NAME')
AWS_MEDIA_ACCESS_KEY_ID = getattr(
settings, 'AWS_MEDIA_ACCESS_KEY_ID', AWS_ACCESS_KEY_ID)
AWS_MEDIA_SECRET_ACCESS_KEY = getattr(
settings, 'AWS_MEDIA_SECRET_ACCESS_KEY', AWS_SECRET_ACCESS_KEY)
AWS_MEDIA_BUCKET_NAME = getattr(settings, 'AWS_MEDIA_BUCKET_NAME')
AWS_S3_ENDPOINT_URL = getattr(
settings, 'AWS_S3_ENDPOINT_URL', 's3.amazonaws.com')
AWS_HOST_URL = 'https://%(bucket_name)s.s3.amazonaws.com/'
AWS_POLICY = 'public-read'
IGNORE_FILES = getattr(settings, 'OFFSITE_STORAGE_IGNORE_FILES',
['*.less', '*.scss', '*.txt', 'components'])
| from django.conf import settings
AWS_ACCESS_KEY_ID = getattr(settings, 'AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = getattr(settings, 'AWS_SECRET_ACCESS_KEY')
AWS_STATIC_BUCKET_NAME = getattr(settings, 'AWS_STATIC_BUCKET_NAME')
AWS_MEDIA_ACCESS_KEY_ID = getattr(
settings, 'AWS_MEDIA_ACCESS_KEY_ID', AWS_ACCESS_KEY_ID)
AWS_MEDIA_SECRET_ACCESS_KEY = getattr(
settings, 'AWS_MEDIA_SECRET_ACCESS_KEY', AWS_SECRET_ACCESS_KEY)
AWS_MEDIA_BUCKET_NAME = getattr(settings, 'AWS_MEDIA_BUCKET_NAME')
AWS_S3_ENDPOINT_URL = getattr(
settings, 'AWS_S3_ENDPOINT_URL', 's3.amazonaws.com')
AWS_HOST_URL = 'https://%%(bucket_name)s.%s/' % AWS_S3_ENDPOINT_URL
AWS_POLICY = 'public-read'
IGNORE_FILES = getattr(settings, 'OFFSITE_STORAGE_IGNORE_FILES',
['*.less', '*.scss', '*.txt', 'components'])
| Use custom endpoint url in AWS_HOST_URL variable | Use custom endpoint url in AWS_HOST_URL variable
| Python | bsd-3-clause | mirumee/django-offsite-storage | from django.conf import settings
AWS_ACCESS_KEY_ID = getattr(settings, 'AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = getattr(settings, 'AWS_SECRET_ACCESS_KEY')
AWS_STATIC_BUCKET_NAME = getattr(settings, 'AWS_STATIC_BUCKET_NAME')
AWS_MEDIA_ACCESS_KEY_ID = getattr(
settings, 'AWS_MEDIA_ACCESS_KEY_ID', AWS_ACCESS_KEY_ID)
AWS_MEDIA_SECRET_ACCESS_KEY = getattr(
settings, 'AWS_MEDIA_SECRET_ACCESS_KEY', AWS_SECRET_ACCESS_KEY)
AWS_MEDIA_BUCKET_NAME = getattr(settings, 'AWS_MEDIA_BUCKET_NAME')
AWS_S3_ENDPOINT_URL = getattr(
settings, 'AWS_S3_ENDPOINT_URL', 's3.amazonaws.com')
- AWS_HOST_URL = 'https://%(bucket_name)s.s3.amazonaws.com/'
+ AWS_HOST_URL = 'https://%%(bucket_name)s.%s/' % AWS_S3_ENDPOINT_URL
AWS_POLICY = 'public-read'
IGNORE_FILES = getattr(settings, 'OFFSITE_STORAGE_IGNORE_FILES',
['*.less', '*.scss', '*.txt', 'components'])
| Use custom endpoint url in AWS_HOST_URL variable | ## Code Before:
from django.conf import settings
AWS_ACCESS_KEY_ID = getattr(settings, 'AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = getattr(settings, 'AWS_SECRET_ACCESS_KEY')
AWS_STATIC_BUCKET_NAME = getattr(settings, 'AWS_STATIC_BUCKET_NAME')
AWS_MEDIA_ACCESS_KEY_ID = getattr(
settings, 'AWS_MEDIA_ACCESS_KEY_ID', AWS_ACCESS_KEY_ID)
AWS_MEDIA_SECRET_ACCESS_KEY = getattr(
settings, 'AWS_MEDIA_SECRET_ACCESS_KEY', AWS_SECRET_ACCESS_KEY)
AWS_MEDIA_BUCKET_NAME = getattr(settings, 'AWS_MEDIA_BUCKET_NAME')
AWS_S3_ENDPOINT_URL = getattr(
settings, 'AWS_S3_ENDPOINT_URL', 's3.amazonaws.com')
AWS_HOST_URL = 'https://%(bucket_name)s.s3.amazonaws.com/'
AWS_POLICY = 'public-read'
IGNORE_FILES = getattr(settings, 'OFFSITE_STORAGE_IGNORE_FILES',
['*.less', '*.scss', '*.txt', 'components'])
## Instruction:
Use custom endpoint url in AWS_HOST_URL variable
## Code After:
from django.conf import settings
AWS_ACCESS_KEY_ID = getattr(settings, 'AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = getattr(settings, 'AWS_SECRET_ACCESS_KEY')
AWS_STATIC_BUCKET_NAME = getattr(settings, 'AWS_STATIC_BUCKET_NAME')
AWS_MEDIA_ACCESS_KEY_ID = getattr(
settings, 'AWS_MEDIA_ACCESS_KEY_ID', AWS_ACCESS_KEY_ID)
AWS_MEDIA_SECRET_ACCESS_KEY = getattr(
settings, 'AWS_MEDIA_SECRET_ACCESS_KEY', AWS_SECRET_ACCESS_KEY)
AWS_MEDIA_BUCKET_NAME = getattr(settings, 'AWS_MEDIA_BUCKET_NAME')
AWS_S3_ENDPOINT_URL = getattr(
settings, 'AWS_S3_ENDPOINT_URL', 's3.amazonaws.com')
AWS_HOST_URL = 'https://%%(bucket_name)s.%s/' % AWS_S3_ENDPOINT_URL
AWS_POLICY = 'public-read'
IGNORE_FILES = getattr(settings, 'OFFSITE_STORAGE_IGNORE_FILES',
['*.less', '*.scss', '*.txt', 'components'])
| ...
settings, 'AWS_S3_ENDPOINT_URL', 's3.amazonaws.com')
AWS_HOST_URL = 'https://%%(bucket_name)s.%s/' % AWS_S3_ENDPOINT_URL
AWS_POLICY = 'public-read'
... |
933fcfff7a9c63b03e13b0bb7756f0530603c556 | series.py | series.py | """Read and print an integer series."""
import sys
def read_series(filename):
f = open(filename, mode='rt', encoding='utf-8')
series = []
for line in f:
a = int(line.strip())
series.append(a)
f.close()
return series
def main(filename):
print(read_series(filename))
if __name__ == '__main__':
main(sys.argv[1])
| """Read and print an integer series."""
import sys
def read_series(filename):
try:
f = open(filename, mode='rt', encoding='utf-8')
return [int(line.strip()) for line in f]
finally:
f.close()
def main(filename):
print(read_series(filename))
if __name__ == '__main__':
main(sys.argv[1])
| Refactor to ensure closing and also use list comprehension | Refactor to ensure closing and also use list comprehension
| Python | mit | kentoj/python-fundamentals | """Read and print an integer series."""
import sys
def read_series(filename):
+ try:
- f = open(filename, mode='rt', encoding='utf-8')
+ f = open(filename, mode='rt', encoding='utf-8')
+ return [int(line.strip()) for line in f]
+ finally:
- series = []
- for line in f:
- a = int(line.strip())
- series.append(a)
- f.close()
+ f.close()
- return series
def main(filename):
print(read_series(filename))
if __name__ == '__main__':
main(sys.argv[1])
| Refactor to ensure closing and also use list comprehension | ## Code Before:
"""Read and print an integer series."""
import sys
def read_series(filename):
f = open(filename, mode='rt', encoding='utf-8')
series = []
for line in f:
a = int(line.strip())
series.append(a)
f.close()
return series
def main(filename):
print(read_series(filename))
if __name__ == '__main__':
main(sys.argv[1])
## Instruction:
Refactor to ensure closing and also use list comprehension
## Code After:
"""Read and print an integer series."""
import sys
def read_series(filename):
try:
f = open(filename, mode='rt', encoding='utf-8')
return [int(line.strip()) for line in f]
finally:
f.close()
def main(filename):
print(read_series(filename))
if __name__ == '__main__':
main(sys.argv[1])
| ...
def read_series(filename):
try:
f = open(filename, mode='rt', encoding='utf-8')
return [int(line.strip()) for line in f]
finally:
f.close()
... |
b07964e8b243b151e64af86cb09a37e980f94eb1 | vantage/utils.py | vantage/utils.py | import binascii
import base64
import click
def to_base64(value):
value = base64.urlsafe_b64encode(value.encode("utf-8")).decode("utf-8")
return f"base64:{value}"
def from_base64(value):
if value.startswith("base64:"):
try:
value = base64.urlsafe_b64decode(value[7:]).decode("utf-8")
except binascii.Error:
pass
return value
def loquacious(line):
try:
env = click.get_current_context().obj
if env is not None and env.get("VG_VERBOSE"):
click.echo(f"VG-LOG: {line}")
except RuntimeError:
# This happens when there's no active click context so we can't get the
# env. In this case we default to not printing the verbose logs.
# This situation happens when you're trying to autocomplete
pass
| import binascii
import base64
import click
def to_base64(value):
value = base64.urlsafe_b64encode(value.encode("utf-8")).decode("utf-8")
return f"base64:{value}"
def from_base64(value):
if value.startswith("base64:"):
try:
value = base64.urlsafe_b64decode(value[7:]).decode("utf-8")
except binascii.Error:
pass
return value
def loquacious(line, env=None):
try:
env = env or click.get_current_context().obj
if env is not None and env.get("VG_VERBOSE"):
click.echo(f"VG-LOG: {line}")
except RuntimeError:
# This happens when there's no active click context so we can't get the
# env. In this case we default to not printing the verbose logs.
# This situation happens when you're trying to autocomplete
pass
| Add optional env kwargs to logging method | Add optional env kwargs to logging method
| Python | mit | vantage-org/vantage,vantage-org/vantage | import binascii
import base64
import click
def to_base64(value):
value = base64.urlsafe_b64encode(value.encode("utf-8")).decode("utf-8")
return f"base64:{value}"
def from_base64(value):
if value.startswith("base64:"):
try:
value = base64.urlsafe_b64decode(value[7:]).decode("utf-8")
except binascii.Error:
pass
return value
- def loquacious(line):
+ def loquacious(line, env=None):
try:
- env = click.get_current_context().obj
+ env = env or click.get_current_context().obj
if env is not None and env.get("VG_VERBOSE"):
click.echo(f"VG-LOG: {line}")
except RuntimeError:
# This happens when there's no active click context so we can't get the
# env. In this case we default to not printing the verbose logs.
# This situation happens when you're trying to autocomplete
pass
| Add optional env kwargs to logging method | ## Code Before:
import binascii
import base64
import click
def to_base64(value):
value = base64.urlsafe_b64encode(value.encode("utf-8")).decode("utf-8")
return f"base64:{value}"
def from_base64(value):
if value.startswith("base64:"):
try:
value = base64.urlsafe_b64decode(value[7:]).decode("utf-8")
except binascii.Error:
pass
return value
def loquacious(line):
try:
env = click.get_current_context().obj
if env is not None and env.get("VG_VERBOSE"):
click.echo(f"VG-LOG: {line}")
except RuntimeError:
# This happens when there's no active click context so we can't get the
# env. In this case we default to not printing the verbose logs.
# This situation happens when you're trying to autocomplete
pass
## Instruction:
Add optional env kwargs to logging method
## Code After:
import binascii
import base64
import click
def to_base64(value):
value = base64.urlsafe_b64encode(value.encode("utf-8")).decode("utf-8")
return f"base64:{value}"
def from_base64(value):
if value.startswith("base64:"):
try:
value = base64.urlsafe_b64decode(value[7:]).decode("utf-8")
except binascii.Error:
pass
return value
def loquacious(line, env=None):
try:
env = env or click.get_current_context().obj
if env is not None and env.get("VG_VERBOSE"):
click.echo(f"VG-LOG: {line}")
except RuntimeError:
# This happens when there's no active click context so we can't get the
# env. In this case we default to not printing the verbose logs.
# This situation happens when you're trying to autocomplete
pass
| ...
def loquacious(line, env=None):
try:
env = env or click.get_current_context().obj
if env is not None and env.get("VG_VERBOSE"):
... |
f0e29748ff899d7e65d1f4169e890d3e3c4bda0e | icekit/project/settings/_test.py | icekit/project/settings/_test.py | from ._base import *
# DJANGO ######################################################################
DATABASE_NAME = 'test_%s' % DATABASES['default']['NAME']
DATABASES = {
'default': {
'NAME': DATABASE_NAME,
'TEST': {
'NAME': DATABASE_NAME,
# See: https://docs.djangoproject.com/en/1.7/ref/settings/#serialize
'SERIALIZE': False,
},
}
}
INSTALLED_APPS += ('icekit.tests', )
TEMPLATES_DJANGO['DIRS'].insert(
0, os.path.join(BASE_DIR, 'icekit', 'tests', 'templates')),
# ICEKIT ######################################################################
# RESPONSE_PAGE_PLUGINS = ['ImagePlugin', ]
# HAYSTACK ####################################################################
# HAYSTACK_CONNECTIONS = {
# 'default': {
# 'ENGINE': 'haystack.backends.simple_backend.SimpleEngine',
# },
# }
# TRAVIS ######################################################################
if 'TRAVIS' in os.environ:
NOSE_ARGS.remove('--with-progressive')
| from ._base import *
# DJANGO ######################################################################
DATABASE_NAME = 'test_%s' % DATABASES['default']['NAME']
DATABASES['default'].update({
'NAME': DATABASE_NAME,
'TEST': {
'NAME': DATABASE_NAME,
# See: https://docs.djangoproject.com/en/1.7/ref/settings/#serialize
'SERIALIZE': False,
},
})
INSTALLED_APPS += ('icekit.tests', )
TEMPLATES_DJANGO['DIRS'].insert(
0, os.path.join(BASE_DIR, 'icekit', 'tests', 'templates')),
# ICEKIT ######################################################################
# RESPONSE_PAGE_PLUGINS = ['ImagePlugin', ]
# HAYSTACK ####################################################################
# HAYSTACK_CONNECTIONS = {
# 'default': {
# 'ENGINE': 'haystack.backends.simple_backend.SimpleEngine',
# },
# }
# TRAVIS ######################################################################
if 'TRAVIS' in os.environ:
NOSE_ARGS.remove('--with-progressive')
| Update instead of overriding `DATABASES` setting in `test` settings. | Update instead of overriding `DATABASES` setting in `test` settings.
| Python | mit | ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit | from ._base import *
# DJANGO ######################################################################
DATABASE_NAME = 'test_%s' % DATABASES['default']['NAME']
- DATABASES = {
- 'default': {
+ DATABASES['default'].update({
+ 'NAME': DATABASE_NAME,
+ 'TEST': {
'NAME': DATABASE_NAME,
- 'TEST': {
- 'NAME': DATABASE_NAME,
- # See: https://docs.djangoproject.com/en/1.7/ref/settings/#serialize
+ # See: https://docs.djangoproject.com/en/1.7/ref/settings/#serialize
- 'SERIALIZE': False,
+ 'SERIALIZE': False,
- },
- }
+ },
- }
+ })
INSTALLED_APPS += ('icekit.tests', )
TEMPLATES_DJANGO['DIRS'].insert(
0, os.path.join(BASE_DIR, 'icekit', 'tests', 'templates')),
# ICEKIT ######################################################################
# RESPONSE_PAGE_PLUGINS = ['ImagePlugin', ]
# HAYSTACK ####################################################################
# HAYSTACK_CONNECTIONS = {
# 'default': {
# 'ENGINE': 'haystack.backends.simple_backend.SimpleEngine',
# },
# }
# TRAVIS ######################################################################
if 'TRAVIS' in os.environ:
NOSE_ARGS.remove('--with-progressive')
| Update instead of overriding `DATABASES` setting in `test` settings. | ## Code Before:
from ._base import *
# DJANGO ######################################################################
DATABASE_NAME = 'test_%s' % DATABASES['default']['NAME']
DATABASES = {
'default': {
'NAME': DATABASE_NAME,
'TEST': {
'NAME': DATABASE_NAME,
# See: https://docs.djangoproject.com/en/1.7/ref/settings/#serialize
'SERIALIZE': False,
},
}
}
INSTALLED_APPS += ('icekit.tests', )
TEMPLATES_DJANGO['DIRS'].insert(
0, os.path.join(BASE_DIR, 'icekit', 'tests', 'templates')),
# ICEKIT ######################################################################
# RESPONSE_PAGE_PLUGINS = ['ImagePlugin', ]
# HAYSTACK ####################################################################
# HAYSTACK_CONNECTIONS = {
# 'default': {
# 'ENGINE': 'haystack.backends.simple_backend.SimpleEngine',
# },
# }
# TRAVIS ######################################################################
if 'TRAVIS' in os.environ:
NOSE_ARGS.remove('--with-progressive')
## Instruction:
Update instead of overriding `DATABASES` setting in `test` settings.
## Code After:
from ._base import *
# DJANGO ######################################################################
DATABASE_NAME = 'test_%s' % DATABASES['default']['NAME']
DATABASES['default'].update({
'NAME': DATABASE_NAME,
'TEST': {
'NAME': DATABASE_NAME,
# See: https://docs.djangoproject.com/en/1.7/ref/settings/#serialize
'SERIALIZE': False,
},
})
INSTALLED_APPS += ('icekit.tests', )
TEMPLATES_DJANGO['DIRS'].insert(
0, os.path.join(BASE_DIR, 'icekit', 'tests', 'templates')),
# ICEKIT ######################################################################
# RESPONSE_PAGE_PLUGINS = ['ImagePlugin', ]
# HAYSTACK ####################################################################
# HAYSTACK_CONNECTIONS = {
# 'default': {
# 'ENGINE': 'haystack.backends.simple_backend.SimpleEngine',
# },
# }
# TRAVIS ######################################################################
if 'TRAVIS' in os.environ:
NOSE_ARGS.remove('--with-progressive')
| // ... existing code ...
DATABASES['default'].update({
'NAME': DATABASE_NAME,
'TEST': {
'NAME': DATABASE_NAME,
# See: https://docs.djangoproject.com/en/1.7/ref/settings/#serialize
'SERIALIZE': False,
},
})
// ... rest of the code ... |
72d119ef80c4c84ae3be65c93795832a7250fc51 | run.py | run.py | import data
import model
import numpy as np
from keras import optimizers
# Localize data through file system relative indexing method
path = 'hcp_olivier/102816/MNINonLinear/Results/rfMRI_REST1_LR/rfMRI_REST1_LR.npy'
# Use data loading library to load data
a, b, y = data.generate_learning_set(np.load(path))
# Generate the model
embedding_model, siamese_model = model.make_mlp_models(a.shape[1], embedding_dropout=0.2)
optimizer = optimizers.SGD(lr=0.00001, momentum=0.9, nesterov=True)
# optimizer = optimizers.Adam(lr=0.0001)
siamese_model.compile(optimizer=optimizer, loss='binary_crossentropy',
metrics=['accuracy'])
print(a.shape)
print(a[:10])
trace = siamese_model.fit([a, b], y, validation_split=0.2, epochs=30,
batch_size=16)
print(trace.history['acc'][-1])
print(trace.history['val_acc'][-1])
| import data
import model
import numpy as np
from keras import optimizers
# Localize data through file system relative indexing method
path = 'hcp_olivier/102816/MNINonLinear/Results/rfMRI_REST1_LR/rfMRI_REST1_LR.npy'
# Use data loading library to load data
a, b, y = data.generate_learning_set(np.load(path))
# Generate the model
embedding_model, siamese_model = model.make_linear_models(a.shape[1])
optimizer = optimizers.SGD(lr=0.00001, momentum=0.9, nesterov=True)
# optimizer = optimizers.Adam(lr=0.0001)
siamese_model.compile(optimizer=optimizer, loss='binary_crossentropy',
metrics=['accuracy'])
print("data shapes:")
print(a.shape)
print(b.shape)
print(y.shape)
trace = siamese_model.fit([a, b], y, validation_split=0.2, epochs=30,
batch_size=16, shuffle=True)
print(trace.history['acc'][-1])
print(trace.history['val_acc'][-1])
| Use linear models by default | Use linear models by default
| Python | mit | ogrisel/brain2vec | import data
import model
import numpy as np
from keras import optimizers
# Localize data through file system relative indexing method
path = 'hcp_olivier/102816/MNINonLinear/Results/rfMRI_REST1_LR/rfMRI_REST1_LR.npy'
# Use data loading library to load data
a, b, y = data.generate_learning_set(np.load(path))
# Generate the model
- embedding_model, siamese_model = model.make_mlp_models(a.shape[1], embedding_dropout=0.2)
+ embedding_model, siamese_model = model.make_linear_models(a.shape[1])
optimizer = optimizers.SGD(lr=0.00001, momentum=0.9, nesterov=True)
# optimizer = optimizers.Adam(lr=0.0001)
siamese_model.compile(optimizer=optimizer, loss='binary_crossentropy',
metrics=['accuracy'])
+ print("data shapes:")
print(a.shape)
- print(a[:10])
+ print(b.shape)
+ print(y.shape)
trace = siamese_model.fit([a, b], y, validation_split=0.2, epochs=30,
- batch_size=16)
+ batch_size=16, shuffle=True)
print(trace.history['acc'][-1])
print(trace.history['val_acc'][-1])
| Use linear models by default | ## Code Before:
import data
import model
import numpy as np
from keras import optimizers
# Localize data through file system relative indexing method
path = 'hcp_olivier/102816/MNINonLinear/Results/rfMRI_REST1_LR/rfMRI_REST1_LR.npy'
# Use data loading library to load data
a, b, y = data.generate_learning_set(np.load(path))
# Generate the model
embedding_model, siamese_model = model.make_mlp_models(a.shape[1], embedding_dropout=0.2)
optimizer = optimizers.SGD(lr=0.00001, momentum=0.9, nesterov=True)
# optimizer = optimizers.Adam(lr=0.0001)
siamese_model.compile(optimizer=optimizer, loss='binary_crossentropy',
metrics=['accuracy'])
print(a.shape)
print(a[:10])
trace = siamese_model.fit([a, b], y, validation_split=0.2, epochs=30,
batch_size=16)
print(trace.history['acc'][-1])
print(trace.history['val_acc'][-1])
## Instruction:
Use linear models by default
## Code After:
import data
import model
import numpy as np
from keras import optimizers
# Localize data through file system relative indexing method
path = 'hcp_olivier/102816/MNINonLinear/Results/rfMRI_REST1_LR/rfMRI_REST1_LR.npy'
# Use data loading library to load data
a, b, y = data.generate_learning_set(np.load(path))
# Generate the model
embedding_model, siamese_model = model.make_linear_models(a.shape[1])
optimizer = optimizers.SGD(lr=0.00001, momentum=0.9, nesterov=True)
# optimizer = optimizers.Adam(lr=0.0001)
siamese_model.compile(optimizer=optimizer, loss='binary_crossentropy',
metrics=['accuracy'])
print("data shapes:")
print(a.shape)
print(b.shape)
print(y.shape)
trace = siamese_model.fit([a, b], y, validation_split=0.2, epochs=30,
batch_size=16, shuffle=True)
print(trace.history['acc'][-1])
print(trace.history['val_acc'][-1])
| # ... existing code ...
# Generate the model
embedding_model, siamese_model = model.make_linear_models(a.shape[1])
# ... modified code ...
print("data shapes:")
print(a.shape)
print(b.shape)
print(y.shape)
...
trace = siamese_model.fit([a, b], y, validation_split=0.2, epochs=30,
batch_size=16, shuffle=True)
# ... rest of the code ... |
4b7e20c5640242a6e06392aaf9cbfe8e4ee8a498 | mangopaysdk/types/payinpaymentdetailscard.py | mangopaysdk/types/payinpaymentdetailscard.py | from mangopaysdk.types.payinpaymentdetails import PayInPaymentDetails
class PayInPaymentDetailsCard(PayInPaymentDetails):
"""Class represents Card type for mean of payment in PayIn entity."""
def __init__(self):
# CardType enum
self.CardType = None | from mangopaysdk.types.payinpaymentdetails import PayInPaymentDetails
class PayInPaymentDetailsCard(PayInPaymentDetails):
"""Class represents Card type for mean of payment in PayIn entity."""
def __init__(self):
# CardType enum
self.CardType = None
self.StatementDescriptor = None
| Add StatementDescriptor for card web payins | Add StatementDescriptor for card web payins | Python | mit | chocopoche/mangopay2-python-sdk,Mangopay/mangopay2-python-sdk | from mangopaysdk.types.payinpaymentdetails import PayInPaymentDetails
class PayInPaymentDetailsCard(PayInPaymentDetails):
"""Class represents Card type for mean of payment in PayIn entity."""
def __init__(self):
# CardType enum
self.CardType = None
+ self.StatementDescriptor = None
+ | Add StatementDescriptor for card web payins | ## Code Before:
from mangopaysdk.types.payinpaymentdetails import PayInPaymentDetails
class PayInPaymentDetailsCard(PayInPaymentDetails):
"""Class represents Card type for mean of payment in PayIn entity."""
def __init__(self):
# CardType enum
self.CardType = None
## Instruction:
Add StatementDescriptor for card web payins
## Code After:
from mangopaysdk.types.payinpaymentdetails import PayInPaymentDetails
class PayInPaymentDetailsCard(PayInPaymentDetails):
"""Class represents Card type for mean of payment in PayIn entity."""
def __init__(self):
# CardType enum
self.CardType = None
self.StatementDescriptor = None
| // ... existing code ...
self.CardType = None
self.StatementDescriptor = None
// ... rest of the code ... |
fa89ac95954502c14105857dfbb8dece271408e0 | fiesta/fiesta.py | fiesta/fiesta.py | import base64, json, urllib2
api_client_id = "To3-IKknn36qAAAA"
api_client_secret = "46d028xWl8zXGa3GCOYJMeXlr5pUebCNZcz3SCJj"
basic_auth = base64.b64encode("%s:%s" % (api_client_id, api_client_secret))
def _create_and_send_request(uri, api_inputs):
request = urllib2.Request(uri)
request.add_header("Authorization", "Basic %s" % (basic_auth))
request.add_header("Content-Type", "application/json")
request.add_data(json.dumps(api_inputs))
return urllib2.urlopen(request)
def create_group_trusted():
create_group_uri = "https://api.fiesta.cc/group"
api_inputs = {}
response = _create_and_send_request(create_group_uri, api_inputs)
json_response = json.loads(response.read())
group_id = json_response['data']['group_id']
def add_member_trusted(group_id, member_email, group_name):
add_member_uri = "https://api.fiesta.cc/membership/%s"
api_inputs = {'group_name': group_name,
'address': member_email}
_create_and_send_request(add_member_uri % group_id, api_inputs)
| import base64, json, urllib2
api_client_id = ""
api_client_secret = ""
basic_auth = base64.b64encode("%s:%s" % (api_client_id, api_client_secret))
def _create_and_send_request(uri, api_inputs):
request = urllib2.Request(uri)
request.add_header("Authorization", "Basic %s" % (basic_auth))
request.add_header("Content-Type", "application/json")
request.add_data(json.dumps(api_inputs))
return urllib2.urlopen(request)
def create_group_trusted():
create_group_uri = "https://api.fiesta.cc/group"
api_inputs = {}
response = _create_and_send_request(create_group_uri, api_inputs)
json_response = json.loads(response.read())
group_id = json_response['data']['group_id']
def add_member_trusted(group_id, member_email, group_name):
add_member_uri = "https://api.fiesta.cc/membership/%s"
api_inputs = {'group_name': group_name,
'address': member_email}
_create_and_send_request(add_member_uri % group_id, api_inputs)
| Remove accidental commit of credentials | Remove accidental commit of credentials
| Python | apache-2.0 | fiesta/fiesta-python | import base64, json, urllib2
- api_client_id = "To3-IKknn36qAAAA"
- api_client_secret = "46d028xWl8zXGa3GCOYJMeXlr5pUebCNZcz3SCJj"
+ api_client_id = ""
+ api_client_secret = ""
basic_auth = base64.b64encode("%s:%s" % (api_client_id, api_client_secret))
def _create_and_send_request(uri, api_inputs):
request = urllib2.Request(uri)
request.add_header("Authorization", "Basic %s" % (basic_auth))
request.add_header("Content-Type", "application/json")
request.add_data(json.dumps(api_inputs))
return urllib2.urlopen(request)
def create_group_trusted():
create_group_uri = "https://api.fiesta.cc/group"
api_inputs = {}
response = _create_and_send_request(create_group_uri, api_inputs)
json_response = json.loads(response.read())
group_id = json_response['data']['group_id']
def add_member_trusted(group_id, member_email, group_name):
add_member_uri = "https://api.fiesta.cc/membership/%s"
api_inputs = {'group_name': group_name,
'address': member_email}
_create_and_send_request(add_member_uri % group_id, api_inputs)
| Remove accidental commit of credentials | ## Code Before:
import base64, json, urllib2
api_client_id = "To3-IKknn36qAAAA"
api_client_secret = "46d028xWl8zXGa3GCOYJMeXlr5pUebCNZcz3SCJj"
basic_auth = base64.b64encode("%s:%s" % (api_client_id, api_client_secret))
def _create_and_send_request(uri, api_inputs):
request = urllib2.Request(uri)
request.add_header("Authorization", "Basic %s" % (basic_auth))
request.add_header("Content-Type", "application/json")
request.add_data(json.dumps(api_inputs))
return urllib2.urlopen(request)
def create_group_trusted():
create_group_uri = "https://api.fiesta.cc/group"
api_inputs = {}
response = _create_and_send_request(create_group_uri, api_inputs)
json_response = json.loads(response.read())
group_id = json_response['data']['group_id']
def add_member_trusted(group_id, member_email, group_name):
add_member_uri = "https://api.fiesta.cc/membership/%s"
api_inputs = {'group_name': group_name,
'address': member_email}
_create_and_send_request(add_member_uri % group_id, api_inputs)
## Instruction:
Remove accidental commit of credentials
## Code After:
import base64, json, urllib2
api_client_id = ""
api_client_secret = ""
basic_auth = base64.b64encode("%s:%s" % (api_client_id, api_client_secret))
def _create_and_send_request(uri, api_inputs):
request = urllib2.Request(uri)
request.add_header("Authorization", "Basic %s" % (basic_auth))
request.add_header("Content-Type", "application/json")
request.add_data(json.dumps(api_inputs))
return urllib2.urlopen(request)
def create_group_trusted():
create_group_uri = "https://api.fiesta.cc/group"
api_inputs = {}
response = _create_and_send_request(create_group_uri, api_inputs)
json_response = json.loads(response.read())
group_id = json_response['data']['group_id']
def add_member_trusted(group_id, member_email, group_name):
add_member_uri = "https://api.fiesta.cc/membership/%s"
api_inputs = {'group_name': group_name,
'address': member_email}
_create_and_send_request(add_member_uri % group_id, api_inputs)
| ...
api_client_id = ""
api_client_secret = ""
basic_auth = base64.b64encode("%s:%s" % (api_client_id, api_client_secret))
... |
dcf11d2d26519cdea10813530d2bde85f8fe8180 | Python/Tests/TestData/DjangoProject/Oar/views.py | Python/Tests/TestData/DjangoProject/Oar/views.py | from django.template import Context, loader
# Create your views here.
from django.http import HttpResponse
from Oar.models import Poll
from django.http import HttpResponse
def main(request):
return HttpResponse('<html><body>Hello world!</body></html>')
def index(request):
latest_poll_list = Poll.objects.all().order_by('-pub_date')[:5]
t = loader.get_template('polls/index.html')
c = Context({
'latest_poll_list': latest_poll_list,
})
return HttpResponse(t.render(c))
def loop(request):
t = loader.get_template('polls/loop.html')
c = Context({
'colors': ['red', 'blue', 'green']
})
return HttpResponse(t.render(c))
def loop_nobom(request):
t = loader.get_template('polls/loop_nobom.html')
c = Context({
'colors': ['red', 'blue', 'green']
})
return HttpResponse(t.render(c))
def loop2(request):
t = loader.get_template('polls/loop2.html')
c = Context({
'colors': ['red', 'blue', 'green']
})
return HttpResponse(t.render(c))
| from django.template import Context, loader
# Create your views here.
from django.http import HttpResponse
from Oar.models import Poll
from django.http import HttpResponse
def main(request):
return HttpResponse('<html><body>Hello world!</body></html>')
def index(request):
latest_poll_list = Poll.objects.all().order_by('-pub_date')[:5]
t = loader.get_template('polls/index.html')
c = {
'latest_poll_list': latest_poll_list,
}
return HttpResponse(t.render(c))
def loop(request):
t = loader.get_template('polls/loop.html')
c = {
'colors': ['red', 'blue', 'green']
}
return HttpResponse(t.render(c))
def loop_nobom(request):
t = loader.get_template('polls/loop_nobom.html')
c = {
'colors': ['red', 'blue', 'green']
}
return HttpResponse(t.render(c))
def loop2(request):
t = loader.get_template('polls/loop2.html')
c = {
'colors': ['red', 'blue', 'green']
}
return HttpResponse(t.render(c))
| Use a dict instead of Context instance in DjangoDebuggerTests test project, to avoid a TypeError in latest version of Django. | Use a dict instead of Context instance in DjangoDebuggerTests test project, to avoid a TypeError in latest version of Django.
| Python | apache-2.0 | int19h/PTVS,huguesv/PTVS,zooba/PTVS,Microsoft/PTVS,Microsoft/PTVS,zooba/PTVS,Microsoft/PTVS,zooba/PTVS,zooba/PTVS,int19h/PTVS,int19h/PTVS,Microsoft/PTVS,Microsoft/PTVS,Microsoft/PTVS,int19h/PTVS,zooba/PTVS,zooba/PTVS,huguesv/PTVS,huguesv/PTVS,int19h/PTVS,huguesv/PTVS,int19h/PTVS,huguesv/PTVS,huguesv/PTVS | from django.template import Context, loader
# Create your views here.
from django.http import HttpResponse
from Oar.models import Poll
from django.http import HttpResponse
def main(request):
return HttpResponse('<html><body>Hello world!</body></html>')
def index(request):
latest_poll_list = Poll.objects.all().order_by('-pub_date')[:5]
t = loader.get_template('polls/index.html')
- c = Context({
+ c = {
'latest_poll_list': latest_poll_list,
- })
+ }
return HttpResponse(t.render(c))
def loop(request):
t = loader.get_template('polls/loop.html')
- c = Context({
+ c = {
'colors': ['red', 'blue', 'green']
- })
+ }
return HttpResponse(t.render(c))
def loop_nobom(request):
t = loader.get_template('polls/loop_nobom.html')
- c = Context({
+ c = {
'colors': ['red', 'blue', 'green']
- })
+ }
return HttpResponse(t.render(c))
def loop2(request):
t = loader.get_template('polls/loop2.html')
- c = Context({
+ c = {
'colors': ['red', 'blue', 'green']
- })
+ }
return HttpResponse(t.render(c))
| Use a dict instead of Context instance in DjangoDebuggerTests test project, to avoid a TypeError in latest version of Django. | ## Code Before:
from django.template import Context, loader
# Create your views here.
from django.http import HttpResponse
from Oar.models import Poll
from django.http import HttpResponse
def main(request):
return HttpResponse('<html><body>Hello world!</body></html>')
def index(request):
latest_poll_list = Poll.objects.all().order_by('-pub_date')[:5]
t = loader.get_template('polls/index.html')
c = Context({
'latest_poll_list': latest_poll_list,
})
return HttpResponse(t.render(c))
def loop(request):
t = loader.get_template('polls/loop.html')
c = Context({
'colors': ['red', 'blue', 'green']
})
return HttpResponse(t.render(c))
def loop_nobom(request):
t = loader.get_template('polls/loop_nobom.html')
c = Context({
'colors': ['red', 'blue', 'green']
})
return HttpResponse(t.render(c))
def loop2(request):
t = loader.get_template('polls/loop2.html')
c = Context({
'colors': ['red', 'blue', 'green']
})
return HttpResponse(t.render(c))
## Instruction:
Use a dict instead of Context instance in DjangoDebuggerTests test project, to avoid a TypeError in latest version of Django.
## Code After:
from django.template import Context, loader
# Create your views here.
from django.http import HttpResponse
from Oar.models import Poll
from django.http import HttpResponse
def main(request):
return HttpResponse('<html><body>Hello world!</body></html>')
def index(request):
latest_poll_list = Poll.objects.all().order_by('-pub_date')[:5]
t = loader.get_template('polls/index.html')
c = {
'latest_poll_list': latest_poll_list,
}
return HttpResponse(t.render(c))
def loop(request):
t = loader.get_template('polls/loop.html')
c = {
'colors': ['red', 'blue', 'green']
}
return HttpResponse(t.render(c))
def loop_nobom(request):
t = loader.get_template('polls/loop_nobom.html')
c = {
'colors': ['red', 'blue', 'green']
}
return HttpResponse(t.render(c))
def loop2(request):
t = loader.get_template('polls/loop2.html')
c = {
'colors': ['red', 'blue', 'green']
}
return HttpResponse(t.render(c))
| ...
t = loader.get_template('polls/index.html')
c = {
'latest_poll_list': latest_poll_list,
}
return HttpResponse(t.render(c))
...
t = loader.get_template('polls/loop.html')
c = {
'colors': ['red', 'blue', 'green']
}
return HttpResponse(t.render(c))
...
t = loader.get_template('polls/loop_nobom.html')
c = {
'colors': ['red', 'blue', 'green']
}
return HttpResponse(t.render(c))
...
t = loader.get_template('polls/loop2.html')
c = {
'colors': ['red', 'blue', 'green']
}
return HttpResponse(t.render(c))
... |
d24e31dbebc776524e0a2cd4b971c726bfcbfda5 | py_nist_beacon/nist_randomness_beacon.py | py_nist_beacon/nist_randomness_beacon.py | import requests
from requests.exceptions import RequestException
from py_nist_beacon.nist_randomness_beacon_value import (
NistRandomnessBeaconValue
)
class NistRandomnessBeacon(object):
NIST_BASE_URL = "https://beacon.nist.gov/rest/record"
@classmethod
def get_last_record(cls):
try:
r = requests.get("{}/last".format(cls.NIST_BASE_URL))
return NistRandomnessBeaconValue.from_xml(r.text)
except RequestException:
return None
| import requests
from requests.exceptions import RequestException
from py_nist_beacon.nist_randomness_beacon_value import (
NistRandomnessBeaconValue
)
class NistRandomnessBeacon(object):
NIST_BASE_URL = "https://beacon.nist.gov/rest/record"
@classmethod
def get_last_record(cls):
try:
r = requests.get("{}/last".format(cls.NIST_BASE_URL))
if r.status_code is requests.codes.OK:
return NistRandomnessBeaconValue.from_xml(r.text)
else:
return None
except RequestException:
return None
| Check status code before object | Check status code before object
| Python | apache-2.0 | urda/nistbeacon | import requests
from requests.exceptions import RequestException
from py_nist_beacon.nist_randomness_beacon_value import (
NistRandomnessBeaconValue
)
class NistRandomnessBeacon(object):
NIST_BASE_URL = "https://beacon.nist.gov/rest/record"
@classmethod
def get_last_record(cls):
try:
r = requests.get("{}/last".format(cls.NIST_BASE_URL))
+
+ if r.status_code is requests.codes.OK:
- return NistRandomnessBeaconValue.from_xml(r.text)
+ return NistRandomnessBeaconValue.from_xml(r.text)
+ else:
+ return None
except RequestException:
return None
| Check status code before object | ## Code Before:
import requests
from requests.exceptions import RequestException
from py_nist_beacon.nist_randomness_beacon_value import (
NistRandomnessBeaconValue
)
class NistRandomnessBeacon(object):
NIST_BASE_URL = "https://beacon.nist.gov/rest/record"
@classmethod
def get_last_record(cls):
try:
r = requests.get("{}/last".format(cls.NIST_BASE_URL))
return NistRandomnessBeaconValue.from_xml(r.text)
except RequestException:
return None
## Instruction:
Check status code before object
## Code After:
import requests
from requests.exceptions import RequestException
from py_nist_beacon.nist_randomness_beacon_value import (
NistRandomnessBeaconValue
)
class NistRandomnessBeacon(object):
NIST_BASE_URL = "https://beacon.nist.gov/rest/record"
@classmethod
def get_last_record(cls):
try:
r = requests.get("{}/last".format(cls.NIST_BASE_URL))
if r.status_code is requests.codes.OK:
return NistRandomnessBeaconValue.from_xml(r.text)
else:
return None
except RequestException:
return None
| // ... existing code ...
r = requests.get("{}/last".format(cls.NIST_BASE_URL))
if r.status_code is requests.codes.OK:
return NistRandomnessBeaconValue.from_xml(r.text)
else:
return None
except RequestException:
// ... rest of the code ... |
2161910a53604bdc48027c5c4e71f9af4228cbaa | keras/backend/common.py | keras/backend/common.py | import numpy as np
# the type of float to use throughout the session.
_FLOATX = 'float32'
_EPSILON = 10e-8
def epsilon():
return _EPSILON
def set_epsilon(e):
global _EPSILON
_EPSILON = e
def floatx():
return _FLOATX
def set_floatx(floatx):
global _FLOATX
if floatx not in {'float32', 'float64'}:
raise Exception('Unknown floatx type: ' + str(floatx))
if isinstance(floatx, unicode):
floatx = floatx.encode('ascii')
_FLOATX = floatx
def cast_to_floatx(x):
'''Cast a Numpy array to floatx.
'''
return np.asarray(x, dtype=_FLOATX)
| import numpy as np
# the type of float to use throughout the session.
_FLOATX = 'float32'
_EPSILON = 10e-8
def epsilon():
return _EPSILON
def set_epsilon(e):
global _EPSILON
_EPSILON = e
def floatx():
return _FLOATX
def set_floatx(floatx):
global _FLOATX
if floatx not in {'float32', 'float64'}:
raise Exception('Unknown floatx type: ' + str(floatx))
floatx = floatx.encode('ascii')
_FLOATX = floatx
def cast_to_floatx(x):
'''Cast a Numpy array to floatx.
'''
return np.asarray(x, dtype=_FLOATX)
| Fix floatx encoding on Python3 | Fix floatx encoding on Python3 | Python | apache-2.0 | keras-team/keras,nebw/keras,daviddiazvico/keras,kemaswill/keras,DeepGnosis/keras,keras-team/keras,dolaameng/keras,relh/keras,kuza55/keras | import numpy as np
# the type of float to use throughout the session.
_FLOATX = 'float32'
_EPSILON = 10e-8
def epsilon():
return _EPSILON
def set_epsilon(e):
global _EPSILON
_EPSILON = e
def floatx():
return _FLOATX
def set_floatx(floatx):
global _FLOATX
if floatx not in {'float32', 'float64'}:
raise Exception('Unknown floatx type: ' + str(floatx))
- if isinstance(floatx, unicode):
- floatx = floatx.encode('ascii')
+ floatx = floatx.encode('ascii')
_FLOATX = floatx
def cast_to_floatx(x):
'''Cast a Numpy array to floatx.
'''
return np.asarray(x, dtype=_FLOATX)
| Fix floatx encoding on Python3 | ## Code Before:
import numpy as np
# the type of float to use throughout the session.
_FLOATX = 'float32'
_EPSILON = 10e-8
def epsilon():
return _EPSILON
def set_epsilon(e):
global _EPSILON
_EPSILON = e
def floatx():
return _FLOATX
def set_floatx(floatx):
global _FLOATX
if floatx not in {'float32', 'float64'}:
raise Exception('Unknown floatx type: ' + str(floatx))
if isinstance(floatx, unicode):
floatx = floatx.encode('ascii')
_FLOATX = floatx
def cast_to_floatx(x):
'''Cast a Numpy array to floatx.
'''
return np.asarray(x, dtype=_FLOATX)
## Instruction:
Fix floatx encoding on Python3
## Code After:
import numpy as np
# the type of float to use throughout the session.
_FLOATX = 'float32'
_EPSILON = 10e-8
def epsilon():
return _EPSILON
def set_epsilon(e):
global _EPSILON
_EPSILON = e
def floatx():
return _FLOATX
def set_floatx(floatx):
global _FLOATX
if floatx not in {'float32', 'float64'}:
raise Exception('Unknown floatx type: ' + str(floatx))
floatx = floatx.encode('ascii')
_FLOATX = floatx
def cast_to_floatx(x):
'''Cast a Numpy array to floatx.
'''
return np.asarray(x, dtype=_FLOATX)
| // ... existing code ...
raise Exception('Unknown floatx type: ' + str(floatx))
floatx = floatx.encode('ascii')
_FLOATX = floatx
// ... rest of the code ... |
ff725b4ae24c58cb126c1d49ce58a69d9b32d3b0 | app/soc/models/timeline.py | app/soc/models/timeline.py |
from google.appengine.ext import db
from django.utils.translation import ugettext
from soc.models import linkable
class Timeline(linkable.Linkable):
"""The Timeline Model, representing the timeline for a Program.
"""
program_start = db.DateTimeProperty(
verbose_name=ugettext('Program Start date'))
program_end = db.DateTimeProperty(
verbose_name=ugettext('Program End date'))
accepted_organization_announced_deadline = db.DateTimeProperty(
verbose_name=ugettext('Accepted Organizations Announced Deadline'))
student_signup_start = db.DateTimeProperty(
verbose_name=ugettext('Student Signup Start date'))
student_signup_end = db.DateTimeProperty(
verbose_name=ugettext('Student Signup End date'))
|
from google.appengine.ext import db
from django.utils.translation import ugettext
from soc.models import linkable
class Timeline(linkable.Linkable):
"""The Timeline Model, representing the timeline for a Program.
"""
program_start = db.DateTimeProperty(
verbose_name=ugettext('Program Start date'))
program_end = db.DateTimeProperty(
verbose_name=ugettext('Program End date'))
program_end.help_text = ugettext(
'After this date no data (such as profiles and forms) can be changed.')
accepted_organization_announced_deadline = db.DateTimeProperty(
verbose_name=ugettext('Accepted Organizations Announced Deadline'))
student_signup_start = db.DateTimeProperty(
verbose_name=ugettext('Student Signup Start date'))
student_signup_end = db.DateTimeProperty(
verbose_name=ugettext('Student Signup End date'))
| Add help text for program_end date. | Add help text for program_end date.
Fixes 1411.
| Python | apache-2.0 | rhyolight/nupic.son,rhyolight/nupic.son,rhyolight/nupic.son |
from google.appengine.ext import db
from django.utils.translation import ugettext
from soc.models import linkable
class Timeline(linkable.Linkable):
"""The Timeline Model, representing the timeline for a Program.
"""
program_start = db.DateTimeProperty(
verbose_name=ugettext('Program Start date'))
program_end = db.DateTimeProperty(
verbose_name=ugettext('Program End date'))
+ program_end.help_text = ugettext(
+ 'After this date no data (such as profiles and forms) can be changed.')
accepted_organization_announced_deadline = db.DateTimeProperty(
verbose_name=ugettext('Accepted Organizations Announced Deadline'))
student_signup_start = db.DateTimeProperty(
verbose_name=ugettext('Student Signup Start date'))
student_signup_end = db.DateTimeProperty(
verbose_name=ugettext('Student Signup End date'))
| Add help text for program_end date. | ## Code Before:
from google.appengine.ext import db
from django.utils.translation import ugettext
from soc.models import linkable
class Timeline(linkable.Linkable):
"""The Timeline Model, representing the timeline for a Program.
"""
program_start = db.DateTimeProperty(
verbose_name=ugettext('Program Start date'))
program_end = db.DateTimeProperty(
verbose_name=ugettext('Program End date'))
accepted_organization_announced_deadline = db.DateTimeProperty(
verbose_name=ugettext('Accepted Organizations Announced Deadline'))
student_signup_start = db.DateTimeProperty(
verbose_name=ugettext('Student Signup Start date'))
student_signup_end = db.DateTimeProperty(
verbose_name=ugettext('Student Signup End date'))
## Instruction:
Add help text for program_end date.
## Code After:
from google.appengine.ext import db
from django.utils.translation import ugettext
from soc.models import linkable
class Timeline(linkable.Linkable):
"""The Timeline Model, representing the timeline for a Program.
"""
program_start = db.DateTimeProperty(
verbose_name=ugettext('Program Start date'))
program_end = db.DateTimeProperty(
verbose_name=ugettext('Program End date'))
program_end.help_text = ugettext(
'After this date no data (such as profiles and forms) can be changed.')
accepted_organization_announced_deadline = db.DateTimeProperty(
verbose_name=ugettext('Accepted Organizations Announced Deadline'))
student_signup_start = db.DateTimeProperty(
verbose_name=ugettext('Student Signup Start date'))
student_signup_end = db.DateTimeProperty(
verbose_name=ugettext('Student Signup End date'))
| ...
verbose_name=ugettext('Program End date'))
program_end.help_text = ugettext(
'After this date no data (such as profiles and forms) can be changed.')
... |
ac8dbe8f70061906035ea24ae6bae91f0432dca8 | astropy/utils/setup_package.py | astropy/utils/setup_package.py | from distutils.core import Extension
from os.path import dirname, join
def get_extensions():
ROOT = dirname(__file__)
return [
Extension('astropy.utils._compiler',
[join(ROOT, 'src', 'compiler.c')])
]
| from distutils.core import Extension
from os.path import dirname, join, relpath
def get_extensions():
ROOT = dirname(__file__)
return [
Extension('astropy.utils._compiler',
[relpath(join(ROOT, 'src', 'compiler.c'))])
]
| Make sure to use the relative path for all C extension source files. Otherwise distuils' MSVC compiler generates some potentially long (too long for Windows) pathnames in the build\temp dir for various compiler artifacts. (This was particularly problematic in Jenkins, where having multiple configuration matrix axes can make for long path names.) | Make sure to use the relative path for all C extension source files. Otherwise distuils' MSVC compiler generates some potentially long (too long for Windows) pathnames in the build\temp dir for various compiler artifacts. (This was particularly problematic in Jenkins, where having multiple configuration matrix axes can make for long path names.)
| Python | bsd-3-clause | MSeifert04/astropy,pllim/astropy,funbaker/astropy,stargaser/astropy,lpsinger/astropy,DougBurke/astropy,larrybradley/astropy,AustereCuriosity/astropy,dhomeier/astropy,saimn/astropy,mhvk/astropy,tbabej/astropy,DougBurke/astropy,kelle/astropy,AustereCuriosity/astropy,saimn/astropy,mhvk/astropy,bsipocz/astropy,funbaker/astropy,astropy/astropy,stargaser/astropy,joergdietrich/astropy,StuartLittlefair/astropy,stargaser/astropy,aleksandr-bakanov/astropy,tbabej/astropy,AustereCuriosity/astropy,saimn/astropy,larrybradley/astropy,astropy/astropy,kelle/astropy,MSeifert04/astropy,larrybradley/astropy,StuartLittlefair/astropy,lpsinger/astropy,funbaker/astropy,dhomeier/astropy,StuartLittlefair/astropy,aleksandr-bakanov/astropy,dhomeier/astropy,larrybradley/astropy,tbabej/astropy,AustereCuriosity/astropy,tbabej/astropy,pllim/astropy,tbabej/astropy,pllim/astropy,saimn/astropy,DougBurke/astropy,lpsinger/astropy,astropy/astropy,joergdietrich/astropy,lpsinger/astropy,bsipocz/astropy,MSeifert04/astropy,pllim/astropy,joergdietrich/astropy,lpsinger/astropy,kelle/astropy,aleksandr-bakanov/astropy,joergdietrich/astropy,AustereCuriosity/astropy,aleksandr-bakanov/astropy,pllim/astropy,dhomeier/astropy,mhvk/astropy,larrybradley/astropy,astropy/astropy,StuartLittlefair/astropy,bsipocz/astropy,MSeifert04/astropy,kelle/astropy,bsipocz/astropy,astropy/astropy,stargaser/astropy,funbaker/astropy,kelle/astropy,dhomeier/astropy,DougBurke/astropy,mhvk/astropy,saimn/astropy,joergdietrich/astropy,StuartLittlefair/astropy,mhvk/astropy | from distutils.core import Extension
- from os.path import dirname, join
+ from os.path import dirname, join, relpath
def get_extensions():
ROOT = dirname(__file__)
return [
Extension('astropy.utils._compiler',
- [join(ROOT, 'src', 'compiler.c')])
+ [relpath(join(ROOT, 'src', 'compiler.c'))])
]
| Make sure to use the relative path for all C extension source files. Otherwise distuils' MSVC compiler generates some potentially long (too long for Windows) pathnames in the build\temp dir for various compiler artifacts. (This was particularly problematic in Jenkins, where having multiple configuration matrix axes can make for long path names.) | ## Code Before:
from distutils.core import Extension
from os.path import dirname, join
def get_extensions():
ROOT = dirname(__file__)
return [
Extension('astropy.utils._compiler',
[join(ROOT, 'src', 'compiler.c')])
]
## Instruction:
Make sure to use the relative path for all C extension source files. Otherwise distuils' MSVC compiler generates some potentially long (too long for Windows) pathnames in the build\temp dir for various compiler artifacts. (This was particularly problematic in Jenkins, where having multiple configuration matrix axes can make for long path names.)
## Code After:
from distutils.core import Extension
from os.path import dirname, join, relpath
def get_extensions():
ROOT = dirname(__file__)
return [
Extension('astropy.utils._compiler',
[relpath(join(ROOT, 'src', 'compiler.c'))])
]
| ...
from distutils.core import Extension
from os.path import dirname, join, relpath
...
Extension('astropy.utils._compiler',
[relpath(join(ROOT, 'src', 'compiler.c'))])
]
... |
d84e6aa022ef5e256807738c35e5069a0a1380d7 | app/main/forms/frameworks.py | app/main/forms/frameworks.py | from flask.ext.wtf import Form
from wtforms import BooleanField
from wtforms.validators import DataRequired, Length
from dmutils.forms import StripWhitespaceStringField
class SignerDetailsForm(Form):
signerName = StripWhitespaceStringField('Full name', validators=[
DataRequired(message="You must provide the full name of the person signing on behalf of the company."),
Length(max=255, message="You must provide a name under 256 characters.")
])
signerRole = StripWhitespaceStringField(
'Role at the company',
validators=[
DataRequired(message="You must provide the role of the person signing on behalf of the company."),
Length(max=255, message="You must provide a role under 256 characters.")
],
description='The person signing must have the authority to agree to the framework terms, '
'eg director or company secretary.'
)
class ContractReviewForm(Form):
authorisation = BooleanField(
'Authorisation',
validators=[DataRequired(message="You must confirm you have the authority to return the agreement.")]
)
| from flask.ext.wtf import Form
from wtforms import BooleanField
from wtforms.validators import DataRequired, Length
from dmutils.forms import StripWhitespaceStringField
class SignerDetailsForm(Form):
signerName = StripWhitespaceStringField('Full name', validators=[
DataRequired(message="You must provide the full name of the person signing on behalf of the company."),
Length(max=255, message="You must provide a name under 256 characters.")
])
signerRole = StripWhitespaceStringField(
'Role at the company',
validators=[
DataRequired(message="You must provide the role of the person signing on behalf of the company."),
Length(max=255, message="You must provide a role under 256 characters.")
],
description='The person signing must have the authority to agree to the framework terms, '
'eg director or company secretary.'
)
class ContractReviewForm(Form):
authorisation = BooleanField(
'Authorisation',
validators=[DataRequired(message="You must confirm you have the authority to return the agreement.")]
)
class AcceptAgreementVariationForm(Form):
accept_changes = BooleanField(
'I accept these proposed changes',
validators=[
DataRequired(message="If you agree to the proposed changes then you must check the box before saving.")
]
)
| Add form for accepting contract variation | Add form for accepting contract variation
| Python | mit | alphagov/digitalmarketplace-supplier-frontend,alphagov/digitalmarketplace-supplier-frontend,alphagov/digitalmarketplace-supplier-frontend,alphagov/digitalmarketplace-supplier-frontend | from flask.ext.wtf import Form
from wtforms import BooleanField
from wtforms.validators import DataRequired, Length
from dmutils.forms import StripWhitespaceStringField
class SignerDetailsForm(Form):
signerName = StripWhitespaceStringField('Full name', validators=[
DataRequired(message="You must provide the full name of the person signing on behalf of the company."),
Length(max=255, message="You must provide a name under 256 characters.")
])
signerRole = StripWhitespaceStringField(
'Role at the company',
validators=[
DataRequired(message="You must provide the role of the person signing on behalf of the company."),
Length(max=255, message="You must provide a role under 256 characters.")
],
description='The person signing must have the authority to agree to the framework terms, '
'eg director or company secretary.'
)
class ContractReviewForm(Form):
authorisation = BooleanField(
'Authorisation',
validators=[DataRequired(message="You must confirm you have the authority to return the agreement.")]
)
+
+ class AcceptAgreementVariationForm(Form):
+ accept_changes = BooleanField(
+ 'I accept these proposed changes',
+ validators=[
+ DataRequired(message="If you agree to the proposed changes then you must check the box before saving.")
+ ]
+ )
+ | Add form for accepting contract variation | ## Code Before:
from flask.ext.wtf import Form
from wtforms import BooleanField
from wtforms.validators import DataRequired, Length
from dmutils.forms import StripWhitespaceStringField
class SignerDetailsForm(Form):
signerName = StripWhitespaceStringField('Full name', validators=[
DataRequired(message="You must provide the full name of the person signing on behalf of the company."),
Length(max=255, message="You must provide a name under 256 characters.")
])
signerRole = StripWhitespaceStringField(
'Role at the company',
validators=[
DataRequired(message="You must provide the role of the person signing on behalf of the company."),
Length(max=255, message="You must provide a role under 256 characters.")
],
description='The person signing must have the authority to agree to the framework terms, '
'eg director or company secretary.'
)
class ContractReviewForm(Form):
authorisation = BooleanField(
'Authorisation',
validators=[DataRequired(message="You must confirm you have the authority to return the agreement.")]
)
## Instruction:
Add form for accepting contract variation
## Code After:
from flask.ext.wtf import Form
from wtforms import BooleanField
from wtforms.validators import DataRequired, Length
from dmutils.forms import StripWhitespaceStringField
class SignerDetailsForm(Form):
signerName = StripWhitespaceStringField('Full name', validators=[
DataRequired(message="You must provide the full name of the person signing on behalf of the company."),
Length(max=255, message="You must provide a name under 256 characters.")
])
signerRole = StripWhitespaceStringField(
'Role at the company',
validators=[
DataRequired(message="You must provide the role of the person signing on behalf of the company."),
Length(max=255, message="You must provide a role under 256 characters.")
],
description='The person signing must have the authority to agree to the framework terms, '
'eg director or company secretary.'
)
class ContractReviewForm(Form):
authorisation = BooleanField(
'Authorisation',
validators=[DataRequired(message="You must confirm you have the authority to return the agreement.")]
)
class AcceptAgreementVariationForm(Form):
accept_changes = BooleanField(
'I accept these proposed changes',
validators=[
DataRequired(message="If you agree to the proposed changes then you must check the box before saving.")
]
)
| // ... existing code ...
)
class AcceptAgreementVariationForm(Form):
accept_changes = BooleanField(
'I accept these proposed changes',
validators=[
DataRequired(message="If you agree to the proposed changes then you must check the box before saving.")
]
)
// ... rest of the code ... |
8abdce9c60c9d2ead839e0065d35128ec16a82a1 | chatterbot/__main__.py | chatterbot/__main__.py | import sys
if __name__ == '__main__':
import chatterbot
if '--version' in sys.argv:
print(chatterbot.__version__)
if 'list_nltk_data' in sys.argv:
import nltk.data
print('\n'.join(nltk.data.path))
| import sys
if __name__ == '__main__':
import chatterbot
if '--version' in sys.argv:
print(chatterbot.__version__)
if 'list_nltk_data' in sys.argv:
import os
import nltk.data
data_directories = []
# Find each data directory in the NLTK path that has content
for path in nltk.data.path:
if os.path.exists(path):
if os.listdir(path):
data_directories.append(path)
print(os.linesep.join(data_directories))
| Add commad line utility to find NLTK data | Add commad line utility to find NLTK data
| Python | bsd-3-clause | gunthercox/ChatterBot,vkosuri/ChatterBot | import sys
if __name__ == '__main__':
import chatterbot
if '--version' in sys.argv:
print(chatterbot.__version__)
if 'list_nltk_data' in sys.argv:
+ import os
import nltk.data
- print('\n'.join(nltk.data.path))
+ data_directories = []
+ # Find each data directory in the NLTK path that has content
+ for path in nltk.data.path:
+ if os.path.exists(path):
+ if os.listdir(path):
+ data_directories.append(path)
+
+ print(os.linesep.join(data_directories))
+ | Add commad line utility to find NLTK data | ## Code Before:
import sys
if __name__ == '__main__':
import chatterbot
if '--version' in sys.argv:
print(chatterbot.__version__)
if 'list_nltk_data' in sys.argv:
import nltk.data
print('\n'.join(nltk.data.path))
## Instruction:
Add commad line utility to find NLTK data
## Code After:
import sys
if __name__ == '__main__':
import chatterbot
if '--version' in sys.argv:
print(chatterbot.__version__)
if 'list_nltk_data' in sys.argv:
import os
import nltk.data
data_directories = []
# Find each data directory in the NLTK path that has content
for path in nltk.data.path:
if os.path.exists(path):
if os.listdir(path):
data_directories.append(path)
print(os.linesep.join(data_directories))
| // ... existing code ...
if 'list_nltk_data' in sys.argv:
import os
import nltk.data
// ... modified code ...
data_directories = []
# Find each data directory in the NLTK path that has content
for path in nltk.data.path:
if os.path.exists(path):
if os.listdir(path):
data_directories.append(path)
print(os.linesep.join(data_directories))
// ... rest of the code ... |
b5672d55beb837f21d761f50740b93c5b1e0dc5d | napalm/exceptions.py | napalm/exceptions.py |
class ReplaceConfigException(Exception):
pass
class MergeConfigException(Exception):
pass
class SessionLockedException(Exception):
pass
class CommandTimeoutException(Exception):
pass
class CommandErrorException(Exception):
pass
|
class ConnectionException(Exception):
pass
class ReplaceConfigException(Exception):
pass
class MergeConfigException(Exception):
pass
class SessionLockedException(Exception):
pass
class CommandTimeoutException(Exception):
pass
class CommandErrorException(Exception):
pass
| Raise ConnectionException when device unusable | Raise ConnectionException when device unusable
| Python | apache-2.0 | napalm-automation/napalm-base,napalm-automation/napalm-base,Netflix-Skunkworks/napalm-base,napalm-automation/napalm,Netflix-Skunkworks/napalm-base,spotify/napalm,bewing/napalm-base,spotify/napalm,bewing/napalm-base | +
+ class ConnectionException(Exception):
+ pass
class ReplaceConfigException(Exception):
pass
class MergeConfigException(Exception):
pass
class SessionLockedException(Exception):
pass
class CommandTimeoutException(Exception):
pass
class CommandErrorException(Exception):
pass
| Raise ConnectionException when device unusable | ## Code Before:
class ReplaceConfigException(Exception):
pass
class MergeConfigException(Exception):
pass
class SessionLockedException(Exception):
pass
class CommandTimeoutException(Exception):
pass
class CommandErrorException(Exception):
pass
## Instruction:
Raise ConnectionException when device unusable
## Code After:
class ConnectionException(Exception):
pass
class ReplaceConfigException(Exception):
pass
class MergeConfigException(Exception):
pass
class SessionLockedException(Exception):
pass
class CommandTimeoutException(Exception):
pass
class CommandErrorException(Exception):
pass
| // ... existing code ...
class ConnectionException(Exception):
pass
// ... rest of the code ... |
5000ed8fa0426a7968a0db4a89d221ef800a2da7 | wordsegmenterTC/__init__.py | wordsegmenterTC/__init__.py | import PyICU
SEPARATER = " "
class Segmenter:
def isThai(self, chr):
cVal = ord(chr)
if (cVal >= 3584 and cVal <= 3711):
return True
return False
def segment(self, text):
bd = PyICU.BreakIterator.createWordInstance(PyICU.Locale("th"))
bd.setText(text)
lastPos = bd.first()
retText = ""
try:
while(True):
currentPos = next(bd)
retText += text[lastPos:currentPos]
if (self.isThai(text[currentPos - 1])):
if (currentPos < len(text)):
if (self.isThai(text[currentPos])):
# Separater
retText += SEPARATER
lastPos = currentPos
except StopIteration:
pass
return retText
| import PyICU
SEPARATER = " "
class Segmenter:
def isThai(self, chr):
cVal = ord(chr)
if (cVal >= 3584 and cVal <= 3711):
return True
return False
def segment(self, text):
bd = PyICU.BreakIterator.createWordInstance(PyICU.Locale("th"))
bd.setText(text)
lastPos = bd.first()
retText = ""
try:
while(True):
currentPos = next(bd)
retText += text[lastPos:currentPos]
try:
if (self.isThai(text[currentPos - 1])):
if (currentPos < len(text)):
if (self.isThai(text[currentPos])):
# Separater
retText += SEPARATER
lastPos = currentPos
except:
pass
except StopIteration:
pass
return retText
| Fix str index of range in some case | Fix str index of range in some case
| Python | mit | tchayintr/wordsegmenterTC | import PyICU
SEPARATER = " "
class Segmenter:
def isThai(self, chr):
cVal = ord(chr)
if (cVal >= 3584 and cVal <= 3711):
return True
return False
def segment(self, text):
bd = PyICU.BreakIterator.createWordInstance(PyICU.Locale("th"))
bd.setText(text)
lastPos = bd.first()
retText = ""
try:
while(True):
currentPos = next(bd)
retText += text[lastPos:currentPos]
+ try:
- if (self.isThai(text[currentPos - 1])):
+ if (self.isThai(text[currentPos - 1])):
- if (currentPos < len(text)):
+ if (currentPos < len(text)):
- if (self.isThai(text[currentPos])):
+ if (self.isThai(text[currentPos])):
- # Separater
+ # Separater
- retText += SEPARATER
+ retText += SEPARATER
- lastPos = currentPos
+ lastPos = currentPos
+ except:
+ pass
except StopIteration:
pass
return retText
-
- | Fix str index of range in some case | ## Code Before:
import PyICU
SEPARATER = " "
class Segmenter:
def isThai(self, chr):
cVal = ord(chr)
if (cVal >= 3584 and cVal <= 3711):
return True
return False
def segment(self, text):
bd = PyICU.BreakIterator.createWordInstance(PyICU.Locale("th"))
bd.setText(text)
lastPos = bd.first()
retText = ""
try:
while(True):
currentPos = next(bd)
retText += text[lastPos:currentPos]
if (self.isThai(text[currentPos - 1])):
if (currentPos < len(text)):
if (self.isThai(text[currentPos])):
# Separater
retText += SEPARATER
lastPos = currentPos
except StopIteration:
pass
return retText
## Instruction:
Fix str index of range in some case
## Code After:
import PyICU
SEPARATER = " "
class Segmenter:
def isThai(self, chr):
cVal = ord(chr)
if (cVal >= 3584 and cVal <= 3711):
return True
return False
def segment(self, text):
bd = PyICU.BreakIterator.createWordInstance(PyICU.Locale("th"))
bd.setText(text)
lastPos = bd.first()
retText = ""
try:
while(True):
currentPos = next(bd)
retText += text[lastPos:currentPos]
try:
if (self.isThai(text[currentPos - 1])):
if (currentPos < len(text)):
if (self.isThai(text[currentPos])):
# Separater
retText += SEPARATER
lastPos = currentPos
except:
pass
except StopIteration:
pass
return retText
| ...
retText += text[lastPos:currentPos]
try:
if (self.isThai(text[currentPos - 1])):
if (currentPos < len(text)):
if (self.isThai(text[currentPos])):
# Separater
retText += SEPARATER
lastPos = currentPos
except:
pass
except StopIteration:
...
... |
1046157fa2e062f12123e110c82851c2484216be | gallery_plugins/plugin_gfycat.py | gallery_plugins/plugin_gfycat.py | import re
try:
import urllib.request as urllib
except:
import urllib # Python 2
def title(source):
gfyId = re.findall(r'href=\".*gfycat.com/(\w+).*\">', source)[-1]
link = 'https://gfycat.com/cajax/get/' + gfyId
respond = urllib.urlopen(link).read()
username = re.findall(r'\"userName\":\"(.+?)\",' ,respond)[0]
return username if username != "anonymous" else "gfycat " + gfyId
def redirect(source):
gfyId = re.findall(r'href=\".*gfycat.com/(\w+).*\">', source)[-1]
respond = urllib.urlopen('https://gfycat.com/cajax/get/' + gfyId).read()
webmurl = re.findall(r'\"webmUrl\":\"(.+?)\",' ,respond)[0]
# delete escape characters
webmurl = webmurl.replace("\\","")
# for some reason we can not connect via https
webmurl = webmurl.replace("https", "http")
return webmurl
same_filename = True
| import re
try:
import urllib.request as urllib
except:
import urllib # Python 2
def title(source):
gfyId = re.findall(r'href=\".*gfycat.com/(\w+).*\">', source)[-1]
link = 'https://gfycat.com/cajax/get/' + gfyId
respond = urllib.urlopen(link).read().decode("utf8")
username = re.findall(r'\"userName\":\"(.+?)\",' ,respond)[0]
return username if username != "anonymous" else "gfycat " + gfyId
def redirect(source):
gfyId = re.findall(r'href=\".*gfycat.com/(\w+).*\">', source)[-1]
respond = urllib.urlopen('https://gfycat.com/cajax/get/' + gfyId).read().decode("utf8")
webmurl = re.findall(r'\"webmUrl\":\"(.+?)\",' ,respond)[0]
# delete escape characters
webmurl = webmurl.replace("\\","")
# for some reason we can not connect via https
webmurl = webmurl.replace("https", "http")
return webmurl
same_filename = True
| Update gfycat plugin for python3 support | Update gfycat plugin for python3 support
| Python | mit | regosen/gallery_get | import re
try:
import urllib.request as urllib
except:
import urllib # Python 2
def title(source):
gfyId = re.findall(r'href=\".*gfycat.com/(\w+).*\">', source)[-1]
link = 'https://gfycat.com/cajax/get/' + gfyId
- respond = urllib.urlopen(link).read()
+ respond = urllib.urlopen(link).read().decode("utf8")
username = re.findall(r'\"userName\":\"(.+?)\",' ,respond)[0]
return username if username != "anonymous" else "gfycat " + gfyId
def redirect(source):
gfyId = re.findall(r'href=\".*gfycat.com/(\w+).*\">', source)[-1]
- respond = urllib.urlopen('https://gfycat.com/cajax/get/' + gfyId).read()
+ respond = urllib.urlopen('https://gfycat.com/cajax/get/' + gfyId).read().decode("utf8")
webmurl = re.findall(r'\"webmUrl\":\"(.+?)\",' ,respond)[0]
# delete escape characters
webmurl = webmurl.replace("\\","")
# for some reason we can not connect via https
webmurl = webmurl.replace("https", "http")
return webmurl
same_filename = True
| Update gfycat plugin for python3 support | ## Code Before:
import re
try:
import urllib.request as urllib
except:
import urllib # Python 2
def title(source):
gfyId = re.findall(r'href=\".*gfycat.com/(\w+).*\">', source)[-1]
link = 'https://gfycat.com/cajax/get/' + gfyId
respond = urllib.urlopen(link).read()
username = re.findall(r'\"userName\":\"(.+?)\",' ,respond)[0]
return username if username != "anonymous" else "gfycat " + gfyId
def redirect(source):
gfyId = re.findall(r'href=\".*gfycat.com/(\w+).*\">', source)[-1]
respond = urllib.urlopen('https://gfycat.com/cajax/get/' + gfyId).read()
webmurl = re.findall(r'\"webmUrl\":\"(.+?)\",' ,respond)[0]
# delete escape characters
webmurl = webmurl.replace("\\","")
# for some reason we can not connect via https
webmurl = webmurl.replace("https", "http")
return webmurl
same_filename = True
## Instruction:
Update gfycat plugin for python3 support
## Code After:
import re
try:
import urllib.request as urllib
except:
import urllib # Python 2
def title(source):
gfyId = re.findall(r'href=\".*gfycat.com/(\w+).*\">', source)[-1]
link = 'https://gfycat.com/cajax/get/' + gfyId
respond = urllib.urlopen(link).read().decode("utf8")
username = re.findall(r'\"userName\":\"(.+?)\",' ,respond)[0]
return username if username != "anonymous" else "gfycat " + gfyId
def redirect(source):
gfyId = re.findall(r'href=\".*gfycat.com/(\w+).*\">', source)[-1]
respond = urllib.urlopen('https://gfycat.com/cajax/get/' + gfyId).read().decode("utf8")
webmurl = re.findall(r'\"webmUrl\":\"(.+?)\",' ,respond)[0]
# delete escape characters
webmurl = webmurl.replace("\\","")
# for some reason we can not connect via https
webmurl = webmurl.replace("https", "http")
return webmurl
same_filename = True
| ...
link = 'https://gfycat.com/cajax/get/' + gfyId
respond = urllib.urlopen(link).read().decode("utf8")
username = re.findall(r'\"userName\":\"(.+?)\",' ,respond)[0]
...
gfyId = re.findall(r'href=\".*gfycat.com/(\w+).*\">', source)[-1]
respond = urllib.urlopen('https://gfycat.com/cajax/get/' + gfyId).read().decode("utf8")
webmurl = re.findall(r'\"webmUrl\":\"(.+?)\",' ,respond)[0]
... |
c538e1a673e208030db04ab9ad3b97e962f3e2ac | download_summaries.py | download_summaries.py |
from utils.summary_downloader import SummaryDownloader
if __name__ == '__main__':
# setting target dir and time interval of interest
tgt_dir = r"D:\nhl\official_and_json\2016-17"
tgt_dir = r"d:\tmp\test"
date = "2017/05/01"
to_date = "2017/05/01"
downloader = SummaryDownloader(tgt_dir, date, to_date, workers=8)
downloader.run()
|
import os
import argparse
from datetime import datetime
from dateutil.parser import parse
from dateutil.relativedelta import relativedelta
from utils.summary_downloader import SummaryDownloader
if __name__ == '__main__':
# retrieving arguments specified on command line
parser = argparse.ArgumentParser(
description='Download NHL game summary reports.')
parser.add_argument(
'-d', '--tgt_dir', dest='tgt_dir', required=True,
metavar='download target directory',
help="Target directories for downloads")
parser.add_argument(
'-f', '--from', dest='from_date', required=False,
metavar='first date to download summaries for',
help="The first date summaries will be downloaded for")
parser.add_argument(
'-t', '--to', dest='to_date', required=False,
metavar='last date to download summaries for',
help="The last date summaries will be downloaded for")
args = parser.parse_args()
# setting target dir and time interval of interest
tgt_dir = args.tgt_dir
from_date = args.from_date
to_date = args.to_date
# setting first date to download summaries for if not specified
if from_date is None:
# using previously downloaded files in target directory to retrieve
# last date data have already been downloaded before
all_dates = list()
for root, dirs, files in os.walk(tgt_dir):
for file in files:
if file.lower().endswith(".zip") and file.lower()[0].isdigit():
try:
curr_date = parse(os.path.basename(file.split(".")[0]))
all_dates.append(curr_date)
except:
pass
from_date = (sorted(all_dates)[-1] + relativedelta(days=1)).strftime(
"%B %d, %Y")
# setting last date to download summaries for...
if to_date is None:
# ...to same as first date to download summaries for if this one is set
if args.from_date:
to_date = from_date
# ...to date before current one otherwise
else:
to_date = (datetime.now() + relativedelta(days=-1)).strftime(
"%B %d, %Y")
downloader = SummaryDownloader(tgt_dir, from_date, to_date, workers=8)
downloader.run()
| Allow control of download process via command line | Allow control of download process via command line
| Python | mit | leaffan/pynhldb | +
+ import os
+ import argparse
+ from datetime import datetime
+
+ from dateutil.parser import parse
+ from dateutil.relativedelta import relativedelta
from utils.summary_downloader import SummaryDownloader
if __name__ == '__main__':
+ # retrieving arguments specified on command line
+ parser = argparse.ArgumentParser(
+ description='Download NHL game summary reports.')
+ parser.add_argument(
+ '-d', '--tgt_dir', dest='tgt_dir', required=True,
+ metavar='download target directory',
+ help="Target directories for downloads")
+ parser.add_argument(
+ '-f', '--from', dest='from_date', required=False,
+ metavar='first date to download summaries for',
+ help="The first date summaries will be downloaded for")
+ parser.add_argument(
+ '-t', '--to', dest='to_date', required=False,
+ metavar='last date to download summaries for',
+ help="The last date summaries will be downloaded for")
+
+ args = parser.parse_args()
+
# setting target dir and time interval of interest
- tgt_dir = r"D:\nhl\official_and_json\2016-17"
- tgt_dir = r"d:\tmp\test"
+ tgt_dir = args.tgt_dir
+ from_date = args.from_date
+ to_date = args.to_date
- date = "2017/05/01"
- to_date = "2017/05/01"
+ # setting first date to download summaries for if not specified
+ if from_date is None:
+ # using previously downloaded files in target directory to retrieve
+ # last date data have already been downloaded before
+ all_dates = list()
+ for root, dirs, files in os.walk(tgt_dir):
+ for file in files:
+ if file.lower().endswith(".zip") and file.lower()[0].isdigit():
+ try:
+ curr_date = parse(os.path.basename(file.split(".")[0]))
+ all_dates.append(curr_date)
+ except:
+ pass
+ from_date = (sorted(all_dates)[-1] + relativedelta(days=1)).strftime(
+ "%B %d, %Y")
+
+ # setting last date to download summaries for...
+ if to_date is None:
+ # ...to same as first date to download summaries for if this one is set
+ if args.from_date:
+ to_date = from_date
+ # ...to date before current one otherwise
+ else:
+ to_date = (datetime.now() + relativedelta(days=-1)).strftime(
+ "%B %d, %Y")
+
- downloader = SummaryDownloader(tgt_dir, date, to_date, workers=8)
+ downloader = SummaryDownloader(tgt_dir, from_date, to_date, workers=8)
downloader.run()
| Allow control of download process via command line | ## Code Before:
from utils.summary_downloader import SummaryDownloader
if __name__ == '__main__':
# setting target dir and time interval of interest
tgt_dir = r"D:\nhl\official_and_json\2016-17"
tgt_dir = r"d:\tmp\test"
date = "2017/05/01"
to_date = "2017/05/01"
downloader = SummaryDownloader(tgt_dir, date, to_date, workers=8)
downloader.run()
## Instruction:
Allow control of download process via command line
## Code After:
import os
import argparse
from datetime import datetime
from dateutil.parser import parse
from dateutil.relativedelta import relativedelta
from utils.summary_downloader import SummaryDownloader
if __name__ == '__main__':
# retrieving arguments specified on command line
parser = argparse.ArgumentParser(
description='Download NHL game summary reports.')
parser.add_argument(
'-d', '--tgt_dir', dest='tgt_dir', required=True,
metavar='download target directory',
help="Target directories for downloads")
parser.add_argument(
'-f', '--from', dest='from_date', required=False,
metavar='first date to download summaries for',
help="The first date summaries will be downloaded for")
parser.add_argument(
'-t', '--to', dest='to_date', required=False,
metavar='last date to download summaries for',
help="The last date summaries will be downloaded for")
args = parser.parse_args()
# setting target dir and time interval of interest
tgt_dir = args.tgt_dir
from_date = args.from_date
to_date = args.to_date
# setting first date to download summaries for if not specified
if from_date is None:
# using previously downloaded files in target directory to retrieve
# last date data have already been downloaded before
all_dates = list()
for root, dirs, files in os.walk(tgt_dir):
for file in files:
if file.lower().endswith(".zip") and file.lower()[0].isdigit():
try:
curr_date = parse(os.path.basename(file.split(".")[0]))
all_dates.append(curr_date)
except:
pass
from_date = (sorted(all_dates)[-1] + relativedelta(days=1)).strftime(
"%B %d, %Y")
# setting last date to download summaries for...
if to_date is None:
# ...to same as first date to download summaries for if this one is set
if args.from_date:
to_date = from_date
# ...to date before current one otherwise
else:
to_date = (datetime.now() + relativedelta(days=-1)).strftime(
"%B %d, %Y")
downloader = SummaryDownloader(tgt_dir, from_date, to_date, workers=8)
downloader.run()
| ...
import os
import argparse
from datetime import datetime
from dateutil.parser import parse
from dateutil.relativedelta import relativedelta
...
# retrieving arguments specified on command line
parser = argparse.ArgumentParser(
description='Download NHL game summary reports.')
parser.add_argument(
'-d', '--tgt_dir', dest='tgt_dir', required=True,
metavar='download target directory',
help="Target directories for downloads")
parser.add_argument(
'-f', '--from', dest='from_date', required=False,
metavar='first date to download summaries for',
help="The first date summaries will be downloaded for")
parser.add_argument(
'-t', '--to', dest='to_date', required=False,
metavar='last date to download summaries for',
help="The last date summaries will be downloaded for")
args = parser.parse_args()
# setting target dir and time interval of interest
tgt_dir = args.tgt_dir
from_date = args.from_date
to_date = args.to_date
# setting first date to download summaries for if not specified
if from_date is None:
# using previously downloaded files in target directory to retrieve
# last date data have already been downloaded before
all_dates = list()
for root, dirs, files in os.walk(tgt_dir):
for file in files:
if file.lower().endswith(".zip") and file.lower()[0].isdigit():
try:
curr_date = parse(os.path.basename(file.split(".")[0]))
all_dates.append(curr_date)
except:
pass
from_date = (sorted(all_dates)[-1] + relativedelta(days=1)).strftime(
"%B %d, %Y")
# setting last date to download summaries for...
if to_date is None:
# ...to same as first date to download summaries for if this one is set
if args.from_date:
to_date = from_date
# ...to date before current one otherwise
else:
to_date = (datetime.now() + relativedelta(days=-1)).strftime(
"%B %d, %Y")
downloader = SummaryDownloader(tgt_dir, from_date, to_date, workers=8)
downloader.run()
... |
01516489dbf9ee78128d653b3ebc46730d466425 | apps/api/serializers.py | apps/api/serializers.py | from rest_framework import serializers
from apps.broadcasts.models import Broadcast, Host, Raid, Series
from apps.games.models import Game, Platform
from apps.subscribers.models import Ticket
class HostSerializer(serializers.ModelSerializer):
class Meta:
fields = ('id', 'timestamp', 'username', 'broadcast')
model = Host
class RaidSerializer(serializers.ModelSerializer):
class Meta:
fields = ('id', 'timestamp', 'username', 'broadcast', 'game')
model = Raid
class SeriesSerializer(serializers.ModelSerializer):
class Meta:
model = Series
class GameSerializer(serializers.ModelSerializer):
class Meta:
model = Game
class PlatformSerializer(serializers.ModelSerializer):
class Meta:
model = Platform
class TicketSerializer(serializers.ModelSerializer):
class Meta:
model = Ticket
class BroadcastSerializer(serializers.ModelSerializer):
hosts = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
raids = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
class Meta:
model = Broadcast
| from rest_framework import serializers
from apps.broadcasts.models import Broadcast, Host, Raid, Series
from apps.games.models import Game, Platform
from apps.subscribers.models import Ticket
class HostSerializer(serializers.ModelSerializer):
class Meta:
fields = ('id', 'timestamp', 'username', 'broadcast')
model = Host
class RaidSerializer(serializers.ModelSerializer):
class Meta:
fields = ('id', 'timestamp', 'username', 'broadcast', 'game')
model = Raid
class SeriesSerializer(serializers.ModelSerializer):
class Meta:
model = Series
class GameSerializer(serializers.ModelSerializer):
appearances = serializers.IntegerField(source='appears_on.count', read_only=True)
class Meta:
model = Game
class PlatformSerializer(serializers.ModelSerializer):
class Meta:
model = Platform
class TicketSerializer(serializers.ModelSerializer):
class Meta:
model = Ticket
class BroadcastSerializer(serializers.ModelSerializer):
hosts = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
raids = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
class Meta:
model = Broadcast
| Add appearance count to the API. | Add appearance count to the API.
| Python | apache-2.0 | bryanveloso/avalonstar-tv,bryanveloso/avalonstar-tv,bryanveloso/avalonstar-tv | from rest_framework import serializers
from apps.broadcasts.models import Broadcast, Host, Raid, Series
from apps.games.models import Game, Platform
from apps.subscribers.models import Ticket
class HostSerializer(serializers.ModelSerializer):
class Meta:
fields = ('id', 'timestamp', 'username', 'broadcast')
model = Host
class RaidSerializer(serializers.ModelSerializer):
class Meta:
fields = ('id', 'timestamp', 'username', 'broadcast', 'game')
model = Raid
class SeriesSerializer(serializers.ModelSerializer):
class Meta:
model = Series
class GameSerializer(serializers.ModelSerializer):
+ appearances = serializers.IntegerField(source='appears_on.count', read_only=True)
+
class Meta:
model = Game
class PlatformSerializer(serializers.ModelSerializer):
class Meta:
model = Platform
class TicketSerializer(serializers.ModelSerializer):
class Meta:
model = Ticket
class BroadcastSerializer(serializers.ModelSerializer):
hosts = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
raids = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
class Meta:
model = Broadcast
| Add appearance count to the API. | ## Code Before:
from rest_framework import serializers
from apps.broadcasts.models import Broadcast, Host, Raid, Series
from apps.games.models import Game, Platform
from apps.subscribers.models import Ticket
class HostSerializer(serializers.ModelSerializer):
class Meta:
fields = ('id', 'timestamp', 'username', 'broadcast')
model = Host
class RaidSerializer(serializers.ModelSerializer):
class Meta:
fields = ('id', 'timestamp', 'username', 'broadcast', 'game')
model = Raid
class SeriesSerializer(serializers.ModelSerializer):
class Meta:
model = Series
class GameSerializer(serializers.ModelSerializer):
class Meta:
model = Game
class PlatformSerializer(serializers.ModelSerializer):
class Meta:
model = Platform
class TicketSerializer(serializers.ModelSerializer):
class Meta:
model = Ticket
class BroadcastSerializer(serializers.ModelSerializer):
hosts = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
raids = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
class Meta:
model = Broadcast
## Instruction:
Add appearance count to the API.
## Code After:
from rest_framework import serializers
from apps.broadcasts.models import Broadcast, Host, Raid, Series
from apps.games.models import Game, Platform
from apps.subscribers.models import Ticket
class HostSerializer(serializers.ModelSerializer):
class Meta:
fields = ('id', 'timestamp', 'username', 'broadcast')
model = Host
class RaidSerializer(serializers.ModelSerializer):
class Meta:
fields = ('id', 'timestamp', 'username', 'broadcast', 'game')
model = Raid
class SeriesSerializer(serializers.ModelSerializer):
class Meta:
model = Series
class GameSerializer(serializers.ModelSerializer):
appearances = serializers.IntegerField(source='appears_on.count', read_only=True)
class Meta:
model = Game
class PlatformSerializer(serializers.ModelSerializer):
class Meta:
model = Platform
class TicketSerializer(serializers.ModelSerializer):
class Meta:
model = Ticket
class BroadcastSerializer(serializers.ModelSerializer):
hosts = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
raids = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
class Meta:
model = Broadcast
| ...
class GameSerializer(serializers.ModelSerializer):
appearances = serializers.IntegerField(source='appears_on.count', read_only=True)
class Meta:
... |
81ca235178a742e0041f2483d1f80d367d77264d | markov.py | markov.py | import random
class Markov:
def __init__(self, source, k=5):
self.source = source
self.k = k
self._init_source()
def _init_source(self):
self.seeds = {}
for i in range(len(self.source) - self.k - 1):
seed = tuple(self.source[i:i+self.k])
if seed not in self.seeds:
self.seeds[seed] = []
self.seeds[seed].append(self.source[i+self.k])
print('Markov dict initialized with {} keys'.format(len(self.seeds.keys())))
def chain(self, length=50, seed=None):
if not seed or seed not in self.seeds:
seed = random.choice(list(self.seeds.keys()))
output = []
while len(output) < length:
if seed not in self.seeds:
seed = random.choice(list(self.seeds.keys()))
next = random.choice(self.seeds[seed])
output.append(next)
seed = tuple(list(seed[1:]) + [next])
return ' '.join(output)
def find_seed(self, start_word):
seeds = list(self.seeds.keys())
seeds = list(filter(lambda s: start_word in s, seeds))
return random.choice(seeds)
| import random
class Markov:
def __init__(self, source, k=5):
self.source = source
self.k = k
self._init_source()
def _init_source(self):
self.seeds = {}
for i in range(len(self.source) - self.k - 1):
seed = tuple(self.source[i:i+self.k])
if seed not in self.seeds:
self.seeds[seed] = []
self.seeds[seed].append(self.source[i+self.k])
print('Markov dict initialized with {} keys'.format(len(self.seeds.keys())))
def chain(self, length=50, seed=None):
if not seed or seed not in self.seeds:
seed = random.choice(list(self.seeds.keys()))
output = []
while len(output) < length:
if seed not in self.seeds:
seed = random.choice(list(self.seeds.keys()))
next = random.choice(self.seeds[seed])
output.append(next)
seed = tuple(list(seed[1:]) + [next])
return ' '.join(output)
def find_seed(self, start_word):
seeds = list(self.seeds.keys())
seeds = list(filter(lambda s: start_word in s, seeds))
if len(seeds) == 0:
return None
return random.choice(seeds)
| Fix find_seed behavior when the word is not present | Fix find_seed behavior when the word is not present
| Python | mit | calzoneman/MarkovBot,calzoneman/MarkovBot | import random
class Markov:
def __init__(self, source, k=5):
self.source = source
self.k = k
self._init_source()
def _init_source(self):
self.seeds = {}
for i in range(len(self.source) - self.k - 1):
seed = tuple(self.source[i:i+self.k])
if seed not in self.seeds:
self.seeds[seed] = []
self.seeds[seed].append(self.source[i+self.k])
print('Markov dict initialized with {} keys'.format(len(self.seeds.keys())))
def chain(self, length=50, seed=None):
if not seed or seed not in self.seeds:
seed = random.choice(list(self.seeds.keys()))
output = []
while len(output) < length:
if seed not in self.seeds:
seed = random.choice(list(self.seeds.keys()))
next = random.choice(self.seeds[seed])
output.append(next)
seed = tuple(list(seed[1:]) + [next])
return ' '.join(output)
def find_seed(self, start_word):
seeds = list(self.seeds.keys())
seeds = list(filter(lambda s: start_word in s, seeds))
+ if len(seeds) == 0:
+ return None
return random.choice(seeds)
| Fix find_seed behavior when the word is not present | ## Code Before:
import random
class Markov:
def __init__(self, source, k=5):
self.source = source
self.k = k
self._init_source()
def _init_source(self):
self.seeds = {}
for i in range(len(self.source) - self.k - 1):
seed = tuple(self.source[i:i+self.k])
if seed not in self.seeds:
self.seeds[seed] = []
self.seeds[seed].append(self.source[i+self.k])
print('Markov dict initialized with {} keys'.format(len(self.seeds.keys())))
def chain(self, length=50, seed=None):
if not seed or seed not in self.seeds:
seed = random.choice(list(self.seeds.keys()))
output = []
while len(output) < length:
if seed not in self.seeds:
seed = random.choice(list(self.seeds.keys()))
next = random.choice(self.seeds[seed])
output.append(next)
seed = tuple(list(seed[1:]) + [next])
return ' '.join(output)
def find_seed(self, start_word):
seeds = list(self.seeds.keys())
seeds = list(filter(lambda s: start_word in s, seeds))
return random.choice(seeds)
## Instruction:
Fix find_seed behavior when the word is not present
## Code After:
import random
class Markov:
def __init__(self, source, k=5):
self.source = source
self.k = k
self._init_source()
def _init_source(self):
self.seeds = {}
for i in range(len(self.source) - self.k - 1):
seed = tuple(self.source[i:i+self.k])
if seed not in self.seeds:
self.seeds[seed] = []
self.seeds[seed].append(self.source[i+self.k])
print('Markov dict initialized with {} keys'.format(len(self.seeds.keys())))
def chain(self, length=50, seed=None):
if not seed or seed not in self.seeds:
seed = random.choice(list(self.seeds.keys()))
output = []
while len(output) < length:
if seed not in self.seeds:
seed = random.choice(list(self.seeds.keys()))
next = random.choice(self.seeds[seed])
output.append(next)
seed = tuple(list(seed[1:]) + [next])
return ' '.join(output)
def find_seed(self, start_word):
seeds = list(self.seeds.keys())
seeds = list(filter(lambda s: start_word in s, seeds))
if len(seeds) == 0:
return None
return random.choice(seeds)
| # ... existing code ...
seeds = list(filter(lambda s: start_word in s, seeds))
if len(seeds) == 0:
return None
return random.choice(seeds)
# ... rest of the code ... |
f7fac123bf72af01272bc27a1dfabb788f611908 | bandit/backends/smtp.py | bandit/backends/smtp.py | from __future__ import unicode_literals
from django.core.mail.backends.smtp import EmailBackend as SMTPBackend
from bandit.backends.base import HijackBackendMixin, LogOnlyBackendMixin
class HijackSMTPBackend(HijackBackendMixin, SMTPBackend):
"""
This backend intercepts outgoing messages drops them to a single email
address.
"""
pass
class LogOnlySMTPBackend(LogOnlyBackendMixin, SMTPBackend):
"""
This backend intercepts outgoing messages and logs them, allowing
only messages destined for ADMINS to be sent via SMTP.
"""
pass
| from __future__ import unicode_literals
from django.core.mail.backends.smtp import EmailBackend as SMTPBackend
from bandit.backends.base import HijackBackendMixin, LogOnlyBackendMixin
class HijackSMTPBackend(HijackBackendMixin, SMTPBackend):
"""
This backend intercepts outgoing messages drops them to a single email
address.
"""
pass
class LogOnlySMTPBackend(LogOnlyBackendMixin, SMTPBackend):
"""
This backend intercepts outgoing messages and logs them, allowing
only messages destined for ADMINS, BANDIT_EMAIL, SERVER_EMAIL, or
BANDIT_WHITELIST to be sent via SMTP.
"""
pass
| Update LogOnlySMTPBackend docstring. Not only admin emails are allowed, all approved emails are still sent. | Update LogOnlySMTPBackend docstring.
Not only admin emails are allowed, all approved emails are still sent.
| Python | bsd-3-clause | caktus/django-email-bandit,caktus/django-email-bandit | from __future__ import unicode_literals
from django.core.mail.backends.smtp import EmailBackend as SMTPBackend
from bandit.backends.base import HijackBackendMixin, LogOnlyBackendMixin
class HijackSMTPBackend(HijackBackendMixin, SMTPBackend):
"""
This backend intercepts outgoing messages drops them to a single email
address.
"""
pass
class LogOnlySMTPBackend(LogOnlyBackendMixin, SMTPBackend):
"""
This backend intercepts outgoing messages and logs them, allowing
- only messages destined for ADMINS to be sent via SMTP.
+ only messages destined for ADMINS, BANDIT_EMAIL, SERVER_EMAIL, or
+ BANDIT_WHITELIST to be sent via SMTP.
"""
pass
| Update LogOnlySMTPBackend docstring. Not only admin emails are allowed, all approved emails are still sent. | ## Code Before:
from __future__ import unicode_literals
from django.core.mail.backends.smtp import EmailBackend as SMTPBackend
from bandit.backends.base import HijackBackendMixin, LogOnlyBackendMixin
class HijackSMTPBackend(HijackBackendMixin, SMTPBackend):
"""
This backend intercepts outgoing messages drops them to a single email
address.
"""
pass
class LogOnlySMTPBackend(LogOnlyBackendMixin, SMTPBackend):
"""
This backend intercepts outgoing messages and logs them, allowing
only messages destined for ADMINS to be sent via SMTP.
"""
pass
## Instruction:
Update LogOnlySMTPBackend docstring. Not only admin emails are allowed, all approved emails are still sent.
## Code After:
from __future__ import unicode_literals
from django.core.mail.backends.smtp import EmailBackend as SMTPBackend
from bandit.backends.base import HijackBackendMixin, LogOnlyBackendMixin
class HijackSMTPBackend(HijackBackendMixin, SMTPBackend):
"""
This backend intercepts outgoing messages drops them to a single email
address.
"""
pass
class LogOnlySMTPBackend(LogOnlyBackendMixin, SMTPBackend):
"""
This backend intercepts outgoing messages and logs them, allowing
only messages destined for ADMINS, BANDIT_EMAIL, SERVER_EMAIL, or
BANDIT_WHITELIST to be sent via SMTP.
"""
pass
| // ... existing code ...
This backend intercepts outgoing messages and logs them, allowing
only messages destined for ADMINS, BANDIT_EMAIL, SERVER_EMAIL, or
BANDIT_WHITELIST to be sent via SMTP.
"""
// ... rest of the code ... |
1e1c8a80199eacb64783a3fa69673059aa04da90 | boardinghouse/tests/test_template_tag.py | boardinghouse/tests/test_template_tag.py | from django.test import TestCase
from .models import AwareModel, NaiveModel
from ..templatetags.boardinghouse import *
class TestTemplateTags(TestCase):
def test_is_schema_aware_filter(self):
self.assertTrue(is_schema_aware(AwareModel()))
self.assertFalse(is_schema_aware(NaiveModel()))
def test_is_shared_model_filter(self):
self.assertFalse(is_shared_model(AwareModel()))
self.assertTrue(is_shared_model(NaiveModel()))
def test_schema_name_filter(self):
Schema.objects.create(name='Schema Name', schema='foo')
self.assertEquals('Schema Name', schema_name('foo'))
self.assertEquals('no schema', schema_name(None))
self.assertEquals('no schema', schema_name(''))
self.assertEquals('no schema', schema_name(False))
self.assertEquals('no schema', schema_name('foobar'))
self.assertEquals('no schema', schema_name('foo_'))
self.assertEquals('no schema', schema_name('foofoo')) | from django.test import TestCase
from .models import AwareModel, NaiveModel
from ..templatetags.boardinghouse import schema_name, is_schema_aware, is_shared_model
from ..models import Schema
class TestTemplateTags(TestCase):
def test_is_schema_aware_filter(self):
self.assertTrue(is_schema_aware(AwareModel()))
self.assertFalse(is_schema_aware(NaiveModel()))
def test_is_shared_model_filter(self):
self.assertFalse(is_shared_model(AwareModel()))
self.assertTrue(is_shared_model(NaiveModel()))
def test_schema_name_filter(self):
Schema.objects.create(name='Schema Name', schema='foo')
self.assertEquals('Schema Name', schema_name('foo'))
self.assertEquals('no schema', schema_name(None))
self.assertEquals('no schema', schema_name(''))
self.assertEquals('no schema', schema_name(False))
self.assertEquals('no schema', schema_name('foobar'))
self.assertEquals('no schema', schema_name('foo_'))
self.assertEquals('no schema', schema_name('foofoo')) | Fix tests since we changed imports. | Fix tests since we changed imports.
| Python | bsd-3-clause | luzfcb/django-boardinghouse,luzfcb/django-boardinghouse,luzfcb/django-boardinghouse | from django.test import TestCase
from .models import AwareModel, NaiveModel
- from ..templatetags.boardinghouse import *
+ from ..templatetags.boardinghouse import schema_name, is_schema_aware, is_shared_model
+ from ..models import Schema
class TestTemplateTags(TestCase):
def test_is_schema_aware_filter(self):
self.assertTrue(is_schema_aware(AwareModel()))
self.assertFalse(is_schema_aware(NaiveModel()))
def test_is_shared_model_filter(self):
self.assertFalse(is_shared_model(AwareModel()))
self.assertTrue(is_shared_model(NaiveModel()))
def test_schema_name_filter(self):
Schema.objects.create(name='Schema Name', schema='foo')
self.assertEquals('Schema Name', schema_name('foo'))
self.assertEquals('no schema', schema_name(None))
self.assertEquals('no schema', schema_name(''))
self.assertEquals('no schema', schema_name(False))
self.assertEquals('no schema', schema_name('foobar'))
self.assertEquals('no schema', schema_name('foo_'))
self.assertEquals('no schema', schema_name('foofoo')) | Fix tests since we changed imports. | ## Code Before:
from django.test import TestCase
from .models import AwareModel, NaiveModel
from ..templatetags.boardinghouse import *
class TestTemplateTags(TestCase):
def test_is_schema_aware_filter(self):
self.assertTrue(is_schema_aware(AwareModel()))
self.assertFalse(is_schema_aware(NaiveModel()))
def test_is_shared_model_filter(self):
self.assertFalse(is_shared_model(AwareModel()))
self.assertTrue(is_shared_model(NaiveModel()))
def test_schema_name_filter(self):
Schema.objects.create(name='Schema Name', schema='foo')
self.assertEquals('Schema Name', schema_name('foo'))
self.assertEquals('no schema', schema_name(None))
self.assertEquals('no schema', schema_name(''))
self.assertEquals('no schema', schema_name(False))
self.assertEquals('no schema', schema_name('foobar'))
self.assertEquals('no schema', schema_name('foo_'))
self.assertEquals('no schema', schema_name('foofoo'))
## Instruction:
Fix tests since we changed imports.
## Code After:
from django.test import TestCase
from .models import AwareModel, NaiveModel
from ..templatetags.boardinghouse import schema_name, is_schema_aware, is_shared_model
from ..models import Schema
class TestTemplateTags(TestCase):
def test_is_schema_aware_filter(self):
self.assertTrue(is_schema_aware(AwareModel()))
self.assertFalse(is_schema_aware(NaiveModel()))
def test_is_shared_model_filter(self):
self.assertFalse(is_shared_model(AwareModel()))
self.assertTrue(is_shared_model(NaiveModel()))
def test_schema_name_filter(self):
Schema.objects.create(name='Schema Name', schema='foo')
self.assertEquals('Schema Name', schema_name('foo'))
self.assertEquals('no schema', schema_name(None))
self.assertEquals('no schema', schema_name(''))
self.assertEquals('no schema', schema_name(False))
self.assertEquals('no schema', schema_name('foobar'))
self.assertEquals('no schema', schema_name('foo_'))
self.assertEquals('no schema', schema_name('foofoo')) | # ... existing code ...
from .models import AwareModel, NaiveModel
from ..templatetags.boardinghouse import schema_name, is_schema_aware, is_shared_model
from ..models import Schema
# ... rest of the code ... |
caf9795cf0f775442bd0c3e06cd550a6e8d0206b | virtool/labels/db.py | virtool/labels/db.py | async def count_samples(db, label_id):
return await db.samples.count_documents({"labels": {"$in": [label_id]}})
| async def attach_sample_count(db, document, label_id):
document.update({"count": await db.samples.count_documents({"labels": {"$in": [label_id]}})})
| Rewrite function for sample count | Rewrite function for sample count
| Python | mit | virtool/virtool,igboyes/virtool,virtool/virtool,igboyes/virtool | - async def count_samples(db, label_id):
+ async def attach_sample_count(db, document, label_id):
- return await db.samples.count_documents({"labels": {"$in": [label_id]}})
+ document.update({"count": await db.samples.count_documents({"labels": {"$in": [label_id]}})})
| Rewrite function for sample count | ## Code Before:
async def count_samples(db, label_id):
return await db.samples.count_documents({"labels": {"$in": [label_id]}})
## Instruction:
Rewrite function for sample count
## Code After:
async def attach_sample_count(db, document, label_id):
document.update({"count": await db.samples.count_documents({"labels": {"$in": [label_id]}})})
| ...
async def attach_sample_count(db, document, label_id):
document.update({"count": await db.samples.count_documents({"labels": {"$in": [label_id]}})})
... |
802b9c2df754b3acf78e9e1facc1802a901e97a2 | furry/furry.py | furry/furry.py | import discord
from discord.ext import commands
class Furry:
"""A cog that adds weird furry commands or something"""
def __init__(self, bot):
self.bot = bot
@commands.command()
async def owo(self):
"""OwO what's this?"""
await self.bot.say("*Notices " + user.mention + "'s bulge* OwO what's this?")
def setup(bot):
bot.add_cog(Furry(bot))
| import discord
from discord.ext import commands
class Furry:
"""A cog that adds weird furry commands or something"""
def __init__(self, bot):
self.bot = bot
@commands.command()
async def owo(self, user : discord.Member):
"""OwO what's this?"""
await self.bot.say("*Notices " + user.mention + "'s bulge* OwO what's this?")
def setup(bot):
bot.add_cog(Furry(bot))
| Fix the command and make it actually work | Fix the command and make it actually work
Pass discord.Member as user
| Python | apache-2.0 | KazroFox/Kaz-Cogs | import discord
from discord.ext import commands
class Furry:
"""A cog that adds weird furry commands or something"""
def __init__(self, bot):
self.bot = bot
@commands.command()
- async def owo(self):
+ async def owo(self, user : discord.Member):
"""OwO what's this?"""
await self.bot.say("*Notices " + user.mention + "'s bulge* OwO what's this?")
def setup(bot):
bot.add_cog(Furry(bot))
| Fix the command and make it actually work | ## Code Before:
import discord
from discord.ext import commands
class Furry:
"""A cog that adds weird furry commands or something"""
def __init__(self, bot):
self.bot = bot
@commands.command()
async def owo(self):
"""OwO what's this?"""
await self.bot.say("*Notices " + user.mention + "'s bulge* OwO what's this?")
def setup(bot):
bot.add_cog(Furry(bot))
## Instruction:
Fix the command and make it actually work
## Code After:
import discord
from discord.ext import commands
class Furry:
"""A cog that adds weird furry commands or something"""
def __init__(self, bot):
self.bot = bot
@commands.command()
async def owo(self, user : discord.Member):
"""OwO what's this?"""
await self.bot.say("*Notices " + user.mention + "'s bulge* OwO what's this?")
def setup(bot):
bot.add_cog(Furry(bot))
| ...
@commands.command()
async def owo(self, user : discord.Member):
"""OwO what's this?"""
... |
e0c3fe2b1ecb4caf33b9ba3dafabe4eedae97c5e | spiralgalaxygame/tests/test_sentinel.py | spiralgalaxygame/tests/test_sentinel.py | import unittest
from spiralgalaxygame.sentinel import Sentinel, Enum
class SentinelTests (unittest.TestCase):
def setUp(self):
self.s = Sentinel('thingy')
def test_name(self):
self.assertIs(self.s.name, 'thingy')
def test_repr(self):
self.assertEqual(repr(self.s), '<Sentinel thingy>')
def test_identity_by_name(self):
s2 = Sentinel('th' + 'ingy') # `+` to avoid string interning.
self.assertIs(s2, self.s)
class EnumTests (unittest.TestCase):
def setUp(self):
self.e = Enum('red', 'green', 'blue')
def test_iter_and_members_are_sentinels(self):
for member in self.e:
self.assertIsInstance(member, Sentinel)
def test_member_as_attr_and_in_operator(self):
self.assertIn(self.e.green, self.e)
| import unittest
from spiralgalaxygame.sentinel import Sentinel, Enum
class SentinelTests (unittest.TestCase):
def setUp(self):
self.s = Sentinel('thingy')
def test_name(self):
self.assertIs(self.s.name, 'thingy')
def test_repr(self):
self.assertEqual(repr(self.s), '<Sentinel thingy>')
def test_identity_by_name(self):
s2 = Sentinel('th' + 'ingy') # `+` to avoid string interning.
self.assertIs(s2, self.s)
class EnumTests (unittest.TestCase):
def setUp(self):
self.e = Enum('red', 'green', 'blue')
def test_repr(self):
self.assertEqual(repr(self.e), '<Enum blue, green, red>')
def test_iter_and_members_are_sentinels(self):
for member in self.e:
self.assertIsInstance(member, Sentinel)
def test_member_as_attr_and_in_operator(self):
self.assertIn(self.e.green, self.e)
| Add a test for ``Enum.__repr__``; ``spiralgalaxygame.sentinel`` now has full coverage. | Add a test for ``Enum.__repr__``; ``spiralgalaxygame.sentinel`` now has full coverage.
| Python | agpl-3.0 | nejucomo/sgg,nejucomo/sgg,nejucomo/sgg | import unittest
from spiralgalaxygame.sentinel import Sentinel, Enum
class SentinelTests (unittest.TestCase):
def setUp(self):
self.s = Sentinel('thingy')
def test_name(self):
self.assertIs(self.s.name, 'thingy')
def test_repr(self):
self.assertEqual(repr(self.s), '<Sentinel thingy>')
def test_identity_by_name(self):
s2 = Sentinel('th' + 'ingy') # `+` to avoid string interning.
self.assertIs(s2, self.s)
class EnumTests (unittest.TestCase):
def setUp(self):
self.e = Enum('red', 'green', 'blue')
+ def test_repr(self):
+ self.assertEqual(repr(self.e), '<Enum blue, green, red>')
+
def test_iter_and_members_are_sentinels(self):
for member in self.e:
self.assertIsInstance(member, Sentinel)
def test_member_as_attr_and_in_operator(self):
self.assertIn(self.e.green, self.e)
| Add a test for ``Enum.__repr__``; ``spiralgalaxygame.sentinel`` now has full coverage. | ## Code Before:
import unittest
from spiralgalaxygame.sentinel import Sentinel, Enum
class SentinelTests (unittest.TestCase):
def setUp(self):
self.s = Sentinel('thingy')
def test_name(self):
self.assertIs(self.s.name, 'thingy')
def test_repr(self):
self.assertEqual(repr(self.s), '<Sentinel thingy>')
def test_identity_by_name(self):
s2 = Sentinel('th' + 'ingy') # `+` to avoid string interning.
self.assertIs(s2, self.s)
class EnumTests (unittest.TestCase):
def setUp(self):
self.e = Enum('red', 'green', 'blue')
def test_iter_and_members_are_sentinels(self):
for member in self.e:
self.assertIsInstance(member, Sentinel)
def test_member_as_attr_and_in_operator(self):
self.assertIn(self.e.green, self.e)
## Instruction:
Add a test for ``Enum.__repr__``; ``spiralgalaxygame.sentinel`` now has full coverage.
## Code After:
import unittest
from spiralgalaxygame.sentinel import Sentinel, Enum
class SentinelTests (unittest.TestCase):
def setUp(self):
self.s = Sentinel('thingy')
def test_name(self):
self.assertIs(self.s.name, 'thingy')
def test_repr(self):
self.assertEqual(repr(self.s), '<Sentinel thingy>')
def test_identity_by_name(self):
s2 = Sentinel('th' + 'ingy') # `+` to avoid string interning.
self.assertIs(s2, self.s)
class EnumTests (unittest.TestCase):
def setUp(self):
self.e = Enum('red', 'green', 'blue')
def test_repr(self):
self.assertEqual(repr(self.e), '<Enum blue, green, red>')
def test_iter_and_members_are_sentinels(self):
for member in self.e:
self.assertIsInstance(member, Sentinel)
def test_member_as_attr_and_in_operator(self):
self.assertIn(self.e.green, self.e)
| // ... existing code ...
def test_repr(self):
self.assertEqual(repr(self.e), '<Enum blue, green, red>')
def test_iter_and_members_are_sentinels(self):
// ... rest of the code ... |
4ead2d0b2bc987bcc75a5f94c31553a8024aa8a8 | src/vault.py | src/vault.py | from fabric.api import task, local
from buildercore import project
import utils
import sys
import logging
LOG = logging.getLogger(__name__)
def vault_addr():
defaults, _ = project.raw_project_map()
return defaults['aws']['vault']['address']
def vault_policy():
return 'builder-user'
@task
def login():
cmd = "VAULT_ADDR=%s vault login" % vault_addr()
local(cmd)
@task
def logout():
cmd = "rm -f ~/.vault-token"
local(cmd)
@task
def token_lookup(token):
cmd = "VAULT_ADDR=%s VAULT_TOKEN=%s vault token lookup" % (vault_addr(), token)
local(cmd)
@task
def token_create():
print("Warning: you should be authenticated with a root token to effectively create a new token here")
token = utils.get_input('token display name: ')
if not token or not token.strip():
print("a token display name is required")
sys.exit(1)
cmd = "VAULT_ADDR=%s vault token create -policy=%s -display-name=%s" % (vault_addr(), vault_policy(), token)
local(cmd)
@task
def token_revoke(token):
cmd = "VAULT_ADDR=%s vault token revoke %s" % (vault_addr(), token)
local(cmd)
| from fabric.api import task, local
from buildercore import project
import utils
import sys
import logging
LOG = logging.getLogger(__name__)
def vault_addr():
defaults, _ = project.raw_project_map()
return defaults['aws']['vault']['address']
def vault_policy():
return 'builder-user'
@task
def login():
cmd = "VAULT_ADDR=%s vault login" % vault_addr()
local(cmd)
@task
def logout():
cmd = "rm -f ~/.vault-token"
local(cmd)
@task
def policies_update():
_warning_root_token()
cmd = "VAULT_ADDR=%s vault policy write %s .vault/%s.hcl" % (vault_addr(), vault_policy(), vault_policy())
local(cmd)
@task
def token_lookup(token):
cmd = "VAULT_ADDR=%s VAULT_TOKEN=%s vault token lookup" % (vault_addr(), token)
local(cmd)
@task
def token_create():
_warning_root_token()
token = utils.get_input('token display name: ')
if not token or not token.strip():
print("a token display name is required")
sys.exit(1)
cmd = "VAULT_ADDR=%s vault token create -policy=%s -display-name=%s" % (vault_addr(), vault_policy(), token)
local(cmd)
@task
def token_revoke(token):
cmd = "VAULT_ADDR=%s vault token revoke %s" % (vault_addr(), token)
local(cmd)
def _warning_root_token():
print("Warning: you should probably be authenticated with a root token for this operation")
| Add task to update policies | Add task to update policies
| Python | mit | elifesciences/builder,elifesciences/builder | from fabric.api import task, local
from buildercore import project
import utils
import sys
import logging
LOG = logging.getLogger(__name__)
def vault_addr():
defaults, _ = project.raw_project_map()
return defaults['aws']['vault']['address']
def vault_policy():
return 'builder-user'
@task
def login():
cmd = "VAULT_ADDR=%s vault login" % vault_addr()
local(cmd)
@task
def logout():
cmd = "rm -f ~/.vault-token"
local(cmd)
@task
+ def policies_update():
+ _warning_root_token()
+ cmd = "VAULT_ADDR=%s vault policy write %s .vault/%s.hcl" % (vault_addr(), vault_policy(), vault_policy())
+ local(cmd)
+
+ @task
def token_lookup(token):
cmd = "VAULT_ADDR=%s VAULT_TOKEN=%s vault token lookup" % (vault_addr(), token)
local(cmd)
@task
def token_create():
- print("Warning: you should be authenticated with a root token to effectively create a new token here")
+ _warning_root_token()
token = utils.get_input('token display name: ')
if not token or not token.strip():
print("a token display name is required")
sys.exit(1)
cmd = "VAULT_ADDR=%s vault token create -policy=%s -display-name=%s" % (vault_addr(), vault_policy(), token)
local(cmd)
@task
def token_revoke(token):
cmd = "VAULT_ADDR=%s vault token revoke %s" % (vault_addr(), token)
local(cmd)
+ def _warning_root_token():
+ print("Warning: you should probably be authenticated with a root token for this operation")
+ | Add task to update policies | ## Code Before:
from fabric.api import task, local
from buildercore import project
import utils
import sys
import logging
LOG = logging.getLogger(__name__)
def vault_addr():
defaults, _ = project.raw_project_map()
return defaults['aws']['vault']['address']
def vault_policy():
return 'builder-user'
@task
def login():
cmd = "VAULT_ADDR=%s vault login" % vault_addr()
local(cmd)
@task
def logout():
cmd = "rm -f ~/.vault-token"
local(cmd)
@task
def token_lookup(token):
cmd = "VAULT_ADDR=%s VAULT_TOKEN=%s vault token lookup" % (vault_addr(), token)
local(cmd)
@task
def token_create():
print("Warning: you should be authenticated with a root token to effectively create a new token here")
token = utils.get_input('token display name: ')
if not token or not token.strip():
print("a token display name is required")
sys.exit(1)
cmd = "VAULT_ADDR=%s vault token create -policy=%s -display-name=%s" % (vault_addr(), vault_policy(), token)
local(cmd)
@task
def token_revoke(token):
cmd = "VAULT_ADDR=%s vault token revoke %s" % (vault_addr(), token)
local(cmd)
## Instruction:
Add task to update policies
## Code After:
from fabric.api import task, local
from buildercore import project
import utils
import sys
import logging
LOG = logging.getLogger(__name__)
def vault_addr():
defaults, _ = project.raw_project_map()
return defaults['aws']['vault']['address']
def vault_policy():
return 'builder-user'
@task
def login():
cmd = "VAULT_ADDR=%s vault login" % vault_addr()
local(cmd)
@task
def logout():
cmd = "rm -f ~/.vault-token"
local(cmd)
@task
def policies_update():
_warning_root_token()
cmd = "VAULT_ADDR=%s vault policy write %s .vault/%s.hcl" % (vault_addr(), vault_policy(), vault_policy())
local(cmd)
@task
def token_lookup(token):
cmd = "VAULT_ADDR=%s VAULT_TOKEN=%s vault token lookup" % (vault_addr(), token)
local(cmd)
@task
def token_create():
_warning_root_token()
token = utils.get_input('token display name: ')
if not token or not token.strip():
print("a token display name is required")
sys.exit(1)
cmd = "VAULT_ADDR=%s vault token create -policy=%s -display-name=%s" % (vault_addr(), vault_policy(), token)
local(cmd)
@task
def token_revoke(token):
cmd = "VAULT_ADDR=%s vault token revoke %s" % (vault_addr(), token)
local(cmd)
def _warning_root_token():
print("Warning: you should probably be authenticated with a root token for this operation")
| ...
@task
def policies_update():
_warning_root_token()
cmd = "VAULT_ADDR=%s vault policy write %s .vault/%s.hcl" % (vault_addr(), vault_policy(), vault_policy())
local(cmd)
@task
def token_lookup(token):
...
def token_create():
_warning_root_token()
token = utils.get_input('token display name: ')
...
local(cmd)
def _warning_root_token():
print("Warning: you should probably be authenticated with a root token for this operation")
... |
1f4349e20a98e622124c1e5bc121053e4775152f | login/signals.py | login/signals.py | from django.db.models.signals import post_save
from django.dispatch import receiver
from django.contrib.auth.models import User, Group
from .models import UserProfile, GroupProfile
# Add signal to automatically extend user profile
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user=instance)
# Add signal to automatically extend group profile
@receiver(post_save, sender=Group)
def create_group_profile(sender, instance, created, **kwargs):
if created:
GroupProfile.objects.create(group=instance)
| from django.db.models.signals import post_save, m2m_changed
from django.dispatch import receiver
from django.contrib.auth.models import User, Group
from login.permissions import cache_clear
from .models import UserProfile, GroupProfile
# Add signal to automatically extend user profile
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user=instance)
@receiver(m2m_changed, sender=User.groups.through)
def user_groups_changed_handler(sender, instance, action, **kwargs):
if action == 'post_add' or action == 'post_remove':
# invalidate permissions cache
cache_clear()
# Add signal to automatically extend group profile
@receiver(post_save, sender=Group)
def create_group_profile(sender, instance, created, **kwargs):
if created:
GroupProfile.objects.create(group=instance)
| Add signal to invalidate cache when groups change. | Add signal to invalidate cache when groups change.
| Python | bsd-3-clause | EMSTrack/WebServerAndClient,EMSTrack/WebServerAndClient,EMSTrack/WebServerAndClient | - from django.db.models.signals import post_save
+ from django.db.models.signals import post_save, m2m_changed
from django.dispatch import receiver
from django.contrib.auth.models import User, Group
+ from login.permissions import cache_clear
from .models import UserProfile, GroupProfile
# Add signal to automatically extend user profile
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user=instance)
+ @receiver(m2m_changed, sender=User.groups.through)
+ def user_groups_changed_handler(sender, instance, action, **kwargs):
+ if action == 'post_add' or action == 'post_remove':
+
+ # invalidate permissions cache
+ cache_clear()
+
+
# Add signal to automatically extend group profile
@receiver(post_save, sender=Group)
def create_group_profile(sender, instance, created, **kwargs):
if created:
GroupProfile.objects.create(group=instance)
| Add signal to invalidate cache when groups change. | ## Code Before:
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.contrib.auth.models import User, Group
from .models import UserProfile, GroupProfile
# Add signal to automatically extend user profile
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user=instance)
# Add signal to automatically extend group profile
@receiver(post_save, sender=Group)
def create_group_profile(sender, instance, created, **kwargs):
if created:
GroupProfile.objects.create(group=instance)
## Instruction:
Add signal to invalidate cache when groups change.
## Code After:
from django.db.models.signals import post_save, m2m_changed
from django.dispatch import receiver
from django.contrib.auth.models import User, Group
from login.permissions import cache_clear
from .models import UserProfile, GroupProfile
# Add signal to automatically extend user profile
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user=instance)
@receiver(m2m_changed, sender=User.groups.through)
def user_groups_changed_handler(sender, instance, action, **kwargs):
if action == 'post_add' or action == 'post_remove':
# invalidate permissions cache
cache_clear()
# Add signal to automatically extend group profile
@receiver(post_save, sender=Group)
def create_group_profile(sender, instance, created, **kwargs):
if created:
GroupProfile.objects.create(group=instance)
| # ... existing code ...
from django.db.models.signals import post_save, m2m_changed
from django.dispatch import receiver
# ... modified code ...
from login.permissions import cache_clear
from .models import UserProfile, GroupProfile
...
@receiver(m2m_changed, sender=User.groups.through)
def user_groups_changed_handler(sender, instance, action, **kwargs):
if action == 'post_add' or action == 'post_remove':
# invalidate permissions cache
cache_clear()
# Add signal to automatically extend group profile
# ... rest of the code ... |
96733510eeee4b06c3b509097e7c26fd143d687f | plugins/clue/clue.py | plugins/clue/clue.py | from __future__ import unicode_literals
import re
crontable = []
outputs = []
state = {}
class ClueState:
def __init__(self):
self.count = 0
self.clue = ''
def process_message(data):
channel = data['channel']
if channel not in state.keys():
state[channel] = ClueState()
st = state[channel]
# Count the number of messages we have seen in this channel since
# stony last repeated a clue.
st.count = st.count + 1
if re.search("\n?\s*>", data['text']):
st.clue = data['text']
st.count = 1
else:
if st.count % 10 == 0:
outputs.append([channel, st.clue])
| from __future__ import unicode_literals
import re
crontable = []
outputs = []
state = {}
class ClueState:
def __init__(self):
self.count = 0
self.clue = ''
def process_message(data):
channel = data['channel']
if channel not in state.keys():
state[channel] = ClueState()
st = state[channel]
# Count the number of messages we have seen in this channel since
# stony last repeated a clue.
st.count = st.count + 1
if re.search("^\s*>", data['text'], re.MULTILINE):
st.clue = data['text']
st.count = 1
else:
if st.count % 10 == 0:
outputs.append([channel, st.clue])
| Fix to only match '>' at the beginning of a line | Fix to only match '>' at the beginning of a line
Which was the intention with the '\n' in the pattern before, but I had
made it optional for the common case of the '>' being at the beginning
of the message, which of course had the side effect of allowing the
'>' to be matched anywhere (bleh).
Now, with a MULTLINE-mode regexp and '^' in the pattern, the '>' will
only be treated as significant at the beginning of a line, (preceded
by optional whitespace).
| Python | mit | cworth-gh/stony | from __future__ import unicode_literals
import re
crontable = []
outputs = []
state = {}
class ClueState:
def __init__(self):
self.count = 0
self.clue = ''
def process_message(data):
channel = data['channel']
if channel not in state.keys():
state[channel] = ClueState()
st = state[channel]
# Count the number of messages we have seen in this channel since
# stony last repeated a clue.
st.count = st.count + 1
- if re.search("\n?\s*>", data['text']):
+ if re.search("^\s*>", data['text'], re.MULTILINE):
st.clue = data['text']
st.count = 1
else:
if st.count % 10 == 0:
outputs.append([channel, st.clue])
| Fix to only match '>' at the beginning of a line | ## Code Before:
from __future__ import unicode_literals
import re
crontable = []
outputs = []
state = {}
class ClueState:
def __init__(self):
self.count = 0
self.clue = ''
def process_message(data):
channel = data['channel']
if channel not in state.keys():
state[channel] = ClueState()
st = state[channel]
# Count the number of messages we have seen in this channel since
# stony last repeated a clue.
st.count = st.count + 1
if re.search("\n?\s*>", data['text']):
st.clue = data['text']
st.count = 1
else:
if st.count % 10 == 0:
outputs.append([channel, st.clue])
## Instruction:
Fix to only match '>' at the beginning of a line
## Code After:
from __future__ import unicode_literals
import re
crontable = []
outputs = []
state = {}
class ClueState:
def __init__(self):
self.count = 0
self.clue = ''
def process_message(data):
channel = data['channel']
if channel not in state.keys():
state[channel] = ClueState()
st = state[channel]
# Count the number of messages we have seen in this channel since
# stony last repeated a clue.
st.count = st.count + 1
if re.search("^\s*>", data['text'], re.MULTILINE):
st.clue = data['text']
st.count = 1
else:
if st.count % 10 == 0:
outputs.append([channel, st.clue])
| // ... existing code ...
if re.search("^\s*>", data['text'], re.MULTILINE):
st.clue = data['text']
// ... rest of the code ... |
f0f3c50a65aae1393928579ca0e48891d1ac8f18 | app/access_control.py | app/access_control.py | from functools import wraps
from flask import flash, redirect, url_for, session
from app import views
def login_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if 'logged_in' in session:
return f(*args, **kwargs)
else:
flash("Please login to continue.", "danger")
return redirect(url_for("login"))
return decorated_function | from functools import wraps
from flask import flash, redirect, url_for, session
from app import views
def login_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if 'logged_in' in session:
return f(*args, **kwargs)
else:
flash("Please login to continue.", "danger")
return redirect(url_for("login"))
return decorated_function
def for_guests(f):
@wraps(f)
def decorated_function(*args, **kwrags):
if not 'logged_in' in session:
return f(*args, **kwargs)
else:
flash("Invalid Action.", "danger")
return redirect(url_for("dashboard"))
return decorated_function | Create a decorator `for_guest` for access control on pages for guests. | Create a decorator `for_guest` for access control on pages for guests.
| Python | mit | alchermd/flask-todo-app,alchermd/flask-todo-app | from functools import wraps
from flask import flash, redirect, url_for, session
from app import views
def login_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if 'logged_in' in session:
return f(*args, **kwargs)
else:
flash("Please login to continue.", "danger")
return redirect(url_for("login"))
return decorated_function
+
+ def for_guests(f):
+ @wraps(f)
+ def decorated_function(*args, **kwrags):
+ if not 'logged_in' in session:
+ return f(*args, **kwargs)
+ else:
+ flash("Invalid Action.", "danger")
+ return redirect(url_for("dashboard"))
+ return decorated_function | Create a decorator `for_guest` for access control on pages for guests. | ## Code Before:
from functools import wraps
from flask import flash, redirect, url_for, session
from app import views
def login_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if 'logged_in' in session:
return f(*args, **kwargs)
else:
flash("Please login to continue.", "danger")
return redirect(url_for("login"))
return decorated_function
## Instruction:
Create a decorator `for_guest` for access control on pages for guests.
## Code After:
from functools import wraps
from flask import flash, redirect, url_for, session
from app import views
def login_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if 'logged_in' in session:
return f(*args, **kwargs)
else:
flash("Please login to continue.", "danger")
return redirect(url_for("login"))
return decorated_function
def for_guests(f):
@wraps(f)
def decorated_function(*args, **kwrags):
if not 'logged_in' in session:
return f(*args, **kwargs)
else:
flash("Invalid Action.", "danger")
return redirect(url_for("dashboard"))
return decorated_function | ...
return decorated_function
def for_guests(f):
@wraps(f)
def decorated_function(*args, **kwrags):
if not 'logged_in' in session:
return f(*args, **kwargs)
else:
flash("Invalid Action.", "danger")
return redirect(url_for("dashboard"))
return decorated_function
... |
25395cbd3536c1bc2ee1a6bc44a34ea7fc5b2a13 | src/priestLogger.py | src/priestLogger.py | import logging
from logging.handlers import TimedRotatingFileHandler
class PriestLogger:
def __init__(self):
logHandler = TimedRotatingFileHandler("C:\\lucas\\PriestPy\\Dropbox\\logs\\HowToPriest",when="midnight",backupCount=365)
logFormatter = logging.Formatter('%(asctime)s - %(message)s')
logHandler.setFormatter( logFormatter )
self.logger = logging.getLogger( 'H2PLogger' )
self.logger.addHandler( logHandler )
self.logger.setLevel( logging.INFO )
def log(self, message):
self.logger.info(message.channel.name + ' - ' + message.author.name+': ' + message.content) | import logging
from logging.handlers import TimedRotatingFileHandler
from time import sleep
class PriestLogger:
def __init__(self):
logHandler = TimedRotatingFileHandler("C:\\lucas\\PriestPy\\Dropbox\\logs\\HowToPriest",when="midnight",backupCount=365)
logFormatter = logging.Formatter('%(asctime)s - %(message)s')
logHandler.setFormatter( logFormatter )
self.logger = logging.getLogger( 'H2PLogger' )
self.logger.addHandler( logHandler )
self.logger.setLevel( logging.INFO )
def log(self, message):
sent = false
while not sent:
try:
self.logger.info(message.channel.name + ' - ' + message.author.name+': ' + message.content)
sent = true
except:
sleep(0.2)
| Fix for conflict in opening file | Fix for conflict in opening file
| Python | mit | lgkern/PriestPy | import logging
from logging.handlers import TimedRotatingFileHandler
+ from time import sleep
class PriestLogger:
def __init__(self):
logHandler = TimedRotatingFileHandler("C:\\lucas\\PriestPy\\Dropbox\\logs\\HowToPriest",when="midnight",backupCount=365)
logFormatter = logging.Formatter('%(asctime)s - %(message)s')
logHandler.setFormatter( logFormatter )
self.logger = logging.getLogger( 'H2PLogger' )
self.logger.addHandler( logHandler )
self.logger.setLevel( logging.INFO )
def log(self, message):
+ sent = false
+ while not sent:
+ try:
- self.logger.info(message.channel.name + ' - ' + message.author.name+': ' + message.content)
+ self.logger.info(message.channel.name + ' - ' + message.author.name+': ' + message.content)
+ sent = true
+ except:
+ sleep(0.2)
+ | Fix for conflict in opening file | ## Code Before:
import logging
from logging.handlers import TimedRotatingFileHandler
class PriestLogger:
def __init__(self):
logHandler = TimedRotatingFileHandler("C:\\lucas\\PriestPy\\Dropbox\\logs\\HowToPriest",when="midnight",backupCount=365)
logFormatter = logging.Formatter('%(asctime)s - %(message)s')
logHandler.setFormatter( logFormatter )
self.logger = logging.getLogger( 'H2PLogger' )
self.logger.addHandler( logHandler )
self.logger.setLevel( logging.INFO )
def log(self, message):
self.logger.info(message.channel.name + ' - ' + message.author.name+': ' + message.content)
## Instruction:
Fix for conflict in opening file
## Code After:
import logging
from logging.handlers import TimedRotatingFileHandler
from time import sleep
class PriestLogger:
def __init__(self):
logHandler = TimedRotatingFileHandler("C:\\lucas\\PriestPy\\Dropbox\\logs\\HowToPriest",when="midnight",backupCount=365)
logFormatter = logging.Formatter('%(asctime)s - %(message)s')
logHandler.setFormatter( logFormatter )
self.logger = logging.getLogger( 'H2PLogger' )
self.logger.addHandler( logHandler )
self.logger.setLevel( logging.INFO )
def log(self, message):
sent = false
while not sent:
try:
self.logger.info(message.channel.name + ' - ' + message.author.name+': ' + message.content)
sent = true
except:
sleep(0.2)
| # ... existing code ...
from logging.handlers import TimedRotatingFileHandler
from time import sleep
# ... modified code ...
def log(self, message):
sent = false
while not sent:
try:
self.logger.info(message.channel.name + ' - ' + message.author.name+': ' + message.content)
sent = true
except:
sleep(0.2)
# ... rest of the code ... |
a2b418c89e6ad3f85c88b7dfcc2238d62cb2e36e | karanja_me/polls/tests.py | karanja_me/polls/tests.py | from django.test import TestCase
# Create your tests here.
| import datetime
from django.utils import timezone
from django.test import TestCase
from .models import Question
class QuestionMethodTest(TestCase):
def test_was_published_recently_with_future_question(self):
"""
was_published_recenlty() should return False for questions that the
pub_date is in the future
"""
time = timezone.now() + datetime.timedelta(days = 30)
future_question = Question(pub_date = time)
self.assertEqual(future_question_was_published_recently(), False)
| Test case for Question method added | Test case for Question method added
A test case to avoid future published questions read as recently added
| Python | mit | yoda-yoda/django-dive-in,yoda-yoda/django-dive-in,denisKaranja/django-dive-in,denisKaranja/django-dive-in | + import datetime
+
+ from django.utils import timezone
from django.test import TestCase
- # Create your tests here.
+ from .models import Question
+ class QuestionMethodTest(TestCase):
+
+ def test_was_published_recently_with_future_question(self):
+ """
+ was_published_recenlty() should return False for questions that the
+ pub_date is in the future
+ """
+ time = timezone.now() + datetime.timedelta(days = 30)
+ future_question = Question(pub_date = time)
+ self.assertEqual(future_question_was_published_recently(), False)
+
+ | Test case for Question method added | ## Code Before:
from django.test import TestCase
# Create your tests here.
## Instruction:
Test case for Question method added
## Code After:
import datetime
from django.utils import timezone
from django.test import TestCase
from .models import Question
class QuestionMethodTest(TestCase):
def test_was_published_recently_with_future_question(self):
"""
was_published_recenlty() should return False for questions that the
pub_date is in the future
"""
time = timezone.now() + datetime.timedelta(days = 30)
future_question = Question(pub_date = time)
self.assertEqual(future_question_was_published_recently(), False)
| ...
import datetime
from django.utils import timezone
from django.test import TestCase
...
from .models import Question
class QuestionMethodTest(TestCase):
def test_was_published_recently_with_future_question(self):
"""
was_published_recenlty() should return False for questions that the
pub_date is in the future
"""
time = timezone.now() + datetime.timedelta(days = 30)
future_question = Question(pub_date = time)
self.assertEqual(future_question_was_published_recently(), False)
... |
8b7660193f5a18a2d0addd218f2fd2d77d8f98ac | app/grandchallenge/cases/serializers.py | app/grandchallenge/cases/serializers.py | from rest_framework import serializers
from grandchallenge.cases.models import Image, ImageFile
class ImageFileSerializer(serializers.ModelSerializer):
class Meta:
model = ImageFile
fields = ("pk", "image", "file")
class ImageSerializer(serializers.ModelSerializer):
files = ImageFileSerializer(many=True, read_only=True)
class Meta:
model = Image
fields = (
"pk",
"name",
"study",
"files",
"width",
"height",
"depth",
"color_space",
"modality",
"eye_choice",
"stereoscopic_choice",
"field_of_view",
"shape_without_color",
"shape",
)
| from rest_framework import serializers
from grandchallenge.cases.models import Image, ImageFile
class ImageFileSerializer(serializers.ModelSerializer):
class Meta:
model = ImageFile
fields = ("pk", "image", "file", "image_type")
class ImageSerializer(serializers.ModelSerializer):
files = ImageFileSerializer(many=True, read_only=True)
class Meta:
model = Image
fields = (
"pk",
"name",
"study",
"files",
"width",
"height",
"depth",
"color_space",
"modality",
"eye_choice",
"stereoscopic_choice",
"field_of_view",
"shape_without_color",
"shape",
)
| Return image type of file in api | Return image type of file in api
| Python | apache-2.0 | comic/comic-django,comic/comic-django,comic/comic-django,comic/comic-django,comic/comic-django | from rest_framework import serializers
from grandchallenge.cases.models import Image, ImageFile
class ImageFileSerializer(serializers.ModelSerializer):
class Meta:
model = ImageFile
- fields = ("pk", "image", "file")
+ fields = ("pk", "image", "file", "image_type")
class ImageSerializer(serializers.ModelSerializer):
files = ImageFileSerializer(many=True, read_only=True)
class Meta:
model = Image
fields = (
"pk",
"name",
"study",
"files",
"width",
"height",
"depth",
"color_space",
"modality",
"eye_choice",
"stereoscopic_choice",
"field_of_view",
"shape_without_color",
"shape",
)
| Return image type of file in api | ## Code Before:
from rest_framework import serializers
from grandchallenge.cases.models import Image, ImageFile
class ImageFileSerializer(serializers.ModelSerializer):
class Meta:
model = ImageFile
fields = ("pk", "image", "file")
class ImageSerializer(serializers.ModelSerializer):
files = ImageFileSerializer(many=True, read_only=True)
class Meta:
model = Image
fields = (
"pk",
"name",
"study",
"files",
"width",
"height",
"depth",
"color_space",
"modality",
"eye_choice",
"stereoscopic_choice",
"field_of_view",
"shape_without_color",
"shape",
)
## Instruction:
Return image type of file in api
## Code After:
from rest_framework import serializers
from grandchallenge.cases.models import Image, ImageFile
class ImageFileSerializer(serializers.ModelSerializer):
class Meta:
model = ImageFile
fields = ("pk", "image", "file", "image_type")
class ImageSerializer(serializers.ModelSerializer):
files = ImageFileSerializer(many=True, read_only=True)
class Meta:
model = Image
fields = (
"pk",
"name",
"study",
"files",
"width",
"height",
"depth",
"color_space",
"modality",
"eye_choice",
"stereoscopic_choice",
"field_of_view",
"shape_without_color",
"shape",
)
| # ... existing code ...
model = ImageFile
fields = ("pk", "image", "file", "image_type")
# ... rest of the code ... |
a5585f7e437a402b10e4fa9094172a10a8955eac | __init__.py | __init__.py | import sys
import importlib
sys.modules[__package__] = importlib.import_module('.dataset', __package__)
| import os
__path__.append(os.path.dirname(__file__) + '/dataset')
from .dataset import *
| Replace importlib hack with __path__ | Replace importlib hack with __path__
| Python | apache-2.0 | analysiscenter/dataset | - import sys
+ import os
- import importlib
- sys.modules[__package__] = importlib.import_module('.dataset', __package__)
+ __path__.append(os.path.dirname(__file__) + '/dataset')
+ from .dataset import *
| Replace importlib hack with __path__ | ## Code Before:
import sys
import importlib
sys.modules[__package__] = importlib.import_module('.dataset', __package__)
## Instruction:
Replace importlib hack with __path__
## Code After:
import os
__path__.append(os.path.dirname(__file__) + '/dataset')
from .dataset import *
| // ... existing code ...
import os
__path__.append(os.path.dirname(__file__) + '/dataset')
from .dataset import *
// ... rest of the code ... |
3bf027eaf2c62ec6fcb3192cfddc5a2aa8b73895 | oneflow/settings/chani.py | oneflow/settings/chani.py |
import os
from sparks.django.settings import include_snippets
include_snippets(
os.path.dirname(__file__), (
'00_development',
# Activate this to test 404/500…
#'00_production',
'1flow_io',
'common',
'db_common',
'db_development',
'cache_common',
'cache_development',
'mail_development',
'raven_development',
'common_development',
'rosetta',
'djdt',
),
globals()
)
# Override `1flow_net` for local development
SITE_DOMAIN = 'localhost'
|
import os
from sparks.django.settings import include_snippets
include_snippets(
os.path.dirname(__file__), (
'00_development',
# Activate this to test 404/500…
#'00_production',
'1flow_io',
'common',
'db_common',
'db_development',
'cache_common',
'cache_development',
'mail_development',
'raven_development',
'common_development',
'rosetta',
'djdt',
),
globals()
)
# Override `1flow_net` for local development
SITE_DOMAIN = 'localhost:8000'
EMAIL_HOST = 'gurney'
#EMAIL_BACKEND = 'django.core.mail.backends.filebased.EmailBackend'
#EMAIL_FILE_PATH = '/tmp/1flow.mail'
| Make mail working on my dev machine. | Make mail working on my dev machine. | Python | agpl-3.0 | 1flow/1flow,1flow/1flow,WillianPaiva/1flow,WillianPaiva/1flow,WillianPaiva/1flow,1flow/1flow,1flow/1flow,1flow/1flow,WillianPaiva/1flow,WillianPaiva/1flow |
import os
from sparks.django.settings import include_snippets
include_snippets(
os.path.dirname(__file__), (
'00_development',
# Activate this to test 404/500…
#'00_production',
'1flow_io',
'common',
'db_common',
'db_development',
'cache_common',
'cache_development',
'mail_development',
'raven_development',
'common_development',
'rosetta',
'djdt',
),
globals()
)
# Override `1flow_net` for local development
- SITE_DOMAIN = 'localhost'
+ SITE_DOMAIN = 'localhost:8000'
+ EMAIL_HOST = 'gurney'
+ #EMAIL_BACKEND = 'django.core.mail.backends.filebased.EmailBackend'
+ #EMAIL_FILE_PATH = '/tmp/1flow.mail'
+ | Make mail working on my dev machine. | ## Code Before:
import os
from sparks.django.settings import include_snippets
include_snippets(
os.path.dirname(__file__), (
'00_development',
# Activate this to test 404/500…
#'00_production',
'1flow_io',
'common',
'db_common',
'db_development',
'cache_common',
'cache_development',
'mail_development',
'raven_development',
'common_development',
'rosetta',
'djdt',
),
globals()
)
# Override `1flow_net` for local development
SITE_DOMAIN = 'localhost'
## Instruction:
Make mail working on my dev machine.
## Code After:
import os
from sparks.django.settings import include_snippets
include_snippets(
os.path.dirname(__file__), (
'00_development',
# Activate this to test 404/500…
#'00_production',
'1flow_io',
'common',
'db_common',
'db_development',
'cache_common',
'cache_development',
'mail_development',
'raven_development',
'common_development',
'rosetta',
'djdt',
),
globals()
)
# Override `1flow_net` for local development
SITE_DOMAIN = 'localhost:8000'
EMAIL_HOST = 'gurney'
#EMAIL_BACKEND = 'django.core.mail.backends.filebased.EmailBackend'
#EMAIL_FILE_PATH = '/tmp/1flow.mail'
| // ... existing code ...
# Override `1flow_net` for local development
SITE_DOMAIN = 'localhost:8000'
EMAIL_HOST = 'gurney'
#EMAIL_BACKEND = 'django.core.mail.backends.filebased.EmailBackend'
#EMAIL_FILE_PATH = '/tmp/1flow.mail'
// ... rest of the code ... |
c9340c70bd6d974e98244a1c3208c3a061aec9bb | tests/cortests.py | tests/cortests.py |
import unittest
import numpy as np
from corfunc import porod, guinier, fitguinier
class TestStringMethods(unittest.TestCase):
def test_porod(self):
self.assertEqual(porod(1, 1, 0), 1)
def test_guinier(self):
self.assertEqual(guinier(1, 1, 0), 1)
def test_sane_fit(self):
A = np.pi
B = -np.sqrt(2)
x = np.linspace(0, 1, 71)
y = guinier(x, A, B)
g = fitguinier(x, y)[0]
self.assertAlmostEqual(B, g[0])
self.assertAlmostEqual(A, np.exp(g[1]))
if __name__ == '__main__':
unittest.main()
|
import unittest
import numpy as np
from corfunc import porod, guinier, fitguinier, smooth
class TestStringMethods(unittest.TestCase):
def test_porod(self):
self.assertEqual(porod(1, 1, 0), 1)
def test_guinier(self):
self.assertEqual(guinier(1, 1, 0), 1)
def test_sane_fit(self):
A = np.pi
B = -np.sqrt(2)
x = np.linspace(0, 1, 71)
y = guinier(x, A, B)
g = fitguinier(x, y)[0]
self.assertAlmostEqual(B, g[0])
self.assertAlmostEqual(A, np.exp(g[1]))
def test_smooth(self):
f = lambda x: np.sqrt(x)*np.sin(x/10)
g = lambda x: np.log(1+x)
s = smooth(f, g, 25, 75)
x = np.linspace(0, 1, 100)
fg = np.vstack([f(x), g(x)])
small = np.min(fg, axis=0)
large = np.max(fg, axis=0)
x = np.linspace(0, 1, 100)
self.assertTrue(np.all(small <= s(x)))
self.assertTrue(np.all(s(x) <= large))
self.assertEqual(s(0), f(0))
self.assertEqual(s(25), f(25))
self.assertEqual(s(75), g(75))
self.assertEqual(s(100), g(100))
if __name__ == '__main__':
unittest.main()
| Add tests for function smoothing | Add tests for function smoothing
| Python | mit | rprospero/corfunc-py |
import unittest
import numpy as np
- from corfunc import porod, guinier, fitguinier
+ from corfunc import porod, guinier, fitguinier, smooth
class TestStringMethods(unittest.TestCase):
def test_porod(self):
self.assertEqual(porod(1, 1, 0), 1)
def test_guinier(self):
self.assertEqual(guinier(1, 1, 0), 1)
def test_sane_fit(self):
A = np.pi
B = -np.sqrt(2)
x = np.linspace(0, 1, 71)
y = guinier(x, A, B)
g = fitguinier(x, y)[0]
self.assertAlmostEqual(B, g[0])
self.assertAlmostEqual(A, np.exp(g[1]))
+ def test_smooth(self):
+ f = lambda x: np.sqrt(x)*np.sin(x/10)
+ g = lambda x: np.log(1+x)
+ s = smooth(f, g, 25, 75)
+
+ x = np.linspace(0, 1, 100)
+ fg = np.vstack([f(x), g(x)])
+ small = np.min(fg, axis=0)
+ large = np.max(fg, axis=0)
+
+ x = np.linspace(0, 1, 100)
+
+ self.assertTrue(np.all(small <= s(x)))
+ self.assertTrue(np.all(s(x) <= large))
+ self.assertEqual(s(0), f(0))
+ self.assertEqual(s(25), f(25))
+ self.assertEqual(s(75), g(75))
+ self.assertEqual(s(100), g(100))
+
+
if __name__ == '__main__':
unittest.main()
| Add tests for function smoothing | ## Code Before:
import unittest
import numpy as np
from corfunc import porod, guinier, fitguinier
class TestStringMethods(unittest.TestCase):
def test_porod(self):
self.assertEqual(porod(1, 1, 0), 1)
def test_guinier(self):
self.assertEqual(guinier(1, 1, 0), 1)
def test_sane_fit(self):
A = np.pi
B = -np.sqrt(2)
x = np.linspace(0, 1, 71)
y = guinier(x, A, B)
g = fitguinier(x, y)[0]
self.assertAlmostEqual(B, g[0])
self.assertAlmostEqual(A, np.exp(g[1]))
if __name__ == '__main__':
unittest.main()
## Instruction:
Add tests for function smoothing
## Code After:
import unittest
import numpy as np
from corfunc import porod, guinier, fitguinier, smooth
class TestStringMethods(unittest.TestCase):
def test_porod(self):
self.assertEqual(porod(1, 1, 0), 1)
def test_guinier(self):
self.assertEqual(guinier(1, 1, 0), 1)
def test_sane_fit(self):
A = np.pi
B = -np.sqrt(2)
x = np.linspace(0, 1, 71)
y = guinier(x, A, B)
g = fitguinier(x, y)[0]
self.assertAlmostEqual(B, g[0])
self.assertAlmostEqual(A, np.exp(g[1]))
def test_smooth(self):
f = lambda x: np.sqrt(x)*np.sin(x/10)
g = lambda x: np.log(1+x)
s = smooth(f, g, 25, 75)
x = np.linspace(0, 1, 100)
fg = np.vstack([f(x), g(x)])
small = np.min(fg, axis=0)
large = np.max(fg, axis=0)
x = np.linspace(0, 1, 100)
self.assertTrue(np.all(small <= s(x)))
self.assertTrue(np.all(s(x) <= large))
self.assertEqual(s(0), f(0))
self.assertEqual(s(25), f(25))
self.assertEqual(s(75), g(75))
self.assertEqual(s(100), g(100))
if __name__ == '__main__':
unittest.main()
| ...
import numpy as np
from corfunc import porod, guinier, fitguinier, smooth
...
def test_smooth(self):
f = lambda x: np.sqrt(x)*np.sin(x/10)
g = lambda x: np.log(1+x)
s = smooth(f, g, 25, 75)
x = np.linspace(0, 1, 100)
fg = np.vstack([f(x), g(x)])
small = np.min(fg, axis=0)
large = np.max(fg, axis=0)
x = np.linspace(0, 1, 100)
self.assertTrue(np.all(small <= s(x)))
self.assertTrue(np.all(s(x) <= large))
self.assertEqual(s(0), f(0))
self.assertEqual(s(25), f(25))
self.assertEqual(s(75), g(75))
self.assertEqual(s(100), g(100))
if __name__ == '__main__':
... |
bb7de7e76302fbd3eeeeb740d00c234faadef4ef | tests/test_nonsensefilter.py | tests/test_nonsensefilter.py | from unittest import TestCase
from spicedham.nonsensefilter import NonsenseFilter
class TestNonsenseFilter(TestCase):
# TODO: This test will likely fail spectacularly because of a lack of
# training.
def test_classify(self):
nonsense = NonsenseFilter()
nonsense.filter_match = 1
nonsense.filter_miss = 0
reverse = lambda x: x[::-1]
match_message = map(reverse, ['supposedly', 'nonsense', 'words'])
miss_message = ['Firefox']
self.assertEqual(nonsense.classify(match_message), 1)
self.assertEqual(nonsense.classify(miss_message), 0)
| from tests.test_classifierbase import TestClassifierBase
from spicedham.backend import load_backend
from spicedham.nonsensefilter import NonsenseFilter
class TestNonsenseFilter(TestClassifierBase):
def test_train(self):
backend = load_backend()
nonsense = NonsenseFilter()
alphabet = map(chr, range(97, 123))
reversed_alphabet = reversed(alphabet)
self._training(nonsense, alphabet, reversed_alphabet)
for letter in alphabet:
self.assertEqual(True,
backend.get_key(nonsense.__class__.__name__, letter))
def test_classify(self):
nonsense = NonsenseFilter()
nonsense.filter_match = 1
nonsense.filter_miss = 0
alphabet = map(chr, range(97, 123))
reversed_alphabet = reversed(alphabet)
self._training(nonsense, alphabet, reversed_alphabet)
match_message = ['not', 'in', 'training', 'set']
miss_message = ['a']
self.assertEqual(nonsense.classify(match_message), 1)
self.assertEqual(nonsense.classify(miss_message), 0)
| Add a base class and a test_train function | Add a base class and a test_train function
Overall, fix a very incomplete test.
| Python | mpl-2.0 | mozilla/spicedham,mozilla/spicedham | - from unittest import TestCase
+ from tests.test_classifierbase import TestClassifierBase
+ from spicedham.backend import load_backend
from spicedham.nonsensefilter import NonsenseFilter
- class TestNonsenseFilter(TestCase):
+ class TestNonsenseFilter(TestClassifierBase):
+
+ def test_train(self):
+ backend = load_backend()
+ nonsense = NonsenseFilter()
+ alphabet = map(chr, range(97, 123))
+ reversed_alphabet = reversed(alphabet)
+ self._training(nonsense, alphabet, reversed_alphabet)
+ for letter in alphabet:
+ self.assertEqual(True,
+ backend.get_key(nonsense.__class__.__name__, letter))
- # TODO: This test will likely fail spectacularly because of a lack of
- # training.
def test_classify(self):
nonsense = NonsenseFilter()
nonsense.filter_match = 1
nonsense.filter_miss = 0
- reverse = lambda x: x[::-1]
- match_message = map(reverse, ['supposedly', 'nonsense', 'words'])
+ alphabet = map(chr, range(97, 123))
+ reversed_alphabet = reversed(alphabet)
+ self._training(nonsense, alphabet, reversed_alphabet)
+ match_message = ['not', 'in', 'training', 'set']
- miss_message = ['Firefox']
+ miss_message = ['a']
self.assertEqual(nonsense.classify(match_message), 1)
self.assertEqual(nonsense.classify(miss_message), 0)
- | Add a base class and a test_train function | ## Code Before:
from unittest import TestCase
from spicedham.nonsensefilter import NonsenseFilter
class TestNonsenseFilter(TestCase):
# TODO: This test will likely fail spectacularly because of a lack of
# training.
def test_classify(self):
nonsense = NonsenseFilter()
nonsense.filter_match = 1
nonsense.filter_miss = 0
reverse = lambda x: x[::-1]
match_message = map(reverse, ['supposedly', 'nonsense', 'words'])
miss_message = ['Firefox']
self.assertEqual(nonsense.classify(match_message), 1)
self.assertEqual(nonsense.classify(miss_message), 0)
## Instruction:
Add a base class and a test_train function
## Code After:
from tests.test_classifierbase import TestClassifierBase
from spicedham.backend import load_backend
from spicedham.nonsensefilter import NonsenseFilter
class TestNonsenseFilter(TestClassifierBase):
def test_train(self):
backend = load_backend()
nonsense = NonsenseFilter()
alphabet = map(chr, range(97, 123))
reversed_alphabet = reversed(alphabet)
self._training(nonsense, alphabet, reversed_alphabet)
for letter in alphabet:
self.assertEqual(True,
backend.get_key(nonsense.__class__.__name__, letter))
def test_classify(self):
nonsense = NonsenseFilter()
nonsense.filter_match = 1
nonsense.filter_miss = 0
alphabet = map(chr, range(97, 123))
reversed_alphabet = reversed(alphabet)
self._training(nonsense, alphabet, reversed_alphabet)
match_message = ['not', 'in', 'training', 'set']
miss_message = ['a']
self.assertEqual(nonsense.classify(match_message), 1)
self.assertEqual(nonsense.classify(miss_message), 0)
| # ... existing code ...
from tests.test_classifierbase import TestClassifierBase
from spicedham.backend import load_backend
from spicedham.nonsensefilter import NonsenseFilter
# ... modified code ...
class TestNonsenseFilter(TestClassifierBase):
def test_train(self):
backend = load_backend()
nonsense = NonsenseFilter()
alphabet = map(chr, range(97, 123))
reversed_alphabet = reversed(alphabet)
self._training(nonsense, alphabet, reversed_alphabet)
for letter in alphabet:
self.assertEqual(True,
backend.get_key(nonsense.__class__.__name__, letter))
def test_classify(self):
...
nonsense.filter_miss = 0
alphabet = map(chr, range(97, 123))
reversed_alphabet = reversed(alphabet)
self._training(nonsense, alphabet, reversed_alphabet)
match_message = ['not', 'in', 'training', 'set']
miss_message = ['a']
self.assertEqual(nonsense.classify(match_message), 1)
...
self.assertEqual(nonsense.classify(miss_message), 0)
# ... rest of the code ... |
017b01a1df6e7095aac78d2c859125bf7107095a | plugins/random/plugin.py | plugins/random/plugin.py | import random
import re
from cardinal.decorators import command
def parse_roll(arg):
# some people might separate with commas
arg = arg.rstrip(',')
if match := re.match(r'^(\d+)?d(\d+)$', arg):
num_dice = match.group(1)
sides = match.group(2)
elif match := re.match(r'^d?(\d+)$', arg):
num_dice = 1
sides = match.group(1)
else:
return []
return [int(sides)] * int(num_dice)
class RandomPlugin:
@command('roll')
def roll(self, cardinal, user, channel, msg):
args = msg.split(' ')
args.pop(0)
dice = []
for arg in args:
dice = dice + parse_roll(arg)
results = []
limit = 10
for sides in dice:
if sides < 2 or sides > 120:
continue
limit -= 1
# Don't allow more than ten dice rolled at a time
if limit < 0:
break
results.append((sides, random.randint(1, sides)))
messages = ', '.join(
[f"d{sides}: {result}" for sides, result in results]
)
cardinal.sendMsg(channel, messages)
entrypoint = RandomPlugin
| import random
import re
from cardinal.decorators import command, help
def parse_roll(arg):
# some people might separate with commas
arg = arg.rstrip(',')
if match := re.match(r'^(\d+)?d(\d+)$', arg):
num_dice = match.group(1)
sides = match.group(2)
elif match := re.match(r'^d?(\d+)$', arg):
num_dice = 1
sides = match.group(1)
else:
return []
return [int(sides)] * int(num_dice)
class RandomPlugin:
@command('roll')
@help("Roll dice")
@help("Syntax: .roll #d# (e.g. .roll 2d6)")
def roll(self, cardinal, user, channel, msg):
args = msg.split(' ')
args.pop(0)
if not args:
return
dice = []
for arg in args:
dice = dice + parse_roll(arg)
results = []
limit = 10
for sides in dice:
if sides < 2 or sides > 120:
continue
limit -= 1
# Don't allow more than ten dice rolled at a time
if limit < 0:
break
results.append((sides, random.randint(1, sides)))
messages = ', '.join(
[f"d{sides}: {result}" for sides, result in results]
)
cardinal.sendMsg(channel, messages)
entrypoint = RandomPlugin
| Add help text for roll command | Add help text for roll command
| Python | mit | JohnMaguire/Cardinal | import random
import re
- from cardinal.decorators import command
+ from cardinal.decorators import command, help
def parse_roll(arg):
# some people might separate with commas
arg = arg.rstrip(',')
if match := re.match(r'^(\d+)?d(\d+)$', arg):
num_dice = match.group(1)
sides = match.group(2)
elif match := re.match(r'^d?(\d+)$', arg):
num_dice = 1
sides = match.group(1)
else:
return []
return [int(sides)] * int(num_dice)
class RandomPlugin:
@command('roll')
+ @help("Roll dice")
+ @help("Syntax: .roll #d# (e.g. .roll 2d6)")
def roll(self, cardinal, user, channel, msg):
args = msg.split(' ')
args.pop(0)
+ if not args:
+ return
dice = []
for arg in args:
dice = dice + parse_roll(arg)
results = []
limit = 10
for sides in dice:
if sides < 2 or sides > 120:
continue
limit -= 1
# Don't allow more than ten dice rolled at a time
if limit < 0:
break
results.append((sides, random.randint(1, sides)))
messages = ', '.join(
[f"d{sides}: {result}" for sides, result in results]
)
cardinal.sendMsg(channel, messages)
entrypoint = RandomPlugin
| Add help text for roll command | ## Code Before:
import random
import re
from cardinal.decorators import command
def parse_roll(arg):
# some people might separate with commas
arg = arg.rstrip(',')
if match := re.match(r'^(\d+)?d(\d+)$', arg):
num_dice = match.group(1)
sides = match.group(2)
elif match := re.match(r'^d?(\d+)$', arg):
num_dice = 1
sides = match.group(1)
else:
return []
return [int(sides)] * int(num_dice)
class RandomPlugin:
@command('roll')
def roll(self, cardinal, user, channel, msg):
args = msg.split(' ')
args.pop(0)
dice = []
for arg in args:
dice = dice + parse_roll(arg)
results = []
limit = 10
for sides in dice:
if sides < 2 or sides > 120:
continue
limit -= 1
# Don't allow more than ten dice rolled at a time
if limit < 0:
break
results.append((sides, random.randint(1, sides)))
messages = ', '.join(
[f"d{sides}: {result}" for sides, result in results]
)
cardinal.sendMsg(channel, messages)
entrypoint = RandomPlugin
## Instruction:
Add help text for roll command
## Code After:
import random
import re
from cardinal.decorators import command, help
def parse_roll(arg):
# some people might separate with commas
arg = arg.rstrip(',')
if match := re.match(r'^(\d+)?d(\d+)$', arg):
num_dice = match.group(1)
sides = match.group(2)
elif match := re.match(r'^d?(\d+)$', arg):
num_dice = 1
sides = match.group(1)
else:
return []
return [int(sides)] * int(num_dice)
class RandomPlugin:
@command('roll')
@help("Roll dice")
@help("Syntax: .roll #d# (e.g. .roll 2d6)")
def roll(self, cardinal, user, channel, msg):
args = msg.split(' ')
args.pop(0)
if not args:
return
dice = []
for arg in args:
dice = dice + parse_roll(arg)
results = []
limit = 10
for sides in dice:
if sides < 2 or sides > 120:
continue
limit -= 1
# Don't allow more than ten dice rolled at a time
if limit < 0:
break
results.append((sides, random.randint(1, sides)))
messages = ', '.join(
[f"d{sides}: {result}" for sides, result in results]
)
cardinal.sendMsg(channel, messages)
entrypoint = RandomPlugin
| # ... existing code ...
from cardinal.decorators import command, help
# ... modified code ...
@command('roll')
@help("Roll dice")
@help("Syntax: .roll #d# (e.g. .roll 2d6)")
def roll(self, cardinal, user, channel, msg):
...
args.pop(0)
if not args:
return
# ... rest of the code ... |
5195a9baae1a87632c55adf390ecc5f32d1a44cb | dict_to_file.py | dict_to_file.py |
import json
def storeJSON(dict, file_string):
with open(file_string, 'w') as fp:
json.dump(dict, fp, indent=4)
def storeTEX(dict, file_string):
with open(file_string, 'w') as fp:
fp.write("\\begin{tabular}\n")
fp.write(" \\hline\n")
fp.write(" ")
# First we need to write out the headers
for row in dict:
for column in dict[row]:
fp.write("& %s " % (column))
fp.write("\\\\\n")
break
fp.write(" \\hline\n")
# Now read all rows and output them as well
for row in dict:
fp.write(" %s " % (row))
for column in dict[row]:
fp.write("& %s " % dict[row][column])
fp.write("\\\\\n")
fp.write(" \\hline\n")
fp.write("\\end{tabular}\n")
|
import json
def storeJSON(dict, file_string):
with open(file_string, 'w') as fp:
json.dump(dict, fp, indent=4)
def storeTEX(dict, file_string):
with open(file_string, 'w') as fp:
fp.write("\\begin{tabular}\n")
fp.write(" \\hline\n")
fp.write(" ")
# First we need to write out the headers
for row in dict:
for column in dict[row]:
fp.write("& %s " % (column))
fp.write("\\\\\n")
break
fp.write(" \\hline\n")
# Now read all rows and output them as well
for row in dict:
fp.write(" %s " % (row))
for column in dict[row]:
fp.write("& %s / %s " % (dict[row][column][0], dict[row][column][1]))
fp.write("\\\\\n")
fp.write(" \\hline\n")
fp.write("\\end{tabular}\n")
| Fix latex output for splitted up/down values | Fix latex output for splitted up/down values
| Python | mit | knutzk/parse_latex_table |
import json
def storeJSON(dict, file_string):
with open(file_string, 'w') as fp:
json.dump(dict, fp, indent=4)
def storeTEX(dict, file_string):
with open(file_string, 'w') as fp:
fp.write("\\begin{tabular}\n")
fp.write(" \\hline\n")
fp.write(" ")
# First we need to write out the headers
for row in dict:
for column in dict[row]:
fp.write("& %s " % (column))
fp.write("\\\\\n")
break
fp.write(" \\hline\n")
# Now read all rows and output them as well
for row in dict:
fp.write(" %s " % (row))
for column in dict[row]:
- fp.write("& %s " % dict[row][column])
+ fp.write("& %s / %s " % (dict[row][column][0], dict[row][column][1]))
fp.write("\\\\\n")
fp.write(" \\hline\n")
fp.write("\\end{tabular}\n")
| Fix latex output for splitted up/down values | ## Code Before:
import json
def storeJSON(dict, file_string):
with open(file_string, 'w') as fp:
json.dump(dict, fp, indent=4)
def storeTEX(dict, file_string):
with open(file_string, 'w') as fp:
fp.write("\\begin{tabular}\n")
fp.write(" \\hline\n")
fp.write(" ")
# First we need to write out the headers
for row in dict:
for column in dict[row]:
fp.write("& %s " % (column))
fp.write("\\\\\n")
break
fp.write(" \\hline\n")
# Now read all rows and output them as well
for row in dict:
fp.write(" %s " % (row))
for column in dict[row]:
fp.write("& %s " % dict[row][column])
fp.write("\\\\\n")
fp.write(" \\hline\n")
fp.write("\\end{tabular}\n")
## Instruction:
Fix latex output for splitted up/down values
## Code After:
import json
def storeJSON(dict, file_string):
with open(file_string, 'w') as fp:
json.dump(dict, fp, indent=4)
def storeTEX(dict, file_string):
with open(file_string, 'w') as fp:
fp.write("\\begin{tabular}\n")
fp.write(" \\hline\n")
fp.write(" ")
# First we need to write out the headers
for row in dict:
for column in dict[row]:
fp.write("& %s " % (column))
fp.write("\\\\\n")
break
fp.write(" \\hline\n")
# Now read all rows and output them as well
for row in dict:
fp.write(" %s " % (row))
for column in dict[row]:
fp.write("& %s / %s " % (dict[row][column][0], dict[row][column][1]))
fp.write("\\\\\n")
fp.write(" \\hline\n")
fp.write("\\end{tabular}\n")
| // ... existing code ...
for column in dict[row]:
fp.write("& %s / %s " % (dict[row][column][0], dict[row][column][1]))
fp.write("\\\\\n")
// ... rest of the code ... |
5025bff2ca9a4f31a371ecbd9255b1fb92b9cc4d | kafka_influxdb/encoder/echo_encoder.py | kafka_influxdb/encoder/echo_encoder.py | class Encoder(object):
@staticmethod
def encode(msg):
"""
Don't change the message at all
:param msg:
"""
return msg
| try:
# Test for mypy support (requires Python 3)
from typing import Text
except:
pass
class Encoder(object):
@staticmethod
def encode(msg):
# type: (bytes) -> List[bytes]
"""
Don't change the message at all
:param msg:
"""
return [msg]
| Return a list of messages in echo encoder and add mypy type hints | Return a list of messages in echo encoder and add mypy type hints
| Python | apache-2.0 | mre/kafka-influxdb,mre/kafka-influxdb | + try:
+ # Test for mypy support (requires Python 3)
+ from typing import Text
+ except:
+ pass
+
class Encoder(object):
@staticmethod
def encode(msg):
+ # type: (bytes) -> List[bytes]
"""
Don't change the message at all
:param msg:
"""
- return msg
+ return [msg]
| Return a list of messages in echo encoder and add mypy type hints | ## Code Before:
class Encoder(object):
@staticmethod
def encode(msg):
"""
Don't change the message at all
:param msg:
"""
return msg
## Instruction:
Return a list of messages in echo encoder and add mypy type hints
## Code After:
try:
# Test for mypy support (requires Python 3)
from typing import Text
except:
pass
class Encoder(object):
@staticmethod
def encode(msg):
# type: (bytes) -> List[bytes]
"""
Don't change the message at all
:param msg:
"""
return [msg]
| ...
try:
# Test for mypy support (requires Python 3)
from typing import Text
except:
pass
class Encoder(object):
...
def encode(msg):
# type: (bytes) -> List[bytes]
"""
...
"""
return [msg]
... |
f62278c420429cfe9a3f2a8903f902ae24bdd95d | remoteappmanager/handlers/home_handler.py | remoteappmanager/handlers/home_handler.py | from tornado import gen, web
from remoteappmanager.handlers.base_handler import BaseHandler
class HomeHandler(BaseHandler):
"""Render the user's home page"""
@web.authenticated
@gen.coroutine
def get(self):
images_info = yield self._get_images_info()
self.render('home.html', images_info=images_info)
# private
@gen.coroutine
def _get_images_info(self):
"""Retrieves a dictionary containing the image and the associated
container, if active, as values."""
container_manager = self.application.container_manager
apps = self.application.db.get_apps_for_user(
self.current_user.account)
images_info = []
for mapping_id, app, policy in apps:
image = yield container_manager.image(app.image)
if image is None:
# The user has access to an application that is no longer
# available in docker. We just move on.
continue
containers = yield container_manager.containers_from_mapping_id(
self.current_user.name,
mapping_id)
# We assume that we can only run one container only (although the
# API considers a broader possibility for future extension.
container = None
if len(containers):
container = containers[0]
images_info.append({
"image": image,
"mapping_id": mapping_id,
"container": container
})
return images_info
| from tornado import gen, web
from remoteappmanager.handlers.base_handler import BaseHandler
class HomeHandler(BaseHandler):
"""Render the user's home page"""
@web.authenticated
@gen.coroutine
def get(self):
self.render('home.html')
| Remove dead code now part of the REST API. | Remove dead code now part of the REST API.
| Python | bsd-3-clause | simphony/simphony-remote,simphony/simphony-remote,simphony/simphony-remote,simphony/simphony-remote | from tornado import gen, web
from remoteappmanager.handlers.base_handler import BaseHandler
class HomeHandler(BaseHandler):
"""Render the user's home page"""
@web.authenticated
@gen.coroutine
def get(self):
+ self.render('home.html')
- images_info = yield self._get_images_info()
- self.render('home.html', images_info=images_info)
- # private
-
- @gen.coroutine
- def _get_images_info(self):
- """Retrieves a dictionary containing the image and the associated
- container, if active, as values."""
- container_manager = self.application.container_manager
-
- apps = self.application.db.get_apps_for_user(
- self.current_user.account)
-
- images_info = []
-
- for mapping_id, app, policy in apps:
- image = yield container_manager.image(app.image)
-
- if image is None:
- # The user has access to an application that is no longer
- # available in docker. We just move on.
- continue
-
- containers = yield container_manager.containers_from_mapping_id(
- self.current_user.name,
- mapping_id)
-
- # We assume that we can only run one container only (although the
- # API considers a broader possibility for future extension.
- container = None
- if len(containers):
- container = containers[0]
-
- images_info.append({
- "image": image,
- "mapping_id": mapping_id,
- "container": container
- })
- return images_info
- | Remove dead code now part of the REST API. | ## Code Before:
from tornado import gen, web
from remoteappmanager.handlers.base_handler import BaseHandler
class HomeHandler(BaseHandler):
"""Render the user's home page"""
@web.authenticated
@gen.coroutine
def get(self):
images_info = yield self._get_images_info()
self.render('home.html', images_info=images_info)
# private
@gen.coroutine
def _get_images_info(self):
"""Retrieves a dictionary containing the image and the associated
container, if active, as values."""
container_manager = self.application.container_manager
apps = self.application.db.get_apps_for_user(
self.current_user.account)
images_info = []
for mapping_id, app, policy in apps:
image = yield container_manager.image(app.image)
if image is None:
# The user has access to an application that is no longer
# available in docker. We just move on.
continue
containers = yield container_manager.containers_from_mapping_id(
self.current_user.name,
mapping_id)
# We assume that we can only run one container only (although the
# API considers a broader possibility for future extension.
container = None
if len(containers):
container = containers[0]
images_info.append({
"image": image,
"mapping_id": mapping_id,
"container": container
})
return images_info
## Instruction:
Remove dead code now part of the REST API.
## Code After:
from tornado import gen, web
from remoteappmanager.handlers.base_handler import BaseHandler
class HomeHandler(BaseHandler):
"""Render the user's home page"""
@web.authenticated
@gen.coroutine
def get(self):
self.render('home.html')
| # ... existing code ...
def get(self):
self.render('home.html')
# ... rest of the code ... |
de310ce3cdd37a372f92559b7ddcf0397b9fb016 | src/convert_dir_to_CLAHE.py | src/convert_dir_to_CLAHE.py |
from ij import IJ
import os
from mpicbg.ij.clahe import Flat
from ij.process import ImageConverter
# http://fiji.sc/wiki/index.php/Enhance_Local_Contrast_(CLAHE)
# http://fiji.sc/cgi-bin/gitweb.cgi?p=mpicbg.git;a=blob;f=mpicbg/ij/clahe/PlugIn.java;h=663153764493547de560c08ee11f2e6b1e7e1a32;hb=HEAD
dir = "/usr/people/tmacrina/Desktop/test/"
blocksize = 50
histogram_bins = 128
maximum_slope = 3
mask = "*None*"
composite = False
mask = None
files = os.listdir(dir)
files.sort()
for file in files:
if file.endswith(".tif")
fn = os.path.join(dir, file)
imp = IJ.openImage(path)
output_fn = os.path.splitext(fn)[0] + "_CLAHE_8bit.tif"
imp = IJ.openImage(fn)
Flat.getFastInstance().run( imp,
blocksize,
histogram_bins,
maximum_slope,
mask,
composite )
ImageConverter(imp).convertToGray8()
IJ.save(imp, output_fn)
|
from ij import IJ
import os
from mpicbg.ij.clahe import Flat
from ij.process import ImageConverter
# http://fiji.sc/wiki/index.php/Enhance_Local_Contrast_(CLAHE)
# http://fiji.sc/cgi-bin/gitweb.cgi?p=mpicbg.git;a=blob;f=mpicbg/ij/clahe/PlugIn.java;h=663153764493547de560c08ee11f2e6b1e7e1a32;hb=HEAD
dir = "/usr/people/tmacrina/seungmount/research/Julimaps/datasets/AIBS_pilot_v1/0_raw/"
blocksize = 63
histogram_bins = 255
maximum_slope = 3
mask = "*None*"
composite = False
mask = None
# files = os.listdir(dir)
# files.sort()
# for file in files:
# if file.endswith(".tif")
fn = os.path.join(dir, 'original.tif')
imp = IJ.openImage(fn)
output_fn = os.path.splitext(fn)[0] + "_CLAHE_8bit.tif"
imp = IJ.openImage(fn)
Flat.getFastInstance().run( imp,
blocksize,
histogram_bins,
maximum_slope,
mask,
composite )
ImageConverter(imp).convertToGray8()
IJ.save(imp, output_fn)
| Adjust FIJI script for applying CLAHE to a directory | Adjust FIJI script for applying CLAHE to a directory
| Python | mit | seung-lab/Julimaps,seung-lab/Julimaps |
from ij import IJ
import os
from mpicbg.ij.clahe import Flat
from ij.process import ImageConverter
# http://fiji.sc/wiki/index.php/Enhance_Local_Contrast_(CLAHE)
# http://fiji.sc/cgi-bin/gitweb.cgi?p=mpicbg.git;a=blob;f=mpicbg/ij/clahe/PlugIn.java;h=663153764493547de560c08ee11f2e6b1e7e1a32;hb=HEAD
- dir = "/usr/people/tmacrina/Desktop/test/"
+ dir = "/usr/people/tmacrina/seungmount/research/Julimaps/datasets/AIBS_pilot_v1/0_raw/"
- blocksize = 50
+ blocksize = 63
- histogram_bins = 128
+ histogram_bins = 255
maximum_slope = 3
mask = "*None*"
composite = False
mask = None
- files = os.listdir(dir)
+ # files = os.listdir(dir)
- files.sort()
+ # files.sort()
- for file in files:
+ # for file in files:
- if file.endswith(".tif")
+ # if file.endswith(".tif")
- fn = os.path.join(dir, file)
- imp = IJ.openImage(path)
+ fn = os.path.join(dir, 'original.tif')
+ imp = IJ.openImage(fn)
- output_fn = os.path.splitext(fn)[0] + "_CLAHE_8bit.tif"
+ output_fn = os.path.splitext(fn)[0] + "_CLAHE_8bit.tif"
- imp = IJ.openImage(fn)
+ imp = IJ.openImage(fn)
-
+
- Flat.getFastInstance().run( imp,
+ Flat.getFastInstance().run( imp,
- blocksize,
+ blocksize,
- histogram_bins,
+ histogram_bins,
- maximum_slope,
+ maximum_slope,
- mask,
+ mask,
- composite )
+ composite )
- ImageConverter(imp).convertToGray8()
+ ImageConverter(imp).convertToGray8()
- IJ.save(imp, output_fn)
+ IJ.save(imp, output_fn)
| Adjust FIJI script for applying CLAHE to a directory | ## Code Before:
from ij import IJ
import os
from mpicbg.ij.clahe import Flat
from ij.process import ImageConverter
# http://fiji.sc/wiki/index.php/Enhance_Local_Contrast_(CLAHE)
# http://fiji.sc/cgi-bin/gitweb.cgi?p=mpicbg.git;a=blob;f=mpicbg/ij/clahe/PlugIn.java;h=663153764493547de560c08ee11f2e6b1e7e1a32;hb=HEAD
dir = "/usr/people/tmacrina/Desktop/test/"
blocksize = 50
histogram_bins = 128
maximum_slope = 3
mask = "*None*"
composite = False
mask = None
files = os.listdir(dir)
files.sort()
for file in files:
if file.endswith(".tif")
fn = os.path.join(dir, file)
imp = IJ.openImage(path)
output_fn = os.path.splitext(fn)[0] + "_CLAHE_8bit.tif"
imp = IJ.openImage(fn)
Flat.getFastInstance().run( imp,
blocksize,
histogram_bins,
maximum_slope,
mask,
composite )
ImageConverter(imp).convertToGray8()
IJ.save(imp, output_fn)
## Instruction:
Adjust FIJI script for applying CLAHE to a directory
## Code After:
from ij import IJ
import os
from mpicbg.ij.clahe import Flat
from ij.process import ImageConverter
# http://fiji.sc/wiki/index.php/Enhance_Local_Contrast_(CLAHE)
# http://fiji.sc/cgi-bin/gitweb.cgi?p=mpicbg.git;a=blob;f=mpicbg/ij/clahe/PlugIn.java;h=663153764493547de560c08ee11f2e6b1e7e1a32;hb=HEAD
dir = "/usr/people/tmacrina/seungmount/research/Julimaps/datasets/AIBS_pilot_v1/0_raw/"
blocksize = 63
histogram_bins = 255
maximum_slope = 3
mask = "*None*"
composite = False
mask = None
# files = os.listdir(dir)
# files.sort()
# for file in files:
# if file.endswith(".tif")
fn = os.path.join(dir, 'original.tif')
imp = IJ.openImage(fn)
output_fn = os.path.splitext(fn)[0] + "_CLAHE_8bit.tif"
imp = IJ.openImage(fn)
Flat.getFastInstance().run( imp,
blocksize,
histogram_bins,
maximum_slope,
mask,
composite )
ImageConverter(imp).convertToGray8()
IJ.save(imp, output_fn)
| ...
dir = "/usr/people/tmacrina/seungmount/research/Julimaps/datasets/AIBS_pilot_v1/0_raw/"
blocksize = 63
histogram_bins = 255
maximum_slope = 3
...
# files = os.listdir(dir)
# files.sort()
# for file in files:
# if file.endswith(".tif")
fn = os.path.join(dir, 'original.tif')
imp = IJ.openImage(fn)
output_fn = os.path.splitext(fn)[0] + "_CLAHE_8bit.tif"
imp = IJ.openImage(fn)
Flat.getFastInstance().run( imp,
blocksize,
histogram_bins,
maximum_slope,
mask,
composite )
ImageConverter(imp).convertToGray8()
IJ.save(imp, output_fn)
... |
0ee2b337b61155044a66ae1f6f173492a51c1150 | dipsim/fluorophore.py | dipsim/fluorophore.py | import numpy as np
class Fluorophore:
"""A single fluorophore is specified by its 3D position, (unit) absorption
dipole moment (theta, phi), and (unit) emission dipole moment (theta, phi).
"""
def __init__(self, position=np.array([0, 0, 0]),
mu_abs=np.array([0, 0]),
mu_em=np.array([0, 0])):
self.position = position
self.mu_abs = mu_abs
self.mu_em = mu_em
self.mu_ind = 0
| import numpy as np
class Fluorophore:
"""A fluorophore is specified by its orientation (in theta and phi spherical
coordinates), it distribution (using a kappa watson distribution), and a
constant (c) proportional to the fluorohphore's brightness.
"""
def __init__(self, theta=np.pi/2, phi=0, kappa=None, c=1.0):
self.theta = theta
self.phi = phi
self.kappa = kappa
self.c = c
| Modify Fluorophore for more convenient coordinates. | Modify Fluorophore for more convenient coordinates.
| Python | mit | talonchandler/dipsim,talonchandler/dipsim | import numpy as np
class Fluorophore:
- """A single fluorophore is specified by its 3D position, (unit) absorption
- dipole moment (theta, phi), and (unit) emission dipole moment (theta, phi).
+ """A fluorophore is specified by its orientation (in theta and phi spherical
+ coordinates), it distribution (using a kappa watson distribution), and a
+ constant (c) proportional to the fluorohphore's brightness.
"""
+ def __init__(self, theta=np.pi/2, phi=0, kappa=None, c=1.0):
+ self.theta = theta
- def __init__(self, position=np.array([0, 0, 0]),
- mu_abs=np.array([0, 0]),
- mu_em=np.array([0, 0])):
- self.position = position
- self.mu_abs = mu_abs
- self.mu_em = mu_em
- self.mu_ind = 0
+ self.phi = phi
+ self.kappa = kappa
+ self.c = c
| Modify Fluorophore for more convenient coordinates. | ## Code Before:
import numpy as np
class Fluorophore:
"""A single fluorophore is specified by its 3D position, (unit) absorption
dipole moment (theta, phi), and (unit) emission dipole moment (theta, phi).
"""
def __init__(self, position=np.array([0, 0, 0]),
mu_abs=np.array([0, 0]),
mu_em=np.array([0, 0])):
self.position = position
self.mu_abs = mu_abs
self.mu_em = mu_em
self.mu_ind = 0
## Instruction:
Modify Fluorophore for more convenient coordinates.
## Code After:
import numpy as np
class Fluorophore:
"""A fluorophore is specified by its orientation (in theta and phi spherical
coordinates), it distribution (using a kappa watson distribution), and a
constant (c) proportional to the fluorohphore's brightness.
"""
def __init__(self, theta=np.pi/2, phi=0, kappa=None, c=1.0):
self.theta = theta
self.phi = phi
self.kappa = kappa
self.c = c
| ...
class Fluorophore:
"""A fluorophore is specified by its orientation (in theta and phi spherical
coordinates), it distribution (using a kappa watson distribution), and a
constant (c) proportional to the fluorohphore's brightness.
"""
def __init__(self, theta=np.pi/2, phi=0, kappa=None, c=1.0):
self.theta = theta
self.phi = phi
self.kappa = kappa
self.c = c
... |
ff5eccb59efd09cfdeb64150440de35215e1b77d | gevent_tasks/utils.py | gevent_tasks/utils.py |
import random
import string
ch_choices = string.ascii_letters + string.digits
def gen_uuid(length=4):
# type: (int) -> str
""" Generate a random ID of a given length. """
return ''.join(map(lambda c: random.choice(ch_choices), range(length)))
|
import random
import string
ch_choices = string.ascii_letters + string.digits
def gen_uuid(length=4):
""" Generate a random ID of a given length.
Args:
length (int): length of the returned string.
Returns:
`str` of length ``length``.
Example::
>>> gen_uuid()
aB6z
>>> gen_uuid(10)
aAzZ0123mN
>>> gen_uuid(None)
9
"""
if not length or length < 1:
length = 1
return ''.join(map(lambda c: random.choice(ch_choices), range(length)))
| Fix `gen_uuid` logic and documentation | Fix `gen_uuid` logic and documentation
| Python | mit | blakev/gevent-tasks |
import random
import string
ch_choices = string.ascii_letters + string.digits
def gen_uuid(length=4):
- # type: (int) -> str
- """ Generate a random ID of a given length. """
+ """ Generate a random ID of a given length.
+
+ Args:
+ length (int): length of the returned string.
+
+ Returns:
+ `str` of length ``length``.
+
+ Example::
+
+ >>> gen_uuid()
+ aB6z
+ >>> gen_uuid(10)
+ aAzZ0123mN
+ >>> gen_uuid(None)
+ 9
+ """
+ if not length or length < 1:
+ length = 1
return ''.join(map(lambda c: random.choice(ch_choices), range(length)))
| Fix `gen_uuid` logic and documentation | ## Code Before:
import random
import string
ch_choices = string.ascii_letters + string.digits
def gen_uuid(length=4):
# type: (int) -> str
""" Generate a random ID of a given length. """
return ''.join(map(lambda c: random.choice(ch_choices), range(length)))
## Instruction:
Fix `gen_uuid` logic and documentation
## Code After:
import random
import string
ch_choices = string.ascii_letters + string.digits
def gen_uuid(length=4):
""" Generate a random ID of a given length.
Args:
length (int): length of the returned string.
Returns:
`str` of length ``length``.
Example::
>>> gen_uuid()
aB6z
>>> gen_uuid(10)
aAzZ0123mN
>>> gen_uuid(None)
9
"""
if not length or length < 1:
length = 1
return ''.join(map(lambda c: random.choice(ch_choices), range(length)))
| // ... existing code ...
def gen_uuid(length=4):
""" Generate a random ID of a given length.
Args:
length (int): length of the returned string.
Returns:
`str` of length ``length``.
Example::
>>> gen_uuid()
aB6z
>>> gen_uuid(10)
aAzZ0123mN
>>> gen_uuid(None)
9
"""
if not length or length < 1:
length = 1
return ''.join(map(lambda c: random.choice(ch_choices), range(length)))
// ... rest of the code ... |
f727a71accdc8a12342fcb684c9ba718eedd8df2 | alexandria/traversal/__init__.py | alexandria/traversal/__init__.py | class Root(object):
"""
The main root object for any traversal
"""
__name__ = None
__parent__ = None
def __init__(self, request):
pass
def __getitem__(self, key):
next_ctx = None
if key == 'user':
next_ctx = User()
if key == 'domain':
next_ctx = Domains()
if next_ctx is None:
raise KeyError
next_ctx.__parent__ = self
return next_ctx
class User(object):
__name__ = 'user'
__parent__ = None
def __init__(self):
pass
def __getitem__(self, key):
raise KeyError
class Domains(object):
__name__ = 'domain'
__parent__ = None
def __init__(self):
pass
def __getitem__(self, key):
next_ctx = Domain(key)
next_ctx.__parent__ = self
return next_ctx
class Domain(object):
__name__ = None
__parent__ = None
def __init__(self, key):
pass
def __getitem__(self, key):
raise KeyError
| class Root(object):
"""
The main root object for any traversal
"""
__name__ = None
__parent__ = None
def __init__(self, request):
pass
def __getitem__(self, key):
next_ctx = None
if key == 'user':
next_ctx = User()
if key == 'domain':
next_ctx = Domains()
if next_ctx is None:
raise KeyError
next_ctx.__parent__ = self
return next_ctx
class User(object):
__name__ = 'user'
__parent__ = None
def __init__(self):
pass
def __getitem__(self, key):
raise KeyError
class Domains(object):
__name__ = 'domain'
__parent__ = None
def __init__(self):
pass
def __getitem__(self, key):
next_ctx = Domain(key)
next_ctx.__parent__ = self
return next_ctx
class Domain(object):
__name__ = None
__parent__ = None
def __init__(self, key):
self.__name__ = key
def __getitem__(self, key):
raise KeyError
| Set the __name__ on the traversal object | Set the __name__ on the traversal object
| Python | isc | cdunklau/alexandria,bertjwregeer/alexandria,bertjwregeer/alexandria,cdunklau/alexandria,cdunklau/alexandria | class Root(object):
"""
The main root object for any traversal
"""
__name__ = None
__parent__ = None
def __init__(self, request):
pass
def __getitem__(self, key):
next_ctx = None
if key == 'user':
next_ctx = User()
if key == 'domain':
next_ctx = Domains()
if next_ctx is None:
raise KeyError
next_ctx.__parent__ = self
return next_ctx
class User(object):
__name__ = 'user'
__parent__ = None
def __init__(self):
pass
def __getitem__(self, key):
raise KeyError
class Domains(object):
__name__ = 'domain'
__parent__ = None
def __init__(self):
pass
def __getitem__(self, key):
next_ctx = Domain(key)
next_ctx.__parent__ = self
return next_ctx
class Domain(object):
__name__ = None
__parent__ = None
def __init__(self, key):
- pass
+ self.__name__ = key
def __getitem__(self, key):
raise KeyError
| Set the __name__ on the traversal object | ## Code Before:
class Root(object):
"""
The main root object for any traversal
"""
__name__ = None
__parent__ = None
def __init__(self, request):
pass
def __getitem__(self, key):
next_ctx = None
if key == 'user':
next_ctx = User()
if key == 'domain':
next_ctx = Domains()
if next_ctx is None:
raise KeyError
next_ctx.__parent__ = self
return next_ctx
class User(object):
__name__ = 'user'
__parent__ = None
def __init__(self):
pass
def __getitem__(self, key):
raise KeyError
class Domains(object):
__name__ = 'domain'
__parent__ = None
def __init__(self):
pass
def __getitem__(self, key):
next_ctx = Domain(key)
next_ctx.__parent__ = self
return next_ctx
class Domain(object):
__name__ = None
__parent__ = None
def __init__(self, key):
pass
def __getitem__(self, key):
raise KeyError
## Instruction:
Set the __name__ on the traversal object
## Code After:
class Root(object):
"""
The main root object for any traversal
"""
__name__ = None
__parent__ = None
def __init__(self, request):
pass
def __getitem__(self, key):
next_ctx = None
if key == 'user':
next_ctx = User()
if key == 'domain':
next_ctx = Domains()
if next_ctx is None:
raise KeyError
next_ctx.__parent__ = self
return next_ctx
class User(object):
__name__ = 'user'
__parent__ = None
def __init__(self):
pass
def __getitem__(self, key):
raise KeyError
class Domains(object):
__name__ = 'domain'
__parent__ = None
def __init__(self):
pass
def __getitem__(self, key):
next_ctx = Domain(key)
next_ctx.__parent__ = self
return next_ctx
class Domain(object):
__name__ = None
__parent__ = None
def __init__(self, key):
self.__name__ = key
def __getitem__(self, key):
raise KeyError
| # ... existing code ...
def __init__(self, key):
self.__name__ = key
# ... rest of the code ... |
febf5e96847fd01b82f7b9a8e30a5cdae30120f5 | layers.py | layers.py | import lasagne
import numpy as np
WIDTH_INDEX = 3
HEIGHT_INDEX = 2
LAYER_INDEX = 1
class SpatialPoolingLayer(lasagne.layers.Layer):
# I assume that all bins has square shape for simplicity
# Maybe later I change this behaviour
def __init__(self, incoming, bin_sizes, **kwargs):
super(SpatialPoolingLayer, self).__init__(incoming, **kwargs)
self.bin_sizes = self.add_param(np.array(bin_sizes), (len(bin_sizes),), name="bin_sizes")
def get_output_shape_for(self, input_shape):
return np.sum(np.power(self.bin_sizes, 2))
def get_output_for(self, input, **kwargs):
layers = []
for bin_size in self.bin_sizes:
win_size = (np.ceil(input.shape[WIDTH_INDEX] / bin_size), np.ceil(input.shape[HEIGHT_INDEX] / bin_size))
stride = (np.floor(input.shape[WIDTH_INDEX] / bin_size), np.floor(input.shape[HEIGHT_INDEX] / bin_size))
layers.append(lasagne.layers.flatten(
lasagne.layers.MaxPool2DLayer(input, pool_size=win_size, stride=stride)
))
return lasagne.layers.concat(layers)
| import lasagne
import numpy as np
from theano import tensor as T
WIDTH_INDEX = 3
HEIGHT_INDEX = 2
LAYER_INDEX = 1
class SpatialPoolingLayer(lasagne.layers.Layer):
# I assume that all bins has square shape for simplicity
# Maybe later I change this behaviour
def __init__(self, incoming, bin_sizes, **kwargs):
super(SpatialPoolingLayer, self).__init__(incoming, **kwargs)
self.bin_sizes = self.add_param(np.array(bin_sizes), (len(bin_sizes),), name="bin_sizes")
def get_output_shape_for(self, input_shape):
return T.sum(T.power(self.bin_sizes, 2))
def get_output_for(self, input, **kwargs):
layers = []
for bin_size in self.bin_sizes:
win_size = (np.ceil(input.shape[WIDTH_INDEX] / bin_size), np.ceil(input.shape[HEIGHT_INDEX] / bin_size))
stride = (np.floor(input.shape[WIDTH_INDEX] / bin_size), np.floor(input.shape[HEIGHT_INDEX] / bin_size))
layers.append(lasagne.layers.flatten(
lasagne.layers.MaxPool2DLayer(input, pool_size=win_size, stride=stride)
))
return lasagne.layers.concat(layers)
| Fix syntax in spatial layer | Fix syntax in spatial layer
| Python | mit | dimmddr/roadSignsNN | import lasagne
import numpy as np
+ from theano import tensor as T
WIDTH_INDEX = 3
HEIGHT_INDEX = 2
LAYER_INDEX = 1
class SpatialPoolingLayer(lasagne.layers.Layer):
# I assume that all bins has square shape for simplicity
# Maybe later I change this behaviour
def __init__(self, incoming, bin_sizes, **kwargs):
super(SpatialPoolingLayer, self).__init__(incoming, **kwargs)
self.bin_sizes = self.add_param(np.array(bin_sizes), (len(bin_sizes),), name="bin_sizes")
def get_output_shape_for(self, input_shape):
- return np.sum(np.power(self.bin_sizes, 2))
+ return T.sum(T.power(self.bin_sizes, 2))
def get_output_for(self, input, **kwargs):
layers = []
for bin_size in self.bin_sizes:
win_size = (np.ceil(input.shape[WIDTH_INDEX] / bin_size), np.ceil(input.shape[HEIGHT_INDEX] / bin_size))
stride = (np.floor(input.shape[WIDTH_INDEX] / bin_size), np.floor(input.shape[HEIGHT_INDEX] / bin_size))
layers.append(lasagne.layers.flatten(
lasagne.layers.MaxPool2DLayer(input, pool_size=win_size, stride=stride)
))
return lasagne.layers.concat(layers)
| Fix syntax in spatial layer | ## Code Before:
import lasagne
import numpy as np
WIDTH_INDEX = 3
HEIGHT_INDEX = 2
LAYER_INDEX = 1
class SpatialPoolingLayer(lasagne.layers.Layer):
# I assume that all bins has square shape for simplicity
# Maybe later I change this behaviour
def __init__(self, incoming, bin_sizes, **kwargs):
super(SpatialPoolingLayer, self).__init__(incoming, **kwargs)
self.bin_sizes = self.add_param(np.array(bin_sizes), (len(bin_sizes),), name="bin_sizes")
def get_output_shape_for(self, input_shape):
return np.sum(np.power(self.bin_sizes, 2))
def get_output_for(self, input, **kwargs):
layers = []
for bin_size in self.bin_sizes:
win_size = (np.ceil(input.shape[WIDTH_INDEX] / bin_size), np.ceil(input.shape[HEIGHT_INDEX] / bin_size))
stride = (np.floor(input.shape[WIDTH_INDEX] / bin_size), np.floor(input.shape[HEIGHT_INDEX] / bin_size))
layers.append(lasagne.layers.flatten(
lasagne.layers.MaxPool2DLayer(input, pool_size=win_size, stride=stride)
))
return lasagne.layers.concat(layers)
## Instruction:
Fix syntax in spatial layer
## Code After:
import lasagne
import numpy as np
from theano import tensor as T
WIDTH_INDEX = 3
HEIGHT_INDEX = 2
LAYER_INDEX = 1
class SpatialPoolingLayer(lasagne.layers.Layer):
# I assume that all bins has square shape for simplicity
# Maybe later I change this behaviour
def __init__(self, incoming, bin_sizes, **kwargs):
super(SpatialPoolingLayer, self).__init__(incoming, **kwargs)
self.bin_sizes = self.add_param(np.array(bin_sizes), (len(bin_sizes),), name="bin_sizes")
def get_output_shape_for(self, input_shape):
return T.sum(T.power(self.bin_sizes, 2))
def get_output_for(self, input, **kwargs):
layers = []
for bin_size in self.bin_sizes:
win_size = (np.ceil(input.shape[WIDTH_INDEX] / bin_size), np.ceil(input.shape[HEIGHT_INDEX] / bin_size))
stride = (np.floor(input.shape[WIDTH_INDEX] / bin_size), np.floor(input.shape[HEIGHT_INDEX] / bin_size))
layers.append(lasagne.layers.flatten(
lasagne.layers.MaxPool2DLayer(input, pool_size=win_size, stride=stride)
))
return lasagne.layers.concat(layers)
| # ... existing code ...
import numpy as np
from theano import tensor as T
# ... modified code ...
def get_output_shape_for(self, input_shape):
return T.sum(T.power(self.bin_sizes, 2))
# ... rest of the code ... |
7bd3b5a9f1664bd153c934d75f245a605aadee2a | ovp_core/models/cause.py | ovp_core/models/cause.py | from django.db import models
class Cause(models.Model):
name = models.CharField('name', max_length=100)
def __str__(self):
return self.name
class Meta:
app_label = 'ovp_core'
verbose_name = 'cause'
verbose_name_plural = 'causes'
| import vinaigrette
from django.db import models
class Cause(models.Model):
name = models.CharField('name', max_length=100)
def __str__(self):
return self.name
class Meta:
app_label = 'ovp_core'
verbose_name = 'cause'
verbose_name_plural = 'causes'
vinaigrette.register(Cause, ['name'])
| Apply django-vinaigrette to Cause model | Apply django-vinaigrette to Cause model
| Python | agpl-3.0 | OpenVolunteeringPlatform/django-ovp-core,OpenVolunteeringPlatform/django-ovp-core | + import vinaigrette
from django.db import models
class Cause(models.Model):
name = models.CharField('name', max_length=100)
def __str__(self):
return self.name
class Meta:
app_label = 'ovp_core'
verbose_name = 'cause'
verbose_name_plural = 'causes'
+ vinaigrette.register(Cause, ['name'])
+ | Apply django-vinaigrette to Cause model | ## Code Before:
from django.db import models
class Cause(models.Model):
name = models.CharField('name', max_length=100)
def __str__(self):
return self.name
class Meta:
app_label = 'ovp_core'
verbose_name = 'cause'
verbose_name_plural = 'causes'
## Instruction:
Apply django-vinaigrette to Cause model
## Code After:
import vinaigrette
from django.db import models
class Cause(models.Model):
name = models.CharField('name', max_length=100)
def __str__(self):
return self.name
class Meta:
app_label = 'ovp_core'
verbose_name = 'cause'
verbose_name_plural = 'causes'
vinaigrette.register(Cause, ['name'])
| ...
import vinaigrette
from django.db import models
...
verbose_name_plural = 'causes'
vinaigrette.register(Cause, ['name'])
... |
721015d5a7ea9745094f06dfcea3625c20555992 | inidiff/tests/test_diff.py | inidiff/tests/test_diff.py | import unittest
import inidiff
INI_1 = '''[test]
number=10
'''
INI_2 = '''[test]
number=20
'''
class TestDiff(unittest.TestCase):
"""Test diffs diff things."""
def test_no_differences(self):
self.assertEqual([], inidiff.diff(INI_1, INI_1))
def test_some_differences(self):
self.assertTrue(len(inidiff.diff(INI_1, INI_2)) > 0)
| import unittest
import inidiff
INI_1 = '''[test]
number=10
'''
INI_2 = '''[test]
number=20
'''
class TestDiff(unittest.TestCase):
"""Test diffs diff things."""
def test_no_differences(self):
self.assertEqual([], inidiff.diff(INI_1, INI_1))
def test_some_differences(self):
self.assertTrue(len(inidiff.diff(INI_1, INI_2)) > 0)
def test_number_is_different(self):
diffs = inidiff.diff(INI_1, INI_2)
first, second = diffs[0]
self.assertEqual('number', first[1])
| Check number is the field that is different | Check number is the field that is different
| Python | mit | kragniz/inidiff | import unittest
import inidiff
INI_1 = '''[test]
number=10
'''
INI_2 = '''[test]
number=20
'''
class TestDiff(unittest.TestCase):
"""Test diffs diff things."""
def test_no_differences(self):
self.assertEqual([], inidiff.diff(INI_1, INI_1))
def test_some_differences(self):
self.assertTrue(len(inidiff.diff(INI_1, INI_2)) > 0)
+ def test_number_is_different(self):
+ diffs = inidiff.diff(INI_1, INI_2)
+ first, second = diffs[0]
+ self.assertEqual('number', first[1])
+ | Check number is the field that is different | ## Code Before:
import unittest
import inidiff
INI_1 = '''[test]
number=10
'''
INI_2 = '''[test]
number=20
'''
class TestDiff(unittest.TestCase):
"""Test diffs diff things."""
def test_no_differences(self):
self.assertEqual([], inidiff.diff(INI_1, INI_1))
def test_some_differences(self):
self.assertTrue(len(inidiff.diff(INI_1, INI_2)) > 0)
## Instruction:
Check number is the field that is different
## Code After:
import unittest
import inidiff
INI_1 = '''[test]
number=10
'''
INI_2 = '''[test]
number=20
'''
class TestDiff(unittest.TestCase):
"""Test diffs diff things."""
def test_no_differences(self):
self.assertEqual([], inidiff.diff(INI_1, INI_1))
def test_some_differences(self):
self.assertTrue(len(inidiff.diff(INI_1, INI_2)) > 0)
def test_number_is_different(self):
diffs = inidiff.diff(INI_1, INI_2)
first, second = diffs[0]
self.assertEqual('number', first[1])
| # ... existing code ...
self.assertTrue(len(inidiff.diff(INI_1, INI_2)) > 0)
def test_number_is_different(self):
diffs = inidiff.diff(INI_1, INI_2)
first, second = diffs[0]
self.assertEqual('number', first[1])
# ... rest of the code ... |
bafdbd28e35d80d28bfb82c23532533cb2915066 | fuel/exceptions.py | fuel/exceptions.py | class AxisLabelsMismatchError(ValueError):
"""Raised when a pair of axis labels tuples do not match."""
class ConfigurationError(Exception):
"""Error raised when a configuration value is requested but not set."""
class MissingInputFiles(Exception):
"""Exception raised by a converter when input files are not found.
Parameters
----------
filenames : list
A list of filenames that were not found.
"""
def __init__(self, message, filenames):
self.filenames = filenames
super(MissingInputFiles, self).__init__(message, filenames)
class NeedURLPrefix(Exception):
"""Raised when a URL is not provided for a file."""
| class AxisLabelsMismatchError(ValueError):
"""Raised when a pair of axis labels tuples do not match."""
class ConfigurationError(Exception):
"""Error raised when a configuration value is requested but not set."""
class MissingInputFiles(Exception):
"""Exception raised by a converter when input files are not found.
Parameters
----------
message : str
The error message to be associated with this exception.
filenames : list
A list of filenames that were not found.
"""
def __init__(self, message, filenames):
self.filenames = filenames
super(MissingInputFiles, self).__init__(message, filenames)
class NeedURLPrefix(Exception):
"""Raised when a URL is not provided for a file."""
| Add docs for MissingInputFiles 'message' arg. | Add docs for MissingInputFiles 'message' arg.
| Python | mit | hantek/fuel,rodrigob/fuel,dmitriy-serdyuk/fuel,codeaudit/fuel,udibr/fuel,mjwillson/fuel,dribnet/fuel,capybaralet/fuel,aalmah/fuel,glewis17/fuel,glewis17/fuel,vdumoulin/fuel,dmitriy-serdyuk/fuel,dwf/fuel,bouthilx/fuel,mila-udem/fuel,chrishokamp/fuel,udibr/fuel,janchorowski/fuel,dwf/fuel,dribnet/fuel,markusnagel/fuel,aalmah/fuel,markusnagel/fuel,orhanf/fuel,capybaralet/fuel,rodrigob/fuel,dhruvparamhans/fuel,dhruvparamhans/fuel,janchorowski/fuel,mila-udem/fuel,bouthilx/fuel,harmdevries89/fuel,hantek/fuel,harmdevries89/fuel,chrishokamp/fuel,codeaudit/fuel,orhanf/fuel,vdumoulin/fuel,mjwillson/fuel | class AxisLabelsMismatchError(ValueError):
"""Raised when a pair of axis labels tuples do not match."""
class ConfigurationError(Exception):
"""Error raised when a configuration value is requested but not set."""
class MissingInputFiles(Exception):
"""Exception raised by a converter when input files are not found.
Parameters
----------
+ message : str
+ The error message to be associated with this exception.
filenames : list
A list of filenames that were not found.
"""
def __init__(self, message, filenames):
self.filenames = filenames
super(MissingInputFiles, self).__init__(message, filenames)
class NeedURLPrefix(Exception):
"""Raised when a URL is not provided for a file."""
| Add docs for MissingInputFiles 'message' arg. | ## Code Before:
class AxisLabelsMismatchError(ValueError):
"""Raised when a pair of axis labels tuples do not match."""
class ConfigurationError(Exception):
"""Error raised when a configuration value is requested but not set."""
class MissingInputFiles(Exception):
"""Exception raised by a converter when input files are not found.
Parameters
----------
filenames : list
A list of filenames that were not found.
"""
def __init__(self, message, filenames):
self.filenames = filenames
super(MissingInputFiles, self).__init__(message, filenames)
class NeedURLPrefix(Exception):
"""Raised when a URL is not provided for a file."""
## Instruction:
Add docs for MissingInputFiles 'message' arg.
## Code After:
class AxisLabelsMismatchError(ValueError):
"""Raised when a pair of axis labels tuples do not match."""
class ConfigurationError(Exception):
"""Error raised when a configuration value is requested but not set."""
class MissingInputFiles(Exception):
"""Exception raised by a converter when input files are not found.
Parameters
----------
message : str
The error message to be associated with this exception.
filenames : list
A list of filenames that were not found.
"""
def __init__(self, message, filenames):
self.filenames = filenames
super(MissingInputFiles, self).__init__(message, filenames)
class NeedURLPrefix(Exception):
"""Raised when a URL is not provided for a file."""
| // ... existing code ...
----------
message : str
The error message to be associated with this exception.
filenames : list
// ... rest of the code ... |
9e5bb5dd850332cdb410fbc2c9fdf78d08b3e9fb | every_election/apps/organisations/constants.py | every_election/apps/organisations/constants.py | PARENT_TO_CHILD_AREAS = {
'DIS': ['DIW',],
'MTD': ['MTW',],
'CTY': ['CED',],
'LBO': ['LBW',],
'CED': ['CPC',],
'UTA': ['UTW', 'UTE'],
'NIA': ['NIE',],
}
CHILD_TO_PARENT_AREAS = {
'DIW': 'DIS',
'MTW': 'MTD',
'UTW': 'UTA',
'UTE': 'UTA',
'CED': 'CTY',
'LBW': 'LBO',
'CPC': 'CED',
}
| PARENT_TO_CHILD_AREAS = {
'DIS': ['DIW',],
'MTD': ['MTW',],
'CTY': ['CED',],
'LBO': ['LBW',],
'CED': ['CPC',],
'UTA': ['UTW', 'UTE'],
'NIA': ['NIE',],
'COI': ['COP',],
}
CHILD_TO_PARENT_AREAS = {
'DIW': 'DIS',
'MTW': 'MTD',
'UTW': 'UTA',
'UTE': 'UTA',
'CED': 'CTY',
'LBW': 'LBO',
'CPC': 'CED',
'COP': 'COI',
}
| Support the Isles of Scilly | Support the Isles of Scilly
| Python | bsd-3-clause | DemocracyClub/EveryElection,DemocracyClub/EveryElection,DemocracyClub/EveryElection | PARENT_TO_CHILD_AREAS = {
'DIS': ['DIW',],
'MTD': ['MTW',],
'CTY': ['CED',],
'LBO': ['LBW',],
'CED': ['CPC',],
'UTA': ['UTW', 'UTE'],
'NIA': ['NIE',],
+ 'COI': ['COP',],
}
CHILD_TO_PARENT_AREAS = {
'DIW': 'DIS',
'MTW': 'MTD',
'UTW': 'UTA',
'UTE': 'UTA',
'CED': 'CTY',
'LBW': 'LBO',
'CPC': 'CED',
+ 'COP': 'COI',
}
| Support the Isles of Scilly | ## Code Before:
PARENT_TO_CHILD_AREAS = {
'DIS': ['DIW',],
'MTD': ['MTW',],
'CTY': ['CED',],
'LBO': ['LBW',],
'CED': ['CPC',],
'UTA': ['UTW', 'UTE'],
'NIA': ['NIE',],
}
CHILD_TO_PARENT_AREAS = {
'DIW': 'DIS',
'MTW': 'MTD',
'UTW': 'UTA',
'UTE': 'UTA',
'CED': 'CTY',
'LBW': 'LBO',
'CPC': 'CED',
}
## Instruction:
Support the Isles of Scilly
## Code After:
PARENT_TO_CHILD_AREAS = {
'DIS': ['DIW',],
'MTD': ['MTW',],
'CTY': ['CED',],
'LBO': ['LBW',],
'CED': ['CPC',],
'UTA': ['UTW', 'UTE'],
'NIA': ['NIE',],
'COI': ['COP',],
}
CHILD_TO_PARENT_AREAS = {
'DIW': 'DIS',
'MTW': 'MTD',
'UTW': 'UTA',
'UTE': 'UTA',
'CED': 'CTY',
'LBW': 'LBO',
'CPC': 'CED',
'COP': 'COI',
}
| ...
'NIA': ['NIE',],
'COI': ['COP',],
}
...
'CPC': 'CED',
'COP': 'COI',
}
... |
03c7f149ac0162a78892593d33b5866a1a9b72df | tests/test_settings.py | tests/test_settings.py | from __future__ import unicode_literals
from django.test import TestCase
from rest_framework.settings import APISettings
class TestSettings(TestCase):
def test_import_error_message_maintained(self):
"""
Make sure import errors are captured and raised sensibly.
"""
settings = APISettings({
'DEFAULT_RENDERER_CLASSES': [
'tests.invalid_module.InvalidClassName'
]
})
with self.assertRaises(ImportError):
settings.DEFAULT_RENDERER_CLASSES
class TestSettingTypes(TestCase):
def test_settings_consistently_coerced_to_list(self):
settings = APISettings({
'DEFAULT_THROTTLE_CLASSES': ('rest_framework.throttling.BaseThrottle',)
})
self.assertTrue(isinstance(settings.DEFAULT_THROTTLE_CLASSES, list))
settings = APISettings({
'DEFAULT_THROTTLE_CLASSES': ()
})
self.assertTrue(isinstance(settings.DEFAULT_THROTTLE_CLASSES, list))
| from __future__ import unicode_literals
from django.test import TestCase
from rest_framework.settings import APISettings
class TestSettings(TestCase):
def test_import_error_message_maintained(self):
"""
Make sure import errors are captured and raised sensibly.
"""
settings = APISettings({
'DEFAULT_RENDERER_CLASSES': [
'tests.invalid_module.InvalidClassName'
]
})
with self.assertRaises(ImportError):
settings.DEFAULT_RENDERER_CLASSES
def test_loud_error_raised_on_removed_setting(self):
"""
Make sure user is alerted with an error when a removed setting
is set.
"""
with self.asserRaise(AttributeError):
APISettings({
'MAX_PAGINATE_BY': 100
})
class TestSettingTypes(TestCase):
def test_settings_consistently_coerced_to_list(self):
settings = APISettings({
'DEFAULT_THROTTLE_CLASSES': ('rest_framework.throttling.BaseThrottle',)
})
self.assertTrue(isinstance(settings.DEFAULT_THROTTLE_CLASSES, list))
settings = APISettings({
'DEFAULT_THROTTLE_CLASSES': ()
})
self.assertTrue(isinstance(settings.DEFAULT_THROTTLE_CLASSES, list))
| Test case for settings check | Test case for settings check
| Python | bsd-2-clause | davesque/django-rest-framework,dmwyatt/django-rest-framework,jpadilla/django-rest-framework,kgeorgy/django-rest-framework,atombrella/django-rest-framework,davesque/django-rest-framework,pombredanne/django-rest-framework,cyberj/django-rest-framework,ossanna16/django-rest-framework,dmwyatt/django-rest-framework,edx/django-rest-framework,johnraz/django-rest-framework,agconti/django-rest-framework,davesque/django-rest-framework,callorico/django-rest-framework,tomchristie/django-rest-framework,edx/django-rest-framework,uploadcare/django-rest-framework,pombredanne/django-rest-framework,pombredanne/django-rest-framework,sheppard/django-rest-framework,tomchristie/django-rest-framework,kgeorgy/django-rest-framework,callorico/django-rest-framework,linovia/django-rest-framework,edx/django-rest-framework,jpadilla/django-rest-framework,kgeorgy/django-rest-framework,agconti/django-rest-framework,tomchristie/django-rest-framework,jpadilla/django-rest-framework,johnraz/django-rest-framework,ossanna16/django-rest-framework,cyberj/django-rest-framework,rhblind/django-rest-framework,atombrella/django-rest-framework,werthen/django-rest-framework,linovia/django-rest-framework,uploadcare/django-rest-framework,rhblind/django-rest-framework,cyberj/django-rest-framework,werthen/django-rest-framework,ossanna16/django-rest-framework,sheppard/django-rest-framework,dmwyatt/django-rest-framework,linovia/django-rest-framework,rhblind/django-rest-framework,werthen/django-rest-framework,sheppard/django-rest-framework,callorico/django-rest-framework,atombrella/django-rest-framework,johnraz/django-rest-framework,agconti/django-rest-framework,uploadcare/django-rest-framework | from __future__ import unicode_literals
from django.test import TestCase
from rest_framework.settings import APISettings
class TestSettings(TestCase):
def test_import_error_message_maintained(self):
"""
Make sure import errors are captured and raised sensibly.
"""
settings = APISettings({
'DEFAULT_RENDERER_CLASSES': [
'tests.invalid_module.InvalidClassName'
]
})
with self.assertRaises(ImportError):
settings.DEFAULT_RENDERER_CLASSES
+ def test_loud_error_raised_on_removed_setting(self):
+ """
+ Make sure user is alerted with an error when a removed setting
+ is set.
+ """
+ with self.asserRaise(AttributeError):
+ APISettings({
+ 'MAX_PAGINATE_BY': 100
+ })
+
class TestSettingTypes(TestCase):
def test_settings_consistently_coerced_to_list(self):
settings = APISettings({
'DEFAULT_THROTTLE_CLASSES': ('rest_framework.throttling.BaseThrottle',)
})
self.assertTrue(isinstance(settings.DEFAULT_THROTTLE_CLASSES, list))
settings = APISettings({
'DEFAULT_THROTTLE_CLASSES': ()
})
self.assertTrue(isinstance(settings.DEFAULT_THROTTLE_CLASSES, list))
| Test case for settings check | ## Code Before:
from __future__ import unicode_literals
from django.test import TestCase
from rest_framework.settings import APISettings
class TestSettings(TestCase):
def test_import_error_message_maintained(self):
"""
Make sure import errors are captured and raised sensibly.
"""
settings = APISettings({
'DEFAULT_RENDERER_CLASSES': [
'tests.invalid_module.InvalidClassName'
]
})
with self.assertRaises(ImportError):
settings.DEFAULT_RENDERER_CLASSES
class TestSettingTypes(TestCase):
def test_settings_consistently_coerced_to_list(self):
settings = APISettings({
'DEFAULT_THROTTLE_CLASSES': ('rest_framework.throttling.BaseThrottle',)
})
self.assertTrue(isinstance(settings.DEFAULT_THROTTLE_CLASSES, list))
settings = APISettings({
'DEFAULT_THROTTLE_CLASSES': ()
})
self.assertTrue(isinstance(settings.DEFAULT_THROTTLE_CLASSES, list))
## Instruction:
Test case for settings check
## Code After:
from __future__ import unicode_literals
from django.test import TestCase
from rest_framework.settings import APISettings
class TestSettings(TestCase):
def test_import_error_message_maintained(self):
"""
Make sure import errors are captured and raised sensibly.
"""
settings = APISettings({
'DEFAULT_RENDERER_CLASSES': [
'tests.invalid_module.InvalidClassName'
]
})
with self.assertRaises(ImportError):
settings.DEFAULT_RENDERER_CLASSES
def test_loud_error_raised_on_removed_setting(self):
"""
Make sure user is alerted with an error when a removed setting
is set.
"""
with self.asserRaise(AttributeError):
APISettings({
'MAX_PAGINATE_BY': 100
})
class TestSettingTypes(TestCase):
def test_settings_consistently_coerced_to_list(self):
settings = APISettings({
'DEFAULT_THROTTLE_CLASSES': ('rest_framework.throttling.BaseThrottle',)
})
self.assertTrue(isinstance(settings.DEFAULT_THROTTLE_CLASSES, list))
settings = APISettings({
'DEFAULT_THROTTLE_CLASSES': ()
})
self.assertTrue(isinstance(settings.DEFAULT_THROTTLE_CLASSES, list))
| // ... existing code ...
def test_loud_error_raised_on_removed_setting(self):
"""
Make sure user is alerted with an error when a removed setting
is set.
"""
with self.asserRaise(AttributeError):
APISettings({
'MAX_PAGINATE_BY': 100
})
// ... rest of the code ... |
07e825b31912a821d116b2a2b394bd041321cd6d | molly/utils/management/commands/deploy.py | molly/utils/management/commands/deploy.py | import os
from optparse import make_option
from django.core.management import call_command
from django.core.management.base import NoArgsCommand
class Command(NoArgsCommand):
option_list = NoArgsCommand.option_list + (
make_option('--develop',
action='store_true',
dest='develop',
default=False,
help='Create symlinks, rather than copy, existing media, then start the dev server'),
) + (
make_option('--skip-cron',
action='store_true',
dest='skip_cron',
default=False,
help='Skip creating a crontab'),
)
def handle_noargs(self, skip_cron, develop, **options):
call_command('sync_and_migrate')
try:
from molly.wurfl import wurfl_data
except ImportError:
no_wurfl = True
else:
no_wurfl = False
if no_wurfl or not develop:
call_command('update_wurfl')
call_command('collectstatic', interactive=False, link=develop)
call_command('synccompress')
if not skip_cron:
call_command('create_crontab', pipe_to_crontab=(os.name != 'nt'))
if develop:
call_command('runserver')
| import os
from optparse import make_option
from django.core.management import call_command
from django.core.management.base import NoArgsCommand
class Command(NoArgsCommand):
option_list = NoArgsCommand.option_list + (
make_option('--develop',
action='store_true',
dest='develop',
default=False,
help='Create symlinks, rather than copy, existing media, then start the dev server'),
) + (
make_option('--skip-cron',
action='store_true',
dest='skip_cron',
default=False,
help='Skip creating a crontab'),
)
def handle_noargs(self, skip_cron, develop, **options):
call_command('sync_and_migrate')
try:
from molly.wurfl import wurfl_data
except ImportError:
no_wurfl = True
else:
no_wurfl = False
if no_wurfl or not develop:
call_command('update_wurfl')
call_command('generate_markers', lazy=True)
call_command('collectstatic', interactive=False, link=develop)
call_command('synccompress')
if not skip_cron:
call_command('create_crontab', pipe_to_crontab=(os.name != 'nt'))
if develop:
call_command('runserver')
| Deploy should remember to generate markers | Deploy should remember to generate markers
| Python | apache-2.0 | mollyproject/mollyproject,mollyproject/mollyproject,mollyproject/mollyproject | import os
from optparse import make_option
from django.core.management import call_command
from django.core.management.base import NoArgsCommand
class Command(NoArgsCommand):
option_list = NoArgsCommand.option_list + (
make_option('--develop',
action='store_true',
dest='develop',
default=False,
help='Create symlinks, rather than copy, existing media, then start the dev server'),
) + (
make_option('--skip-cron',
action='store_true',
dest='skip_cron',
default=False,
help='Skip creating a crontab'),
)
def handle_noargs(self, skip_cron, develop, **options):
call_command('sync_and_migrate')
try:
from molly.wurfl import wurfl_data
except ImportError:
no_wurfl = True
else:
no_wurfl = False
if no_wurfl or not develop:
call_command('update_wurfl')
+ call_command('generate_markers', lazy=True)
call_command('collectstatic', interactive=False, link=develop)
call_command('synccompress')
if not skip_cron:
call_command('create_crontab', pipe_to_crontab=(os.name != 'nt'))
if develop:
call_command('runserver')
| Deploy should remember to generate markers | ## Code Before:
import os
from optparse import make_option
from django.core.management import call_command
from django.core.management.base import NoArgsCommand
class Command(NoArgsCommand):
option_list = NoArgsCommand.option_list + (
make_option('--develop',
action='store_true',
dest='develop',
default=False,
help='Create symlinks, rather than copy, existing media, then start the dev server'),
) + (
make_option('--skip-cron',
action='store_true',
dest='skip_cron',
default=False,
help='Skip creating a crontab'),
)
def handle_noargs(self, skip_cron, develop, **options):
call_command('sync_and_migrate')
try:
from molly.wurfl import wurfl_data
except ImportError:
no_wurfl = True
else:
no_wurfl = False
if no_wurfl or not develop:
call_command('update_wurfl')
call_command('collectstatic', interactive=False, link=develop)
call_command('synccompress')
if not skip_cron:
call_command('create_crontab', pipe_to_crontab=(os.name != 'nt'))
if develop:
call_command('runserver')
## Instruction:
Deploy should remember to generate markers
## Code After:
import os
from optparse import make_option
from django.core.management import call_command
from django.core.management.base import NoArgsCommand
class Command(NoArgsCommand):
option_list = NoArgsCommand.option_list + (
make_option('--develop',
action='store_true',
dest='develop',
default=False,
help='Create symlinks, rather than copy, existing media, then start the dev server'),
) + (
make_option('--skip-cron',
action='store_true',
dest='skip_cron',
default=False,
help='Skip creating a crontab'),
)
def handle_noargs(self, skip_cron, develop, **options):
call_command('sync_and_migrate')
try:
from molly.wurfl import wurfl_data
except ImportError:
no_wurfl = True
else:
no_wurfl = False
if no_wurfl or not develop:
call_command('update_wurfl')
call_command('generate_markers', lazy=True)
call_command('collectstatic', interactive=False, link=develop)
call_command('synccompress')
if not skip_cron:
call_command('create_crontab', pipe_to_crontab=(os.name != 'nt'))
if develop:
call_command('runserver')
| # ... existing code ...
call_command('update_wurfl')
call_command('generate_markers', lazy=True)
call_command('collectstatic', interactive=False, link=develop)
# ... rest of the code ... |
a36adf795f370877a472fa4730a3eb31271b8b23 | subversion/bindings/swig/python/tests/run_all.py | subversion/bindings/swig/python/tests/run_all.py | import sys, os
bindir = os.path.dirname(sys.argv[0])
sys.path[0:0] = [ os.getcwd(), "%s/.libs" % os.getcwd(), \
"%s/.." % bindir, "%s/../.libs" % bindir ]
import unittest
import pool
import trac.versioncontrol.tests
# Run all tests
def suite():
"""Run all tests"""
suite = unittest.TestSuite()
suite.addTest(pool.suite())
suite.addTest(trac.versioncontrol.tests.suite());
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| import sys, os
bindir = os.path.dirname(sys.argv[0])
sys.path[0:0] = [ os.getcwd(), "%s/.libs" % os.getcwd(), \
"%s/.." % bindir, "%s/../.libs" % bindir ]
# OSes without RPATH support are going to have to do things here to make
# the correct shared libraries be found.
if sys.platform == 'cygwin':
import glob
svndir = os.path.dirname(os.path.dirname(os.path.dirname(os.getcwd())))
libpath = os.getenv("PATH").split(":")
libpath.insert(0, "%s/libsvn_swig_py/.libs" % os.getcwd())
for libdir in glob.glob("%s/libsvn_*" % svndir):
libpath.insert(0, "%s/.libs" % (libdir))
os.putenv("PATH", ":".join(libpath))
import unittest
import pool
import trac.versioncontrol.tests
# Run all tests
def suite():
"""Run all tests"""
suite = unittest.TestSuite()
suite.addTest(pool.suite())
suite.addTest(trac.versioncontrol.tests.suite());
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| Make the Python bindings testsuite be able to find the needed shared libraries on Cygwin. Needed to compensate for Windows' complete lack of library RPATHs. | Make the Python bindings testsuite be able to find the needed shared libraries
on Cygwin. Needed to compensate for Windows' complete lack of library RPATHs.
* subversion/bindings/swig/python/tests/run_all.py: On Cygwin, manipulate $PATH
so that the relevant shared libraries are found.
| Python | apache-2.0 | jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion | import sys, os
bindir = os.path.dirname(sys.argv[0])
sys.path[0:0] = [ os.getcwd(), "%s/.libs" % os.getcwd(), \
"%s/.." % bindir, "%s/../.libs" % bindir ]
+
+ # OSes without RPATH support are going to have to do things here to make
+ # the correct shared libraries be found.
+ if sys.platform == 'cygwin':
+ import glob
+ svndir = os.path.dirname(os.path.dirname(os.path.dirname(os.getcwd())))
+ libpath = os.getenv("PATH").split(":")
+ libpath.insert(0, "%s/libsvn_swig_py/.libs" % os.getcwd())
+ for libdir in glob.glob("%s/libsvn_*" % svndir):
+ libpath.insert(0, "%s/.libs" % (libdir))
+ os.putenv("PATH", ":".join(libpath))
+
import unittest
import pool
import trac.versioncontrol.tests
# Run all tests
def suite():
"""Run all tests"""
suite = unittest.TestSuite()
suite.addTest(pool.suite())
suite.addTest(trac.versioncontrol.tests.suite());
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| Make the Python bindings testsuite be able to find the needed shared libraries on Cygwin. Needed to compensate for Windows' complete lack of library RPATHs. | ## Code Before:
import sys, os
bindir = os.path.dirname(sys.argv[0])
sys.path[0:0] = [ os.getcwd(), "%s/.libs" % os.getcwd(), \
"%s/.." % bindir, "%s/../.libs" % bindir ]
import unittest
import pool
import trac.versioncontrol.tests
# Run all tests
def suite():
"""Run all tests"""
suite = unittest.TestSuite()
suite.addTest(pool.suite())
suite.addTest(trac.versioncontrol.tests.suite());
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
## Instruction:
Make the Python bindings testsuite be able to find the needed shared libraries on Cygwin. Needed to compensate for Windows' complete lack of library RPATHs.
## Code After:
import sys, os
bindir = os.path.dirname(sys.argv[0])
sys.path[0:0] = [ os.getcwd(), "%s/.libs" % os.getcwd(), \
"%s/.." % bindir, "%s/../.libs" % bindir ]
# OSes without RPATH support are going to have to do things here to make
# the correct shared libraries be found.
if sys.platform == 'cygwin':
import glob
svndir = os.path.dirname(os.path.dirname(os.path.dirname(os.getcwd())))
libpath = os.getenv("PATH").split(":")
libpath.insert(0, "%s/libsvn_swig_py/.libs" % os.getcwd())
for libdir in glob.glob("%s/libsvn_*" % svndir):
libpath.insert(0, "%s/.libs" % (libdir))
os.putenv("PATH", ":".join(libpath))
import unittest
import pool
import trac.versioncontrol.tests
# Run all tests
def suite():
"""Run all tests"""
suite = unittest.TestSuite()
suite.addTest(pool.suite())
suite.addTest(trac.versioncontrol.tests.suite());
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| ...
"%s/.." % bindir, "%s/../.libs" % bindir ]
# OSes without RPATH support are going to have to do things here to make
# the correct shared libraries be found.
if sys.platform == 'cygwin':
import glob
svndir = os.path.dirname(os.path.dirname(os.path.dirname(os.getcwd())))
libpath = os.getenv("PATH").split(":")
libpath.insert(0, "%s/libsvn_swig_py/.libs" % os.getcwd())
for libdir in glob.glob("%s/libsvn_*" % svndir):
libpath.insert(0, "%s/.libs" % (libdir))
os.putenv("PATH", ":".join(libpath))
import unittest
... |
8c1f303d4cc04c95170dea268ab836a23d626064 | thezombies/management/commands/crawl_agency_datasets.py | thezombies/management/commands/crawl_agency_datasets.py | from django.core.management.base import BaseCommand
from thezombies.tasks.main import crawl_agency_datasets
class Command(BaseCommand):
"""Start a task that crawl the datasets from an agency data catalog. This command will exit, but the task will run in the background"""
args = '<agency_id ...>'
def handle(self, *args, **options):
if len(args) > 0:
agency_id = args[0]
if agency_id:
task = crawl_agency_datasets.delay(agency_id)
self.stdout.write(u'Running task with id {0}'.format(task.id))
self.stdout.write(u'This can take many minutes...')
else:
self.stderr.write(u"Didn't get an agency_id!")
| from django.core.management.base import BaseCommand
from thezombies.models import Agency
from thezombies.tasks.main import crawl_agency_datasets
class Command(BaseCommand):
"""Start a task that crawl the datasets from an agency data catalog. This command will exit, but the task will run in the background"""
args = '<agency_id ...>'
def handle(self, *args, **options):
if len(args) > 0:
agency_id = args[0]
if agency_id:
task = crawl_agency_datasets.delay(agency_id)
self.stdout.write(u'Running task with id {0}'.format(task.id))
self.stdout.write(u'This can take many minutes...')
else:
self.stderr.write(u"Didn't get an agency_id!")
else:
self.stdout.write(u'Please provide an agency id:\n')
agency_list = u'\n'.join(['{0:2d}: {1}'.format(a.id, a.name) for a in Agency.objects.all()])
self.stdout.write(agency_list)
self.stdout.write(u'\n')
| Add message for when command is not supplied any arguments. | Add message for when command is not supplied any arguments.
| Python | bsd-3-clause | sunlightlabs/thezombies,sunlightlabs/thezombies,sunlightlabs/thezombies,sunlightlabs/thezombies | from django.core.management.base import BaseCommand
+ from thezombies.models import Agency
from thezombies.tasks.main import crawl_agency_datasets
class Command(BaseCommand):
"""Start a task that crawl the datasets from an agency data catalog. This command will exit, but the task will run in the background"""
args = '<agency_id ...>'
def handle(self, *args, **options):
if len(args) > 0:
agency_id = args[0]
if agency_id:
task = crawl_agency_datasets.delay(agency_id)
self.stdout.write(u'Running task with id {0}'.format(task.id))
self.stdout.write(u'This can take many minutes...')
else:
self.stderr.write(u"Didn't get an agency_id!")
+ else:
+ self.stdout.write(u'Please provide an agency id:\n')
+ agency_list = u'\n'.join(['{0:2d}: {1}'.format(a.id, a.name) for a in Agency.objects.all()])
+ self.stdout.write(agency_list)
+ self.stdout.write(u'\n')
| Add message for when command is not supplied any arguments. | ## Code Before:
from django.core.management.base import BaseCommand
from thezombies.tasks.main import crawl_agency_datasets
class Command(BaseCommand):
"""Start a task that crawl the datasets from an agency data catalog. This command will exit, but the task will run in the background"""
args = '<agency_id ...>'
def handle(self, *args, **options):
if len(args) > 0:
agency_id = args[0]
if agency_id:
task = crawl_agency_datasets.delay(agency_id)
self.stdout.write(u'Running task with id {0}'.format(task.id))
self.stdout.write(u'This can take many minutes...')
else:
self.stderr.write(u"Didn't get an agency_id!")
## Instruction:
Add message for when command is not supplied any arguments.
## Code After:
from django.core.management.base import BaseCommand
from thezombies.models import Agency
from thezombies.tasks.main import crawl_agency_datasets
class Command(BaseCommand):
"""Start a task that crawl the datasets from an agency data catalog. This command will exit, but the task will run in the background"""
args = '<agency_id ...>'
def handle(self, *args, **options):
if len(args) > 0:
agency_id = args[0]
if agency_id:
task = crawl_agency_datasets.delay(agency_id)
self.stdout.write(u'Running task with id {0}'.format(task.id))
self.stdout.write(u'This can take many minutes...')
else:
self.stderr.write(u"Didn't get an agency_id!")
else:
self.stdout.write(u'Please provide an agency id:\n')
agency_list = u'\n'.join(['{0:2d}: {1}'.format(a.id, a.name) for a in Agency.objects.all()])
self.stdout.write(agency_list)
self.stdout.write(u'\n')
| // ... existing code ...
from django.core.management.base import BaseCommand
from thezombies.models import Agency
from thezombies.tasks.main import crawl_agency_datasets
// ... modified code ...
self.stderr.write(u"Didn't get an agency_id!")
else:
self.stdout.write(u'Please provide an agency id:\n')
agency_list = u'\n'.join(['{0:2d}: {1}'.format(a.id, a.name) for a in Agency.objects.all()])
self.stdout.write(agency_list)
self.stdout.write(u'\n')
// ... rest of the code ... |
48987cb9b5417232280482c681d3e055c1dee9a4 | snap7/bin/snap7-server.py | snap7/bin/snap7-server.py | import time
import logging
import snap7
def mainloop():
server = snap7.server.Server()
size = 100
data = (snap7.types.wordlen_to_ctypes[snap7.types.S7WLByte] * size)()
server.register_area(snap7.types.srvAreaDB, 1, data)
server.start()
while True:
#logger.info("server: %s cpu: %s users: %s" % server.get_status())
while True:
event = server.pick_event()
if event:
logger.info(server.event_text(event))
else:
break
time.sleep(1)
def check_root():
"""
check if uid of this process is root
"""
import os
import platform
if platform.system() == 'Windows':
# We don't need root on Windows to use port 102
return True
if os.getuid() == 0:
return True
root_msg = "it sucks, but you need to run this as root. The snap7 library is" \
" hardcoded run on port 102, which requires root privileges."
if __name__ == '__main__':
logging.basicConfig()
logger = logging.getLogger()
logger.setLevel(logging.INFO)
if not check_root():
logging.error(root_msg)
mainloop()
| import time
import logging
import snap7
def mainloop():
server = snap7.server.Server()
size = 100
data = (snap7.types.wordlen_to_ctypes[snap7.types.S7WLByte] * size)()
server.register_area(snap7.types.srvAreaDB, 1, data)
server.start()
while True:
#logger.info("server: %s cpu: %s users: %s" % server.get_status())
while True:
event = server.pick_event()
if event:
logger.info(server.event_text(event))
else:
break
time.sleep(1)
def check_root():
"""
check if uid of this process is root
"""
import os
import platform
if platform.system() == 'Windows':
# We don't need root on Windows to use port 102
return True
if os.getuid() == 0:
return True
root_msg = "it sucks, but you need to run this as root. The snap7 library is" \
" hardcoded run on port 102, which requires root privileges."
if __name__ == '__main__':
import sys
if len(sys.argv) > 1:
snap7.common.load_library(sys.argv[1])
logging.basicConfig()
logger = logging.getLogger()
logger.setLevel(logging.INFO)
if not check_root():
logging.error(root_msg)
mainloop()
| Add option to start server passing lib path | Add option to start server passing lib path
| Python | mit | SimplyAutomationized/python-snap7,gijzelaerr/python-snap7,ellepdesk/python-snap7,SimplyAutomationized/python-snap7,ellepdesk/python-snap7 | import time
import logging
import snap7
def mainloop():
server = snap7.server.Server()
size = 100
data = (snap7.types.wordlen_to_ctypes[snap7.types.S7WLByte] * size)()
server.register_area(snap7.types.srvAreaDB, 1, data)
server.start()
while True:
#logger.info("server: %s cpu: %s users: %s" % server.get_status())
while True:
event = server.pick_event()
if event:
logger.info(server.event_text(event))
else:
break
time.sleep(1)
def check_root():
"""
check if uid of this process is root
"""
import os
import platform
if platform.system() == 'Windows':
# We don't need root on Windows to use port 102
return True
if os.getuid() == 0:
return True
root_msg = "it sucks, but you need to run this as root. The snap7 library is" \
" hardcoded run on port 102, which requires root privileges."
if __name__ == '__main__':
+ import sys
+ if len(sys.argv) > 1:
+ snap7.common.load_library(sys.argv[1])
logging.basicConfig()
logger = logging.getLogger()
logger.setLevel(logging.INFO)
if not check_root():
logging.error(root_msg)
mainloop()
| Add option to start server passing lib path | ## Code Before:
import time
import logging
import snap7
def mainloop():
server = snap7.server.Server()
size = 100
data = (snap7.types.wordlen_to_ctypes[snap7.types.S7WLByte] * size)()
server.register_area(snap7.types.srvAreaDB, 1, data)
server.start()
while True:
#logger.info("server: %s cpu: %s users: %s" % server.get_status())
while True:
event = server.pick_event()
if event:
logger.info(server.event_text(event))
else:
break
time.sleep(1)
def check_root():
"""
check if uid of this process is root
"""
import os
import platform
if platform.system() == 'Windows':
# We don't need root on Windows to use port 102
return True
if os.getuid() == 0:
return True
root_msg = "it sucks, but you need to run this as root. The snap7 library is" \
" hardcoded run on port 102, which requires root privileges."
if __name__ == '__main__':
logging.basicConfig()
logger = logging.getLogger()
logger.setLevel(logging.INFO)
if not check_root():
logging.error(root_msg)
mainloop()
## Instruction:
Add option to start server passing lib path
## Code After:
import time
import logging
import snap7
def mainloop():
server = snap7.server.Server()
size = 100
data = (snap7.types.wordlen_to_ctypes[snap7.types.S7WLByte] * size)()
server.register_area(snap7.types.srvAreaDB, 1, data)
server.start()
while True:
#logger.info("server: %s cpu: %s users: %s" % server.get_status())
while True:
event = server.pick_event()
if event:
logger.info(server.event_text(event))
else:
break
time.sleep(1)
def check_root():
"""
check if uid of this process is root
"""
import os
import platform
if platform.system() == 'Windows':
# We don't need root on Windows to use port 102
return True
if os.getuid() == 0:
return True
root_msg = "it sucks, but you need to run this as root. The snap7 library is" \
" hardcoded run on port 102, which requires root privileges."
if __name__ == '__main__':
import sys
if len(sys.argv) > 1:
snap7.common.load_library(sys.argv[1])
logging.basicConfig()
logger = logging.getLogger()
logger.setLevel(logging.INFO)
if not check_root():
logging.error(root_msg)
mainloop()
| // ... existing code ...
if __name__ == '__main__':
import sys
if len(sys.argv) > 1:
snap7.common.load_library(sys.argv[1])
logging.basicConfig()
// ... rest of the code ... |
cc143597dd7673fb13d8257c4dd7bdafa31c2dd4 | examples/distributed_workers.py | examples/distributed_workers.py | import sc2
from sc2 import run_game, maps, Race, Difficulty
from sc2.player import Bot, Computer
from sc2.constants import *
class TerranBot(sc2.BotAI):
async def on_step(self, iteration):
await self.distribute_workers()
await self.build_supply()
await self.build_workers()
await self.expand()
async def build_workers(self):
for cc in self.units(UnitTypeId.COMMANDCENTER).ready.noqueue:
if self.can_afford(UnitTypeId.SCV):
await self.do(cc.train(UnitTypeId.SCV))
async def expand(self):
if self.units(UnitTypeId.COMMANDCENTER).amount < 3 and self.can_afford(UnitTypeId.COMMANDCENTER):
await self.expand_now()
async def build_supply(self):
cc = self.units(UnitTypeId.COMMANDCENTER).ready.first
if self.supply_left < 4 and not self.already_pending(UnitTypeId.SUPPLYDEPOT):
if self.can_afford(UnitTypeId.SUPPLYDEPOT):
await self.build(UnitTypeId.SUPPLYDEPOT, near=cc.position.towards(self.game_info.map_center, 5))
run_game(maps.get("Abyssal Reef LE"), [
Bot(Race.Terran, TerranBot()),
Computer(Race.Protoss, Difficulty.Medium)
], realtime=False)
| import sc2
from sc2 import run_game, maps, Race, Difficulty
from sc2.player import Bot, Computer
from sc2.constants import *
class TerranBot(sc2.BotAI):
async def on_step(self, iteration):
await self.distribute_workers()
await self.build_supply()
await self.build_workers()
await self.expand()
async def build_workers(self):
for cc in self.units(UnitTypeId.COMMANDCENTER).ready.noqueue:
if self.can_afford(UnitTypeId.SCV):
await self.do(cc.train(UnitTypeId.SCV))
async def expand(self):
if self.units(UnitTypeId.COMMANDCENTER).amount < 3 and self.can_afford(UnitTypeId.COMMANDCENTER):
await self.expand_now()
async def build_supply(self):
ccs = self.units(UnitTypeId.COMMANDCENTER).ready
if ccs.exists:
cc = ccs.first
if self.supply_left < 4 and not self.already_pending(UnitTypeId.SUPPLYDEPOT):
if self.can_afford(UnitTypeId.SUPPLYDEPOT):
await self.build(UnitTypeId.SUPPLYDEPOT, near=cc.position.towards(self.game_info.map_center, 5))
run_game(maps.get("Abyssal Reef LE"), [
Bot(Race.Terran, TerranBot()),
Computer(Race.Protoss, Difficulty.Medium)
], realtime=False)
| Fix command center selection in example | Fix command center selection in example
| Python | mit | Dentosal/python-sc2 | import sc2
from sc2 import run_game, maps, Race, Difficulty
from sc2.player import Bot, Computer
from sc2.constants import *
class TerranBot(sc2.BotAI):
async def on_step(self, iteration):
await self.distribute_workers()
await self.build_supply()
await self.build_workers()
await self.expand()
async def build_workers(self):
for cc in self.units(UnitTypeId.COMMANDCENTER).ready.noqueue:
if self.can_afford(UnitTypeId.SCV):
await self.do(cc.train(UnitTypeId.SCV))
async def expand(self):
if self.units(UnitTypeId.COMMANDCENTER).amount < 3 and self.can_afford(UnitTypeId.COMMANDCENTER):
await self.expand_now()
async def build_supply(self):
- cc = self.units(UnitTypeId.COMMANDCENTER).ready.first
+ ccs = self.units(UnitTypeId.COMMANDCENTER).ready
+ if ccs.exists:
+ cc = ccs.first
- if self.supply_left < 4 and not self.already_pending(UnitTypeId.SUPPLYDEPOT):
+ if self.supply_left < 4 and not self.already_pending(UnitTypeId.SUPPLYDEPOT):
- if self.can_afford(UnitTypeId.SUPPLYDEPOT):
+ if self.can_afford(UnitTypeId.SUPPLYDEPOT):
- await self.build(UnitTypeId.SUPPLYDEPOT, near=cc.position.towards(self.game_info.map_center, 5))
+ await self.build(UnitTypeId.SUPPLYDEPOT, near=cc.position.towards(self.game_info.map_center, 5))
run_game(maps.get("Abyssal Reef LE"), [
Bot(Race.Terran, TerranBot()),
Computer(Race.Protoss, Difficulty.Medium)
], realtime=False)
| Fix command center selection in example | ## Code Before:
import sc2
from sc2 import run_game, maps, Race, Difficulty
from sc2.player import Bot, Computer
from sc2.constants import *
class TerranBot(sc2.BotAI):
async def on_step(self, iteration):
await self.distribute_workers()
await self.build_supply()
await self.build_workers()
await self.expand()
async def build_workers(self):
for cc in self.units(UnitTypeId.COMMANDCENTER).ready.noqueue:
if self.can_afford(UnitTypeId.SCV):
await self.do(cc.train(UnitTypeId.SCV))
async def expand(self):
if self.units(UnitTypeId.COMMANDCENTER).amount < 3 and self.can_afford(UnitTypeId.COMMANDCENTER):
await self.expand_now()
async def build_supply(self):
cc = self.units(UnitTypeId.COMMANDCENTER).ready.first
if self.supply_left < 4 and not self.already_pending(UnitTypeId.SUPPLYDEPOT):
if self.can_afford(UnitTypeId.SUPPLYDEPOT):
await self.build(UnitTypeId.SUPPLYDEPOT, near=cc.position.towards(self.game_info.map_center, 5))
run_game(maps.get("Abyssal Reef LE"), [
Bot(Race.Terran, TerranBot()),
Computer(Race.Protoss, Difficulty.Medium)
], realtime=False)
## Instruction:
Fix command center selection in example
## Code After:
import sc2
from sc2 import run_game, maps, Race, Difficulty
from sc2.player import Bot, Computer
from sc2.constants import *
class TerranBot(sc2.BotAI):
async def on_step(self, iteration):
await self.distribute_workers()
await self.build_supply()
await self.build_workers()
await self.expand()
async def build_workers(self):
for cc in self.units(UnitTypeId.COMMANDCENTER).ready.noqueue:
if self.can_afford(UnitTypeId.SCV):
await self.do(cc.train(UnitTypeId.SCV))
async def expand(self):
if self.units(UnitTypeId.COMMANDCENTER).amount < 3 and self.can_afford(UnitTypeId.COMMANDCENTER):
await self.expand_now()
async def build_supply(self):
ccs = self.units(UnitTypeId.COMMANDCENTER).ready
if ccs.exists:
cc = ccs.first
if self.supply_left < 4 and not self.already_pending(UnitTypeId.SUPPLYDEPOT):
if self.can_afford(UnitTypeId.SUPPLYDEPOT):
await self.build(UnitTypeId.SUPPLYDEPOT, near=cc.position.towards(self.game_info.map_center, 5))
run_game(maps.get("Abyssal Reef LE"), [
Bot(Race.Terran, TerranBot()),
Computer(Race.Protoss, Difficulty.Medium)
], realtime=False)
| ...
async def build_supply(self):
ccs = self.units(UnitTypeId.COMMANDCENTER).ready
if ccs.exists:
cc = ccs.first
if self.supply_left < 4 and not self.already_pending(UnitTypeId.SUPPLYDEPOT):
if self.can_afford(UnitTypeId.SUPPLYDEPOT):
await self.build(UnitTypeId.SUPPLYDEPOT, near=cc.position.towards(self.game_info.map_center, 5))
... |
d35bd019d99c8ef19012642339ddab1f4a631b8d | fixtureless/tests/test_django_project/test_django_project/test_app/tests/__init__.py | fixtureless/tests/test_django_project/test_django_project/test_app/tests/__init__.py | from test_app.tests.generator import *
from test_app.tests.factory import *
from test_app.tests.utils import *
| from test_app.tests.test_generator import *
from test_app.tests.test_factory import *
from test_app.tests.test_utils import *
| Fix broken imports after file namechange | Fix broken imports after file namechange
| Python | mit | ricomoss/django-fixtureless | - from test_app.tests.generator import *
+ from test_app.tests.test_generator import *
- from test_app.tests.factory import *
+ from test_app.tests.test_factory import *
- from test_app.tests.utils import *
+ from test_app.tests.test_utils import *
| Fix broken imports after file namechange | ## Code Before:
from test_app.tests.generator import *
from test_app.tests.factory import *
from test_app.tests.utils import *
## Instruction:
Fix broken imports after file namechange
## Code After:
from test_app.tests.test_generator import *
from test_app.tests.test_factory import *
from test_app.tests.test_utils import *
| // ... existing code ...
from test_app.tests.test_generator import *
from test_app.tests.test_factory import *
from test_app.tests.test_utils import *
// ... rest of the code ... |
e8ba913722218c86b2b705d8351795a409a514ac | pale/arguments/__init__.py | pale/arguments/__init__.py | from .base import BaseArgument, ListArgument
from .boolean import BooleanArgument
from .number import IntegerArgument
from .scope import ScopeArgument
from .string import StringArgument, StringListArgument
from .url import URLArgument
| from .base import BaseArgument, ListArgument
from .boolean import BooleanArgument
from .number import FloatArgument, IntegerArgument
from .scope import ScopeArgument
from .string import StringArgument, StringListArgument
from .url import URLArgument
| Add FloatArgument to arguments module | Add FloatArgument to arguments module
| Python | mit | Loudr/pale | from .base import BaseArgument, ListArgument
from .boolean import BooleanArgument
- from .number import IntegerArgument
+ from .number import FloatArgument, IntegerArgument
from .scope import ScopeArgument
from .string import StringArgument, StringListArgument
from .url import URLArgument
| Add FloatArgument to arguments module | ## Code Before:
from .base import BaseArgument, ListArgument
from .boolean import BooleanArgument
from .number import IntegerArgument
from .scope import ScopeArgument
from .string import StringArgument, StringListArgument
from .url import URLArgument
## Instruction:
Add FloatArgument to arguments module
## Code After:
from .base import BaseArgument, ListArgument
from .boolean import BooleanArgument
from .number import FloatArgument, IntegerArgument
from .scope import ScopeArgument
from .string import StringArgument, StringListArgument
from .url import URLArgument
| # ... existing code ...
from .boolean import BooleanArgument
from .number import FloatArgument, IntegerArgument
from .scope import ScopeArgument
# ... rest of the code ... |
4368567e44c144e85fa9fcdb72f2648c13eb8158 | rest_framework_jsonp/renderers.py | rest_framework_jsonp/renderers.py | from __future__ import unicode_literals
from rest_framework.renderers import JSONRenderer
class JSONPRenderer(JSONRenderer):
"""
Renderer which serializes to json,
wrapping the json output in a callback function.
"""
media_type = 'application/javascript'
format = 'jsonp'
callback_parameter = 'callback'
default_callback = 'callback'
charset = 'utf-8'
def get_callback(self, renderer_context):
"""
Determine the name of the callback to wrap around the json output.
"""
request = renderer_context.get('request', None)
params = request and request.QUERY_PARAMS or {}
return params.get(self.callback_parameter, self.default_callback)
def render(self, data, accepted_media_type=None, renderer_context=None):
"""
Renders into jsonp, wrapping the json output in a callback function.
Clients may set the callback function name using a query parameter
on the URL, for example: ?callback=exampleCallbackName
"""
renderer_context = renderer_context or {}
callback = self.get_callback(renderer_context)
json = super(JSONPRenderer, self).render(data, accepted_media_type,
renderer_context)
return callback.encode(self.charset) + b'(' + json + b');'
| from __future__ import unicode_literals
from rest_framework.renderers import JSONRenderer
class JSONPRenderer(JSONRenderer):
"""
Renderer which serializes to json,
wrapping the json output in a callback function.
"""
media_type = 'application/javascript'
format = 'jsonp'
callback_parameter = 'callback'
default_callback = 'callback'
charset = 'utf-8'
def get_callback(self, renderer_context):
"""
Determine the name of the callback to wrap around the json output.
"""
request = renderer_context.get('request', None)
params = request and request.query_params or {}
return params.get(self.callback_parameter, self.default_callback)
def render(self, data, accepted_media_type=None, renderer_context=None):
"""
Renders into jsonp, wrapping the json output in a callback function.
Clients may set the callback function name using a query parameter
on the URL, for example: ?callback=exampleCallbackName
"""
renderer_context = renderer_context or {}
callback = self.get_callback(renderer_context)
json = super(JSONPRenderer, self).render(data, accepted_media_type,
renderer_context)
return callback.encode(self.charset) + b'(' + json + b');'
| Update for compat w/ djangorestframework 3.2 | Update for compat w/ djangorestframework 3.2
Change request.QUERY_PARAMS to request.query_params | Python | isc | baxrob/django-rest-framework-jsonp | from __future__ import unicode_literals
from rest_framework.renderers import JSONRenderer
class JSONPRenderer(JSONRenderer):
"""
Renderer which serializes to json,
wrapping the json output in a callback function.
"""
media_type = 'application/javascript'
format = 'jsonp'
callback_parameter = 'callback'
default_callback = 'callback'
charset = 'utf-8'
def get_callback(self, renderer_context):
"""
Determine the name of the callback to wrap around the json output.
"""
request = renderer_context.get('request', None)
- params = request and request.QUERY_PARAMS or {}
+ params = request and request.query_params or {}
return params.get(self.callback_parameter, self.default_callback)
def render(self, data, accepted_media_type=None, renderer_context=None):
"""
Renders into jsonp, wrapping the json output in a callback function.
Clients may set the callback function name using a query parameter
on the URL, for example: ?callback=exampleCallbackName
"""
renderer_context = renderer_context or {}
callback = self.get_callback(renderer_context)
json = super(JSONPRenderer, self).render(data, accepted_media_type,
renderer_context)
return callback.encode(self.charset) + b'(' + json + b');'
| Update for compat w/ djangorestframework 3.2 | ## Code Before:
from __future__ import unicode_literals
from rest_framework.renderers import JSONRenderer
class JSONPRenderer(JSONRenderer):
"""
Renderer which serializes to json,
wrapping the json output in a callback function.
"""
media_type = 'application/javascript'
format = 'jsonp'
callback_parameter = 'callback'
default_callback = 'callback'
charset = 'utf-8'
def get_callback(self, renderer_context):
"""
Determine the name of the callback to wrap around the json output.
"""
request = renderer_context.get('request', None)
params = request and request.QUERY_PARAMS or {}
return params.get(self.callback_parameter, self.default_callback)
def render(self, data, accepted_media_type=None, renderer_context=None):
"""
Renders into jsonp, wrapping the json output in a callback function.
Clients may set the callback function name using a query parameter
on the URL, for example: ?callback=exampleCallbackName
"""
renderer_context = renderer_context or {}
callback = self.get_callback(renderer_context)
json = super(JSONPRenderer, self).render(data, accepted_media_type,
renderer_context)
return callback.encode(self.charset) + b'(' + json + b');'
## Instruction:
Update for compat w/ djangorestframework 3.2
## Code After:
from __future__ import unicode_literals
from rest_framework.renderers import JSONRenderer
class JSONPRenderer(JSONRenderer):
"""
Renderer which serializes to json,
wrapping the json output in a callback function.
"""
media_type = 'application/javascript'
format = 'jsonp'
callback_parameter = 'callback'
default_callback = 'callback'
charset = 'utf-8'
def get_callback(self, renderer_context):
"""
Determine the name of the callback to wrap around the json output.
"""
request = renderer_context.get('request', None)
params = request and request.query_params or {}
return params.get(self.callback_parameter, self.default_callback)
def render(self, data, accepted_media_type=None, renderer_context=None):
"""
Renders into jsonp, wrapping the json output in a callback function.
Clients may set the callback function name using a query parameter
on the URL, for example: ?callback=exampleCallbackName
"""
renderer_context = renderer_context or {}
callback = self.get_callback(renderer_context)
json = super(JSONPRenderer, self).render(data, accepted_media_type,
renderer_context)
return callback.encode(self.charset) + b'(' + json + b');'
| # ... existing code ...
request = renderer_context.get('request', None)
params = request and request.query_params or {}
return params.get(self.callback_parameter, self.default_callback)
# ... rest of the code ... |
acce959e4885a52ba4a80beaed41a56aac63844e | tests/opwen_email_server/api/test_client_read.py | tests/opwen_email_server/api/test_client_read.py | from contextlib import contextmanager
from os import environ
from unittest import TestCase
class DownloadTests(TestCase):
def test_denies_unknown_client(self):
with self._given_clients('{"client1": "id1"}') as download:
message, status = download('unknown_client')
self.assertEqual(status, 403)
@classmethod
@contextmanager
def _given_clients(cls, clients: str):
environ['LOKOLE_CLIENTS'] = clients
from opwen_email_server.api import client_read
yield client_read.download
del client_read
| from contextlib import contextmanager
from unittest import TestCase
from opwen_email_server.api import client_read
from opwen_email_server.services.auth import EnvironmentAuth
class DownloadTests(TestCase):
def test_denies_unknown_client(self):
with self.given_clients({'client1': 'id1'}):
message, status = client_read.download('unknown_client')
self.assertEqual(status, 403)
@contextmanager
def given_clients(self, clients):
original_clients = client_read.CLIENTS
client_read.CLIENTS = EnvironmentAuth(clients)
yield
client_read.CLIENTS = original_clients
| Remove need to set environment variables in test | Remove need to set environment variables in test
| Python | apache-2.0 | ascoderu/opwen-cloudserver,ascoderu/opwen-cloudserver | from contextlib import contextmanager
- from os import environ
from unittest import TestCase
+
+ from opwen_email_server.api import client_read
+ from opwen_email_server.services.auth import EnvironmentAuth
class DownloadTests(TestCase):
def test_denies_unknown_client(self):
- with self._given_clients('{"client1": "id1"}') as download:
+ with self.given_clients({'client1': 'id1'}):
- message, status = download('unknown_client')
+ message, status = client_read.download('unknown_client')
self.assertEqual(status, 403)
- @classmethod
@contextmanager
- def _given_clients(cls, clients: str):
+ def given_clients(self, clients):
- environ['LOKOLE_CLIENTS'] = clients
- from opwen_email_server.api import client_read
- yield client_read.download
- del client_read
+ original_clients = client_read.CLIENTS
+ client_read.CLIENTS = EnvironmentAuth(clients)
+ yield
+ client_read.CLIENTS = original_clients
| Remove need to set environment variables in test | ## Code Before:
from contextlib import contextmanager
from os import environ
from unittest import TestCase
class DownloadTests(TestCase):
def test_denies_unknown_client(self):
with self._given_clients('{"client1": "id1"}') as download:
message, status = download('unknown_client')
self.assertEqual(status, 403)
@classmethod
@contextmanager
def _given_clients(cls, clients: str):
environ['LOKOLE_CLIENTS'] = clients
from opwen_email_server.api import client_read
yield client_read.download
del client_read
## Instruction:
Remove need to set environment variables in test
## Code After:
from contextlib import contextmanager
from unittest import TestCase
from opwen_email_server.api import client_read
from opwen_email_server.services.auth import EnvironmentAuth
class DownloadTests(TestCase):
def test_denies_unknown_client(self):
with self.given_clients({'client1': 'id1'}):
message, status = client_read.download('unknown_client')
self.assertEqual(status, 403)
@contextmanager
def given_clients(self, clients):
original_clients = client_read.CLIENTS
client_read.CLIENTS = EnvironmentAuth(clients)
yield
client_read.CLIENTS = original_clients
| // ... existing code ...
from contextlib import contextmanager
from unittest import TestCase
from opwen_email_server.api import client_read
from opwen_email_server.services.auth import EnvironmentAuth
// ... modified code ...
def test_denies_unknown_client(self):
with self.given_clients({'client1': 'id1'}):
message, status = client_read.download('unknown_client')
self.assertEqual(status, 403)
...
@contextmanager
def given_clients(self, clients):
original_clients = client_read.CLIENTS
client_read.CLIENTS = EnvironmentAuth(clients)
yield
client_read.CLIENTS = original_clients
// ... rest of the code ... |
9968e526c00ee221940b30f435ecb866a4a1a608 | tests/core/test_validator.py | tests/core/test_validator.py | import pytest
import asyncio
from rasa.core.validator import Validator
from tests.core.conftest import DEFAULT_DOMAIN_PATH, DEFAULT_STORIES_FILE, DEFAULT_NLU_DATA
from rasa.core.domain import Domain
from rasa.nlu.training_data import load_data, TrainingData
from rasa.core.training.dsl import StoryFileReader
@pytest.fixture
def validator():
domain = Domain.load(DEFAULT_DOMAIN_PATH)
stories = asyncio.run(
StoryFileReader.read_from_folder(DEFAULT_STORIES_FILE, domain)
)
intents = load_data(DEFAULT_NLU_DATA)
return Validator(domain=domain, intents=intents, stories=stories)
def test_validator_creation(validator):
assert isinstance(validator.domain, Domain)
assert isinstance(validator.intents, TrainingData)
assert isinstance(validator.stories, list)
def test_search(validator):
vec = ['a', 'b', 'c', 'd', 'e']
assert validator._search(vector=vec, searched_value='c')
def test_verify_intents(validator):
valid_intents = ['greet', 'goodbye', 'affirm']
validator.verify_intents()
assert validator.valid_intents == valid_intents
def test_verify_utters(validator):
valid_utterances = ['utter_greet', 'utter_goodbye', 'utter_default']
validator.verify_utterances()
assert validator.valid_utterances == valid_utterances
| import pytest
import asyncio
from rasa.core.validator import Validator
from tests.core.conftest import (
DEFAULT_DOMAIN_PATH,
DEFAULT_STORIES_FILE,
DEFAULT_NLU_DATA,
)
from rasa.core.domain import Domain
from rasa.nlu.training_data import load_data, TrainingData
from rasa.core.training.dsl import StoryFileReader
@pytest.fixture
def validator():
domain = Domain.load(DEFAULT_DOMAIN_PATH)
stories = asyncio.run(
StoryFileReader.read_from_folder(DEFAULT_STORIES_FILE, domain)
)
intents = load_data(DEFAULT_NLU_DATA)
return Validator(domain=domain, intents=intents, stories=stories)
def test_validator_creation(validator):
assert isinstance(validator.domain, Domain)
assert isinstance(validator.intents, TrainingData)
assert isinstance(validator.stories, list)
def test_search(validator):
vec = ["a", "b", "c", "d", "e"]
assert validator._search(vector=vec, searched_value="c")
def test_verify_intents(validator):
valid_intents = ["greet", "goodbye", "affirm"]
validator.verify_intents()
assert validator.valid_intents == valid_intents
def test_verify_utters(validator):
valid_utterances = ["utter_greet", "utter_goodbye", "utter_default"]
validator.verify_utterances()
assert validator.valid_utterances == valid_utterances
| Refactor validator tests with black | Refactor validator tests with black
Signed-off-by: Gabriela Barrozo Guedes <[email protected]>
| Python | apache-2.0 | RasaHQ/rasa_nlu,RasaHQ/rasa_nlu,RasaHQ/rasa_nlu | import pytest
import asyncio
from rasa.core.validator import Validator
- from tests.core.conftest import DEFAULT_DOMAIN_PATH, DEFAULT_STORIES_FILE, DEFAULT_NLU_DATA
+ from tests.core.conftest import (
+ DEFAULT_DOMAIN_PATH,
+ DEFAULT_STORIES_FILE,
+ DEFAULT_NLU_DATA,
+ )
from rasa.core.domain import Domain
from rasa.nlu.training_data import load_data, TrainingData
from rasa.core.training.dsl import StoryFileReader
+
@pytest.fixture
def validator():
domain = Domain.load(DEFAULT_DOMAIN_PATH)
stories = asyncio.run(
StoryFileReader.read_from_folder(DEFAULT_STORIES_FILE, domain)
)
intents = load_data(DEFAULT_NLU_DATA)
return Validator(domain=domain, intents=intents, stories=stories)
def test_validator_creation(validator):
assert isinstance(validator.domain, Domain)
assert isinstance(validator.intents, TrainingData)
assert isinstance(validator.stories, list)
def test_search(validator):
- vec = ['a', 'b', 'c', 'd', 'e']
+ vec = ["a", "b", "c", "d", "e"]
- assert validator._search(vector=vec, searched_value='c')
+ assert validator._search(vector=vec, searched_value="c")
def test_verify_intents(validator):
- valid_intents = ['greet', 'goodbye', 'affirm']
+ valid_intents = ["greet", "goodbye", "affirm"]
validator.verify_intents()
assert validator.valid_intents == valid_intents
def test_verify_utters(validator):
- valid_utterances = ['utter_greet', 'utter_goodbye', 'utter_default']
+ valid_utterances = ["utter_greet", "utter_goodbye", "utter_default"]
validator.verify_utterances()
assert validator.valid_utterances == valid_utterances
| Refactor validator tests with black | ## Code Before:
import pytest
import asyncio
from rasa.core.validator import Validator
from tests.core.conftest import DEFAULT_DOMAIN_PATH, DEFAULT_STORIES_FILE, DEFAULT_NLU_DATA
from rasa.core.domain import Domain
from rasa.nlu.training_data import load_data, TrainingData
from rasa.core.training.dsl import StoryFileReader
@pytest.fixture
def validator():
domain = Domain.load(DEFAULT_DOMAIN_PATH)
stories = asyncio.run(
StoryFileReader.read_from_folder(DEFAULT_STORIES_FILE, domain)
)
intents = load_data(DEFAULT_NLU_DATA)
return Validator(domain=domain, intents=intents, stories=stories)
def test_validator_creation(validator):
assert isinstance(validator.domain, Domain)
assert isinstance(validator.intents, TrainingData)
assert isinstance(validator.stories, list)
def test_search(validator):
vec = ['a', 'b', 'c', 'd', 'e']
assert validator._search(vector=vec, searched_value='c')
def test_verify_intents(validator):
valid_intents = ['greet', 'goodbye', 'affirm']
validator.verify_intents()
assert validator.valid_intents == valid_intents
def test_verify_utters(validator):
valid_utterances = ['utter_greet', 'utter_goodbye', 'utter_default']
validator.verify_utterances()
assert validator.valid_utterances == valid_utterances
## Instruction:
Refactor validator tests with black
## Code After:
import pytest
import asyncio
from rasa.core.validator import Validator
from tests.core.conftest import (
DEFAULT_DOMAIN_PATH,
DEFAULT_STORIES_FILE,
DEFAULT_NLU_DATA,
)
from rasa.core.domain import Domain
from rasa.nlu.training_data import load_data, TrainingData
from rasa.core.training.dsl import StoryFileReader
@pytest.fixture
def validator():
domain = Domain.load(DEFAULT_DOMAIN_PATH)
stories = asyncio.run(
StoryFileReader.read_from_folder(DEFAULT_STORIES_FILE, domain)
)
intents = load_data(DEFAULT_NLU_DATA)
return Validator(domain=domain, intents=intents, stories=stories)
def test_validator_creation(validator):
assert isinstance(validator.domain, Domain)
assert isinstance(validator.intents, TrainingData)
assert isinstance(validator.stories, list)
def test_search(validator):
vec = ["a", "b", "c", "d", "e"]
assert validator._search(vector=vec, searched_value="c")
def test_verify_intents(validator):
valid_intents = ["greet", "goodbye", "affirm"]
validator.verify_intents()
assert validator.valid_intents == valid_intents
def test_verify_utters(validator):
valid_utterances = ["utter_greet", "utter_goodbye", "utter_default"]
validator.verify_utterances()
assert validator.valid_utterances == valid_utterances
| ...
from rasa.core.validator import Validator
from tests.core.conftest import (
DEFAULT_DOMAIN_PATH,
DEFAULT_STORIES_FILE,
DEFAULT_NLU_DATA,
)
from rasa.core.domain import Domain
...
from rasa.core.training.dsl import StoryFileReader
...
def test_search(validator):
vec = ["a", "b", "c", "d", "e"]
assert validator._search(vector=vec, searched_value="c")
...
def test_verify_intents(validator):
valid_intents = ["greet", "goodbye", "affirm"]
validator.verify_intents()
...
def test_verify_utters(validator):
valid_utterances = ["utter_greet", "utter_goodbye", "utter_default"]
validator.verify_utterances()
... |
648daddbc75ee18201cc441dcf3ec34238e4479d | astropy/coordinates/__init__.py | astropy/coordinates/__init__.py |
from .errors import *
from .angles import *
from .baseframe import *
from .attributes import *
from .distances import *
from .earth import *
from .transformations import *
from .builtin_frames import *
from .name_resolve import *
from .matching import *
from .representation import *
from .sky_coordinate import *
from .funcs import *
from .calculation import *
from .solar_system import *
# This is for backwards-compatibility -- can be removed in v3.0 when the
# deprecation warnings are removed
from .attributes import (TimeFrameAttribute, QuantityFrameAttribute,
CartesianRepresentationFrameAttribute)
__doc__ += builtin_frames._transform_graph_docs + """
.. note::
The ecliptic coordinate systems (added in Astropy v1.1) have not been
extensively tested for accuracy or consistency with other implementations of
ecliptic coordinates. We welcome contributions to add such testing, but in
the meantime, users who depend on consistency with other implementations may
wish to check test inputs against good datasets before using Astropy's
ecliptic coordinates.
"""
|
from .errors import *
from .angles import *
from .baseframe import *
from .attributes import *
from .distances import *
from .earth import *
from .transformations import *
from .builtin_frames import *
from .name_resolve import *
from .matching import *
from .representation import *
from .sky_coordinate import *
from .funcs import *
from .calculation import *
from .solar_system import *
# This is for backwards-compatibility -- can be removed in v3.0 when the
# deprecation warnings are removed
from .attributes import (TimeFrameAttribute, QuantityFrameAttribute,
CartesianRepresentationFrameAttribute)
__doc__ += builtin_frames._transform_graph_docs
| Remove "experimental" state of ecliptic frames | Remove "experimental" state of ecliptic frames
| Python | bsd-3-clause | lpsinger/astropy,saimn/astropy,dhomeier/astropy,saimn/astropy,astropy/astropy,MSeifert04/astropy,larrybradley/astropy,dhomeier/astropy,MSeifert04/astropy,mhvk/astropy,lpsinger/astropy,pllim/astropy,saimn/astropy,astropy/astropy,MSeifert04/astropy,bsipocz/astropy,larrybradley/astropy,bsipocz/astropy,StuartLittlefair/astropy,stargaser/astropy,aleksandr-bakanov/astropy,bsipocz/astropy,StuartLittlefair/astropy,aleksandr-bakanov/astropy,astropy/astropy,larrybradley/astropy,lpsinger/astropy,saimn/astropy,lpsinger/astropy,mhvk/astropy,dhomeier/astropy,bsipocz/astropy,aleksandr-bakanov/astropy,stargaser/astropy,astropy/astropy,StuartLittlefair/astropy,larrybradley/astropy,pllim/astropy,stargaser/astropy,aleksandr-bakanov/astropy,MSeifert04/astropy,larrybradley/astropy,astropy/astropy,mhvk/astropy,dhomeier/astropy,stargaser/astropy,mhvk/astropy,saimn/astropy,mhvk/astropy,dhomeier/astropy,StuartLittlefair/astropy,pllim/astropy,pllim/astropy,StuartLittlefair/astropy,pllim/astropy,lpsinger/astropy |
from .errors import *
from .angles import *
from .baseframe import *
from .attributes import *
from .distances import *
from .earth import *
from .transformations import *
from .builtin_frames import *
from .name_resolve import *
from .matching import *
from .representation import *
from .sky_coordinate import *
from .funcs import *
from .calculation import *
from .solar_system import *
# This is for backwards-compatibility -- can be removed in v3.0 when the
# deprecation warnings are removed
from .attributes import (TimeFrameAttribute, QuantityFrameAttribute,
CartesianRepresentationFrameAttribute)
- __doc__ += builtin_frames._transform_graph_docs + """
+ __doc__ += builtin_frames._transform_graph_docs
- .. note::
-
- The ecliptic coordinate systems (added in Astropy v1.1) have not been
- extensively tested for accuracy or consistency with other implementations of
- ecliptic coordinates. We welcome contributions to add such testing, but in
- the meantime, users who depend on consistency with other implementations may
- wish to check test inputs against good datasets before using Astropy's
- ecliptic coordinates.
-
- """
- | Remove "experimental" state of ecliptic frames | ## Code Before:
from .errors import *
from .angles import *
from .baseframe import *
from .attributes import *
from .distances import *
from .earth import *
from .transformations import *
from .builtin_frames import *
from .name_resolve import *
from .matching import *
from .representation import *
from .sky_coordinate import *
from .funcs import *
from .calculation import *
from .solar_system import *
# This is for backwards-compatibility -- can be removed in v3.0 when the
# deprecation warnings are removed
from .attributes import (TimeFrameAttribute, QuantityFrameAttribute,
CartesianRepresentationFrameAttribute)
__doc__ += builtin_frames._transform_graph_docs + """
.. note::
The ecliptic coordinate systems (added in Astropy v1.1) have not been
extensively tested for accuracy or consistency with other implementations of
ecliptic coordinates. We welcome contributions to add such testing, but in
the meantime, users who depend on consistency with other implementations may
wish to check test inputs against good datasets before using Astropy's
ecliptic coordinates.
"""
## Instruction:
Remove "experimental" state of ecliptic frames
## Code After:
from .errors import *
from .angles import *
from .baseframe import *
from .attributes import *
from .distances import *
from .earth import *
from .transformations import *
from .builtin_frames import *
from .name_resolve import *
from .matching import *
from .representation import *
from .sky_coordinate import *
from .funcs import *
from .calculation import *
from .solar_system import *
# This is for backwards-compatibility -- can be removed in v3.0 when the
# deprecation warnings are removed
from .attributes import (TimeFrameAttribute, QuantityFrameAttribute,
CartesianRepresentationFrameAttribute)
__doc__ += builtin_frames._transform_graph_docs
| # ... existing code ...
__doc__ += builtin_frames._transform_graph_docs
# ... rest of the code ... |
5076055b54d18ea2441abaf604a4ea4dd79353c5 | cybox/test/objects/__init__.py | cybox/test/objects/__init__.py | import cybox.utils
class ObjectTestCase(object):
"""A base class for testing all subclasses of ObjectProperties.
Each subclass of ObjectTestCase should subclass both unittest.TestCase
and ObjectTestCase, and defined two class-level fields:
- klass: the ObjectProperties subclass being tested
- object_type: The name prefix used in the XML Schema bindings for the
object.
"""
def test_type_exists(self):
# Verify that the correct class has been added to the OBJECTS
# dictionary in cybox.utils
print(type(self))
if type(self) == type(ObjectTestCase):
return
t = self.__class__.object_type
c = self.__class__.klass
self.assertEqual(cybox.utils.get_class_for_object_type(t), c)
| import cybox.utils
class ObjectTestCase(object):
"""A base class for testing all subclasses of ObjectProperties.
Each subclass of ObjectTestCase should subclass both unittest.TestCase
and ObjectTestCase, and defined two class-level fields:
- klass: the ObjectProperties subclass being tested
- object_type: The name prefix used in the XML Schema bindings for the
object.
"""
def test_type_exists(self):
# Verify that the correct class has been added to the OBJECT_TYPES_DICT
# dictionary in cybox.utils.nsparser
# Skip this base class
if type(self) == type(ObjectTestCase):
return
t = self.__class__.object_type
expected_class = cybox.utils.get_class_for_object_type(t)
actual_class = self.__class__.klass
self.assertEqual(expected_class, actual_class)
expected_namespace = expected_class._XSI_NS
actual_namespace = cybox.utils.nsparser.OBJECT_TYPES_DICT.get(t).get('namespace_prefix')
self.assertEqual(expected_namespace, actual_namespace)
self.assertEqual(expected_class._XSI_TYPE, t)
| Expand default testing on new object types | Expand default testing on new object types
| Python | bsd-3-clause | CybOXProject/python-cybox | import cybox.utils
class ObjectTestCase(object):
"""A base class for testing all subclasses of ObjectProperties.
Each subclass of ObjectTestCase should subclass both unittest.TestCase
and ObjectTestCase, and defined two class-level fields:
- klass: the ObjectProperties subclass being tested
- object_type: The name prefix used in the XML Schema bindings for the
object.
"""
def test_type_exists(self):
- # Verify that the correct class has been added to the OBJECTS
+ # Verify that the correct class has been added to the OBJECT_TYPES_DICT
- # dictionary in cybox.utils
+ # dictionary in cybox.utils.nsparser
- print(type(self))
+
+ # Skip this base class
if type(self) == type(ObjectTestCase):
return
+
t = self.__class__.object_type
- c = self.__class__.klass
- self.assertEqual(cybox.utils.get_class_for_object_type(t), c)
+ expected_class = cybox.utils.get_class_for_object_type(t)
+ actual_class = self.__class__.klass
+
+ self.assertEqual(expected_class, actual_class)
+
+ expected_namespace = expected_class._XSI_NS
+ actual_namespace = cybox.utils.nsparser.OBJECT_TYPES_DICT.get(t).get('namespace_prefix')
+ self.assertEqual(expected_namespace, actual_namespace)
+
+ self.assertEqual(expected_class._XSI_TYPE, t)
+ | Expand default testing on new object types | ## Code Before:
import cybox.utils
class ObjectTestCase(object):
"""A base class for testing all subclasses of ObjectProperties.
Each subclass of ObjectTestCase should subclass both unittest.TestCase
and ObjectTestCase, and defined two class-level fields:
- klass: the ObjectProperties subclass being tested
- object_type: The name prefix used in the XML Schema bindings for the
object.
"""
def test_type_exists(self):
# Verify that the correct class has been added to the OBJECTS
# dictionary in cybox.utils
print(type(self))
if type(self) == type(ObjectTestCase):
return
t = self.__class__.object_type
c = self.__class__.klass
self.assertEqual(cybox.utils.get_class_for_object_type(t), c)
## Instruction:
Expand default testing on new object types
## Code After:
import cybox.utils
class ObjectTestCase(object):
"""A base class for testing all subclasses of ObjectProperties.
Each subclass of ObjectTestCase should subclass both unittest.TestCase
and ObjectTestCase, and defined two class-level fields:
- klass: the ObjectProperties subclass being tested
- object_type: The name prefix used in the XML Schema bindings for the
object.
"""
def test_type_exists(self):
# Verify that the correct class has been added to the OBJECT_TYPES_DICT
# dictionary in cybox.utils.nsparser
# Skip this base class
if type(self) == type(ObjectTestCase):
return
t = self.__class__.object_type
expected_class = cybox.utils.get_class_for_object_type(t)
actual_class = self.__class__.klass
self.assertEqual(expected_class, actual_class)
expected_namespace = expected_class._XSI_NS
actual_namespace = cybox.utils.nsparser.OBJECT_TYPES_DICT.get(t).get('namespace_prefix')
self.assertEqual(expected_namespace, actual_namespace)
self.assertEqual(expected_class._XSI_TYPE, t)
| # ... existing code ...
def test_type_exists(self):
# Verify that the correct class has been added to the OBJECT_TYPES_DICT
# dictionary in cybox.utils.nsparser
# Skip this base class
if type(self) == type(ObjectTestCase):
# ... modified code ...
return
t = self.__class__.object_type
expected_class = cybox.utils.get_class_for_object_type(t)
actual_class = self.__class__.klass
self.assertEqual(expected_class, actual_class)
expected_namespace = expected_class._XSI_NS
actual_namespace = cybox.utils.nsparser.OBJECT_TYPES_DICT.get(t).get('namespace_prefix')
self.assertEqual(expected_namespace, actual_namespace)
self.assertEqual(expected_class._XSI_TYPE, t)
# ... rest of the code ... |
56aa0448fb3cd1df1a0fd43abc9a0e37e8ddf55b | trans_sync/management/commands/save_trans.py | trans_sync/management/commands/save_trans.py | from __future__ import unicode_literals
from optparse import make_option
from django.core.management.base import NoArgsCommand
class Command(NoArgsCommand):
option_list = NoArgsCommand.option_list + (
make_option(
'--dry-run',
action='store_true',
dest='dry_run',
default=False,
help='Do not actually send signals (and all connected stuff).'
),
)
def handle(self, *args, **options):
if not options['dry_run']:
pass | from __future__ import unicode_literals
import os
from os.path import join, isdir
from optparse import make_option
from django.core.management.base import NoArgsCommand
from django.conf import settings
from modeltranslation.translator import translator
from babel.messages.catalog import Catalog
from babel.messages.pofile import write_po
class Command(NoArgsCommand):
option_list = NoArgsCommand.option_list + (
make_option(
'--dry-run',
action='store_true',
dest='dry_run',
default=False,
help='Do not actually save files.'
),
)
def handle(self, *args, **options):
if not options['dry_run']:
pass
locale_path = settings.LOCALE_MODEL_TRANS
if not isdir(locale_path):
os.mkdir(locale_path)
for lang in [l[0] for l in list(settings.LANGUAGES)]:
catalog = Catalog(locale=lang)
for model in translator.get_registered_models():
opts = translator.get_options_for_model(model)
for field in opts.get_field_names():
tr_field = "%s_%s" % (field, lang)
for item in model.objects.all():
msgid = "%s.%s.%s" % (item._meta, item.pk, field)
msgstr = "%s" % getattr(item, tr_field)
catalog.add(id=msgid, string=msgstr)
# write catalog to file
lang_path = os.path.join(locale_path, lang)
if not isdir(lang_path):
os.mkdir(lang_path)
f = open(join(lang_path, "LC_MESSAGES", "modeltranslation.po"), "w")
write_po(f, catalog)
f.close() | Save trans to .po files | Save trans to .po files
| Python | mit | djentlemen/django-modeltranslation-sync | from __future__ import unicode_literals
-
+ import os
+ from os.path import join, isdir
from optparse import make_option
from django.core.management.base import NoArgsCommand
+ from django.conf import settings
+ from modeltranslation.translator import translator
+
+ from babel.messages.catalog import Catalog
+ from babel.messages.pofile import write_po
class Command(NoArgsCommand):
option_list = NoArgsCommand.option_list + (
make_option(
'--dry-run',
action='store_true',
dest='dry_run',
default=False,
- help='Do not actually send signals (and all connected stuff).'
+ help='Do not actually save files.'
),
)
def handle(self, *args, **options):
if not options['dry_run']:
pass
+
+ locale_path = settings.LOCALE_MODEL_TRANS
+ if not isdir(locale_path):
+ os.mkdir(locale_path)
+
+ for lang in [l[0] for l in list(settings.LANGUAGES)]:
+
+ catalog = Catalog(locale=lang)
+
+ for model in translator.get_registered_models():
+ opts = translator.get_options_for_model(model)
+
+ for field in opts.get_field_names():
+ tr_field = "%s_%s" % (field, lang)
+ for item in model.objects.all():
+ msgid = "%s.%s.%s" % (item._meta, item.pk, field)
+ msgstr = "%s" % getattr(item, tr_field)
+ catalog.add(id=msgid, string=msgstr)
+
+ # write catalog to file
+ lang_path = os.path.join(locale_path, lang)
+ if not isdir(lang_path):
+ os.mkdir(lang_path)
+ f = open(join(lang_path, "LC_MESSAGES", "modeltranslation.po"), "w")
+ write_po(f, catalog)
+ f.close() | Save trans to .po files | ## Code Before:
from __future__ import unicode_literals
from optparse import make_option
from django.core.management.base import NoArgsCommand
class Command(NoArgsCommand):
option_list = NoArgsCommand.option_list + (
make_option(
'--dry-run',
action='store_true',
dest='dry_run',
default=False,
help='Do not actually send signals (and all connected stuff).'
),
)
def handle(self, *args, **options):
if not options['dry_run']:
pass
## Instruction:
Save trans to .po files
## Code After:
from __future__ import unicode_literals
import os
from os.path import join, isdir
from optparse import make_option
from django.core.management.base import NoArgsCommand
from django.conf import settings
from modeltranslation.translator import translator
from babel.messages.catalog import Catalog
from babel.messages.pofile import write_po
class Command(NoArgsCommand):
option_list = NoArgsCommand.option_list + (
make_option(
'--dry-run',
action='store_true',
dest='dry_run',
default=False,
help='Do not actually save files.'
),
)
def handle(self, *args, **options):
if not options['dry_run']:
pass
locale_path = settings.LOCALE_MODEL_TRANS
if not isdir(locale_path):
os.mkdir(locale_path)
for lang in [l[0] for l in list(settings.LANGUAGES)]:
catalog = Catalog(locale=lang)
for model in translator.get_registered_models():
opts = translator.get_options_for_model(model)
for field in opts.get_field_names():
tr_field = "%s_%s" % (field, lang)
for item in model.objects.all():
msgid = "%s.%s.%s" % (item._meta, item.pk, field)
msgstr = "%s" % getattr(item, tr_field)
catalog.add(id=msgid, string=msgstr)
# write catalog to file
lang_path = os.path.join(locale_path, lang)
if not isdir(lang_path):
os.mkdir(lang_path)
f = open(join(lang_path, "LC_MESSAGES", "modeltranslation.po"), "w")
write_po(f, catalog)
f.close() | // ... existing code ...
from __future__ import unicode_literals
import os
from os.path import join, isdir
from optparse import make_option
// ... modified code ...
from django.core.management.base import NoArgsCommand
from django.conf import settings
from modeltranslation.translator import translator
from babel.messages.catalog import Catalog
from babel.messages.pofile import write_po
...
default=False,
help='Do not actually save files.'
),
...
pass
locale_path = settings.LOCALE_MODEL_TRANS
if not isdir(locale_path):
os.mkdir(locale_path)
for lang in [l[0] for l in list(settings.LANGUAGES)]:
catalog = Catalog(locale=lang)
for model in translator.get_registered_models():
opts = translator.get_options_for_model(model)
for field in opts.get_field_names():
tr_field = "%s_%s" % (field, lang)
for item in model.objects.all():
msgid = "%s.%s.%s" % (item._meta, item.pk, field)
msgstr = "%s" % getattr(item, tr_field)
catalog.add(id=msgid, string=msgstr)
# write catalog to file
lang_path = os.path.join(locale_path, lang)
if not isdir(lang_path):
os.mkdir(lang_path)
f = open(join(lang_path, "LC_MESSAGES", "modeltranslation.po"), "w")
write_po(f, catalog)
f.close()
// ... rest of the code ... |
061e0e0702025d99956b7dc606ea0bb4fa5c84ea | flocker/restapi/_logging.py | flocker/restapi/_logging.py |
__all__ = [
"JSON_REQUEST",
"REQUEST",
]
from eliot import Field, ActionType
LOG_SYSTEM = u"api"
METHOD = Field(u"method", lambda method: method,
u"The HTTP method of the request.")
REQUEST_PATH = Field(
u"request_path", lambda path: path,
u"The absolute path of the resource to which the request was issued.")
JSON = Field.forTypes(
u"json", [unicode, bytes, dict, list, None, bool, float],
u"JSON, either request or response depending on context.")
RESPONSE_CODE = Field.forTypes(
u"code", [int],
u"The response code for the request.")
REQUEST = ActionType(
LOG_SYSTEM + u":request",
[REQUEST_PATH, METHOD],
[],
u"A request was received on the public HTTP interface.")
JSON_REQUEST = ActionType(
LOG_SYSTEM + u":json_request",
[JSON],
[RESPONSE_CODE, JSON],
u"A request containing JSON request and response.")
|
__all__ = [
"JSON_REQUEST",
"REQUEST",
]
from eliot import Field, ActionType
LOG_SYSTEM = u"api"
METHOD = Field(u"method", lambda method: method,
u"The HTTP method of the request.")
REQUEST_PATH = Field(
u"request_path", lambda path: path,
u"The absolute path of the resource to which the request was issued.")
JSON = Field.forTypes(
u"json", [unicode, bytes, dict, list, None, bool, float],
u"JSON, either request or response depending on context.")
RESPONSE_CODE = Field.forTypes(
u"code", [int],
u"The response code for the request.")
# It would be nice if RESPONSE_CODE was in REQUEST instead of
# JSON_REQUEST; see FLOC-1586.
REQUEST = ActionType(
LOG_SYSTEM + u":request",
[REQUEST_PATH, METHOD],
[],
u"A request was received on the public HTTP interface.")
JSON_REQUEST = ActionType(
LOG_SYSTEM + u":json_request",
[JSON],
[RESPONSE_CODE, JSON],
u"A request containing JSON request and response bodies.")
| Address review comment: Better documentation. | Address review comment: Better documentation.
| Python | apache-2.0 | Azulinho/flocker,moypray/flocker,wallnerryan/flocker-profiles,mbrukman/flocker,w4ngyi/flocker,adamtheturtle/flocker,runcom/flocker,mbrukman/flocker,LaynePeng/flocker,achanda/flocker,jml/flocker,hackday-profilers/flocker,AndyHuu/flocker,jml/flocker,lukemarsden/flocker,LaynePeng/flocker,achanda/flocker,1d4Nf6/flocker,Azulinho/flocker,hackday-profilers/flocker,moypray/flocker,w4ngyi/flocker,1d4Nf6/flocker,hackday-profilers/flocker,lukemarsden/flocker,AndyHuu/flocker,Azulinho/flocker,1d4Nf6/flocker,mbrukman/flocker,agonzalezro/flocker,LaynePeng/flocker,wallnerryan/flocker-profiles,adamtheturtle/flocker,w4ngyi/flocker,moypray/flocker,lukemarsden/flocker,wallnerryan/flocker-profiles,agonzalezro/flocker,adamtheturtle/flocker,achanda/flocker,runcom/flocker,jml/flocker,agonzalezro/flocker,AndyHuu/flocker,runcom/flocker |
__all__ = [
"JSON_REQUEST",
"REQUEST",
]
from eliot import Field, ActionType
LOG_SYSTEM = u"api"
METHOD = Field(u"method", lambda method: method,
u"The HTTP method of the request.")
REQUEST_PATH = Field(
u"request_path", lambda path: path,
u"The absolute path of the resource to which the request was issued.")
JSON = Field.forTypes(
u"json", [unicode, bytes, dict, list, None, bool, float],
u"JSON, either request or response depending on context.")
RESPONSE_CODE = Field.forTypes(
u"code", [int],
u"The response code for the request.")
+ # It would be nice if RESPONSE_CODE was in REQUEST instead of
+ # JSON_REQUEST; see FLOC-1586.
REQUEST = ActionType(
LOG_SYSTEM + u":request",
[REQUEST_PATH, METHOD],
[],
u"A request was received on the public HTTP interface.")
JSON_REQUEST = ActionType(
LOG_SYSTEM + u":json_request",
[JSON],
[RESPONSE_CODE, JSON],
- u"A request containing JSON request and response.")
+ u"A request containing JSON request and response bodies.")
| Address review comment: Better documentation. | ## Code Before:
__all__ = [
"JSON_REQUEST",
"REQUEST",
]
from eliot import Field, ActionType
LOG_SYSTEM = u"api"
METHOD = Field(u"method", lambda method: method,
u"The HTTP method of the request.")
REQUEST_PATH = Field(
u"request_path", lambda path: path,
u"The absolute path of the resource to which the request was issued.")
JSON = Field.forTypes(
u"json", [unicode, bytes, dict, list, None, bool, float],
u"JSON, either request or response depending on context.")
RESPONSE_CODE = Field.forTypes(
u"code", [int],
u"The response code for the request.")
REQUEST = ActionType(
LOG_SYSTEM + u":request",
[REQUEST_PATH, METHOD],
[],
u"A request was received on the public HTTP interface.")
JSON_REQUEST = ActionType(
LOG_SYSTEM + u":json_request",
[JSON],
[RESPONSE_CODE, JSON],
u"A request containing JSON request and response.")
## Instruction:
Address review comment: Better documentation.
## Code After:
__all__ = [
"JSON_REQUEST",
"REQUEST",
]
from eliot import Field, ActionType
LOG_SYSTEM = u"api"
METHOD = Field(u"method", lambda method: method,
u"The HTTP method of the request.")
REQUEST_PATH = Field(
u"request_path", lambda path: path,
u"The absolute path of the resource to which the request was issued.")
JSON = Field.forTypes(
u"json", [unicode, bytes, dict, list, None, bool, float],
u"JSON, either request or response depending on context.")
RESPONSE_CODE = Field.forTypes(
u"code", [int],
u"The response code for the request.")
# It would be nice if RESPONSE_CODE was in REQUEST instead of
# JSON_REQUEST; see FLOC-1586.
REQUEST = ActionType(
LOG_SYSTEM + u":request",
[REQUEST_PATH, METHOD],
[],
u"A request was received on the public HTTP interface.")
JSON_REQUEST = ActionType(
LOG_SYSTEM + u":json_request",
[JSON],
[RESPONSE_CODE, JSON],
u"A request containing JSON request and response bodies.")
| // ... existing code ...
# It would be nice if RESPONSE_CODE was in REQUEST instead of
# JSON_REQUEST; see FLOC-1586.
REQUEST = ActionType(
// ... modified code ...
[RESPONSE_CODE, JSON],
u"A request containing JSON request and response bodies.")
// ... rest of the code ... |
edc830bcd8fb594406e314b5c93062b0ec347bba | cref/structure/__init__.py | cref/structure/__init__.py | from peptide import PeptideBuilder
import Bio.PDB
def write_pdb(aa_sequence, fragment_angles, gap_length, filepath):
"""
Generate pdb file with results
:param aa_sequence: Amino acid sequence
:param fragment_angles: Backbone torsion angles
:param gap_length: Length of the gap at the sequence start and end
:param filepath: Path to the file to save the pdb
"""
phi, psi = zip(*fragment_angles)
structure = PeptideBuilder.make_structure(aa_sequence, phi, psi)
out = Bio.PDB.PDBIO()
out.set_structure(structure)
out.save(filepath)
def rmsd(source, target):
source = Bio.PDB.parser.get_structure('source', source)
target = Bio.PDB.parser.get_structure('target', target)
superimposer = Bio.PDB.Superimposer()
source_atoms = list(source.get_atoms())
target_atoms = list(target.get_atoms())[:len(source_atoms)]
superimposer.set_atoms(source_atoms, target_atoms)
return superimposer.rms
| from peptide import PeptideBuilder
import Bio.PDB
def write_pdb(aa_sequence, fragment_angles, gap_length, filepath):
"""
Generate pdb file with results
:param aa_sequence: Amino acid sequence
:param fragment_angles: Backbone torsion angles
:param gap_length: Length of the gap at the sequence start and end
:param filepath: Path to the file to save the pdb
"""
phi, psi = zip(*fragment_angles)
structure = PeptideBuilder.make_structure(aa_sequence, phi, psi)
out = Bio.PDB.PDBIO()
out.set_structure(structure)
out.save(filepath)
def rmsd(source, target):
parser = Bio.PDB.PDBParser()
source = parser.get_structure('source', source)
target = parser.get_structure('target', target)
superimposer = Bio.PDB.Superimposer()
source_atoms = list(source.get_atoms())
target_atoms = list(target.get_atoms())[:len(source_atoms)]
superimposer.set_atoms(source_atoms, target_atoms)
return superimposer.rms
| Fix wrong pdb parser invocation | Fix wrong pdb parser invocation
| Python | mit | mchelem/cref2,mchelem/cref2,mchelem/cref2 | from peptide import PeptideBuilder
import Bio.PDB
def write_pdb(aa_sequence, fragment_angles, gap_length, filepath):
"""
Generate pdb file with results
:param aa_sequence: Amino acid sequence
:param fragment_angles: Backbone torsion angles
:param gap_length: Length of the gap at the sequence start and end
:param filepath: Path to the file to save the pdb
"""
phi, psi = zip(*fragment_angles)
structure = PeptideBuilder.make_structure(aa_sequence, phi, psi)
out = Bio.PDB.PDBIO()
out.set_structure(structure)
out.save(filepath)
def rmsd(source, target):
+ parser = Bio.PDB.PDBParser()
- source = Bio.PDB.parser.get_structure('source', source)
+ source = parser.get_structure('source', source)
- target = Bio.PDB.parser.get_structure('target', target)
+ target = parser.get_structure('target', target)
superimposer = Bio.PDB.Superimposer()
source_atoms = list(source.get_atoms())
target_atoms = list(target.get_atoms())[:len(source_atoms)]
superimposer.set_atoms(source_atoms, target_atoms)
return superimposer.rms
| Fix wrong pdb parser invocation | ## Code Before:
from peptide import PeptideBuilder
import Bio.PDB
def write_pdb(aa_sequence, fragment_angles, gap_length, filepath):
"""
Generate pdb file with results
:param aa_sequence: Amino acid sequence
:param fragment_angles: Backbone torsion angles
:param gap_length: Length of the gap at the sequence start and end
:param filepath: Path to the file to save the pdb
"""
phi, psi = zip(*fragment_angles)
structure = PeptideBuilder.make_structure(aa_sequence, phi, psi)
out = Bio.PDB.PDBIO()
out.set_structure(structure)
out.save(filepath)
def rmsd(source, target):
source = Bio.PDB.parser.get_structure('source', source)
target = Bio.PDB.parser.get_structure('target', target)
superimposer = Bio.PDB.Superimposer()
source_atoms = list(source.get_atoms())
target_atoms = list(target.get_atoms())[:len(source_atoms)]
superimposer.set_atoms(source_atoms, target_atoms)
return superimposer.rms
## Instruction:
Fix wrong pdb parser invocation
## Code After:
from peptide import PeptideBuilder
import Bio.PDB
def write_pdb(aa_sequence, fragment_angles, gap_length, filepath):
"""
Generate pdb file with results
:param aa_sequence: Amino acid sequence
:param fragment_angles: Backbone torsion angles
:param gap_length: Length of the gap at the sequence start and end
:param filepath: Path to the file to save the pdb
"""
phi, psi = zip(*fragment_angles)
structure = PeptideBuilder.make_structure(aa_sequence, phi, psi)
out = Bio.PDB.PDBIO()
out.set_structure(structure)
out.save(filepath)
def rmsd(source, target):
parser = Bio.PDB.PDBParser()
source = parser.get_structure('source', source)
target = parser.get_structure('target', target)
superimposer = Bio.PDB.Superimposer()
source_atoms = list(source.get_atoms())
target_atoms = list(target.get_atoms())[:len(source_atoms)]
superimposer.set_atoms(source_atoms, target_atoms)
return superimposer.rms
| # ... existing code ...
def rmsd(source, target):
parser = Bio.PDB.PDBParser()
source = parser.get_structure('source', source)
target = parser.get_structure('target', target)
# ... rest of the code ... |
4de82c9a0737c079634a87d0ea358fba7840a419 | sesame/test_settings.py | sesame/test_settings.py | from __future__ import unicode_literals
AUTHENTICATION_BACKENDS = [
"django.contrib.auth.backends.ModelBackend",
"sesame.backends.ModelBackend",
]
CACHES = {"default": {"BACKEND": "django.core.cache.backends.locmem.LocMemCache"}}
DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3"}}
INSTALLED_APPS = [
"django.contrib.auth",
"django.contrib.contenttypes",
"sesame",
"sesame.test_app",
]
LOGGING_CONFIG = None
MIDDLEWARE = [
"django.contrib.sessions.middleware.SessionMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
]
ROOT_URLCONF = "sesame.test_urls"
SECRET_KEY = "Anyone who finds an URL will be able to log in. Seriously."
SESSION_ENGINE = "django.contrib.sessions.backends.cache"
TEMPLATES = [{"BACKEND": "django.template.backends.django.DjangoTemplates"}]
| from __future__ import unicode_literals
AUTHENTICATION_BACKENDS = [
"django.contrib.auth.backends.ModelBackend",
"sesame.backends.ModelBackend",
]
CACHES = {"default": {"BACKEND": "django.core.cache.backends.locmem.LocMemCache"}}
DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3"}}
INSTALLED_APPS = [
"django.contrib.auth",
"django.contrib.contenttypes",
"sesame",
"sesame.test_app",
]
LOGGING_CONFIG = None
MIDDLEWARE = [
"django.contrib.sessions.middleware.SessionMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
]
PASSWORD_HASHERS = ["django.contrib.auth.hashers.SHA1PasswordHasher"]
ROOT_URLCONF = "sesame.test_urls"
SECRET_KEY = "Anyone who finds an URL will be able to log in. Seriously."
SESSION_ENGINE = "django.contrib.sessions.backends.cache"
TEMPLATES = [{"BACKEND": "django.template.backends.django.DjangoTemplates"}]
| Use a fast password hasher for tests. | Use a fast password hasher for tests.
Speed is obviously more important than security in tests.
| Python | bsd-3-clause | aaugustin/django-sesame,aaugustin/django-sesame | from __future__ import unicode_literals
AUTHENTICATION_BACKENDS = [
"django.contrib.auth.backends.ModelBackend",
"sesame.backends.ModelBackend",
]
CACHES = {"default": {"BACKEND": "django.core.cache.backends.locmem.LocMemCache"}}
DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3"}}
INSTALLED_APPS = [
"django.contrib.auth",
"django.contrib.contenttypes",
"sesame",
"sesame.test_app",
]
LOGGING_CONFIG = None
MIDDLEWARE = [
"django.contrib.sessions.middleware.SessionMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
]
+ PASSWORD_HASHERS = ["django.contrib.auth.hashers.SHA1PasswordHasher"]
+
ROOT_URLCONF = "sesame.test_urls"
SECRET_KEY = "Anyone who finds an URL will be able to log in. Seriously."
SESSION_ENGINE = "django.contrib.sessions.backends.cache"
TEMPLATES = [{"BACKEND": "django.template.backends.django.DjangoTemplates"}]
| Use a fast password hasher for tests. | ## Code Before:
from __future__ import unicode_literals
AUTHENTICATION_BACKENDS = [
"django.contrib.auth.backends.ModelBackend",
"sesame.backends.ModelBackend",
]
CACHES = {"default": {"BACKEND": "django.core.cache.backends.locmem.LocMemCache"}}
DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3"}}
INSTALLED_APPS = [
"django.contrib.auth",
"django.contrib.contenttypes",
"sesame",
"sesame.test_app",
]
LOGGING_CONFIG = None
MIDDLEWARE = [
"django.contrib.sessions.middleware.SessionMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
]
ROOT_URLCONF = "sesame.test_urls"
SECRET_KEY = "Anyone who finds an URL will be able to log in. Seriously."
SESSION_ENGINE = "django.contrib.sessions.backends.cache"
TEMPLATES = [{"BACKEND": "django.template.backends.django.DjangoTemplates"}]
## Instruction:
Use a fast password hasher for tests.
## Code After:
from __future__ import unicode_literals
AUTHENTICATION_BACKENDS = [
"django.contrib.auth.backends.ModelBackend",
"sesame.backends.ModelBackend",
]
CACHES = {"default": {"BACKEND": "django.core.cache.backends.locmem.LocMemCache"}}
DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3"}}
INSTALLED_APPS = [
"django.contrib.auth",
"django.contrib.contenttypes",
"sesame",
"sesame.test_app",
]
LOGGING_CONFIG = None
MIDDLEWARE = [
"django.contrib.sessions.middleware.SessionMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
]
PASSWORD_HASHERS = ["django.contrib.auth.hashers.SHA1PasswordHasher"]
ROOT_URLCONF = "sesame.test_urls"
SECRET_KEY = "Anyone who finds an URL will be able to log in. Seriously."
SESSION_ENGINE = "django.contrib.sessions.backends.cache"
TEMPLATES = [{"BACKEND": "django.template.backends.django.DjangoTemplates"}]
| # ... existing code ...
PASSWORD_HASHERS = ["django.contrib.auth.hashers.SHA1PasswordHasher"]
ROOT_URLCONF = "sesame.test_urls"
# ... rest of the code ... |
0aff137a210debd9ea18793a98c043a5151d9524 | src/Compiler/VM/arithmetic_exprs.py | src/Compiler/VM/arithmetic_exprs.py | from Helpers.string import *
def binop_aexp(commands, env, op, left, right):
left.compile_vm(commands, env)
right.compile_vm(commands, env)
if op == '+':
value = assemble(Add)
elif op == '-':
value = assemble(Sub)
elif op == '*':
value = assemble(Mul)
elif op == '/':
value = assemble(Div)
elif op == '%':
value = assemble(Mod)
else:
raise RuntimeError('unknown operator: ' + op)
commands.append(value)
def int_aexp(commands, env, i):
commands.append(assemble(Push, i))
def var_aexp(commands, env, name):
var_type = Environment.get_var_type(env, name)
var_value = Environment.get_var(env, name)
if var_type == 'IntAexp':
commands.append(assemble(Load, var_value))
elif var_type == 'Char':
commands.append(assemble(Load, var_value))
elif var_type == 'String':
String.compile_get(commands, env, var_value)
| from Helpers.string import *
def binop_aexp(commands, env, op, left, right):
left.compile_vm(commands, env)
right.compile_vm(commands, env)
if op == '+':
value = assemble(Add)
elif op == '-':
value = assemble(Sub)
elif op == '*':
value = assemble(Mul)
elif op == '/':
value = assemble(Div)
elif op == '%':
value = assemble(Mod)
else:
raise RuntimeError('unknown operator: ' + op)
commands.append(value)
def int_aexp(commands, env, i):
commands.append(assemble(Push, i))
def var_aexp(commands, env, name):
var_type = Environment.get_var_type(env, name)
var_value = Environment.get_var(env, name)
if var_type == 'String':
String.compile_get(commands, env, var_value)
else:
commands.append(assemble(Load, var_value))
| Fix compiling problem for runtime variables | Fix compiling problem for runtime variables
| Python | mit | PetukhovVictor/compiler,PetukhovVictor/compiler | from Helpers.string import *
def binop_aexp(commands, env, op, left, right):
left.compile_vm(commands, env)
right.compile_vm(commands, env)
if op == '+':
value = assemble(Add)
elif op == '-':
value = assemble(Sub)
elif op == '*':
value = assemble(Mul)
elif op == '/':
value = assemble(Div)
elif op == '%':
value = assemble(Mod)
else:
raise RuntimeError('unknown operator: ' + op)
commands.append(value)
def int_aexp(commands, env, i):
commands.append(assemble(Push, i))
def var_aexp(commands, env, name):
var_type = Environment.get_var_type(env, name)
var_value = Environment.get_var(env, name)
- if var_type == 'IntAexp':
+ if var_type == 'String':
+ String.compile_get(commands, env, var_value)
+ else:
commands.append(assemble(Load, var_value))
- elif var_type == 'Char':
- commands.append(assemble(Load, var_value))
- elif var_type == 'String':
- String.compile_get(commands, env, var_value)
| Fix compiling problem for runtime variables | ## Code Before:
from Helpers.string import *
def binop_aexp(commands, env, op, left, right):
left.compile_vm(commands, env)
right.compile_vm(commands, env)
if op == '+':
value = assemble(Add)
elif op == '-':
value = assemble(Sub)
elif op == '*':
value = assemble(Mul)
elif op == '/':
value = assemble(Div)
elif op == '%':
value = assemble(Mod)
else:
raise RuntimeError('unknown operator: ' + op)
commands.append(value)
def int_aexp(commands, env, i):
commands.append(assemble(Push, i))
def var_aexp(commands, env, name):
var_type = Environment.get_var_type(env, name)
var_value = Environment.get_var(env, name)
if var_type == 'IntAexp':
commands.append(assemble(Load, var_value))
elif var_type == 'Char':
commands.append(assemble(Load, var_value))
elif var_type == 'String':
String.compile_get(commands, env, var_value)
## Instruction:
Fix compiling problem for runtime variables
## Code After:
from Helpers.string import *
def binop_aexp(commands, env, op, left, right):
left.compile_vm(commands, env)
right.compile_vm(commands, env)
if op == '+':
value = assemble(Add)
elif op == '-':
value = assemble(Sub)
elif op == '*':
value = assemble(Mul)
elif op == '/':
value = assemble(Div)
elif op == '%':
value = assemble(Mod)
else:
raise RuntimeError('unknown operator: ' + op)
commands.append(value)
def int_aexp(commands, env, i):
commands.append(assemble(Push, i))
def var_aexp(commands, env, name):
var_type = Environment.get_var_type(env, name)
var_value = Environment.get_var(env, name)
if var_type == 'String':
String.compile_get(commands, env, var_value)
else:
commands.append(assemble(Load, var_value))
| # ... existing code ...
var_value = Environment.get_var(env, name)
if var_type == 'String':
String.compile_get(commands, env, var_value)
else:
commands.append(assemble(Load, var_value))
# ... rest of the code ... |
8d53a7478a139770d9ffb241ec2985123c403845 | bookmarks/bookmarks/models.py | bookmarks/bookmarks/models.py | from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
from django.dispatch import receiver
from django.conf import settings
from taggit.managers import TaggableManager
import requests
class Bookmark(models.Model):
title = models.CharField(max_length=200, blank=True, null=True)
description = models.TextField(blank=True, null=True)
date_added = models.DateTimeField(default=timezone.now, blank=True)
tags = TaggableManager(blank=True)
private = models.BooleanField(default=False)
url = models.URLField(max_length=500)
def __unicode__(self):
return "{}: {} [{}]".format(
self.pk,
self.title[:40],
self.date_added
)
@receiver(models.signals.post_save, sender=Bookmark)
def bookmark_pre_save_handler(sender, instance, created, *args, **kwargs):
# Only run for new items, not updates
if created:
if not hasattr(settings, 'SLACK_WEBHOOK_URL'):
return
payload = {
'channel': "#bookmarks-dev",
'username': "Bookmarks",
'text': "{}".format(
"Bookmark added:",
),
'icon_emoji': ":blue_book:",
'attachments': [
{
"fallback": instance.title,
"color": "good",
"title": instance.title,
"title_link": instance.url,
"text": instance.description,
}
]
}
requests.post(settings.SLACK_WEBHOOK_URL, json=payload)
| from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
from django.dispatch import receiver
from django.conf import settings
from taggit.managers import TaggableManager
import requests
class Bookmark(models.Model):
title = models.CharField(max_length=200, blank=True, null=True)
description = models.TextField(blank=True, null=True)
date_added = models.DateTimeField(default=timezone.now, blank=True)
tags = TaggableManager(blank=True)
private = models.BooleanField(default=False)
url = models.URLField(max_length=500)
def __unicode__(self):
return "{}: {} [{}]".format(
self.pk,
self.title[:40],
self.date_added
)
@receiver(models.signals.post_save, sender=Bookmark)
def bookmark_pre_save_handler(sender, instance, created, *args, **kwargs):
# Only run for new items, not updates
if created:
if not hasattr(settings, 'SLACK_WEBHOOK_URL'):
return
payload = {
'channel': "#bookmarks-dev",
'username': "Bookmarks",
'text': "<{}|{}>\n{}".format(
instance.url,
instance.title,
instance.description,
),
'icon_emoji': ":blue_book:",
'unfurl_links': True
}
requests.post(settings.SLACK_WEBHOOK_URL, json=payload)
| Remove attachment and use slack link unfurling | Remove attachment and use slack link unfurling
| Python | mit | tom-henderson/bookmarks,tom-henderson/bookmarks,tom-henderson/bookmarks | from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
from django.dispatch import receiver
from django.conf import settings
from taggit.managers import TaggableManager
import requests
class Bookmark(models.Model):
title = models.CharField(max_length=200, blank=True, null=True)
description = models.TextField(blank=True, null=True)
date_added = models.DateTimeField(default=timezone.now, blank=True)
tags = TaggableManager(blank=True)
private = models.BooleanField(default=False)
url = models.URLField(max_length=500)
def __unicode__(self):
return "{}: {} [{}]".format(
self.pk,
self.title[:40],
self.date_added
)
@receiver(models.signals.post_save, sender=Bookmark)
def bookmark_pre_save_handler(sender, instance, created, *args, **kwargs):
# Only run for new items, not updates
if created:
if not hasattr(settings, 'SLACK_WEBHOOK_URL'):
return
payload = {
'channel': "#bookmarks-dev",
'username': "Bookmarks",
- 'text': "{}".format(
+ 'text': "<{}|{}>\n{}".format(
- "Bookmark added:",
+ instance.url,
+ instance.title,
+ instance.description,
),
'icon_emoji': ":blue_book:",
+ 'unfurl_links': True
- 'attachments': [
- {
- "fallback": instance.title,
- "color": "good",
- "title": instance.title,
- "title_link": instance.url,
- "text": instance.description,
- }
- ]
}
requests.post(settings.SLACK_WEBHOOK_URL, json=payload)
| Remove attachment and use slack link unfurling | ## Code Before:
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
from django.dispatch import receiver
from django.conf import settings
from taggit.managers import TaggableManager
import requests
class Bookmark(models.Model):
title = models.CharField(max_length=200, blank=True, null=True)
description = models.TextField(blank=True, null=True)
date_added = models.DateTimeField(default=timezone.now, blank=True)
tags = TaggableManager(blank=True)
private = models.BooleanField(default=False)
url = models.URLField(max_length=500)
def __unicode__(self):
return "{}: {} [{}]".format(
self.pk,
self.title[:40],
self.date_added
)
@receiver(models.signals.post_save, sender=Bookmark)
def bookmark_pre_save_handler(sender, instance, created, *args, **kwargs):
# Only run for new items, not updates
if created:
if not hasattr(settings, 'SLACK_WEBHOOK_URL'):
return
payload = {
'channel': "#bookmarks-dev",
'username': "Bookmarks",
'text': "{}".format(
"Bookmark added:",
),
'icon_emoji': ":blue_book:",
'attachments': [
{
"fallback": instance.title,
"color": "good",
"title": instance.title,
"title_link": instance.url,
"text": instance.description,
}
]
}
requests.post(settings.SLACK_WEBHOOK_URL, json=payload)
## Instruction:
Remove attachment and use slack link unfurling
## Code After:
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
from django.dispatch import receiver
from django.conf import settings
from taggit.managers import TaggableManager
import requests
class Bookmark(models.Model):
title = models.CharField(max_length=200, blank=True, null=True)
description = models.TextField(blank=True, null=True)
date_added = models.DateTimeField(default=timezone.now, blank=True)
tags = TaggableManager(blank=True)
private = models.BooleanField(default=False)
url = models.URLField(max_length=500)
def __unicode__(self):
return "{}: {} [{}]".format(
self.pk,
self.title[:40],
self.date_added
)
@receiver(models.signals.post_save, sender=Bookmark)
def bookmark_pre_save_handler(sender, instance, created, *args, **kwargs):
# Only run for new items, not updates
if created:
if not hasattr(settings, 'SLACK_WEBHOOK_URL'):
return
payload = {
'channel': "#bookmarks-dev",
'username': "Bookmarks",
'text': "<{}|{}>\n{}".format(
instance.url,
instance.title,
instance.description,
),
'icon_emoji': ":blue_book:",
'unfurl_links': True
}
requests.post(settings.SLACK_WEBHOOK_URL, json=payload)
| // ... existing code ...
'username': "Bookmarks",
'text': "<{}|{}>\n{}".format(
instance.url,
instance.title,
instance.description,
),
// ... modified code ...
'icon_emoji': ":blue_book:",
'unfurl_links': True
}
// ... rest of the code ... |
c1e9d369680e779d481aa7db17be9348d56ec29d | test_linked_list.py | test_linked_list.py | from __future__ import unicode_literals
import linked_list
# def func(x):
# return x + 1
# def tdest_answer():
# assert func(3) == 5
# init
a = linked_list.LinkedList()
def test_size():
assert a.size is 0
def test_head():
assert a.head is None
def test_init():
assert type(a) is linked_list.LinkedList
|
from __future__ import unicode_literals
import linked_list
import copy
# init method
a = linked_list.LinkedList()
def test_init_size():
assert a.sizeOfList is 0
assert type(a.sizeOfList) is int
def test_init_head():
assert a.head is None
def test_init_type():
assert type(a) is linked_list.LinkedList
# insert method
b = copy.copy(a) # make a copy every time a change is made
b.insert(5) # so the test can handle different values
def test_insert_size():
assert b.sizeOfList is 1
def test_insert_head():
assert b.head.value is 5
def test_insert_next():
assert b.head.next_node is None
c = copy.copy(b)
c.insert(6)
def test_insert_new_size():
assert c.sizeOfList is 2
def test_insert_new_head():
assert c.head.value is 6
def test_insert_pointer():
assert c.head.next_node.value is 5
# size method
def test_size():
assert c.size() is 2
# search method
def test_search_value_in_list():
assert c.search(5).value is 5
def test_search_value_not_in_list():
assert c.search(7) is None # 7 is not in the list
# remove method
d = copy.copy(c)
d.remove(d.search(6))
def test_remove_value():
assert d.search(6) is None
def test_remove_size():
assert d.size() is 1
# display method
def test_display():
assert d.display() == (5,) # test to make sure they are equivalent
| Add comments to test file | Add comments to test file
Add comments after all tests passed
| Python | mit | jesseklein406/data-structures | +
+
from __future__ import unicode_literals
import linked_list
+ import copy
-
- # def func(x):
- # return x + 1
-
- # def tdest_answer():
- # assert func(3) == 5
- # init
+ # init method
a = linked_list.LinkedList()
- def test_size():
+ def test_init_size():
- assert a.size is 0
+ assert a.sizeOfList is 0
+ assert type(a.sizeOfList) is int
- def test_head():
+ def test_init_head():
assert a.head is None
- def test_init():
+ def test_init_type():
assert type(a) is linked_list.LinkedList
+ # insert method
+ b = copy.copy(a) # make a copy every time a change is made
+ b.insert(5) # so the test can handle different values
+
+
+ def test_insert_size():
+ assert b.sizeOfList is 1
+
+
+ def test_insert_head():
+ assert b.head.value is 5
+
+
+ def test_insert_next():
+ assert b.head.next_node is None
+
+
+ c = copy.copy(b)
+ c.insert(6)
+
+
+ def test_insert_new_size():
+ assert c.sizeOfList is 2
+
+
+ def test_insert_new_head():
+ assert c.head.value is 6
+
+
+ def test_insert_pointer():
+ assert c.head.next_node.value is 5
+
+
+ # size method
+
+ def test_size():
+ assert c.size() is 2
+
+
+ # search method
+
+ def test_search_value_in_list():
+ assert c.search(5).value is 5
+
+
+ def test_search_value_not_in_list():
+ assert c.search(7) is None # 7 is not in the list
+
+
+ # remove method
+
+ d = copy.copy(c)
+ d.remove(d.search(6))
+
+
+ def test_remove_value():
+ assert d.search(6) is None
+
+
+ def test_remove_size():
+ assert d.size() is 1
+
+
+ # display method
+
+ def test_display():
+ assert d.display() == (5,) # test to make sure they are equivalent
+ | Add comments to test file | ## Code Before:
from __future__ import unicode_literals
import linked_list
# def func(x):
# return x + 1
# def tdest_answer():
# assert func(3) == 5
# init
a = linked_list.LinkedList()
def test_size():
assert a.size is 0
def test_head():
assert a.head is None
def test_init():
assert type(a) is linked_list.LinkedList
## Instruction:
Add comments to test file
## Code After:
from __future__ import unicode_literals
import linked_list
import copy
# init method
a = linked_list.LinkedList()
def test_init_size():
assert a.sizeOfList is 0
assert type(a.sizeOfList) is int
def test_init_head():
assert a.head is None
def test_init_type():
assert type(a) is linked_list.LinkedList
# insert method
b = copy.copy(a) # make a copy every time a change is made
b.insert(5) # so the test can handle different values
def test_insert_size():
assert b.sizeOfList is 1
def test_insert_head():
assert b.head.value is 5
def test_insert_next():
assert b.head.next_node is None
c = copy.copy(b)
c.insert(6)
def test_insert_new_size():
assert c.sizeOfList is 2
def test_insert_new_head():
assert c.head.value is 6
def test_insert_pointer():
assert c.head.next_node.value is 5
# size method
def test_size():
assert c.size() is 2
# search method
def test_search_value_in_list():
assert c.search(5).value is 5
def test_search_value_not_in_list():
assert c.search(7) is None # 7 is not in the list
# remove method
d = copy.copy(c)
d.remove(d.search(6))
def test_remove_value():
assert d.search(6) is None
def test_remove_size():
assert d.size() is 1
# display method
def test_display():
assert d.display() == (5,) # test to make sure they are equivalent
| // ... existing code ...
from __future__ import unicode_literals
// ... modified code ...
import linked_list
import copy
...
# init method
...
def test_init_size():
assert a.sizeOfList is 0
assert type(a.sizeOfList) is int
...
def test_init_head():
assert a.head is None
...
def test_init_type():
assert type(a) is linked_list.LinkedList
...
# insert method
b = copy.copy(a) # make a copy every time a change is made
b.insert(5) # so the test can handle different values
def test_insert_size():
assert b.sizeOfList is 1
def test_insert_head():
assert b.head.value is 5
def test_insert_next():
assert b.head.next_node is None
c = copy.copy(b)
c.insert(6)
def test_insert_new_size():
assert c.sizeOfList is 2
def test_insert_new_head():
assert c.head.value is 6
def test_insert_pointer():
assert c.head.next_node.value is 5
# size method
def test_size():
assert c.size() is 2
# search method
def test_search_value_in_list():
assert c.search(5).value is 5
def test_search_value_not_in_list():
assert c.search(7) is None # 7 is not in the list
# remove method
d = copy.copy(c)
d.remove(d.search(6))
def test_remove_value():
assert d.search(6) is None
def test_remove_size():
assert d.size() is 1
# display method
def test_display():
assert d.display() == (5,) # test to make sure they are equivalent
// ... rest of the code ... |
3c1f2c46485aee91dbf4c61b7b096c2cc4b28c06 | kcdc3/apps/pinata/urls.py | kcdc3/apps/pinata/urls.py | from django.conf.urls import patterns, include, url
from models import Page
urlpatterns = patterns('kcdc3.apps.pinata.views',
url(r'^$', 'page_view'),
url(r'^[0-9a-zA-Z_-]+/$', 'page_view'),
)
| from django.conf.urls import patterns, include, url
from models import Page
urlpatterns = patterns('kcdc3.apps.pinata.views',
url(r'^$', 'page_view'),
url(r'^[0-9a-zA-Z_-]+/$', 'page_view'),
url(r'^[0-9a-zA-Z_-]+/[0-9a-zA-Z_-]+/$', 'page_view'),
# Surely there's a better way to handle paths that contain several slashes
)
| Allow three-deep paths in Pinata | Allow three-deep paths in Pinata
| Python | mit | knowledgecommonsdc/kcdc3,knowledgecommonsdc/kcdc3,knowledgecommonsdc/kcdc3,knowledgecommonsdc/kcdc3,knowledgecommonsdc/kcdc3,knowledgecommonsdc/kcdc3 | from django.conf.urls import patterns, include, url
from models import Page
urlpatterns = patterns('kcdc3.apps.pinata.views',
url(r'^$', 'page_view'),
url(r'^[0-9a-zA-Z_-]+/$', 'page_view'),
+ url(r'^[0-9a-zA-Z_-]+/[0-9a-zA-Z_-]+/$', 'page_view'),
+ # Surely there's a better way to handle paths that contain several slashes
)
| Allow three-deep paths in Pinata | ## Code Before:
from django.conf.urls import patterns, include, url
from models import Page
urlpatterns = patterns('kcdc3.apps.pinata.views',
url(r'^$', 'page_view'),
url(r'^[0-9a-zA-Z_-]+/$', 'page_view'),
)
## Instruction:
Allow three-deep paths in Pinata
## Code After:
from django.conf.urls import patterns, include, url
from models import Page
urlpatterns = patterns('kcdc3.apps.pinata.views',
url(r'^$', 'page_view'),
url(r'^[0-9a-zA-Z_-]+/$', 'page_view'),
url(r'^[0-9a-zA-Z_-]+/[0-9a-zA-Z_-]+/$', 'page_view'),
# Surely there's a better way to handle paths that contain several slashes
)
| ...
url(r'^[0-9a-zA-Z_-]+/$', 'page_view'),
url(r'^[0-9a-zA-Z_-]+/[0-9a-zA-Z_-]+/$', 'page_view'),
# Surely there's a better way to handle paths that contain several slashes
... |
0704dd1002e7ef546b718abec41a55c256a49cb2 | examples/test_fail.py | examples/test_fail.py |
from seleniumbase import BaseCase
class MyTestClass(BaseCase):
def test_find_army_of_robots_on_xkcd_desert_island(self):
self.open("http://xkcd.com/731/")
self.assert_element("div#ARMY_OF_ROBOTS", timeout=0.7)
|
from seleniumbase import BaseCase
class MyTestClass(BaseCase):
def test_find_army_of_robots_on_xkcd_desert_island(self):
self.open("http://xkcd.com/731/")
print("\n(This test fails on purpose)")
self.assert_element("div#ARMY_OF_ROBOTS", timeout=1)
| Update test that fails on purpose. | Update test that fails on purpose.
| Python | mit | mdmintz/seleniumspot,mdmintz/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/seleniumspot |
from seleniumbase import BaseCase
class MyTestClass(BaseCase):
def test_find_army_of_robots_on_xkcd_desert_island(self):
self.open("http://xkcd.com/731/")
+ print("\n(This test fails on purpose)")
- self.assert_element("div#ARMY_OF_ROBOTS", timeout=0.7)
+ self.assert_element("div#ARMY_OF_ROBOTS", timeout=1)
| Update test that fails on purpose. | ## Code Before:
from seleniumbase import BaseCase
class MyTestClass(BaseCase):
def test_find_army_of_robots_on_xkcd_desert_island(self):
self.open("http://xkcd.com/731/")
self.assert_element("div#ARMY_OF_ROBOTS", timeout=0.7)
## Instruction:
Update test that fails on purpose.
## Code After:
from seleniumbase import BaseCase
class MyTestClass(BaseCase):
def test_find_army_of_robots_on_xkcd_desert_island(self):
self.open("http://xkcd.com/731/")
print("\n(This test fails on purpose)")
self.assert_element("div#ARMY_OF_ROBOTS", timeout=1)
| # ... existing code ...
self.open("http://xkcd.com/731/")
print("\n(This test fails on purpose)")
self.assert_element("div#ARMY_OF_ROBOTS", timeout=1)
# ... rest of the code ... |
53b22654b015d1450fe124bc01a2f1bffba816a2 | test_hpack_integration.py | test_hpack_integration.py | from hyper.http20.hpack import Decoder
from binascii import unhexlify
class TestHPACKDecoderIntegration(object):
def test_can_decode_a_story(self, story):
d = Decoder()
for case in story['cases']:
d.header_table_size = case['header_table_size']
decoded_headers = d.decode(unhexlify(case['wire']))
# The correct headers are a list of dicts, which is annoying.
correct_headers = {(item[0], item[1]) for header in case['headers'] for item in header.items()}
assert correct_headers == decoded_headers
| from hyper.http20.hpack import Decoder
from hyper.http20.huffman import HuffmanDecoder
from hyper.http20.huffman_constants import REQUEST_CODES, REQUEST_CODES_LENGTH
from binascii import unhexlify
class TestHPACKDecoderIntegration(object):
def test_can_decode_a_story(self, story):
d = Decoder()
if story['context'] == 'request':
d.huffman_coder = HuffmanDecoder(REQUEST_CODES, REQUEST_CODES_LENGTH)
for case in story['cases']:
d.header_table_size = case['header_table_size']
decoded_headers = d.decode(unhexlify(case['wire']))
# The correct headers are a list of dicts, which is annoying.
correct_headers = {(item[0], item[1]) for header in case['headers'] for item in header.items()}
assert correct_headers == decoded_headers
| Use the correct decoder for the test. | Use the correct decoder for the test.
| Python | mit | Lukasa/hyper,masaori335/hyper,lawnmowerlatte/hyper,fredthomsen/hyper,irvind/hyper,lawnmowerlatte/hyper,masaori335/hyper,jdecuyper/hyper,irvind/hyper,fredthomsen/hyper,plucury/hyper,plucury/hyper,Lukasa/hyper,jdecuyper/hyper | from hyper.http20.hpack import Decoder
+ from hyper.http20.huffman import HuffmanDecoder
+ from hyper.http20.huffman_constants import REQUEST_CODES, REQUEST_CODES_LENGTH
from binascii import unhexlify
class TestHPACKDecoderIntegration(object):
def test_can_decode_a_story(self, story):
d = Decoder()
+
+ if story['context'] == 'request':
+ d.huffman_coder = HuffmanDecoder(REQUEST_CODES, REQUEST_CODES_LENGTH)
+
for case in story['cases']:
d.header_table_size = case['header_table_size']
decoded_headers = d.decode(unhexlify(case['wire']))
# The correct headers are a list of dicts, which is annoying.
correct_headers = {(item[0], item[1]) for header in case['headers'] for item in header.items()}
assert correct_headers == decoded_headers
| Use the correct decoder for the test. | ## Code Before:
from hyper.http20.hpack import Decoder
from binascii import unhexlify
class TestHPACKDecoderIntegration(object):
def test_can_decode_a_story(self, story):
d = Decoder()
for case in story['cases']:
d.header_table_size = case['header_table_size']
decoded_headers = d.decode(unhexlify(case['wire']))
# The correct headers are a list of dicts, which is annoying.
correct_headers = {(item[0], item[1]) for header in case['headers'] for item in header.items()}
assert correct_headers == decoded_headers
## Instruction:
Use the correct decoder for the test.
## Code After:
from hyper.http20.hpack import Decoder
from hyper.http20.huffman import HuffmanDecoder
from hyper.http20.huffman_constants import REQUEST_CODES, REQUEST_CODES_LENGTH
from binascii import unhexlify
class TestHPACKDecoderIntegration(object):
def test_can_decode_a_story(self, story):
d = Decoder()
if story['context'] == 'request':
d.huffman_coder = HuffmanDecoder(REQUEST_CODES, REQUEST_CODES_LENGTH)
for case in story['cases']:
d.header_table_size = case['header_table_size']
decoded_headers = d.decode(unhexlify(case['wire']))
# The correct headers are a list of dicts, which is annoying.
correct_headers = {(item[0], item[1]) for header in case['headers'] for item in header.items()}
assert correct_headers == decoded_headers
| ...
from hyper.http20.hpack import Decoder
from hyper.http20.huffman import HuffmanDecoder
from hyper.http20.huffman_constants import REQUEST_CODES, REQUEST_CODES_LENGTH
from binascii import unhexlify
...
d = Decoder()
if story['context'] == 'request':
d.huffman_coder = HuffmanDecoder(REQUEST_CODES, REQUEST_CODES_LENGTH)
for case in story['cases']:
... |
49af73f2903580d55093e8e001585010fb3a3c46 | locarise_drf_oauth2_support/users/factories.py | locarise_drf_oauth2_support/users/factories.py | from datetime import timedelta
from django.utils import timezone
from locarise_drf_oauth2_support.users.models import User
try:
import factory
class UserF(factory.DjangoModelFactory):
first_name = factory.Sequence(lambda n: "first_name%s" % n)
last_name = factory.Sequence(lambda n: "last_name%s" % n)
email = factory.Sequence(lambda n: "email%[email protected]" % n)
is_staff = False
is_active = True
is_superuser = False
last_login = timezone.now() - timedelta(days=2)
password = factory.PostGenerationMethodCall('set_password', 'pass')
class Meta:
model = User
except ImportError: # pragma: no cover
pass
| from datetime import timedelta
from django.utils import timezone
from locarise_drf_oauth2_support.users.models import User
try:
import factory
class UserF(factory.DjangoModelFactory):
first_name = factory.Sequence(lambda n: "first_name%s" % n)
last_name = factory.Sequence(lambda n: "last_name%s" % n)
email = factory.Sequence(lambda n: "email%[email protected]" % n)
is_staff = False
is_active = True
is_superuser = False
last_login = timezone.now() - timedelta(days=2)
password = factory.PostGenerationMethodCall('set_password', 'pass')
organization_set = [{
"uid": "6tbgzDKyZYLCMzDarN7ga8",
"name": "Organization Demo",
"role": "organization-manager",
"is_active": True
}]
class Meta:
model = User
except ImportError: # pragma: no cover
pass
| Add organization_set field in UserF | Add organization_set field in UserF
| Python | mit | locarise/locarise-drf-oauth2-support,locarise/locarise-drf-oauth2-support | from datetime import timedelta
from django.utils import timezone
from locarise_drf_oauth2_support.users.models import User
try:
import factory
class UserF(factory.DjangoModelFactory):
first_name = factory.Sequence(lambda n: "first_name%s" % n)
last_name = factory.Sequence(lambda n: "last_name%s" % n)
email = factory.Sequence(lambda n: "email%[email protected]" % n)
is_staff = False
is_active = True
is_superuser = False
last_login = timezone.now() - timedelta(days=2)
password = factory.PostGenerationMethodCall('set_password', 'pass')
+ organization_set = [{
+ "uid": "6tbgzDKyZYLCMzDarN7ga8",
+ "name": "Organization Demo",
+ "role": "organization-manager",
+ "is_active": True
+ }]
class Meta:
model = User
except ImportError: # pragma: no cover
pass
| Add organization_set field in UserF | ## Code Before:
from datetime import timedelta
from django.utils import timezone
from locarise_drf_oauth2_support.users.models import User
try:
import factory
class UserF(factory.DjangoModelFactory):
first_name = factory.Sequence(lambda n: "first_name%s" % n)
last_name = factory.Sequence(lambda n: "last_name%s" % n)
email = factory.Sequence(lambda n: "email%[email protected]" % n)
is_staff = False
is_active = True
is_superuser = False
last_login = timezone.now() - timedelta(days=2)
password = factory.PostGenerationMethodCall('set_password', 'pass')
class Meta:
model = User
except ImportError: # pragma: no cover
pass
## Instruction:
Add organization_set field in UserF
## Code After:
from datetime import timedelta
from django.utils import timezone
from locarise_drf_oauth2_support.users.models import User
try:
import factory
class UserF(factory.DjangoModelFactory):
first_name = factory.Sequence(lambda n: "first_name%s" % n)
last_name = factory.Sequence(lambda n: "last_name%s" % n)
email = factory.Sequence(lambda n: "email%[email protected]" % n)
is_staff = False
is_active = True
is_superuser = False
last_login = timezone.now() - timedelta(days=2)
password = factory.PostGenerationMethodCall('set_password', 'pass')
organization_set = [{
"uid": "6tbgzDKyZYLCMzDarN7ga8",
"name": "Organization Demo",
"role": "organization-manager",
"is_active": True
}]
class Meta:
model = User
except ImportError: # pragma: no cover
pass
| ...
password = factory.PostGenerationMethodCall('set_password', 'pass')
organization_set = [{
"uid": "6tbgzDKyZYLCMzDarN7ga8",
"name": "Organization Demo",
"role": "organization-manager",
"is_active": True
}]
... |
2b1cd9a58aa51ef53996dc1897a7a0e50f29d7ca | isitopenaccess/plugins/bmc.py | isitopenaccess/plugins/bmc.py | import requests
from copy import deepcopy
from datetime import datetime
from isitopenaccess.plugins import string_matcher
def page_license(record):
"""
To respond to the provider identifier: http://www.biomedcentral.com
This should determine the licence conditions of the BMC article and populate
the record['bibjson']['license'] (note the US spelling) field.
"""
# licensing statements to look for on this publisher's pages
# take the form of {statement: meaning}
# where meaning['type'] identifies the license (see licenses.py)
# and meaning['version'] identifies the license version (if available)
lic_statements = [
{"This is an Open Access article distributed under the terms of the Creative Commons Attribution License (<a href='http://creativecommons.org/licenses/by/2.0'>http://creativecommons.org/licenses/by/2.0</a>), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work is properly cited.":
{'type': 'cc-by', 'version':'2.0', 'open_access': True, 'BY': True, 'NC': False, 'SA': False, 'ND': False}
}
]
string_matcher.simple_extract(lic_statements, record)
| import requests
from copy import deepcopy
from datetime import datetime
from isitopenaccess.plugins import string_matcher
def page_license(record):
"""
To respond to the provider identifier: http://www.biomedcentral.com
This should determine the licence conditions of the BMC article and populate
the record['bibjson']['license'] (note the US spelling) field.
"""
# licensing statements to look for on this publisher's pages
# take the form of {statement: meaning}
# where meaning['type'] identifies the license (see licenses.py)
# and meaning['version'] identifies the license version (if available)
lic_statements = [
{"This is an Open Access article distributed under the terms of the Creative Commons Attribution License (<a href='http://creativecommons.org/licenses/by/2.0'>http://creativecommons.org/licenses/by/2.0</a>), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work is properly cited.":
{'type': 'cc-by', 'version':'2.0', 'open_access': True, 'BY': True, 'NC': False, 'SA': False, 'ND': False,
# also declare some properties which override info about this license in the licenses list (see licenses module)
'url': 'http://creativecommons.org/licenses/by/2.0'}
}
]
string_matcher.simple_extract(lic_statements, record)
| ADD MISSING FILE TO PREV COMMIT "modify BMC plugin: overwrite URL for CC-BY license. We have a MORE specific URL (from the license statement on the BMC pages) than the Open Definition one" | ADD MISSING FILE TO PREV COMMIT "modify BMC plugin: overwrite URL for CC-BY license. We have a MORE specific URL (from the license statement on the BMC pages) than the Open Definition one"
| Python | bsd-3-clause | CottageLabs/OpenArticleGauge,CottageLabs/OpenArticleGauge,CottageLabs/OpenArticleGauge | import requests
from copy import deepcopy
from datetime import datetime
from isitopenaccess.plugins import string_matcher
def page_license(record):
"""
To respond to the provider identifier: http://www.biomedcentral.com
This should determine the licence conditions of the BMC article and populate
the record['bibjson']['license'] (note the US spelling) field.
"""
# licensing statements to look for on this publisher's pages
# take the form of {statement: meaning}
# where meaning['type'] identifies the license (see licenses.py)
# and meaning['version'] identifies the license version (if available)
lic_statements = [
{"This is an Open Access article distributed under the terms of the Creative Commons Attribution License (<a href='http://creativecommons.org/licenses/by/2.0'>http://creativecommons.org/licenses/by/2.0</a>), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work is properly cited.":
- {'type': 'cc-by', 'version':'2.0', 'open_access': True, 'BY': True, 'NC': False, 'SA': False, 'ND': False}
+ {'type': 'cc-by', 'version':'2.0', 'open_access': True, 'BY': True, 'NC': False, 'SA': False, 'ND': False,
+ # also declare some properties which override info about this license in the licenses list (see licenses module)
+ 'url': 'http://creativecommons.org/licenses/by/2.0'}
}
]
string_matcher.simple_extract(lic_statements, record)
| ADD MISSING FILE TO PREV COMMIT "modify BMC plugin: overwrite URL for CC-BY license. We have a MORE specific URL (from the license statement on the BMC pages) than the Open Definition one" | ## Code Before:
import requests
from copy import deepcopy
from datetime import datetime
from isitopenaccess.plugins import string_matcher
def page_license(record):
"""
To respond to the provider identifier: http://www.biomedcentral.com
This should determine the licence conditions of the BMC article and populate
the record['bibjson']['license'] (note the US spelling) field.
"""
# licensing statements to look for on this publisher's pages
# take the form of {statement: meaning}
# where meaning['type'] identifies the license (see licenses.py)
# and meaning['version'] identifies the license version (if available)
lic_statements = [
{"This is an Open Access article distributed under the terms of the Creative Commons Attribution License (<a href='http://creativecommons.org/licenses/by/2.0'>http://creativecommons.org/licenses/by/2.0</a>), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work is properly cited.":
{'type': 'cc-by', 'version':'2.0', 'open_access': True, 'BY': True, 'NC': False, 'SA': False, 'ND': False}
}
]
string_matcher.simple_extract(lic_statements, record)
## Instruction:
ADD MISSING FILE TO PREV COMMIT "modify BMC plugin: overwrite URL for CC-BY license. We have a MORE specific URL (from the license statement on the BMC pages) than the Open Definition one"
## Code After:
import requests
from copy import deepcopy
from datetime import datetime
from isitopenaccess.plugins import string_matcher
def page_license(record):
"""
To respond to the provider identifier: http://www.biomedcentral.com
This should determine the licence conditions of the BMC article and populate
the record['bibjson']['license'] (note the US spelling) field.
"""
# licensing statements to look for on this publisher's pages
# take the form of {statement: meaning}
# where meaning['type'] identifies the license (see licenses.py)
# and meaning['version'] identifies the license version (if available)
lic_statements = [
{"This is an Open Access article distributed under the terms of the Creative Commons Attribution License (<a href='http://creativecommons.org/licenses/by/2.0'>http://creativecommons.org/licenses/by/2.0</a>), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work is properly cited.":
{'type': 'cc-by', 'version':'2.0', 'open_access': True, 'BY': True, 'NC': False, 'SA': False, 'ND': False,
# also declare some properties which override info about this license in the licenses list (see licenses module)
'url': 'http://creativecommons.org/licenses/by/2.0'}
}
]
string_matcher.simple_extract(lic_statements, record)
| // ... existing code ...
{"This is an Open Access article distributed under the terms of the Creative Commons Attribution License (<a href='http://creativecommons.org/licenses/by/2.0'>http://creativecommons.org/licenses/by/2.0</a>), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work is properly cited.":
{'type': 'cc-by', 'version':'2.0', 'open_access': True, 'BY': True, 'NC': False, 'SA': False, 'ND': False,
# also declare some properties which override info about this license in the licenses list (see licenses module)
'url': 'http://creativecommons.org/licenses/by/2.0'}
}
// ... rest of the code ... |
6d450dccc7e89e4e90fd1f0f27cdf2aa67166859 | conanfile.py | conanfile.py | from conans import ConanFile, CMake
class SocketwConan(ConanFile):
name = "SocketW"
version = "3.10.36"
license = "GNU Lesser General Public License v2.1"
url = "https://github.com/RigsOfRods/socketw/issues"
description = "SocketW is a library which provides cross-platform socket abstraction"
settings = "os", "compiler", "build_type", "arch"
#options = {"shared": [True, False]}
#default_options = "shared=False"
generators = "cmake"
exports_sources = "src/*", "CMakeLists.txt", "LICENSE", "README"
def requirements(self):
self.requires.add('OpenSSL/1.0.2@conan/stable')
def build(self):
cmake = CMake(self)
cmake.configure()
cmake.build()
def package(self):
cmake = CMake(self)
cmake.install()
def package_info(self):
self.cpp_info.libs = ["SocketW"]
| from conans import ConanFile, CMake
class SocketwConan(ConanFile):
name = "SocketW"
version = "3.10.36"
license = "GNU Lesser General Public License v2.1"
url = "https://github.com/RigsOfRods/socketw/issues"
description = "SocketW is a library which provides cross-platform socket abstraction"
settings = "os", "compiler", "build_type", "arch"
#options = {"shared": [True, False]}
#default_options = "shared=False"
generators = "cmake"
exports_sources = "src/*", "CMakeLists.txt", "LICENSE", "README"
def requirements(self):
self.requires.add('OpenSSL/1.0.2@conan/stable')
def build(self):
cmake = CMake(self)
cmake.configure()
cmake.build()
def package(self):
cmake = CMake(self)
cmake.install()
def package_info(self):
self.cpp_info.libs = tools.collect_libs(self)
| Use collect_libs for finding libs | Use collect_libs for finding libs | Python | lgpl-2.1 | Hiradur/mysocketw,Hiradur/mysocketw | from conans import ConanFile, CMake
class SocketwConan(ConanFile):
name = "SocketW"
version = "3.10.36"
license = "GNU Lesser General Public License v2.1"
url = "https://github.com/RigsOfRods/socketw/issues"
description = "SocketW is a library which provides cross-platform socket abstraction"
settings = "os", "compiler", "build_type", "arch"
#options = {"shared": [True, False]}
#default_options = "shared=False"
generators = "cmake"
exports_sources = "src/*", "CMakeLists.txt", "LICENSE", "README"
def requirements(self):
self.requires.add('OpenSSL/1.0.2@conan/stable')
def build(self):
cmake = CMake(self)
cmake.configure()
cmake.build()
def package(self):
cmake = CMake(self)
cmake.install()
def package_info(self):
- self.cpp_info.libs = ["SocketW"]
+ self.cpp_info.libs = tools.collect_libs(self)
| Use collect_libs for finding libs | ## Code Before:
from conans import ConanFile, CMake
class SocketwConan(ConanFile):
name = "SocketW"
version = "3.10.36"
license = "GNU Lesser General Public License v2.1"
url = "https://github.com/RigsOfRods/socketw/issues"
description = "SocketW is a library which provides cross-platform socket abstraction"
settings = "os", "compiler", "build_type", "arch"
#options = {"shared": [True, False]}
#default_options = "shared=False"
generators = "cmake"
exports_sources = "src/*", "CMakeLists.txt", "LICENSE", "README"
def requirements(self):
self.requires.add('OpenSSL/1.0.2@conan/stable')
def build(self):
cmake = CMake(self)
cmake.configure()
cmake.build()
def package(self):
cmake = CMake(self)
cmake.install()
def package_info(self):
self.cpp_info.libs = ["SocketW"]
## Instruction:
Use collect_libs for finding libs
## Code After:
from conans import ConanFile, CMake
class SocketwConan(ConanFile):
name = "SocketW"
version = "3.10.36"
license = "GNU Lesser General Public License v2.1"
url = "https://github.com/RigsOfRods/socketw/issues"
description = "SocketW is a library which provides cross-platform socket abstraction"
settings = "os", "compiler", "build_type", "arch"
#options = {"shared": [True, False]}
#default_options = "shared=False"
generators = "cmake"
exports_sources = "src/*", "CMakeLists.txt", "LICENSE", "README"
def requirements(self):
self.requires.add('OpenSSL/1.0.2@conan/stable')
def build(self):
cmake = CMake(self)
cmake.configure()
cmake.build()
def package(self):
cmake = CMake(self)
cmake.install()
def package_info(self):
self.cpp_info.libs = tools.collect_libs(self)
| // ... existing code ...
def package_info(self):
self.cpp_info.libs = tools.collect_libs(self)
// ... rest of the code ... |
553731a0ea12a8303076dc3d83bfbba91e6bc3e8 | scripts/merge_duplicate_users.py | scripts/merge_duplicate_users.py | from django.db.models.functions import Lower
from django.db.models import Count
from bluebottle.members.models import Member
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
from bluebottle.activities.models import Activity, Contributor
from bluebottle.initiatives.models import Initiative
for client in Client.objects.all():
with LocalTenant(client):
duplicate = Member.objects.annotate(
lower=Lower('email')
).values('lower').annotate(count=Count('lower')).filter(count__gt=1)
for result in duplicate:
first = Member.objects.filter(email__iexact=result['lower']).order_by('date_joined').first()
for duplicate in Member.objects.filter(
email__iexact=result['lower']
).exclude(pk=first.pk).order_by('date_joined'):
for activity in Activity.objects.filter(owner=duplicate):
activity.owner = first
activity.execute_triggers(send_messages=False)
activity.save()
for contributor in Contributor.objects.filter(user=duplicate):
contributor.user = first
contributor.execute_triggers(send_messages=False)
contributor.save()
for initiative in Initiative.objects.filter(owner=duplicate):
initiative.owner = first
initiative.execute_triggers(send_messages=False)
initiative.save()
duplicate.anonymize()
| from django.db.models.functions import Lower
from django.db.models import Count
from bluebottle.members.models import Member
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
from bluebottle.activities.models import Activity, Contributor
from bluebottle.initiatives.models import Initiative
for client in Client.objects.all():
with LocalTenant(client):
duplicate = Member.objects.annotate(
lower=Lower('email')
).values('lower').annotate(count=Count('lower')).filter(count__gt=1)
for result in duplicate:
first = Member.objects.filter(email__iexact=result['lower']).order_by('date_joined').first()
for duplicate in Member.objects.filter(
email__iexact=result['lower']
).exclude(pk=first.pk).order_by('date_joined'):
for activity in Activity.objects.filter(owner=duplicate):
activity.owner = first
activity.execute_triggers(send_messages=False)
activity.save()
for contributor in Contributor.objects.filter(user=duplicate):
contributor.user = first
contributor.execute_triggers(send_messages=False)
contributor.save()
for initiative in Initiative.objects.filter(owner=duplicate):
initiative.owner = first
initiative.execute_triggers(send_messages=False)
initiative.save()
duplicate.anonymize()
duplicate.email = 'merged-{}@example.com'.format(first.pk)
duplicate.save()
| Make sure we remember to which the user was merged | Make sure we remember to which the user was merged
| Python | bsd-3-clause | onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle | from django.db.models.functions import Lower
from django.db.models import Count
from bluebottle.members.models import Member
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
from bluebottle.activities.models import Activity, Contributor
from bluebottle.initiatives.models import Initiative
for client in Client.objects.all():
with LocalTenant(client):
duplicate = Member.objects.annotate(
lower=Lower('email')
).values('lower').annotate(count=Count('lower')).filter(count__gt=1)
for result in duplicate:
first = Member.objects.filter(email__iexact=result['lower']).order_by('date_joined').first()
for duplicate in Member.objects.filter(
email__iexact=result['lower']
).exclude(pk=first.pk).order_by('date_joined'):
for activity in Activity.objects.filter(owner=duplicate):
activity.owner = first
activity.execute_triggers(send_messages=False)
activity.save()
for contributor in Contributor.objects.filter(user=duplicate):
contributor.user = first
contributor.execute_triggers(send_messages=False)
contributor.save()
for initiative in Initiative.objects.filter(owner=duplicate):
initiative.owner = first
initiative.execute_triggers(send_messages=False)
initiative.save()
duplicate.anonymize()
+ duplicate.email = 'merged-{}@example.com'.format(first.pk)
+ duplicate.save()
| Make sure we remember to which the user was merged | ## Code Before:
from django.db.models.functions import Lower
from django.db.models import Count
from bluebottle.members.models import Member
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
from bluebottle.activities.models import Activity, Contributor
from bluebottle.initiatives.models import Initiative
for client in Client.objects.all():
with LocalTenant(client):
duplicate = Member.objects.annotate(
lower=Lower('email')
).values('lower').annotate(count=Count('lower')).filter(count__gt=1)
for result in duplicate:
first = Member.objects.filter(email__iexact=result['lower']).order_by('date_joined').first()
for duplicate in Member.objects.filter(
email__iexact=result['lower']
).exclude(pk=first.pk).order_by('date_joined'):
for activity in Activity.objects.filter(owner=duplicate):
activity.owner = first
activity.execute_triggers(send_messages=False)
activity.save()
for contributor in Contributor.objects.filter(user=duplicate):
contributor.user = first
contributor.execute_triggers(send_messages=False)
contributor.save()
for initiative in Initiative.objects.filter(owner=duplicate):
initiative.owner = first
initiative.execute_triggers(send_messages=False)
initiative.save()
duplicate.anonymize()
## Instruction:
Make sure we remember to which the user was merged
## Code After:
from django.db.models.functions import Lower
from django.db.models import Count
from bluebottle.members.models import Member
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
from bluebottle.activities.models import Activity, Contributor
from bluebottle.initiatives.models import Initiative
for client in Client.objects.all():
with LocalTenant(client):
duplicate = Member.objects.annotate(
lower=Lower('email')
).values('lower').annotate(count=Count('lower')).filter(count__gt=1)
for result in duplicate:
first = Member.objects.filter(email__iexact=result['lower']).order_by('date_joined').first()
for duplicate in Member.objects.filter(
email__iexact=result['lower']
).exclude(pk=first.pk).order_by('date_joined'):
for activity in Activity.objects.filter(owner=duplicate):
activity.owner = first
activity.execute_triggers(send_messages=False)
activity.save()
for contributor in Contributor.objects.filter(user=duplicate):
contributor.user = first
contributor.execute_triggers(send_messages=False)
contributor.save()
for initiative in Initiative.objects.filter(owner=duplicate):
initiative.owner = first
initiative.execute_triggers(send_messages=False)
initiative.save()
duplicate.anonymize()
duplicate.email = 'merged-{}@example.com'.format(first.pk)
duplicate.save()
| # ... existing code ...
duplicate.anonymize()
duplicate.email = 'merged-{}@example.com'.format(first.pk)
duplicate.save()
# ... rest of the code ... |
3224ea27a23e1c254bb93a110be1bd481585cb99 | mosecom_air/api/models.py | mosecom_air/api/models.py |
from django.db import models
class Substance(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Station(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Unit(models.Model):
name = models.TextField(unique=True, db_index=True)
class Measurement(models.Model):
station = models.ForeignKey(Station)
substance = models.ForeignKey(Substance)
unit = models.ForeignKey(Unit)
value = models.FloatField()
performed = models.DateTimeField()
written = models.DateTimeField(auto_now=True)
|
from django.db import models
class Substance(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Station(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Unit(models.Model):
name = models.TextField(unique=True, db_index=True)
class Measurement(models.Model):
station = models.ForeignKey(Station)
substance = models.ForeignKey(Substance)
unit = models.ForeignKey(Unit)
value = models.FloatField()
performed = models.DateTimeField()
written = models.DateTimeField(auto_now=True)
class Meta:
index_together = [
['station', 'substance', 'unit', 'performed']
]
| Add index for Measurement model | Add index for Measurement model
| Python | mit | elsid/mosecom-air,elsid/mosecom-air,elsid/mosecom-air |
from django.db import models
class Substance(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Station(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Unit(models.Model):
name = models.TextField(unique=True, db_index=True)
class Measurement(models.Model):
station = models.ForeignKey(Station)
substance = models.ForeignKey(Substance)
unit = models.ForeignKey(Unit)
value = models.FloatField()
performed = models.DateTimeField()
written = models.DateTimeField(auto_now=True)
+ class Meta:
+ index_together = [
+ ['station', 'substance', 'unit', 'performed']
+ ]
+ | Add index for Measurement model | ## Code Before:
from django.db import models
class Substance(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Station(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Unit(models.Model):
name = models.TextField(unique=True, db_index=True)
class Measurement(models.Model):
station = models.ForeignKey(Station)
substance = models.ForeignKey(Substance)
unit = models.ForeignKey(Unit)
value = models.FloatField()
performed = models.DateTimeField()
written = models.DateTimeField(auto_now=True)
## Instruction:
Add index for Measurement model
## Code After:
from django.db import models
class Substance(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Station(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Unit(models.Model):
name = models.TextField(unique=True, db_index=True)
class Measurement(models.Model):
station = models.ForeignKey(Station)
substance = models.ForeignKey(Substance)
unit = models.ForeignKey(Unit)
value = models.FloatField()
performed = models.DateTimeField()
written = models.DateTimeField(auto_now=True)
class Meta:
index_together = [
['station', 'substance', 'unit', 'performed']
]
| // ... existing code ...
written = models.DateTimeField(auto_now=True)
class Meta:
index_together = [
['station', 'substance', 'unit', 'performed']
]
// ... rest of the code ... |
c37500894b309a691009b87b1305935ee57648cb | tests/test_test.py | tests/test_test.py | import pytest
from web_test_base import *
"""
A class to test new features without running all of the tests.
Usage:
py.test tests/test_test.py -rsx
"""
class TestTest(WebTestBase):
urls_to_get = [
"http://aidtransparency.net/"
]
text_to_find = [
("information", '//*[@id="home-strapline"]/h1')
]
def test_locate_text(self, loaded_request, text_to_find):
"""
Tests that each page contains lthe specified text at the required location.
"""
result = self._get_text_from_xpath(loaded_request, text_to_find[1])
assert self._substring_in_list(text_to_find[0], result)
| import pytest
from web_test_base import *
"""
A class to test new features without running all of the tests.
Usage:
py.test tests/test_test.py -rsx
"""
class TestTest(WebTestBase):
urls_to_get = [
"http://iatistandard.org/"
, "http://iatistandard.org/202/namespaces-extensions/"
]
text_to_find = [
("technical publishing framework", '//*[@id="home-strapline"]/h1')
]
def test_locate_text(self, loaded_request, text_to_find):
"""
Tests that each page contains lthe specified text at the required location.
"""
result = self._get_text_from_xpath(loaded_request, text_to_find[1])
assert self._substring_in_list(text_to_find[0], result)
| Add test text finding that fails | Add test text finding that fails
This indicates that a different method of specifying how and where
to find text within a document is required.
| Python | mit | IATI/IATI-Website-Tests | import pytest
from web_test_base import *
"""
A class to test new features without running all of the tests.
Usage:
py.test tests/test_test.py -rsx
"""
class TestTest(WebTestBase):
urls_to_get = [
- "http://aidtransparency.net/"
+ "http://iatistandard.org/"
+ , "http://iatistandard.org/202/namespaces-extensions/"
]
text_to_find = [
- ("information", '//*[@id="home-strapline"]/h1')
+ ("technical publishing framework", '//*[@id="home-strapline"]/h1')
]
def test_locate_text(self, loaded_request, text_to_find):
"""
Tests that each page contains lthe specified text at the required location.
"""
result = self._get_text_from_xpath(loaded_request, text_to_find[1])
assert self._substring_in_list(text_to_find[0], result)
| Add test text finding that fails | ## Code Before:
import pytest
from web_test_base import *
"""
A class to test new features without running all of the tests.
Usage:
py.test tests/test_test.py -rsx
"""
class TestTest(WebTestBase):
urls_to_get = [
"http://aidtransparency.net/"
]
text_to_find = [
("information", '//*[@id="home-strapline"]/h1')
]
def test_locate_text(self, loaded_request, text_to_find):
"""
Tests that each page contains lthe specified text at the required location.
"""
result = self._get_text_from_xpath(loaded_request, text_to_find[1])
assert self._substring_in_list(text_to_find[0], result)
## Instruction:
Add test text finding that fails
## Code After:
import pytest
from web_test_base import *
"""
A class to test new features without running all of the tests.
Usage:
py.test tests/test_test.py -rsx
"""
class TestTest(WebTestBase):
urls_to_get = [
"http://iatistandard.org/"
, "http://iatistandard.org/202/namespaces-extensions/"
]
text_to_find = [
("technical publishing framework", '//*[@id="home-strapline"]/h1')
]
def test_locate_text(self, loaded_request, text_to_find):
"""
Tests that each page contains lthe specified text at the required location.
"""
result = self._get_text_from_xpath(loaded_request, text_to_find[1])
assert self._substring_in_list(text_to_find[0], result)
| # ... existing code ...
urls_to_get = [
"http://iatistandard.org/"
, "http://iatistandard.org/202/namespaces-extensions/"
]
# ... modified code ...
text_to_find = [
("technical publishing framework", '//*[@id="home-strapline"]/h1')
]
# ... rest of the code ... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.