commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 10
2.94k
| new_contents
stringlengths 21
3.18k
| subject
stringlengths 16
444
| message
stringlengths 17
2.63k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43k
| ndiff
stringlengths 52
3.32k
| instruction
stringlengths 16
444
| content
stringlengths 133
4.32k
| fuzzy_diff
stringlengths 16
3.18k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d9226d778a831d6d9f9f8d7645869245d0757754 | tests/integration/test_cli.py | tests/integration/test_cli.py | import os
import subprocess
import pytest
from chalice.utils import OSUtils
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
PROJECT_DIR = os.path.join(CURRENT_DIR, 'testapp')
@pytest.fixture
def local_app(tmpdir):
temp_dir_path = str(tmpdir)
OSUtils().copytree(PROJECT_DIR, temp_dir_path)
old_dir = os.getcwd()
try:
os.chdir(temp_dir_path)
yield temp_dir_path
finally:
os.chdir(old_dir)
def test_stack_trace_printed_on_error(local_app):
app_file = os.path.join(local_app, 'app.py')
with open(app_file, 'w') as f:
f.write(
'from chalice import Chalice\n'
'app = Chalice(app_name="test")\n'
'foobarbaz\n'
)
p = subprocess.Popen(['chalice', 'local'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stderr = p.communicate()[1].decode('ascii')
rc = p.returncode
assert rc == 2
assert 'Traceback' in stderr
assert 'foobarbaz' in stderr
| import os
import subprocess
import pytest
from chalice.utils import OSUtils
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
PROJECT_DIR = os.path.join(CURRENT_DIR, 'testapp')
@pytest.fixture
def local_app(tmpdir):
temp_dir_path = str(tmpdir)
OSUtils().copytree(PROJECT_DIR, temp_dir_path)
old_dir = os.getcwd()
try:
os.chdir(temp_dir_path)
yield temp_dir_path
finally:
os.chdir(old_dir)
def test_stack_trace_printed_on_error(local_app):
app_file = os.path.join(local_app, 'app.py')
with open(app_file, 'w') as f:
f.write(
'from chalice import Chalice\n'
'app = Chalice(app_name="test")\n'
'foobarbaz\n'
)
p = subprocess.Popen(['chalice', 'local', '--no-autoreload'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stderr = p.communicate()[1].decode('ascii')
rc = p.returncode
assert rc == 2
assert 'Traceback' in stderr
assert 'foobarbaz' in stderr
| Disable autoreload in integration tests | Disable autoreload in integration tests
| Python | apache-2.0 | awslabs/chalice | import os
import subprocess
import pytest
from chalice.utils import OSUtils
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
PROJECT_DIR = os.path.join(CURRENT_DIR, 'testapp')
@pytest.fixture
def local_app(tmpdir):
temp_dir_path = str(tmpdir)
OSUtils().copytree(PROJECT_DIR, temp_dir_path)
old_dir = os.getcwd()
try:
os.chdir(temp_dir_path)
yield temp_dir_path
finally:
os.chdir(old_dir)
def test_stack_trace_printed_on_error(local_app):
app_file = os.path.join(local_app, 'app.py')
with open(app_file, 'w') as f:
f.write(
'from chalice import Chalice\n'
'app = Chalice(app_name="test")\n'
'foobarbaz\n'
)
- p = subprocess.Popen(['chalice', 'local'],
+ p = subprocess.Popen(['chalice', 'local', '--no-autoreload'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stderr = p.communicate()[1].decode('ascii')
rc = p.returncode
assert rc == 2
assert 'Traceback' in stderr
assert 'foobarbaz' in stderr
| Disable autoreload in integration tests | ## Code Before:
import os
import subprocess
import pytest
from chalice.utils import OSUtils
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
PROJECT_DIR = os.path.join(CURRENT_DIR, 'testapp')
@pytest.fixture
def local_app(tmpdir):
temp_dir_path = str(tmpdir)
OSUtils().copytree(PROJECT_DIR, temp_dir_path)
old_dir = os.getcwd()
try:
os.chdir(temp_dir_path)
yield temp_dir_path
finally:
os.chdir(old_dir)
def test_stack_trace_printed_on_error(local_app):
app_file = os.path.join(local_app, 'app.py')
with open(app_file, 'w') as f:
f.write(
'from chalice import Chalice\n'
'app = Chalice(app_name="test")\n'
'foobarbaz\n'
)
p = subprocess.Popen(['chalice', 'local'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stderr = p.communicate()[1].decode('ascii')
rc = p.returncode
assert rc == 2
assert 'Traceback' in stderr
assert 'foobarbaz' in stderr
## Instruction:
Disable autoreload in integration tests
## Code After:
import os
import subprocess
import pytest
from chalice.utils import OSUtils
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
PROJECT_DIR = os.path.join(CURRENT_DIR, 'testapp')
@pytest.fixture
def local_app(tmpdir):
temp_dir_path = str(tmpdir)
OSUtils().copytree(PROJECT_DIR, temp_dir_path)
old_dir = os.getcwd()
try:
os.chdir(temp_dir_path)
yield temp_dir_path
finally:
os.chdir(old_dir)
def test_stack_trace_printed_on_error(local_app):
app_file = os.path.join(local_app, 'app.py')
with open(app_file, 'w') as f:
f.write(
'from chalice import Chalice\n'
'app = Chalice(app_name="test")\n'
'foobarbaz\n'
)
p = subprocess.Popen(['chalice', 'local', '--no-autoreload'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stderr = p.communicate()[1].decode('ascii')
rc = p.returncode
assert rc == 2
assert 'Traceback' in stderr
assert 'foobarbaz' in stderr
| ...
)
p = subprocess.Popen(['chalice', 'local', '--no-autoreload'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
... |
c6917a2f439b99078e67310230f1d0cfa0de8a7b | tests/builder_tests.py | tests/builder_tests.py | import ujson
import unittest
from sqlalchemy import Column, Integer, String, create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.pool import NullPool
from interrogate import Builder
class InterrogateTestCase(unittest.TestCase):
def valid_builder_args(self):
model = self.model
type_constraints = {
'string': [
'name',
'email'
],
'numeric': [
'age',
'height'
],
'nullable': [
'email',
'height'
]
}
query_constraints = {
'breadth': None,
'depth': 32,
'elements': 64
}
return [model, type_constraints, query_constraints]
def make_builder(self, model=None, type_constraints=None, query_constraints=None):
dm, dt, dq = self.valid_builder_args()
return Builder(
model or dm,
type_constraints or dt,
query_constraints or dq
)
def setUp(self):
Base = declarative_base()
class User(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
name = Column(String)
email = Column(String)
age = Column(Integer)
height = Column(Integer)
engine = create_engine("sqlite://", poolclass=NullPool)
Base.metadata.create_all(engine)
self.model = User
self.session = sessionmaker(bind=engine)()
def tearDown(self):
self.session.close()
| import ujson
import unittest
from sqlalchemy import Column, Integer, String, create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from interrogate import Builder
class InterrogateTestCase(unittest.TestCase):
def valid_builder_args(self):
model = self.model
query_constraints = {
'breadth': None,
'depth': 32,
'elements': 64
}
return [model, query_constraints]
def make_builder(self, model=None, query_constraints=None):
dm, dt, dq = self.valid_builder_args()
return Builder(
model or dm,
query_constraints or dq
)
def setUp(self):
Base = declarative_base()
class User(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
name = Column(String)
email = Column(String)
age = Column(Integer)
height = Column(Integer)
engine = create_engine("sqlite://", echo=True)
Base.metadata.create_all(engine)
self.model = User
self.session = sessionmaker(bind=engine)()
def tearDown(self):
self.session.close()
def add_user(self, **kwargs):
user = self.model(**kwargs)
self.session.add(user)
self.session.commit()
| Add test helper for creating users | Add test helper for creating users
| Python | mit | numberoverzero/jsonquery | import ujson
import unittest
from sqlalchemy import Column, Integer, String, create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
- from sqlalchemy.pool import NullPool
from interrogate import Builder
class InterrogateTestCase(unittest.TestCase):
def valid_builder_args(self):
model = self.model
- type_constraints = {
- 'string': [
- 'name',
- 'email'
- ],
- 'numeric': [
- 'age',
- 'height'
- ],
- 'nullable': [
- 'email',
- 'height'
- ]
- }
query_constraints = {
'breadth': None,
'depth': 32,
'elements': 64
}
- return [model, type_constraints, query_constraints]
+ return [model, query_constraints]
- def make_builder(self, model=None, type_constraints=None, query_constraints=None):
+ def make_builder(self, model=None, query_constraints=None):
dm, dt, dq = self.valid_builder_args()
return Builder(
model or dm,
- type_constraints or dt,
query_constraints or dq
)
def setUp(self):
Base = declarative_base()
class User(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
name = Column(String)
email = Column(String)
age = Column(Integer)
height = Column(Integer)
- engine = create_engine("sqlite://", poolclass=NullPool)
+ engine = create_engine("sqlite://", echo=True)
Base.metadata.create_all(engine)
self.model = User
self.session = sessionmaker(bind=engine)()
def tearDown(self):
self.session.close()
+ def add_user(self, **kwargs):
+ user = self.model(**kwargs)
+ self.session.add(user)
+ self.session.commit()
+ | Add test helper for creating users | ## Code Before:
import ujson
import unittest
from sqlalchemy import Column, Integer, String, create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.pool import NullPool
from interrogate import Builder
class InterrogateTestCase(unittest.TestCase):
def valid_builder_args(self):
model = self.model
type_constraints = {
'string': [
'name',
'email'
],
'numeric': [
'age',
'height'
],
'nullable': [
'email',
'height'
]
}
query_constraints = {
'breadth': None,
'depth': 32,
'elements': 64
}
return [model, type_constraints, query_constraints]
def make_builder(self, model=None, type_constraints=None, query_constraints=None):
dm, dt, dq = self.valid_builder_args()
return Builder(
model or dm,
type_constraints or dt,
query_constraints or dq
)
def setUp(self):
Base = declarative_base()
class User(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
name = Column(String)
email = Column(String)
age = Column(Integer)
height = Column(Integer)
engine = create_engine("sqlite://", poolclass=NullPool)
Base.metadata.create_all(engine)
self.model = User
self.session = sessionmaker(bind=engine)()
def tearDown(self):
self.session.close()
## Instruction:
Add test helper for creating users
## Code After:
import ujson
import unittest
from sqlalchemy import Column, Integer, String, create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from interrogate import Builder
class InterrogateTestCase(unittest.TestCase):
def valid_builder_args(self):
model = self.model
query_constraints = {
'breadth': None,
'depth': 32,
'elements': 64
}
return [model, query_constraints]
def make_builder(self, model=None, query_constraints=None):
dm, dt, dq = self.valid_builder_args()
return Builder(
model or dm,
query_constraints or dq
)
def setUp(self):
Base = declarative_base()
class User(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
name = Column(String)
email = Column(String)
age = Column(Integer)
height = Column(Integer)
engine = create_engine("sqlite://", echo=True)
Base.metadata.create_all(engine)
self.model = User
self.session = sessionmaker(bind=engine)()
def tearDown(self):
self.session.close()
def add_user(self, **kwargs):
user = self.model(**kwargs)
self.session.add(user)
self.session.commit()
| # ... existing code ...
from sqlalchemy.ext.declarative import declarative_base
from interrogate import Builder
# ... modified code ...
model = self.model
query_constraints = {
...
}
return [model, query_constraints]
def make_builder(self, model=None, query_constraints=None):
dm, dt, dq = self.valid_builder_args()
...
model or dm,
query_constraints or dq
...
height = Column(Integer)
engine = create_engine("sqlite://", echo=True)
Base.metadata.create_all(engine)
...
self.session.close()
def add_user(self, **kwargs):
user = self.model(**kwargs)
self.session.add(user)
self.session.commit()
# ... rest of the code ... |
62705d28c826a213a42de504c041d56d72bd64df | examples/sparkfun_redbot/sparkfun_experiments/Exp2_DriveForward.py | examples/sparkfun_redbot/sparkfun_experiments/Exp2_DriveForward.py |
from pymata_aio.pymata3 import PyMata3
from RedBot import RedBotMotors
# This line "includes" the RedBot library into your sketch.
# Provides special objects, methods, and functions for the RedBot.
board = PyMata3()
motors = RedBotMotors(board)
# Instantiate the motor control object. This only needs to be done once.
def setup():
motors.drive(255) # Turn on Left and right motors at full speed forward.
board.sleep(2.0) # Waits for 2 seconds
motors.stop() # Stops both motors
def loop():
# Nothing here. We'll get to this in the next experiment.
pass
if __name__ == "__main__":
setup()
while True:
loop()
|
from pymata_aio.pymata3 import PyMata3
from RedBot import RedBotMotors
# This line "includes" the RedBot library into your sketch.
# Provides special objects, methods, and functions for the RedBot.
board = PyMata3()
motors = RedBotMotors(board)
# Instantiate the motor control object. This only needs to be done once.
def setup():
print("Left and right motors at full speed forward")
motors.drive(255) # Turn on Left and right motors at full speed forward.
board.sleep(2.0) # Waits for 2 seconds
print("Stop both motors")
motors.stop() # Stops both motors
def loop():
# Nothing here. We'll get to this in the next experiment.
pass
if __name__ == "__main__":
setup()
while True:
loop()
| Add a log to Exp2 | Add a log to Exp2
| Python | agpl-3.0 | MrYsLab/pymata-aio |
from pymata_aio.pymata3 import PyMata3
from RedBot import RedBotMotors
# This line "includes" the RedBot library into your sketch.
# Provides special objects, methods, and functions for the RedBot.
board = PyMata3()
motors = RedBotMotors(board)
# Instantiate the motor control object. This only needs to be done once.
def setup():
+ print("Left and right motors at full speed forward")
motors.drive(255) # Turn on Left and right motors at full speed forward.
board.sleep(2.0) # Waits for 2 seconds
+ print("Stop both motors")
motors.stop() # Stops both motors
def loop():
# Nothing here. We'll get to this in the next experiment.
pass
if __name__ == "__main__":
setup()
while True:
loop()
| Add a log to Exp2 | ## Code Before:
from pymata_aio.pymata3 import PyMata3
from RedBot import RedBotMotors
# This line "includes" the RedBot library into your sketch.
# Provides special objects, methods, and functions for the RedBot.
board = PyMata3()
motors = RedBotMotors(board)
# Instantiate the motor control object. This only needs to be done once.
def setup():
motors.drive(255) # Turn on Left and right motors at full speed forward.
board.sleep(2.0) # Waits for 2 seconds
motors.stop() # Stops both motors
def loop():
# Nothing here. We'll get to this in the next experiment.
pass
if __name__ == "__main__":
setup()
while True:
loop()
## Instruction:
Add a log to Exp2
## Code After:
from pymata_aio.pymata3 import PyMata3
from RedBot import RedBotMotors
# This line "includes" the RedBot library into your sketch.
# Provides special objects, methods, and functions for the RedBot.
board = PyMata3()
motors = RedBotMotors(board)
# Instantiate the motor control object. This only needs to be done once.
def setup():
print("Left and right motors at full speed forward")
motors.drive(255) # Turn on Left and right motors at full speed forward.
board.sleep(2.0) # Waits for 2 seconds
print("Stop both motors")
motors.stop() # Stops both motors
def loop():
# Nothing here. We'll get to this in the next experiment.
pass
if __name__ == "__main__":
setup()
while True:
loop()
| # ... existing code ...
def setup():
print("Left and right motors at full speed forward")
motors.drive(255) # Turn on Left and right motors at full speed forward.
# ... modified code ...
board.sleep(2.0) # Waits for 2 seconds
print("Stop both motors")
motors.stop() # Stops both motors
# ... rest of the code ... |
b45c0cc0e9f2964ad442115f7a83292fb83611ec | test/vim_autopep8.py | test/vim_autopep8.py |
from __future__ import unicode_literals
import sys
import vim
ENCODING = vim.eval('&fileencoding')
def encode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.encode(ENCODING)
def decode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.decode(ENCODING)
if vim.eval('&syntax') == 'python':
source = '\n'.join(decode(line)
for line in vim.current.buffer) + '\n'
import autopep8
options = autopep8.parse_args(['--range',
str(1 + vim.current.range.start),
str(1 + vim.current.range.end),
''])
formatted = autopep8.fix_code(source, options=options)
if source != formatted:
if formatted.endswith('\n'):
formatted = formatted[:-1]
vim.current.buffer[:] = [encode(line)
for line in formatted.splitlines()]
|
from __future__ import unicode_literals
import sys
import vim
ENCODING = vim.eval('&fileencoding')
def encode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.encode(ENCODING)
def decode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.decode(ENCODING)
def main():
if vim.eval('&syntax') != 'python':
return
source = '\n'.join(decode(line)
for line in vim.current.buffer) + '\n'
import autopep8
options = autopep8.parse_args(['--range',
str(1 + vim.current.range.start),
str(1 + vim.current.range.end),
''])
formatted = autopep8.fix_code(source, options=options)
if source != formatted:
if formatted.endswith('\n'):
formatted = formatted[:-1]
vim.current.buffer[:] = [encode(line)
for line in formatted.splitlines()]
if __name__ == '__main__':
main()
| Put code in main function | Put code in main function
| Python | mit | SG345/autopep8,MeteorAdminz/autopep8,Vauxoo/autopep8,hhatto/autopep8,Vauxoo/autopep8,MeteorAdminz/autopep8,SG345/autopep8,vauxoo-dev/autopep8,vauxoo-dev/autopep8,hhatto/autopep8 |
from __future__ import unicode_literals
import sys
import vim
ENCODING = vim.eval('&fileencoding')
def encode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.encode(ENCODING)
def decode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.decode(ENCODING)
+ def main():
- if vim.eval('&syntax') == 'python':
+ if vim.eval('&syntax') != 'python':
+ return
+
source = '\n'.join(decode(line)
for line in vim.current.buffer) + '\n'
import autopep8
options = autopep8.parse_args(['--range',
str(1 + vim.current.range.start),
str(1 + vim.current.range.end),
''])
formatted = autopep8.fix_code(source, options=options)
if source != formatted:
if formatted.endswith('\n'):
formatted = formatted[:-1]
vim.current.buffer[:] = [encode(line)
for line in formatted.splitlines()]
+
+ if __name__ == '__main__':
+ main()
+ | Put code in main function | ## Code Before:
from __future__ import unicode_literals
import sys
import vim
ENCODING = vim.eval('&fileencoding')
def encode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.encode(ENCODING)
def decode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.decode(ENCODING)
if vim.eval('&syntax') == 'python':
source = '\n'.join(decode(line)
for line in vim.current.buffer) + '\n'
import autopep8
options = autopep8.parse_args(['--range',
str(1 + vim.current.range.start),
str(1 + vim.current.range.end),
''])
formatted = autopep8.fix_code(source, options=options)
if source != formatted:
if formatted.endswith('\n'):
formatted = formatted[:-1]
vim.current.buffer[:] = [encode(line)
for line in formatted.splitlines()]
## Instruction:
Put code in main function
## Code After:
from __future__ import unicode_literals
import sys
import vim
ENCODING = vim.eval('&fileencoding')
def encode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.encode(ENCODING)
def decode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.decode(ENCODING)
def main():
if vim.eval('&syntax') != 'python':
return
source = '\n'.join(decode(line)
for line in vim.current.buffer) + '\n'
import autopep8
options = autopep8.parse_args(['--range',
str(1 + vim.current.range.start),
str(1 + vim.current.range.end),
''])
formatted = autopep8.fix_code(source, options=options)
if source != formatted:
if formatted.endswith('\n'):
formatted = formatted[:-1]
vim.current.buffer[:] = [encode(line)
for line in formatted.splitlines()]
if __name__ == '__main__':
main()
| ...
def main():
if vim.eval('&syntax') != 'python':
return
source = '\n'.join(decode(line)
...
for line in formatted.splitlines()]
if __name__ == '__main__':
main()
... |
fa67de4900be765a5ea4194b1a786cd237934a33 | displacy_service_tests/test_server.py | displacy_service_tests/test_server.py | import falcon.testing
import json
from displacy_service.server import APP
class TestAPI(falcon.testing.TestCase):
def __init__(self):
self.api = APP
def test_deps():
test_api = TestAPI()
result = test_api.simulate_post(path='/dep',
body='''{"text": "This is a test.", "model": "en",
"collapse_punctuation": false,
"collapse_phrases": false}''')
result = json.loads(result.text)
words = [w['text'] for w in result['words']]
assert words == ["This", "is", "a", "test", "."]
def test_ents():
test_api = TestAPI()
result = test_api.simulate_post(path='/ent',
body='''{"text": "What a great company Google is.",
"model": "en"}''')
ents = json.loads(result.text)
assert ents == [
{"start": 21, "end": 27, "type": "ORG", "text": "Google"}]
def test_sents():
test_api = TestAPI()
sentences = test_api.simulate_post(
path='/sent', body='''{"text": "This a test that should split into sentences! This is the second. Is this the third?", "model": "en"}''')
assert sentences == ['This a test that should split into sentences!', 'This is the second.', 'Is this the third?']
| import falcon.testing
import json
from displacy_service.server import APP
class TestAPI(falcon.testing.TestCase):
def __init__(self):
self.api = APP
def test_deps():
test_api = TestAPI()
result = test_api.simulate_post(
path='/dep',
body='''{"text": "This is a test.", "model": "en",
"collapse_punctuation": false,
"collapse_phrases": false}'''
)
result = json.loads(result.text)
words = [w['text'] for w in result['words']]
assert words == ["This", "is", "a", "test", "."]
def test_ents():
test_api = TestAPI()
result = test_api.simulate_post(
path='/ent',
body='''{"text": "What a great company Google is.",
"model": "en"}''')
ents = json.loads(result.text)
assert ents == [
{"start": 21, "end": 27, "type": "ORG", "text": "Google"}]
def test_sents():
test_api = TestAPI()
sentences = test_api.simulate_post(
path='/sent',
body='''{"text": "This a test that should split into sentences!
This is the second. Is this the third?", "model": "en"}'''
)
assert sentences == ['This a test that should split into sentences!',
'This is the second.', 'Is this the third?']
| Make test file PEP8 compliant. | Make test file PEP8 compliant.
| Python | mit | jgontrum/spacy-api-docker,jgontrum/spacy-api-docker,jgontrum/spacy-api-docker,jgontrum/spacy-api-docker | import falcon.testing
import json
from displacy_service.server import APP
class TestAPI(falcon.testing.TestCase):
def __init__(self):
self.api = APP
def test_deps():
test_api = TestAPI()
- result = test_api.simulate_post(path='/dep',
+ result = test_api.simulate_post(
+ path='/dep',
- body='''{"text": "This is a test.", "model": "en",
+ body='''{"text": "This is a test.", "model": "en",
- "collapse_punctuation": false,
+ "collapse_punctuation": false,
- "collapse_phrases": false}''')
+ "collapse_phrases": false}'''
+ )
result = json.loads(result.text)
words = [w['text'] for w in result['words']]
assert words == ["This", "is", "a", "test", "."]
def test_ents():
test_api = TestAPI()
- result = test_api.simulate_post(path='/ent',
+ result = test_api.simulate_post(
+ path='/ent',
- body='''{"text": "What a great company Google is.",
+ body='''{"text": "What a great company Google is.",
"model": "en"}''')
ents = json.loads(result.text)
assert ents == [
{"start": 21, "end": 27, "type": "ORG", "text": "Google"}]
def test_sents():
test_api = TestAPI()
sentences = test_api.simulate_post(
- path='/sent', body='''{"text": "This a test that should split into sentences! This is the second. Is this the third?", "model": "en"}''')
- assert sentences == ['This a test that should split into sentences!', 'This is the second.', 'Is this the third?']
+ path='/sent',
+ body='''{"text": "This a test that should split into sentences!
+ This is the second. Is this the third?", "model": "en"}'''
+ )
+ assert sentences == ['This a test that should split into sentences!',
+ 'This is the second.', 'Is this the third?']
+ | Make test file PEP8 compliant. | ## Code Before:
import falcon.testing
import json
from displacy_service.server import APP
class TestAPI(falcon.testing.TestCase):
def __init__(self):
self.api = APP
def test_deps():
test_api = TestAPI()
result = test_api.simulate_post(path='/dep',
body='''{"text": "This is a test.", "model": "en",
"collapse_punctuation": false,
"collapse_phrases": false}''')
result = json.loads(result.text)
words = [w['text'] for w in result['words']]
assert words == ["This", "is", "a", "test", "."]
def test_ents():
test_api = TestAPI()
result = test_api.simulate_post(path='/ent',
body='''{"text": "What a great company Google is.",
"model": "en"}''')
ents = json.loads(result.text)
assert ents == [
{"start": 21, "end": 27, "type": "ORG", "text": "Google"}]
def test_sents():
test_api = TestAPI()
sentences = test_api.simulate_post(
path='/sent', body='''{"text": "This a test that should split into sentences! This is the second. Is this the third?", "model": "en"}''')
assert sentences == ['This a test that should split into sentences!', 'This is the second.', 'Is this the third?']
## Instruction:
Make test file PEP8 compliant.
## Code After:
import falcon.testing
import json
from displacy_service.server import APP
class TestAPI(falcon.testing.TestCase):
def __init__(self):
self.api = APP
def test_deps():
test_api = TestAPI()
result = test_api.simulate_post(
path='/dep',
body='''{"text": "This is a test.", "model": "en",
"collapse_punctuation": false,
"collapse_phrases": false}'''
)
result = json.loads(result.text)
words = [w['text'] for w in result['words']]
assert words == ["This", "is", "a", "test", "."]
def test_ents():
test_api = TestAPI()
result = test_api.simulate_post(
path='/ent',
body='''{"text": "What a great company Google is.",
"model": "en"}''')
ents = json.loads(result.text)
assert ents == [
{"start": 21, "end": 27, "type": "ORG", "text": "Google"}]
def test_sents():
test_api = TestAPI()
sentences = test_api.simulate_post(
path='/sent',
body='''{"text": "This a test that should split into sentences!
This is the second. Is this the third?", "model": "en"}'''
)
assert sentences == ['This a test that should split into sentences!',
'This is the second.', 'Is this the third?']
| # ... existing code ...
test_api = TestAPI()
result = test_api.simulate_post(
path='/dep',
body='''{"text": "This is a test.", "model": "en",
"collapse_punctuation": false,
"collapse_phrases": false}'''
)
result = json.loads(result.text)
# ... modified code ...
test_api = TestAPI()
result = test_api.simulate_post(
path='/ent',
body='''{"text": "What a great company Google is.",
"model": "en"}''')
...
sentences = test_api.simulate_post(
path='/sent',
body='''{"text": "This a test that should split into sentences!
This is the second. Is this the third?", "model": "en"}'''
)
assert sentences == ['This a test that should split into sentences!',
'This is the second.', 'Is this the third?']
# ... rest of the code ... |
ab72360da83e3b8d95030394f35a442943f53233 | domains/integrator_chains/fmrb_sci_examples/scripts/lqr.py | domains/integrator_chains/fmrb_sci_examples/scripts/lqr.py | from __future__ import print_function
import roslib; roslib.load_manifest('dynamaestro')
import rospy
from dynamaestro.msg import VectorStamped
from control import lqr
import numpy as np
class LQRController(rospy.Subscriber):
def __init__(self, intopic, outtopic):
rospy.Subscriber.__init__(self, outtopic, VectorStamped, self.read_state)
self.intopic = rospy.Publisher(intopic, VectorStamped, queue_size=1)
A = np.array([[0., 1, 0], [0,0,1], [0,0,0]])
B = np.array([[0.],[0],[1]])
Q = np.diag([1.,1,1])
R = np.diag([1.])
K, S, E = lqr(A,B,Q,R)
self.K = K
def read_state(self, vs):
self.intopic.publish(VectorStamped(point=[-(vs.point[0] + 2.4142*vs.point[1] + 2.4142*vs.point[2])]))
if __name__ == "__main__":
rospy.init_node("lqr", anonymous=True)
lqrc = LQRController("input", "output")
rospy.spin()
| from __future__ import print_function
import roslib; roslib.load_manifest('dynamaestro')
import rospy
from dynamaestro.msg import VectorStamped
class LQRController(rospy.Subscriber):
def __init__(self, intopic, outtopic):
rospy.Subscriber.__init__(self, outtopic, VectorStamped, self.read_state)
self.intopic = rospy.Publisher(intopic, VectorStamped, queue_size=1)
def read_state(self, vs):
self.intopic.publish(VectorStamped(point=[-(vs.point[0] + 2.4142*vs.point[1] + 2.4142*vs.point[2])]))
if __name__ == "__main__":
rospy.init_node("lqr", anonymous=True)
lqrc = LQRController("input", "output")
rospy.spin()
| Remove some unused code that unnecessarily introduced depends | Remove some unused code that unnecessarily introduced depends
This should be regarded as an update to 9aa5b02e12a2287642a285541c43790a10d6444f
that removes unnecessary dependencies for the lqr.py example.
In the example provided by 9aa5b02e12a2287642a285541c43790a10d6444f
Python code was introduced that led to dependencies on NumPy and
the Python Control System Library (control), yet the state-feedback
gains were hard-coded. We will re-introduce these dependencies in the
next changeset, but having this checkpoint without them seems useful.
| Python | bsd-3-clause | fmrchallenge/fmrbenchmark,fmrchallenge/fmrbenchmark,fmrchallenge/fmrbenchmark | from __future__ import print_function
import roslib; roslib.load_manifest('dynamaestro')
import rospy
from dynamaestro.msg import VectorStamped
- from control import lqr
- import numpy as np
-
class LQRController(rospy.Subscriber):
def __init__(self, intopic, outtopic):
rospy.Subscriber.__init__(self, outtopic, VectorStamped, self.read_state)
self.intopic = rospy.Publisher(intopic, VectorStamped, queue_size=1)
- A = np.array([[0., 1, 0], [0,0,1], [0,0,0]])
- B = np.array([[0.],[0],[1]])
- Q = np.diag([1.,1,1])
- R = np.diag([1.])
- K, S, E = lqr(A,B,Q,R)
- self.K = K
def read_state(self, vs):
self.intopic.publish(VectorStamped(point=[-(vs.point[0] + 2.4142*vs.point[1] + 2.4142*vs.point[2])]))
if __name__ == "__main__":
rospy.init_node("lqr", anonymous=True)
lqrc = LQRController("input", "output")
rospy.spin()
| Remove some unused code that unnecessarily introduced depends | ## Code Before:
from __future__ import print_function
import roslib; roslib.load_manifest('dynamaestro')
import rospy
from dynamaestro.msg import VectorStamped
from control import lqr
import numpy as np
class LQRController(rospy.Subscriber):
def __init__(self, intopic, outtopic):
rospy.Subscriber.__init__(self, outtopic, VectorStamped, self.read_state)
self.intopic = rospy.Publisher(intopic, VectorStamped, queue_size=1)
A = np.array([[0., 1, 0], [0,0,1], [0,0,0]])
B = np.array([[0.],[0],[1]])
Q = np.diag([1.,1,1])
R = np.diag([1.])
K, S, E = lqr(A,B,Q,R)
self.K = K
def read_state(self, vs):
self.intopic.publish(VectorStamped(point=[-(vs.point[0] + 2.4142*vs.point[1] + 2.4142*vs.point[2])]))
if __name__ == "__main__":
rospy.init_node("lqr", anonymous=True)
lqrc = LQRController("input", "output")
rospy.spin()
## Instruction:
Remove some unused code that unnecessarily introduced depends
## Code After:
from __future__ import print_function
import roslib; roslib.load_manifest('dynamaestro')
import rospy
from dynamaestro.msg import VectorStamped
class LQRController(rospy.Subscriber):
def __init__(self, intopic, outtopic):
rospy.Subscriber.__init__(self, outtopic, VectorStamped, self.read_state)
self.intopic = rospy.Publisher(intopic, VectorStamped, queue_size=1)
def read_state(self, vs):
self.intopic.publish(VectorStamped(point=[-(vs.point[0] + 2.4142*vs.point[1] + 2.4142*vs.point[2])]))
if __name__ == "__main__":
rospy.init_node("lqr", anonymous=True)
lqrc = LQRController("input", "output")
rospy.spin()
| ...
...
self.intopic = rospy.Publisher(intopic, VectorStamped, queue_size=1)
... |
3d86b4473f66a9311a94b1def4c40189eae23990 | lancet/git.py | lancet/git.py | import sys
import click
from slugify import slugify
class SlugBranchGetter(object):
def __init__(self, base_branch='master'):
self.base_branch = base_branch
def __call__(self, repo, issue):
discriminator = 'features/{}'.format(issue.key)
slug = slugify(issue.fields.summary[:30])
full_name = '{}_{}'.format(discriminator, slug)
branches = [b for b in repo.listall_branches()
if b.startswith(discriminator)]
if len(branches) > 1:
click.secho('Multiple matching branches found!',
fg='red', bold=True)
click.echo()
click.echo('The prefix {} matched the following branches:'
.format(discriminator))
click.echo()
for b in branches:
click.echo(' {} {}'.format(click.style('*', fg='red'), b))
click.echo()
click.echo('Please remove all but one in order to continue.')
sys.exit(1)
elif branches:
branch = repo.lookup_branch(branches[0])
if branch.branch_name != full_name:
branch.rename(full_name)
branch = repo.lookup_branch(full_name)
else:
base = repo.lookup_branch(self.base_branch)
if not base:
click.secho('Base branch not found: "{}", aborting.'
.format(self.base_branch), fg='red', bold=True)
sys.exit(1)
branch = repo.create_branch(full_name, base.get_object())
return branch
| import sys
import click
from slugify import slugify
class SlugBranchGetter(object):
prefix = 'feature/'
def __init__(self, base_branch='master'):
self.base_branch = base_branch
def __call__(self, repo, issue):
discriminator = '{}{}'.format(self.prefix, issue.key)
slug = slugify(issue.fields.summary[:30])
full_name = '{}_{}'.format(discriminator, slug)
branches = [b for b in repo.listall_branches()
if b.startswith(discriminator)]
if len(branches) > 1:
click.secho('Multiple matching branches found!',
fg='red', bold=True)
click.echo()
click.echo('The prefix {} matched the following branches:'
.format(discriminator))
click.echo()
for b in branches:
click.echo(' {} {}'.format(click.style('*', fg='red'), b))
click.echo()
click.echo('Please remove all but one in order to continue.')
sys.exit(1)
elif branches:
branch = repo.lookup_branch(branches[0])
if branch.branch_name != full_name:
branch.rename(full_name)
branch = repo.lookup_branch(full_name)
else:
base = repo.lookup_branch(self.base_branch)
if not base:
click.secho('Base branch not found: "{}", aborting.'
.format(self.base_branch), fg='red', bold=True)
sys.exit(1)
branch = repo.create_branch(full_name, base.get_object())
return branch
| Change the prefix from features/ to feature/. | Change the prefix from features/ to feature/.
| Python | mit | GaretJax/lancet,GaretJax/lancet | import sys
import click
from slugify import slugify
class SlugBranchGetter(object):
+ prefix = 'feature/'
+
def __init__(self, base_branch='master'):
self.base_branch = base_branch
def __call__(self, repo, issue):
- discriminator = 'features/{}'.format(issue.key)
+ discriminator = '{}{}'.format(self.prefix, issue.key)
slug = slugify(issue.fields.summary[:30])
full_name = '{}_{}'.format(discriminator, slug)
branches = [b for b in repo.listall_branches()
if b.startswith(discriminator)]
if len(branches) > 1:
click.secho('Multiple matching branches found!',
fg='red', bold=True)
click.echo()
click.echo('The prefix {} matched the following branches:'
.format(discriminator))
click.echo()
for b in branches:
click.echo(' {} {}'.format(click.style('*', fg='red'), b))
click.echo()
click.echo('Please remove all but one in order to continue.')
sys.exit(1)
elif branches:
branch = repo.lookup_branch(branches[0])
if branch.branch_name != full_name:
branch.rename(full_name)
branch = repo.lookup_branch(full_name)
else:
base = repo.lookup_branch(self.base_branch)
if not base:
click.secho('Base branch not found: "{}", aborting.'
.format(self.base_branch), fg='red', bold=True)
sys.exit(1)
branch = repo.create_branch(full_name, base.get_object())
return branch
| Change the prefix from features/ to feature/. | ## Code Before:
import sys
import click
from slugify import slugify
class SlugBranchGetter(object):
def __init__(self, base_branch='master'):
self.base_branch = base_branch
def __call__(self, repo, issue):
discriminator = 'features/{}'.format(issue.key)
slug = slugify(issue.fields.summary[:30])
full_name = '{}_{}'.format(discriminator, slug)
branches = [b for b in repo.listall_branches()
if b.startswith(discriminator)]
if len(branches) > 1:
click.secho('Multiple matching branches found!',
fg='red', bold=True)
click.echo()
click.echo('The prefix {} matched the following branches:'
.format(discriminator))
click.echo()
for b in branches:
click.echo(' {} {}'.format(click.style('*', fg='red'), b))
click.echo()
click.echo('Please remove all but one in order to continue.')
sys.exit(1)
elif branches:
branch = repo.lookup_branch(branches[0])
if branch.branch_name != full_name:
branch.rename(full_name)
branch = repo.lookup_branch(full_name)
else:
base = repo.lookup_branch(self.base_branch)
if not base:
click.secho('Base branch not found: "{}", aborting.'
.format(self.base_branch), fg='red', bold=True)
sys.exit(1)
branch = repo.create_branch(full_name, base.get_object())
return branch
## Instruction:
Change the prefix from features/ to feature/.
## Code After:
import sys
import click
from slugify import slugify
class SlugBranchGetter(object):
prefix = 'feature/'
def __init__(self, base_branch='master'):
self.base_branch = base_branch
def __call__(self, repo, issue):
discriminator = '{}{}'.format(self.prefix, issue.key)
slug = slugify(issue.fields.summary[:30])
full_name = '{}_{}'.format(discriminator, slug)
branches = [b for b in repo.listall_branches()
if b.startswith(discriminator)]
if len(branches) > 1:
click.secho('Multiple matching branches found!',
fg='red', bold=True)
click.echo()
click.echo('The prefix {} matched the following branches:'
.format(discriminator))
click.echo()
for b in branches:
click.echo(' {} {}'.format(click.style('*', fg='red'), b))
click.echo()
click.echo('Please remove all but one in order to continue.')
sys.exit(1)
elif branches:
branch = repo.lookup_branch(branches[0])
if branch.branch_name != full_name:
branch.rename(full_name)
branch = repo.lookup_branch(full_name)
else:
base = repo.lookup_branch(self.base_branch)
if not base:
click.secho('Base branch not found: "{}", aborting.'
.format(self.base_branch), fg='red', bold=True)
sys.exit(1)
branch = repo.create_branch(full_name, base.get_object())
return branch
| ...
class SlugBranchGetter(object):
prefix = 'feature/'
def __init__(self, base_branch='master'):
...
def __call__(self, repo, issue):
discriminator = '{}{}'.format(self.prefix, issue.key)
slug = slugify(issue.fields.summary[:30])
... |
566ae40b7f546e3773933217506f917845c8b468 | virtool/subtractions/db.py | virtool/subtractions/db.py | import virtool.utils
PROJECTION = [
"_id",
"file",
"ready",
"job"
]
async def get_linked_samples(db, subtraction_id):
cursor = db.samples.find({"subtraction.id": subtraction_id}, ["name"])
return [virtool.utils.base_processor(d) async for d in cursor]
| import virtool.utils
PROJECTION = [
"_id",
"count",
"file",
"ready",
"job",
"nickname",
"user"
]
async def get_linked_samples(db, subtraction_id):
cursor = db.samples.find({"subtraction.id": subtraction_id}, ["name"])
return [virtool.utils.base_processor(d) async for d in cursor]
| Return more fields in subtraction find API response | Return more fields in subtraction find API response
| Python | mit | igboyes/virtool,virtool/virtool,igboyes/virtool,virtool/virtool | import virtool.utils
PROJECTION = [
"_id",
+ "count",
"file",
"ready",
- "job"
+ "job",
+ "nickname",
+ "user"
]
async def get_linked_samples(db, subtraction_id):
cursor = db.samples.find({"subtraction.id": subtraction_id}, ["name"])
return [virtool.utils.base_processor(d) async for d in cursor]
| Return more fields in subtraction find API response | ## Code Before:
import virtool.utils
PROJECTION = [
"_id",
"file",
"ready",
"job"
]
async def get_linked_samples(db, subtraction_id):
cursor = db.samples.find({"subtraction.id": subtraction_id}, ["name"])
return [virtool.utils.base_processor(d) async for d in cursor]
## Instruction:
Return more fields in subtraction find API response
## Code After:
import virtool.utils
PROJECTION = [
"_id",
"count",
"file",
"ready",
"job",
"nickname",
"user"
]
async def get_linked_samples(db, subtraction_id):
cursor = db.samples.find({"subtraction.id": subtraction_id}, ["name"])
return [virtool.utils.base_processor(d) async for d in cursor]
| // ... existing code ...
"_id",
"count",
"file",
// ... modified code ...
"ready",
"job",
"nickname",
"user"
]
// ... rest of the code ... |
f769360dbb6da83fc8bf9c244c04b3d2f7c49ffa | lab/runnerctl.py | lab/runnerctl.py | import pytest
def pytest_runtest_makereport(item, call):
if 'setup_test' in item.keywords and call.excinfo:
if not call.excinfo.errisinstance(pytest.skip.Exception):
pytest.halt('A setup test has failed, aborting...')
class Halt(object):
def __init__(self):
self.msg = None
def __call__(self, msg):
self.msg = msg
def pytest_namespace():
return {'halt': Halt()}
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_protocol(item, nextitem):
yield
if pytest.halt.msg:
item.session.shouldstop = pytest.halt.msg
| import pytest
import string
def pytest_runtest_makereport(item, call):
if 'setup_test' in item.keywords and call.excinfo:
if not call.excinfo.errisinstance(pytest.skip.Exception):
pytest.halt('A setup test has failed, aborting...')
class Halt(object):
def __init__(self):
self.msg = None
def __call__(self, msg):
self.msg = msg
def pytest_namespace():
return {'halt': Halt()}
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_protocol(item, nextitem):
yield
if pytest.halt.msg:
item.session.shouldstop = pytest.halt.msg
@pytest.fixture(scope='class')
def testname(request):
"""Pytest test node name with all unfriendly characters transformed
into underscores. The lifetime is class scoped since this name is
often used to provision remote sw profiles which live for the entirety
of a test suite.
"""
return request.node.name.translate(
string.maketrans('\[', '__')).strip(']')
| Move some fixtures into better places | Move some fixtures into better places
Move datadir into the sipsecmon plugin and testname into
lab.runnerctl.
| Python | mpl-2.0 | sangoma/pytestlab | import pytest
+ import string
def pytest_runtest_makereport(item, call):
if 'setup_test' in item.keywords and call.excinfo:
if not call.excinfo.errisinstance(pytest.skip.Exception):
pytest.halt('A setup test has failed, aborting...')
class Halt(object):
def __init__(self):
self.msg = None
def __call__(self, msg):
self.msg = msg
def pytest_namespace():
return {'halt': Halt()}
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_protocol(item, nextitem):
yield
if pytest.halt.msg:
item.session.shouldstop = pytest.halt.msg
+
+ @pytest.fixture(scope='class')
+ def testname(request):
+ """Pytest test node name with all unfriendly characters transformed
+ into underscores. The lifetime is class scoped since this name is
+ often used to provision remote sw profiles which live for the entirety
+ of a test suite.
+ """
+ return request.node.name.translate(
+ string.maketrans('\[', '__')).strip(']')
+ | Move some fixtures into better places | ## Code Before:
import pytest
def pytest_runtest_makereport(item, call):
if 'setup_test' in item.keywords and call.excinfo:
if not call.excinfo.errisinstance(pytest.skip.Exception):
pytest.halt('A setup test has failed, aborting...')
class Halt(object):
def __init__(self):
self.msg = None
def __call__(self, msg):
self.msg = msg
def pytest_namespace():
return {'halt': Halt()}
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_protocol(item, nextitem):
yield
if pytest.halt.msg:
item.session.shouldstop = pytest.halt.msg
## Instruction:
Move some fixtures into better places
## Code After:
import pytest
import string
def pytest_runtest_makereport(item, call):
if 'setup_test' in item.keywords and call.excinfo:
if not call.excinfo.errisinstance(pytest.skip.Exception):
pytest.halt('A setup test has failed, aborting...')
class Halt(object):
def __init__(self):
self.msg = None
def __call__(self, msg):
self.msg = msg
def pytest_namespace():
return {'halt': Halt()}
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_protocol(item, nextitem):
yield
if pytest.halt.msg:
item.session.shouldstop = pytest.halt.msg
@pytest.fixture(scope='class')
def testname(request):
"""Pytest test node name with all unfriendly characters transformed
into underscores. The lifetime is class scoped since this name is
often used to provision remote sw profiles which live for the entirety
of a test suite.
"""
return request.node.name.translate(
string.maketrans('\[', '__')).strip(']')
| ...
import pytest
import string
...
item.session.shouldstop = pytest.halt.msg
@pytest.fixture(scope='class')
def testname(request):
"""Pytest test node name with all unfriendly characters transformed
into underscores. The lifetime is class scoped since this name is
often used to provision remote sw profiles which live for the entirety
of a test suite.
"""
return request.node.name.translate(
string.maketrans('\[', '__')).strip(']')
... |
759e22f8d629f76d7fca0d0567603c9ae6835fa6 | api_v3/serializers/profile.py | api_v3/serializers/profile.py | from django.conf import settings
from rest_framework import fields
from rest_framework_json_api import serializers
from api_v3.models import Profile, Ticket
class ProfileSerializer(serializers.ModelSerializer):
tickets_count = fields.SerializerMethodField()
class Meta:
model = Profile
read_only_fields = (
'id',
'email',
'first_name',
'last_name',
'is_staff',
'is_superuser',
'locale'
)
fields = (
'id',
'email',
'first_name',
'last_name',
'is_staff',
'is_superuser',
'bio',
'locale',
'tickets_count'
)
def get_tickets_count(self, obj):
if obj.is_superuser:
return Ticket.objects.count()
else:
return Ticket.filter_by_user(obj).count()
def to_representation(self, obj):
request = self.context.get('request', None)
data = super(ProfileSerializer, self).to_representation(obj)
if request and request.user and request.user.is_superuser:
return data
# For regular users, make sure others email is not displayed
if request and request.user != obj:
data.pop('email')
return data
# Adds extra application related metas.
def get_root_meta(self, resource, many):
if not self.context.get('add_misc', None):
return {}
return {
'member_centers': settings.MEMBER_CENTERS,
'expense_scopes': settings.EXPENSE_SCOPES
}
| from django.conf import settings
from rest_framework import fields
from rest_framework_json_api import serializers
from api_v3.models import Profile, Ticket
class ProfileSerializer(serializers.ModelSerializer):
tickets_count = fields.SerializerMethodField()
class Meta:
model = Profile
read_only_fields = (
'id',
'email',
'first_name',
'last_name',
'is_staff',
'is_superuser',
'locale'
)
fields = (
'id',
'email',
'first_name',
'last_name',
'is_staff',
'is_superuser',
'bio',
'locale',
'tickets_count'
)
def get_tickets_count(self, obj):
if obj.is_superuser:
return Ticket.objects.count()
else:
return Ticket.filter_by_user(obj).count()
def to_representation(self, obj):
request = self.context.get('request', None)
data = super(ProfileSerializer, self).to_representation(obj)
if request and request.user and request.user.is_superuser:
return data
# For regular users, make sure others email is not displayed
if request and request.user != obj:
data.pop('email')
return data
# Adds extra application related metas.
def get_root_meta(self, resource, many):
if not self.context.get('add_misc', None):
return {}
return {
'member_centers': sorted(settings.MEMBER_CENTERS),
'expense_scopes': sorted(settings.EXPENSE_SCOPES)
}
| Return sorted member centers and expense scopes. | Return sorted member centers and expense scopes.
| Python | mit | occrp/id-backend | from django.conf import settings
from rest_framework import fields
from rest_framework_json_api import serializers
from api_v3.models import Profile, Ticket
class ProfileSerializer(serializers.ModelSerializer):
tickets_count = fields.SerializerMethodField()
class Meta:
model = Profile
read_only_fields = (
'id',
'email',
'first_name',
'last_name',
'is_staff',
'is_superuser',
'locale'
)
fields = (
'id',
'email',
'first_name',
'last_name',
'is_staff',
'is_superuser',
'bio',
'locale',
'tickets_count'
)
def get_tickets_count(self, obj):
if obj.is_superuser:
return Ticket.objects.count()
else:
return Ticket.filter_by_user(obj).count()
def to_representation(self, obj):
request = self.context.get('request', None)
data = super(ProfileSerializer, self).to_representation(obj)
if request and request.user and request.user.is_superuser:
return data
# For regular users, make sure others email is not displayed
if request and request.user != obj:
data.pop('email')
return data
# Adds extra application related metas.
def get_root_meta(self, resource, many):
if not self.context.get('add_misc', None):
return {}
return {
- 'member_centers': settings.MEMBER_CENTERS,
+ 'member_centers': sorted(settings.MEMBER_CENTERS),
- 'expense_scopes': settings.EXPENSE_SCOPES
+ 'expense_scopes': sorted(settings.EXPENSE_SCOPES)
}
| Return sorted member centers and expense scopes. | ## Code Before:
from django.conf import settings
from rest_framework import fields
from rest_framework_json_api import serializers
from api_v3.models import Profile, Ticket
class ProfileSerializer(serializers.ModelSerializer):
tickets_count = fields.SerializerMethodField()
class Meta:
model = Profile
read_only_fields = (
'id',
'email',
'first_name',
'last_name',
'is_staff',
'is_superuser',
'locale'
)
fields = (
'id',
'email',
'first_name',
'last_name',
'is_staff',
'is_superuser',
'bio',
'locale',
'tickets_count'
)
def get_tickets_count(self, obj):
if obj.is_superuser:
return Ticket.objects.count()
else:
return Ticket.filter_by_user(obj).count()
def to_representation(self, obj):
request = self.context.get('request', None)
data = super(ProfileSerializer, self).to_representation(obj)
if request and request.user and request.user.is_superuser:
return data
# For regular users, make sure others email is not displayed
if request and request.user != obj:
data.pop('email')
return data
# Adds extra application related metas.
def get_root_meta(self, resource, many):
if not self.context.get('add_misc', None):
return {}
return {
'member_centers': settings.MEMBER_CENTERS,
'expense_scopes': settings.EXPENSE_SCOPES
}
## Instruction:
Return sorted member centers and expense scopes.
## Code After:
from django.conf import settings
from rest_framework import fields
from rest_framework_json_api import serializers
from api_v3.models import Profile, Ticket
class ProfileSerializer(serializers.ModelSerializer):
tickets_count = fields.SerializerMethodField()
class Meta:
model = Profile
read_only_fields = (
'id',
'email',
'first_name',
'last_name',
'is_staff',
'is_superuser',
'locale'
)
fields = (
'id',
'email',
'first_name',
'last_name',
'is_staff',
'is_superuser',
'bio',
'locale',
'tickets_count'
)
def get_tickets_count(self, obj):
if obj.is_superuser:
return Ticket.objects.count()
else:
return Ticket.filter_by_user(obj).count()
def to_representation(self, obj):
request = self.context.get('request', None)
data = super(ProfileSerializer, self).to_representation(obj)
if request and request.user and request.user.is_superuser:
return data
# For regular users, make sure others email is not displayed
if request and request.user != obj:
data.pop('email')
return data
# Adds extra application related metas.
def get_root_meta(self, resource, many):
if not self.context.get('add_misc', None):
return {}
return {
'member_centers': sorted(settings.MEMBER_CENTERS),
'expense_scopes': sorted(settings.EXPENSE_SCOPES)
}
| # ... existing code ...
return {
'member_centers': sorted(settings.MEMBER_CENTERS),
'expense_scopes': sorted(settings.EXPENSE_SCOPES)
}
# ... rest of the code ... |
23fa1c55ec9fcbc595260be1039a4b8481cb4f13 | api/comments/views.py | api/comments/views.py | from rest_framework import generics
from api.comments.serializers import CommentSerializer, CommentDetailSerializer
from website.project.model import Comment
from api.base.utils import get_object_or_error
class CommentMixin(object):
"""Mixin with convenience methods for retrieving the current comment based on the
current URL. By default, fetches the comment based on the comment_id kwarg.
"""
serializer_class = CommentSerializer
comment_lookup_url_kwarg = 'comment_id'
def get_comment(self, check_permissions=True):
comment = get_object_or_error(Comment, self.kwargs[self.comment_lookup_url_kwarg])
if check_permissions:
# May raise a permission denied
self.check_object_permissions(self.request, comment)
return comment
class CommentDetail(generics.RetrieveUpdateAPIView, CommentMixin):
"""Details about a specific comment.
"""
# permission classes
# required scopes
serializer_class = CommentDetailSerializer
# overrides RetrieveAPIView
def get_object(self):
return self.get_comment()
| from modularodm import Q
from modularodm.exceptions import NoResultsFound
from rest_framework import generics
from rest_framework.exceptions import NotFound
from api.comments.serializers import CommentSerializer, CommentDetailSerializer
from website.project.model import Comment
class CommentMixin(object):
"""Mixin with convenience methods for retrieving the current comment based on the
current URL. By default, fetches the comment based on the comment_id kwarg.
"""
serializer_class = CommentSerializer
comment_lookup_url_kwarg = 'comment_id'
def get_comment(self, check_permissions=True):
pk = self.kwargs[self.comment_lookup_url_kwarg]
query = Q('_id', 'eq', pk)
try:
comment = Comment.find_one(query)
except NoResultsFound:
raise NotFound
if check_permissions:
# May raise a permission denied
self.check_object_permissions(self.request, comment)
return comment
class CommentDetail(generics.RetrieveUpdateAPIView, CommentMixin):
"""Details about a specific comment.
"""
# permission classes
# required scopes
serializer_class = CommentDetailSerializer
# overrides RetrieveAPIView
def get_object(self):
return self.get_comment()
| Return deleted comments instead of throwing error | Return deleted comments instead of throwing error
| Python | apache-2.0 | kch8qx/osf.io,CenterForOpenScience/osf.io,Johnetordoff/osf.io,kch8qx/osf.io,ZobairAlijan/osf.io,acshi/osf.io,DanielSBrown/osf.io,GageGaskins/osf.io,icereval/osf.io,wearpants/osf.io,mfraezz/osf.io,acshi/osf.io,jnayak1/osf.io,crcresearch/osf.io,danielneis/osf.io,brandonPurvis/osf.io,samanehsan/osf.io,samanehsan/osf.io,SSJohns/osf.io,samanehsan/osf.io,hmoco/osf.io,aaxelb/osf.io,zachjanicki/osf.io,monikagrabowska/osf.io,baylee-d/osf.io,rdhyee/osf.io,HalcyonChimera/osf.io,caseyrollins/osf.io,brandonPurvis/osf.io,ticklemepierce/osf.io,leb2dg/osf.io,brandonPurvis/osf.io,TomBaxter/osf.io,kch8qx/osf.io,monikagrabowska/osf.io,CenterForOpenScience/osf.io,baylee-d/osf.io,cwisecarver/osf.io,KAsante95/osf.io,felliott/osf.io,erinspace/osf.io,Nesiehr/osf.io,alexschiller/osf.io,baylee-d/osf.io,brianjgeiger/osf.io,abought/osf.io,Johnetordoff/osf.io,aaxelb/osf.io,samanehsan/osf.io,HalcyonChimera/osf.io,jnayak1/osf.io,emetsger/osf.io,rdhyee/osf.io,cslzchen/osf.io,ZobairAlijan/osf.io,abought/osf.io,alexschiller/osf.io,sloria/osf.io,doublebits/osf.io,KAsante95/osf.io,brianjgeiger/osf.io,jnayak1/osf.io,samchrisinger/osf.io,RomanZWang/osf.io,mattclark/osf.io,mluo613/osf.io,GageGaskins/osf.io,DanielSBrown/osf.io,leb2dg/osf.io,binoculars/osf.io,Nesiehr/osf.io,hmoco/osf.io,TomHeatwole/osf.io,doublebits/osf.io,caneruguz/osf.io,KAsante95/osf.io,mluo613/osf.io,kwierman/osf.io,crcresearch/osf.io,abought/osf.io,caseyrollins/osf.io,saradbowman/osf.io,saradbowman/osf.io,chrisseto/osf.io,danielneis/osf.io,Johnetordoff/osf.io,brianjgeiger/osf.io,SSJohns/osf.io,caneruguz/osf.io,mluo613/osf.io,mluo613/osf.io,amyshi188/osf.io,adlius/osf.io,RomanZWang/osf.io,DanielSBrown/osf.io,monikagrabowska/osf.io,TomBaxter/osf.io,pattisdr/osf.io,Ghalko/osf.io,HalcyonChimera/osf.io,zamattiac/osf.io,mfraezz/osf.io,Nesiehr/osf.io,SSJohns/osf.io,caneruguz/osf.io,hmoco/osf.io,emetsger/osf.io,brianjgeiger/osf.io,zachjanicki/osf.io,abought/osf.io,ticklemepierce/osf.io,brandonPurvis/osf.io,aaxelb/osf.io,kch8qx/osf.io,monikagrabowska/osf.io,asanfilippo7/osf.io,ticklemepierce/osf.io,caseyrygt/osf.io,hmoco/osf.io,monikagrabowska/osf.io,cslzchen/osf.io,caseyrollins/osf.io,wearpants/osf.io,leb2dg/osf.io,mluke93/osf.io,amyshi188/osf.io,zamattiac/osf.io,mfraezz/osf.io,mattclark/osf.io,RomanZWang/osf.io,wearpants/osf.io,samchrisinger/osf.io,TomHeatwole/osf.io,icereval/osf.io,amyshi188/osf.io,mattclark/osf.io,chennan47/osf.io,billyhunt/osf.io,wearpants/osf.io,alexschiller/osf.io,cslzchen/osf.io,KAsante95/osf.io,leb2dg/osf.io,billyhunt/osf.io,felliott/osf.io,erinspace/osf.io,cwisecarver/osf.io,CenterForOpenScience/osf.io,ZobairAlijan/osf.io,chennan47/osf.io,aaxelb/osf.io,ZobairAlijan/osf.io,cwisecarver/osf.io,acshi/osf.io,binoculars/osf.io,kch8qx/osf.io,danielneis/osf.io,acshi/osf.io,erinspace/osf.io,alexschiller/osf.io,cslzchen/osf.io,adlius/osf.io,billyhunt/osf.io,laurenrevere/osf.io,GageGaskins/osf.io,mfraezz/osf.io,samchrisinger/osf.io,laurenrevere/osf.io,crcresearch/osf.io,kwierman/osf.io,pattisdr/osf.io,asanfilippo7/osf.io,sloria/osf.io,samchrisinger/osf.io,doublebits/osf.io,asanfilippo7/osf.io,SSJohns/osf.io,mluo613/osf.io,brandonPurvis/osf.io,mluke93/osf.io,mluke93/osf.io,doublebits/osf.io,emetsger/osf.io,Ghalko/osf.io,caseyrygt/osf.io,chrisseto/osf.io,chennan47/osf.io,caseyrygt/osf.io,doublebits/osf.io,Ghalko/osf.io,zamattiac/osf.io,sloria/osf.io,Johnetordoff/osf.io,KAsante95/osf.io,pattisdr/osf.io,amyshi188/osf.io,asanfilippo7/osf.io,laurenrevere/osf.io,alexschiller/osf.io,HalcyonChimera/osf.io,chrisseto/osf.io,RomanZWang/osf.io,TomHeatwole/osf.io,GageGaskins/osf.io,GageGaskins/osf.io,Nesiehr/osf.io,RomanZWang/osf.io,billyhunt/osf.io,felliott/osf.io,rdhyee/osf.io,Ghalko/osf.io,ticklemepierce/osf.io,caseyrygt/osf.io,CenterForOpenScience/osf.io,DanielSBrown/osf.io,cwisecarver/osf.io,mluke93/osf.io,adlius/osf.io,danielneis/osf.io,billyhunt/osf.io,caneruguz/osf.io,kwierman/osf.io,zachjanicki/osf.io,chrisseto/osf.io,icereval/osf.io,felliott/osf.io,kwierman/osf.io,zachjanicki/osf.io,TomHeatwole/osf.io,zamattiac/osf.io,binoculars/osf.io,acshi/osf.io,emetsger/osf.io,rdhyee/osf.io,adlius/osf.io,TomBaxter/osf.io,jnayak1/osf.io | + from modularodm import Q
+ from modularodm.exceptions import NoResultsFound
from rest_framework import generics
+ from rest_framework.exceptions import NotFound
from api.comments.serializers import CommentSerializer, CommentDetailSerializer
from website.project.model import Comment
- from api.base.utils import get_object_or_error
class CommentMixin(object):
"""Mixin with convenience methods for retrieving the current comment based on the
current URL. By default, fetches the comment based on the comment_id kwarg.
"""
serializer_class = CommentSerializer
comment_lookup_url_kwarg = 'comment_id'
def get_comment(self, check_permissions=True):
- comment = get_object_or_error(Comment, self.kwargs[self.comment_lookup_url_kwarg])
+ pk = self.kwargs[self.comment_lookup_url_kwarg]
+ query = Q('_id', 'eq', pk)
+ try:
+ comment = Comment.find_one(query)
+ except NoResultsFound:
+ raise NotFound
if check_permissions:
# May raise a permission denied
self.check_object_permissions(self.request, comment)
return comment
class CommentDetail(generics.RetrieveUpdateAPIView, CommentMixin):
"""Details about a specific comment.
"""
# permission classes
# required scopes
serializer_class = CommentDetailSerializer
# overrides RetrieveAPIView
def get_object(self):
return self.get_comment()
| Return deleted comments instead of throwing error | ## Code Before:
from rest_framework import generics
from api.comments.serializers import CommentSerializer, CommentDetailSerializer
from website.project.model import Comment
from api.base.utils import get_object_or_error
class CommentMixin(object):
"""Mixin with convenience methods for retrieving the current comment based on the
current URL. By default, fetches the comment based on the comment_id kwarg.
"""
serializer_class = CommentSerializer
comment_lookup_url_kwarg = 'comment_id'
def get_comment(self, check_permissions=True):
comment = get_object_or_error(Comment, self.kwargs[self.comment_lookup_url_kwarg])
if check_permissions:
# May raise a permission denied
self.check_object_permissions(self.request, comment)
return comment
class CommentDetail(generics.RetrieveUpdateAPIView, CommentMixin):
"""Details about a specific comment.
"""
# permission classes
# required scopes
serializer_class = CommentDetailSerializer
# overrides RetrieveAPIView
def get_object(self):
return self.get_comment()
## Instruction:
Return deleted comments instead of throwing error
## Code After:
from modularodm import Q
from modularodm.exceptions import NoResultsFound
from rest_framework import generics
from rest_framework.exceptions import NotFound
from api.comments.serializers import CommentSerializer, CommentDetailSerializer
from website.project.model import Comment
class CommentMixin(object):
"""Mixin with convenience methods for retrieving the current comment based on the
current URL. By default, fetches the comment based on the comment_id kwarg.
"""
serializer_class = CommentSerializer
comment_lookup_url_kwarg = 'comment_id'
def get_comment(self, check_permissions=True):
pk = self.kwargs[self.comment_lookup_url_kwarg]
query = Q('_id', 'eq', pk)
try:
comment = Comment.find_one(query)
except NoResultsFound:
raise NotFound
if check_permissions:
# May raise a permission denied
self.check_object_permissions(self.request, comment)
return comment
class CommentDetail(generics.RetrieveUpdateAPIView, CommentMixin):
"""Details about a specific comment.
"""
# permission classes
# required scopes
serializer_class = CommentDetailSerializer
# overrides RetrieveAPIView
def get_object(self):
return self.get_comment()
| ...
from modularodm import Q
from modularodm.exceptions import NoResultsFound
from rest_framework import generics
from rest_framework.exceptions import NotFound
from api.comments.serializers import CommentSerializer, CommentDetailSerializer
...
from website.project.model import Comment
...
def get_comment(self, check_permissions=True):
pk = self.kwargs[self.comment_lookup_url_kwarg]
query = Q('_id', 'eq', pk)
try:
comment = Comment.find_one(query)
except NoResultsFound:
raise NotFound
... |
8b34daa2a61422c79fef2afd9137bf2f1c2a5b12 | project3/webscale/synthesizer/admin.py | project3/webscale/synthesizer/admin.py | from django.contrib import admin
from .models import ApplicationTable,OldUser,Snippit,SnippitData,HolesTable,GoogleAuth,Comment
# Register your models here.
admin.site.register(ApplicationTable)
admin.site.register(OldUser)
admin.site.register(Snippit)
admin.site.register(SnippitData)
admin.site.register(HolesTable)
admin.site.register(GoogleAuth)
admin.site.register(Comment)
| from django.contrib import admin
from .models import ApplicationTable,Snippit,SnippitData,HolesTable,GoogleAuth,Comment
# Register your models here.
admin.site.register(ApplicationTable)
admin.site.register(Snippit)
admin.site.register(SnippitData)
admin.site.register(HolesTable)
admin.site.register(GoogleAuth)
admin.site.register(Comment)
| Delete calls to Old User model | Delete calls to Old User model
| Python | mit | 326-WEBSCALE/webscale-synthesizer,326-WEBSCALE/webscale-synthesizer,326-WEBSCALE/webscale-synthesizer,326-WEBSCALE/webscale-synthesizer,326-WEBSCALE/webscale-synthesizer | from django.contrib import admin
- from .models import ApplicationTable,OldUser,Snippit,SnippitData,HolesTable,GoogleAuth,Comment
+ from .models import ApplicationTable,Snippit,SnippitData,HolesTable,GoogleAuth,Comment
# Register your models here.
admin.site.register(ApplicationTable)
- admin.site.register(OldUser)
admin.site.register(Snippit)
admin.site.register(SnippitData)
admin.site.register(HolesTable)
admin.site.register(GoogleAuth)
admin.site.register(Comment)
| Delete calls to Old User model | ## Code Before:
from django.contrib import admin
from .models import ApplicationTable,OldUser,Snippit,SnippitData,HolesTable,GoogleAuth,Comment
# Register your models here.
admin.site.register(ApplicationTable)
admin.site.register(OldUser)
admin.site.register(Snippit)
admin.site.register(SnippitData)
admin.site.register(HolesTable)
admin.site.register(GoogleAuth)
admin.site.register(Comment)
## Instruction:
Delete calls to Old User model
## Code After:
from django.contrib import admin
from .models import ApplicationTable,Snippit,SnippitData,HolesTable,GoogleAuth,Comment
# Register your models here.
admin.site.register(ApplicationTable)
admin.site.register(Snippit)
admin.site.register(SnippitData)
admin.site.register(HolesTable)
admin.site.register(GoogleAuth)
admin.site.register(Comment)
| # ... existing code ...
from django.contrib import admin
from .models import ApplicationTable,Snippit,SnippitData,HolesTable,GoogleAuth,Comment
# Register your models here.
# ... modified code ...
admin.site.register(ApplicationTable)
admin.site.register(Snippit)
# ... rest of the code ... |
7f44c6a114f95c25b533c9b69988798ba3919d68 | wger/email/forms.py | wger/email/forms.py |
from django.utils.translation import (
pgettext,
ugettext_lazy as _
)
from django.forms import (
Form,
CharField,
Textarea
)
class EmailListForm(Form):
'''
Small form to send emails
'''
subject = CharField(label=pgettext('Subject', 'As in "email subject"'))
body = CharField(widget=Textarea, label=pgettext('Content', 'As in "content of an email"'))
|
from django.utils.translation import (
pgettext,
ugettext_lazy as _
)
from django.forms import (
Form,
CharField,
Textarea
)
class EmailListForm(Form):
'''
Small form to send emails
'''
subject = CharField(label=pgettext('As in "email subject"', 'Subject'))
body = CharField(widget=Textarea, label=pgettext('As in "content of an email"', 'Content'))
| Use correct order of arguments of pgettext | Use correct order of arguments of pgettext
| Python | agpl-3.0 | rolandgeider/wger,rolandgeider/wger,wger-project/wger,DeveloperMal/wger,DeveloperMal/wger,wger-project/wger,rolandgeider/wger,kjagoo/wger_stark,petervanderdoes/wger,rolandgeider/wger,petervanderdoes/wger,wger-project/wger,wger-project/wger,petervanderdoes/wger,DeveloperMal/wger,kjagoo/wger_stark,kjagoo/wger_stark,petervanderdoes/wger,kjagoo/wger_stark,DeveloperMal/wger |
from django.utils.translation import (
pgettext,
ugettext_lazy as _
)
from django.forms import (
Form,
CharField,
Textarea
)
class EmailListForm(Form):
'''
Small form to send emails
'''
- subject = CharField(label=pgettext('Subject', 'As in "email subject"'))
+ subject = CharField(label=pgettext('As in "email subject"', 'Subject'))
- body = CharField(widget=Textarea, label=pgettext('Content', 'As in "content of an email"'))
+ body = CharField(widget=Textarea, label=pgettext('As in "content of an email"', 'Content'))
| Use correct order of arguments of pgettext | ## Code Before:
from django.utils.translation import (
pgettext,
ugettext_lazy as _
)
from django.forms import (
Form,
CharField,
Textarea
)
class EmailListForm(Form):
'''
Small form to send emails
'''
subject = CharField(label=pgettext('Subject', 'As in "email subject"'))
body = CharField(widget=Textarea, label=pgettext('Content', 'As in "content of an email"'))
## Instruction:
Use correct order of arguments of pgettext
## Code After:
from django.utils.translation import (
pgettext,
ugettext_lazy as _
)
from django.forms import (
Form,
CharField,
Textarea
)
class EmailListForm(Form):
'''
Small form to send emails
'''
subject = CharField(label=pgettext('As in "email subject"', 'Subject'))
body = CharField(widget=Textarea, label=pgettext('As in "content of an email"', 'Content'))
| // ... existing code ...
subject = CharField(label=pgettext('As in "email subject"', 'Subject'))
body = CharField(widget=Textarea, label=pgettext('As in "content of an email"', 'Content'))
// ... rest of the code ... |
0f427ed334f8a58e888872d60419709cfd6f41c3 | var/spack/repos/builtin/packages/nccmp/package.py | var/spack/repos/builtin/packages/nccmp/package.py | from spack import *
import os
class Nccmp(Package):
"""Compare NetCDF Files"""
homepage = "http://nccmp.sourceforge.net/"
url = "http://downloads.sourceforge.net/project/nccmp/nccmp-1.8.2.0.tar.gz"
version('1.8.2.0', '81e6286d4413825aec4327e61a28a580')
depends_on('netcdf')
def install(self, spec, prefix):
# Configure says: F90 and F90FLAGS are replaced by FC and
# FCFLAGS respectively in this configure, please unset
# F90/F90FLAGS and set FC/FCFLAGS instead and rerun configure
# again.
os.environ['FC'] = os.environ['F90']
del os.environ['F90']
try:
os.environ['FCFLAGS'] = os.environ['F90FLAGS']
del os.environ['F90FLAGS']
except KeyError: # There are no flags
pass
configure('--prefix=%s' % prefix)
make()
make("check")
make("install")
| from spack import *
class Nccmp(Package):
"""Compare NetCDF Files"""
homepage = "http://nccmp.sourceforge.net/"
url = "http://downloads.sourceforge.net/project/nccmp/nccmp-1.8.2.0.tar.gz"
version('1.8.2.0', '81e6286d4413825aec4327e61a28a580')
depends_on('netcdf')
def install(self, spec, prefix):
# Configure says: F90 and F90FLAGS are replaced by FC and
# FCFLAGS respectively in this configure, please unset
# F90/F90FLAGS and set FC/FCFLAGS instead and rerun configure
# again.
env.pop('F90', None)
env.pop('F90FLAGS', None)
configure('--prefix=%s' % prefix)
make()
make("check")
make("install")
| Tweak nccmp to be more spack-compatible. | Tweak nccmp to be more spack-compatible.
- Spack doesn't set F90, but it confuses the nccmp build. Just remove
it from the environment.
- TODO: should build environment unset this variable?
| Python | lgpl-2.1 | skosukhin/spack,matthiasdiener/spack,EmreAtes/spack,iulian787/spack,mfherbst/spack,matthiasdiener/spack,iulian787/spack,tmerrick1/spack,TheTimmy/spack,iulian787/spack,EmreAtes/spack,TheTimmy/spack,krafczyk/spack,LLNL/spack,lgarren/spack,TheTimmy/spack,iulian787/spack,iulian787/spack,tmerrick1/spack,lgarren/spack,matthiasdiener/spack,tmerrick1/spack,skosukhin/spack,EmreAtes/spack,lgarren/spack,mfherbst/spack,tmerrick1/spack,lgarren/spack,lgarren/spack,matthiasdiener/spack,krafczyk/spack,skosukhin/spack,matthiasdiener/spack,EmreAtes/spack,krafczyk/spack,mfherbst/spack,TheTimmy/spack,TheTimmy/spack,LLNL/spack,LLNL/spack,skosukhin/spack,mfherbst/spack,skosukhin/spack,LLNL/spack,krafczyk/spack,tmerrick1/spack,EmreAtes/spack,mfherbst/spack,LLNL/spack,krafczyk/spack | from spack import *
- import os
class Nccmp(Package):
"""Compare NetCDF Files"""
homepage = "http://nccmp.sourceforge.net/"
url = "http://downloads.sourceforge.net/project/nccmp/nccmp-1.8.2.0.tar.gz"
version('1.8.2.0', '81e6286d4413825aec4327e61a28a580')
depends_on('netcdf')
def install(self, spec, prefix):
# Configure says: F90 and F90FLAGS are replaced by FC and
# FCFLAGS respectively in this configure, please unset
# F90/F90FLAGS and set FC/FCFLAGS instead and rerun configure
# again.
+ env.pop('F90', None)
+ env.pop('F90FLAGS', None)
- os.environ['FC'] = os.environ['F90']
- del os.environ['F90']
- try:
- os.environ['FCFLAGS'] = os.environ['F90FLAGS']
- del os.environ['F90FLAGS']
- except KeyError: # There are no flags
- pass
configure('--prefix=%s' % prefix)
-
make()
make("check")
make("install")
| Tweak nccmp to be more spack-compatible. | ## Code Before:
from spack import *
import os
class Nccmp(Package):
"""Compare NetCDF Files"""
homepage = "http://nccmp.sourceforge.net/"
url = "http://downloads.sourceforge.net/project/nccmp/nccmp-1.8.2.0.tar.gz"
version('1.8.2.0', '81e6286d4413825aec4327e61a28a580')
depends_on('netcdf')
def install(self, spec, prefix):
# Configure says: F90 and F90FLAGS are replaced by FC and
# FCFLAGS respectively in this configure, please unset
# F90/F90FLAGS and set FC/FCFLAGS instead and rerun configure
# again.
os.environ['FC'] = os.environ['F90']
del os.environ['F90']
try:
os.environ['FCFLAGS'] = os.environ['F90FLAGS']
del os.environ['F90FLAGS']
except KeyError: # There are no flags
pass
configure('--prefix=%s' % prefix)
make()
make("check")
make("install")
## Instruction:
Tweak nccmp to be more spack-compatible.
## Code After:
from spack import *
class Nccmp(Package):
"""Compare NetCDF Files"""
homepage = "http://nccmp.sourceforge.net/"
url = "http://downloads.sourceforge.net/project/nccmp/nccmp-1.8.2.0.tar.gz"
version('1.8.2.0', '81e6286d4413825aec4327e61a28a580')
depends_on('netcdf')
def install(self, spec, prefix):
# Configure says: F90 and F90FLAGS are replaced by FC and
# FCFLAGS respectively in this configure, please unset
# F90/F90FLAGS and set FC/FCFLAGS instead and rerun configure
# again.
env.pop('F90', None)
env.pop('F90FLAGS', None)
configure('--prefix=%s' % prefix)
make()
make("check")
make("install")
| # ... existing code ...
from spack import *
# ... modified code ...
# again.
env.pop('F90', None)
env.pop('F90FLAGS', None)
...
configure('--prefix=%s' % prefix)
make()
# ... rest of the code ... |
d12be22b5427a1433dd2ff7b1d2f97951d2b9c0f | pycon/migrations/0002_remove_old_google_openid_auths.py | pycon/migrations/0002_remove_old_google_openid_auths.py | from __future__ import unicode_literals
"""
Google OpenID auth has been turned off, so any associations that
users had to their Google accounts via Google OpenID are now useless.
Just remove them.
"""
from django.db import migrations
def no_op(apps, schema_editor):
pass
def remove_old_google_openid_auths(apps, schema_editor):
UserSocialAuth = apps.get_model('social_auth', 'UserSocialAuth')
db_alias = schema_editor.connection.alias
UserSocialAuth.objects.using(db_alias).filter(provider='google').delete()
class Migration(migrations.Migration):
dependencies = [
('pycon', '0001_initial'),
('social_auth', '0001_initial'),
]
operations = [
migrations.RunPython(remove_old_google_openid_auths, no_op),
]
| from __future__ import unicode_literals
"""
Google OpenID auth has been turned off, so any associations that
users had to their Google accounts via Google OpenID are now useless.
Just remove them.
"""
from django.db import migrations
def no_op(apps, schema_editor):
pass
def remove_old_google_openid_auths(apps, schema_editor):
UserSocialAuth = apps.get_model('social_auth', 'UserSocialAuth')
db_alias = schema_editor.connection.alias
UserSocialAuth.objects.using(db_alias).filter(provider='google').delete()
class Migration(migrations.Migration):
dependencies = [
('pycon', '0001_initial'),
]
operations = [
migrations.RunPython(remove_old_google_openid_auths, no_op),
]
| Undo premature fix for dependency | Undo premature fix for dependency
| Python | bsd-3-clause | Diwahars/pycon,PyCon/pycon,njl/pycon,njl/pycon,njl/pycon,PyCon/pycon,Diwahars/pycon,Diwahars/pycon,PyCon/pycon,PyCon/pycon,Diwahars/pycon,njl/pycon | from __future__ import unicode_literals
"""
Google OpenID auth has been turned off, so any associations that
users had to their Google accounts via Google OpenID are now useless.
Just remove them.
"""
from django.db import migrations
def no_op(apps, schema_editor):
pass
def remove_old_google_openid_auths(apps, schema_editor):
UserSocialAuth = apps.get_model('social_auth', 'UserSocialAuth')
db_alias = schema_editor.connection.alias
UserSocialAuth.objects.using(db_alias).filter(provider='google').delete()
class Migration(migrations.Migration):
dependencies = [
('pycon', '0001_initial'),
- ('social_auth', '0001_initial'),
]
operations = [
migrations.RunPython(remove_old_google_openid_auths, no_op),
]
| Undo premature fix for dependency | ## Code Before:
from __future__ import unicode_literals
"""
Google OpenID auth has been turned off, so any associations that
users had to their Google accounts via Google OpenID are now useless.
Just remove them.
"""
from django.db import migrations
def no_op(apps, schema_editor):
pass
def remove_old_google_openid_auths(apps, schema_editor):
UserSocialAuth = apps.get_model('social_auth', 'UserSocialAuth')
db_alias = schema_editor.connection.alias
UserSocialAuth.objects.using(db_alias).filter(provider='google').delete()
class Migration(migrations.Migration):
dependencies = [
('pycon', '0001_initial'),
('social_auth', '0001_initial'),
]
operations = [
migrations.RunPython(remove_old_google_openid_auths, no_op),
]
## Instruction:
Undo premature fix for dependency
## Code After:
from __future__ import unicode_literals
"""
Google OpenID auth has been turned off, so any associations that
users had to their Google accounts via Google OpenID are now useless.
Just remove them.
"""
from django.db import migrations
def no_op(apps, schema_editor):
pass
def remove_old_google_openid_auths(apps, schema_editor):
UserSocialAuth = apps.get_model('social_auth', 'UserSocialAuth')
db_alias = schema_editor.connection.alias
UserSocialAuth.objects.using(db_alias).filter(provider='google').delete()
class Migration(migrations.Migration):
dependencies = [
('pycon', '0001_initial'),
]
operations = [
migrations.RunPython(remove_old_google_openid_auths, no_op),
]
| // ... existing code ...
('pycon', '0001_initial'),
]
// ... rest of the code ... |
09e4dd8736d6e829b779dd14b882e0e1d7f5abb9 | tester/register/prepare_test.py | tester/register/prepare_test.py |
import sys
import os
import argparse
def write_csv(filename, nb_users):
with open(filename, "w") as csv_file:
csv_file.write("SEQUENTIAL\n")
for x in xrange(nb_users):
line = "{uname};localhost;[authentication username={uname} password={uname}];\n".format(uname=str(1000+x))
csv_file.write(line)
def write_sql(filename, nb_users):
with open(filename, "w") as sql_file:
header = """DROP DATABASE tests;
CREATE DATABASE tests;
USE tests;
CREATE TABLE accounts (user VARCHAR(20),password VARCHAR(20));"""
sql_file.write(header)
for x in xrange(nb_users):
line = """INSERT into accounts (user, password) VALUES ("{uname}", "{uname}");\n""".format(uname=str(1000+x))
sql_file.write(line)
def main(argv=None):
if argv == None:
argv = sys.argv
argparser = argparse.ArgumentParser(description="Prepare load tests for Flexisip.")
argparser.add_argument('-N', '--users', help="How many different users should be registering to flexisip", dest="users", default=5000)
args, additional_args = argparser.parse_known_args()
write_csv("users.csv", args.users)
write_sql("users.sql", args.users)
if __name__ == '__main__':
main()
|
import sys
import os
import argparse
def write_csv(filename, nb_users):
with open(filename, "w") as csv_file:
csv_file.write("SEQUENTIAL\n")
for x in xrange(nb_users):
line = "{uname};localhost;[authentication username={uname} password={uname}];\n".format(uname=str(1000+x))
csv_file.write(line)
def write_sql(filename, nb_users):
with open(filename, "w") as sql_file:
header = """DROP DATABASE IF EXISTS tests;
CREATE DATABASE tests;
USE tests;
CREATE TABLE accounts (user VARCHAR(20),password VARCHAR(20));"""
sql_file.write(header)
for x in xrange(nb_users):
line = """INSERT into accounts (user, password) VALUES ("{uname}", "{uname}");\n""".format(uname=str(1000+x))
sql_file.write(line)
def main(argv=None):
if argv == None:
argv = sys.argv
argparser = argparse.ArgumentParser(description="Prepare load tests for Flexisip.")
argparser.add_argument('-N', '--users', help="How many different users should be registering to flexisip", dest="users", default=5000)
args, additional_args = argparser.parse_known_args()
write_csv("users.csv", args.users)
write_sql("users.sql", args.users)
if __name__ == '__main__':
main()
| DROP DATABASE IF EXISTS in tests. | DROP DATABASE IF EXISTS in tests. | Python | agpl-3.0 | BelledonneCommunications/flexisip,BelledonneCommunications/flexisip,BelledonneCommunications/flexisip,BelledonneCommunications/flexisip |
import sys
import os
import argparse
def write_csv(filename, nb_users):
with open(filename, "w") as csv_file:
csv_file.write("SEQUENTIAL\n")
for x in xrange(nb_users):
line = "{uname};localhost;[authentication username={uname} password={uname}];\n".format(uname=str(1000+x))
csv_file.write(line)
def write_sql(filename, nb_users):
with open(filename, "w") as sql_file:
- header = """DROP DATABASE tests;
+ header = """DROP DATABASE IF EXISTS tests;
CREATE DATABASE tests;
USE tests;
CREATE TABLE accounts (user VARCHAR(20),password VARCHAR(20));"""
sql_file.write(header)
for x in xrange(nb_users):
line = """INSERT into accounts (user, password) VALUES ("{uname}", "{uname}");\n""".format(uname=str(1000+x))
sql_file.write(line)
def main(argv=None):
if argv == None:
argv = sys.argv
argparser = argparse.ArgumentParser(description="Prepare load tests for Flexisip.")
argparser.add_argument('-N', '--users', help="How many different users should be registering to flexisip", dest="users", default=5000)
args, additional_args = argparser.parse_known_args()
write_csv("users.csv", args.users)
write_sql("users.sql", args.users)
if __name__ == '__main__':
main()
| DROP DATABASE IF EXISTS in tests. | ## Code Before:
import sys
import os
import argparse
def write_csv(filename, nb_users):
with open(filename, "w") as csv_file:
csv_file.write("SEQUENTIAL\n")
for x in xrange(nb_users):
line = "{uname};localhost;[authentication username={uname} password={uname}];\n".format(uname=str(1000+x))
csv_file.write(line)
def write_sql(filename, nb_users):
with open(filename, "w") as sql_file:
header = """DROP DATABASE tests;
CREATE DATABASE tests;
USE tests;
CREATE TABLE accounts (user VARCHAR(20),password VARCHAR(20));"""
sql_file.write(header)
for x in xrange(nb_users):
line = """INSERT into accounts (user, password) VALUES ("{uname}", "{uname}");\n""".format(uname=str(1000+x))
sql_file.write(line)
def main(argv=None):
if argv == None:
argv = sys.argv
argparser = argparse.ArgumentParser(description="Prepare load tests for Flexisip.")
argparser.add_argument('-N', '--users', help="How many different users should be registering to flexisip", dest="users", default=5000)
args, additional_args = argparser.parse_known_args()
write_csv("users.csv", args.users)
write_sql("users.sql", args.users)
if __name__ == '__main__':
main()
## Instruction:
DROP DATABASE IF EXISTS in tests.
## Code After:
import sys
import os
import argparse
def write_csv(filename, nb_users):
with open(filename, "w") as csv_file:
csv_file.write("SEQUENTIAL\n")
for x in xrange(nb_users):
line = "{uname};localhost;[authentication username={uname} password={uname}];\n".format(uname=str(1000+x))
csv_file.write(line)
def write_sql(filename, nb_users):
with open(filename, "w") as sql_file:
header = """DROP DATABASE IF EXISTS tests;
CREATE DATABASE tests;
USE tests;
CREATE TABLE accounts (user VARCHAR(20),password VARCHAR(20));"""
sql_file.write(header)
for x in xrange(nb_users):
line = """INSERT into accounts (user, password) VALUES ("{uname}", "{uname}");\n""".format(uname=str(1000+x))
sql_file.write(line)
def main(argv=None):
if argv == None:
argv = sys.argv
argparser = argparse.ArgumentParser(description="Prepare load tests for Flexisip.")
argparser.add_argument('-N', '--users', help="How many different users should be registering to flexisip", dest="users", default=5000)
args, additional_args = argparser.parse_known_args()
write_csv("users.csv", args.users)
write_sql("users.sql", args.users)
if __name__ == '__main__':
main()
| ...
with open(filename, "w") as sql_file:
header = """DROP DATABASE IF EXISTS tests;
CREATE DATABASE tests;
... |
b9bd647cfd8def947838cb35c266b3b9ac855201 | test_apriori.py | test_apriori.py | import unittest
from itertools import chain
from apriori import subsets
class AprioriTest(unittest.TestCase):
def test_subsets_should_return_empty_subsets_if_input_empty_set(self):
result = tuple(subsets(set([])))
self.assertEqual(result, ())
def test_subsets_should_return_non_empty_subsets(self):
result = tuple(subsets(set(['beer', 'rice'])))
self.assertEqual(result[0], ('beer',))
self.assertEqual(result[1], ('rice',))
self.assertEqual(result[2], ('beer', 'rice',))
if __name__ == '__main__':
unittest.main()
| from collections import defaultdict
from itertools import chain
import unittest
from apriori import (
subsets,
returnItemsWithMinSupport,
)
class AprioriTest(unittest.TestCase):
def test_subsets_should_return_empty_subsets_if_input_empty_set(self):
result = tuple(subsets(frozenset([])))
self.assertEqual(result, ())
def test_subsets_should_return_non_empty_subsets(self):
result = tuple(subsets(frozenset(['beer', 'rice'])))
self.assertEqual(result[0], ('beer',))
self.assertEqual(result[1], ('rice',))
self.assertEqual(result[2], ('beer', 'rice',))
def test_return_items_with_min_support(self):
itemSet = set([
frozenset(['apple']),
frozenset(['beer']),
frozenset(['chicken']),
frozenset(['mango']),
frozenset(['milk']),
frozenset(['rice'])
])
transactionList = [
frozenset(['beer', 'rice', 'apple', 'chicken']),
frozenset(['beer', 'rice', 'apple']),
frozenset(['beer', 'apple']),
frozenset(['mango', 'apple']),
frozenset(['beer', 'rice', 'milk', 'chicken']),
frozenset(['beer', 'rice', 'milk']),
frozenset(['beer', 'milk']),
frozenset(['mango', 'milk'])
]
minSupport = 0.5
freqSet = defaultdict(int)
result = returnItemsWithMinSupport(
itemSet,
transactionList,
minSupport,
freqSet
)
expected = set([
frozenset(['milk']),
frozenset(['apple']),
frozenset(['beer']),
frozenset(['rice'])
])
self.assertEqual(result, expected)
expected = defaultdict(
int,
{
frozenset(['apple']): 4,
frozenset(['beer']): 6,
frozenset(['chicken']): 2,
frozenset(['mango']): 2,
frozenset(['milk']): 4,
frozenset(['rice']): 4
}
)
self.assertEqual(freqSet, expected)
if __name__ == '__main__':
unittest.main()
| Test returning items with minimum support | Test returning items with minimum support
| Python | mit | asaini/Apriori,gst-group/apriori_demo | + from collections import defaultdict
+ from itertools import chain
import unittest
- from itertools import chain
- from apriori import subsets
+ from apriori import (
+ subsets,
+ returnItemsWithMinSupport,
+ )
class AprioriTest(unittest.TestCase):
def test_subsets_should_return_empty_subsets_if_input_empty_set(self):
- result = tuple(subsets(set([])))
+ result = tuple(subsets(frozenset([])))
self.assertEqual(result, ())
def test_subsets_should_return_non_empty_subsets(self):
- result = tuple(subsets(set(['beer', 'rice'])))
+ result = tuple(subsets(frozenset(['beer', 'rice'])))
self.assertEqual(result[0], ('beer',))
self.assertEqual(result[1], ('rice',))
self.assertEqual(result[2], ('beer', 'rice',))
+ def test_return_items_with_min_support(self):
+ itemSet = set([
+ frozenset(['apple']),
+ frozenset(['beer']),
+ frozenset(['chicken']),
+ frozenset(['mango']),
+ frozenset(['milk']),
+ frozenset(['rice'])
+ ])
+ transactionList = [
+ frozenset(['beer', 'rice', 'apple', 'chicken']),
+ frozenset(['beer', 'rice', 'apple']),
+ frozenset(['beer', 'apple']),
+ frozenset(['mango', 'apple']),
+ frozenset(['beer', 'rice', 'milk', 'chicken']),
+ frozenset(['beer', 'rice', 'milk']),
+ frozenset(['beer', 'milk']),
+ frozenset(['mango', 'milk'])
+ ]
+ minSupport = 0.5
+ freqSet = defaultdict(int)
+
+ result = returnItemsWithMinSupport(
+ itemSet,
+ transactionList,
+ minSupport,
+ freqSet
+ )
+
+ expected = set([
+ frozenset(['milk']),
+ frozenset(['apple']),
+ frozenset(['beer']),
+ frozenset(['rice'])
+ ])
+ self.assertEqual(result, expected)
+
+ expected = defaultdict(
+ int,
+ {
+ frozenset(['apple']): 4,
+ frozenset(['beer']): 6,
+ frozenset(['chicken']): 2,
+ frozenset(['mango']): 2,
+ frozenset(['milk']): 4,
+ frozenset(['rice']): 4
+ }
+ )
+ self.assertEqual(freqSet, expected)
+
if __name__ == '__main__':
unittest.main()
| Test returning items with minimum support | ## Code Before:
import unittest
from itertools import chain
from apriori import subsets
class AprioriTest(unittest.TestCase):
def test_subsets_should_return_empty_subsets_if_input_empty_set(self):
result = tuple(subsets(set([])))
self.assertEqual(result, ())
def test_subsets_should_return_non_empty_subsets(self):
result = tuple(subsets(set(['beer', 'rice'])))
self.assertEqual(result[0], ('beer',))
self.assertEqual(result[1], ('rice',))
self.assertEqual(result[2], ('beer', 'rice',))
if __name__ == '__main__':
unittest.main()
## Instruction:
Test returning items with minimum support
## Code After:
from collections import defaultdict
from itertools import chain
import unittest
from apriori import (
subsets,
returnItemsWithMinSupport,
)
class AprioriTest(unittest.TestCase):
def test_subsets_should_return_empty_subsets_if_input_empty_set(self):
result = tuple(subsets(frozenset([])))
self.assertEqual(result, ())
def test_subsets_should_return_non_empty_subsets(self):
result = tuple(subsets(frozenset(['beer', 'rice'])))
self.assertEqual(result[0], ('beer',))
self.assertEqual(result[1], ('rice',))
self.assertEqual(result[2], ('beer', 'rice',))
def test_return_items_with_min_support(self):
itemSet = set([
frozenset(['apple']),
frozenset(['beer']),
frozenset(['chicken']),
frozenset(['mango']),
frozenset(['milk']),
frozenset(['rice'])
])
transactionList = [
frozenset(['beer', 'rice', 'apple', 'chicken']),
frozenset(['beer', 'rice', 'apple']),
frozenset(['beer', 'apple']),
frozenset(['mango', 'apple']),
frozenset(['beer', 'rice', 'milk', 'chicken']),
frozenset(['beer', 'rice', 'milk']),
frozenset(['beer', 'milk']),
frozenset(['mango', 'milk'])
]
minSupport = 0.5
freqSet = defaultdict(int)
result = returnItemsWithMinSupport(
itemSet,
transactionList,
minSupport,
freqSet
)
expected = set([
frozenset(['milk']),
frozenset(['apple']),
frozenset(['beer']),
frozenset(['rice'])
])
self.assertEqual(result, expected)
expected = defaultdict(
int,
{
frozenset(['apple']): 4,
frozenset(['beer']): 6,
frozenset(['chicken']): 2,
frozenset(['mango']): 2,
frozenset(['milk']): 4,
frozenset(['rice']): 4
}
)
self.assertEqual(freqSet, expected)
if __name__ == '__main__':
unittest.main()
| ...
from collections import defaultdict
from itertools import chain
import unittest
from apriori import (
subsets,
returnItemsWithMinSupport,
)
...
def test_subsets_should_return_empty_subsets_if_input_empty_set(self):
result = tuple(subsets(frozenset([])))
...
def test_subsets_should_return_non_empty_subsets(self):
result = tuple(subsets(frozenset(['beer', 'rice'])))
...
def test_return_items_with_min_support(self):
itemSet = set([
frozenset(['apple']),
frozenset(['beer']),
frozenset(['chicken']),
frozenset(['mango']),
frozenset(['milk']),
frozenset(['rice'])
])
transactionList = [
frozenset(['beer', 'rice', 'apple', 'chicken']),
frozenset(['beer', 'rice', 'apple']),
frozenset(['beer', 'apple']),
frozenset(['mango', 'apple']),
frozenset(['beer', 'rice', 'milk', 'chicken']),
frozenset(['beer', 'rice', 'milk']),
frozenset(['beer', 'milk']),
frozenset(['mango', 'milk'])
]
minSupport = 0.5
freqSet = defaultdict(int)
result = returnItemsWithMinSupport(
itemSet,
transactionList,
minSupport,
freqSet
)
expected = set([
frozenset(['milk']),
frozenset(['apple']),
frozenset(['beer']),
frozenset(['rice'])
])
self.assertEqual(result, expected)
expected = defaultdict(
int,
{
frozenset(['apple']): 4,
frozenset(['beer']): 6,
frozenset(['chicken']): 2,
frozenset(['mango']): 2,
frozenset(['milk']): 4,
frozenset(['rice']): 4
}
)
self.assertEqual(freqSet, expected)
... |
55e506489e93bad1d000acd747a272103e789a59 | rml/element.py | rml/element.py | ''' Representation of an element
@param element_type: type of the element
@param length: length of the element
'''
import pkg_resources
from rml.exceptions import ConfigException
pkg_resources.require('cothread')
from cothread.catools import caget
class Element(object):
def __init__(self, element_type, length, **kwargs):
self.element_type = element_type
self.length = length
self.families = set()
# Getting the pv value
self.pv = kwargs.get('pv', None)
self._field = {}
def add_to_family(self, family):
self.families.add(family)
def get_pv(self, field, handle='readback'):
"""
Get pv value for the given field.
Currently only supports readback handle
"""
if not field in self._field:
raise ConfigException("Field {0} doesn't exist.".format(field))
elif handle == 'readback':
print 'abc'
return caget(self.pv)
else:
raise ValueError("Unknown handle {0}".format(handle))
def set_pv(self, field, pv_name):
self.pv = pv_name
self._field[field] = pv_name
def get_type(self):
return self.element_type
def get_length(self):
return self.length
def get_families(self):
return self.families
| ''' Representation of an element
@param element_type: type of the element
@param length: length of the element
'''
import pkg_resources
from rml.exceptions import ConfigException
pkg_resources.require('cothread')
from cothread.catools import caget
class Element(object):
def __init__(self, element_type, length, **kwargs):
self.element_type = element_type
self.length = length
self.families = set()
# For storing the pv. Dictionary where keys are fields and
# values are pv names
self.pv = dict()
def add_to_family(self, family):
self.families.add(family)
def get_pv(self, field, handle='readback'):
"""
Get pv value for the given field.
Currently only supports readback handle
"""
if not field in self.pv:
raise ConfigException("Field {0} doesn't exist.".format(field))
elif handle == 'readback':
return caget(self.pv[field])
else:
raise ValueError("Unknown handle {0}".format(handle))
def set_pv(self, field, pv_name):
self.pv[field] = pv_name
def get_type(self):
return self.element_type
def get_length(self):
return self.length
def get_families(self):
return self.families
| Add support for y field of a pv | Add support for y field of a pv
| Python | apache-2.0 | willrogers/pml,razvanvasile/RML,willrogers/pml | ''' Representation of an element
@param element_type: type of the element
@param length: length of the element
'''
import pkg_resources
from rml.exceptions import ConfigException
pkg_resources.require('cothread')
from cothread.catools import caget
class Element(object):
def __init__(self, element_type, length, **kwargs):
self.element_type = element_type
self.length = length
self.families = set()
- # Getting the pv value
- self.pv = kwargs.get('pv', None)
- self._field = {}
+ # For storing the pv. Dictionary where keys are fields and
+ # values are pv names
+ self.pv = dict()
def add_to_family(self, family):
self.families.add(family)
def get_pv(self, field, handle='readback'):
"""
Get pv value for the given field.
Currently only supports readback handle
"""
- if not field in self._field:
+ if not field in self.pv:
raise ConfigException("Field {0} doesn't exist.".format(field))
elif handle == 'readback':
- print 'abc'
- return caget(self.pv)
+ return caget(self.pv[field])
else:
raise ValueError("Unknown handle {0}".format(handle))
def set_pv(self, field, pv_name):
- self.pv = pv_name
- self._field[field] = pv_name
+ self.pv[field] = pv_name
def get_type(self):
return self.element_type
def get_length(self):
return self.length
def get_families(self):
return self.families
| Add support for y field of a pv | ## Code Before:
''' Representation of an element
@param element_type: type of the element
@param length: length of the element
'''
import pkg_resources
from rml.exceptions import ConfigException
pkg_resources.require('cothread')
from cothread.catools import caget
class Element(object):
def __init__(self, element_type, length, **kwargs):
self.element_type = element_type
self.length = length
self.families = set()
# Getting the pv value
self.pv = kwargs.get('pv', None)
self._field = {}
def add_to_family(self, family):
self.families.add(family)
def get_pv(self, field, handle='readback'):
"""
Get pv value for the given field.
Currently only supports readback handle
"""
if not field in self._field:
raise ConfigException("Field {0} doesn't exist.".format(field))
elif handle == 'readback':
print 'abc'
return caget(self.pv)
else:
raise ValueError("Unknown handle {0}".format(handle))
def set_pv(self, field, pv_name):
self.pv = pv_name
self._field[field] = pv_name
def get_type(self):
return self.element_type
def get_length(self):
return self.length
def get_families(self):
return self.families
## Instruction:
Add support for y field of a pv
## Code After:
''' Representation of an element
@param element_type: type of the element
@param length: length of the element
'''
import pkg_resources
from rml.exceptions import ConfigException
pkg_resources.require('cothread')
from cothread.catools import caget
class Element(object):
def __init__(self, element_type, length, **kwargs):
self.element_type = element_type
self.length = length
self.families = set()
# For storing the pv. Dictionary where keys are fields and
# values are pv names
self.pv = dict()
def add_to_family(self, family):
self.families.add(family)
def get_pv(self, field, handle='readback'):
"""
Get pv value for the given field.
Currently only supports readback handle
"""
if not field in self.pv:
raise ConfigException("Field {0} doesn't exist.".format(field))
elif handle == 'readback':
return caget(self.pv[field])
else:
raise ValueError("Unknown handle {0}".format(handle))
def set_pv(self, field, pv_name):
self.pv[field] = pv_name
def get_type(self):
return self.element_type
def get_length(self):
return self.length
def get_families(self):
return self.families
| // ... existing code ...
# For storing the pv. Dictionary where keys are fields and
# values are pv names
self.pv = dict()
// ... modified code ...
if not field in self.pv:
raise ConfigException("Field {0} doesn't exist.".format(field))
...
elif handle == 'readback':
return caget(self.pv[field])
else:
...
def set_pv(self, field, pv_name):
self.pv[field] = pv_name
// ... rest of the code ... |
a5cd2110283ba699f36548c42b83aa86e6b50aab | configuration.py | configuration.py | from trytond.model import fields, ModelSingleton, ModelSQL, ModelView
__all__ = ['EndiciaConfiguration']
class EndiciaConfiguration(ModelSingleton, ModelSQL, ModelView):
"""
Configuration settings for Endicia.
"""
__name__ = 'endicia.configuration'
account_id = fields.Integer('Account Id')
requester_id = fields.Char('Requester Id')
passphrase = fields.Char('Passphrase')
is_test = fields.Boolean('Is Test')
@classmethod
def __setup__(cls):
super(EndiciaConfiguration, cls).__setup__()
cls._error_messages.update({
'endicia_credentials_required':
'Endicia settings on endicia configuration are incomplete.',
})
def get_endicia_credentials(self):
"""Validate if endicia credentials are complete.
"""
if not all([
self.account_id,
self.requester_id,
self.passphrase
]):
self.raise_user_error('endicia_credentials_required')
return self
| from trytond import backend
from trytond.model import fields, ModelSingleton, ModelSQL, ModelView
from trytond.transaction import Transaction
__all__ = ['EndiciaConfiguration']
class EndiciaConfiguration(ModelSingleton, ModelSQL, ModelView):
"""
Configuration settings for Endicia.
"""
__name__ = 'endicia.configuration'
account_id = fields.Char('Account Id')
requester_id = fields.Char('Requester Id')
passphrase = fields.Char('Passphrase')
is_test = fields.Boolean('Is Test')
@classmethod
def __setup__(cls):
super(EndiciaConfiguration, cls).__setup__()
cls._error_messages.update({
'endicia_credentials_required':
'Endicia settings on endicia configuration are incomplete.',
})
@classmethod
def __register__(cls, module_name):
TableHandler = backend.get('TableHandler')
cursor = Transaction().cursor
# Migration from 3.4.0.6 : Migrate account_id field to string
if backend.name() == 'postgresql':
cursor.execute(
'SELECT pg_typeof("account_id") '
'FROM endicia_configuration '
'LIMIT 1',
)
# Check if account_id is integer field
is_integer = cursor.fetchone()[0] == 'integer'
if is_integer:
# Migrate integer field to string
table = TableHandler(cursor, cls, module_name)
table.alter_type('account_id', 'varchar')
super(EndiciaConfiguration, cls).__register__(module_name)
def get_endicia_credentials(self):
"""Validate if endicia credentials are complete.
"""
if not all([
self.account_id,
self.requester_id,
self.passphrase
]):
self.raise_user_error('endicia_credentials_required')
return self
| Migrate account_id from integer field to char field | Migrate account_id from integer field to char field
| Python | bsd-3-clause | priyankarani/trytond-shipping-endicia,fulfilio/trytond-shipping-endicia,prakashpp/trytond-shipping-endicia | + from trytond import backend
from trytond.model import fields, ModelSingleton, ModelSQL, ModelView
+ from trytond.transaction import Transaction
__all__ = ['EndiciaConfiguration']
class EndiciaConfiguration(ModelSingleton, ModelSQL, ModelView):
"""
Configuration settings for Endicia.
"""
__name__ = 'endicia.configuration'
- account_id = fields.Integer('Account Id')
+ account_id = fields.Char('Account Id')
requester_id = fields.Char('Requester Id')
passphrase = fields.Char('Passphrase')
is_test = fields.Boolean('Is Test')
@classmethod
def __setup__(cls):
super(EndiciaConfiguration, cls).__setup__()
cls._error_messages.update({
'endicia_credentials_required':
'Endicia settings on endicia configuration are incomplete.',
})
+
+ @classmethod
+ def __register__(cls, module_name):
+ TableHandler = backend.get('TableHandler')
+ cursor = Transaction().cursor
+
+ # Migration from 3.4.0.6 : Migrate account_id field to string
+ if backend.name() == 'postgresql':
+ cursor.execute(
+ 'SELECT pg_typeof("account_id") '
+ 'FROM endicia_configuration '
+ 'LIMIT 1',
+ )
+
+ # Check if account_id is integer field
+ is_integer = cursor.fetchone()[0] == 'integer'
+
+ if is_integer:
+ # Migrate integer field to string
+ table = TableHandler(cursor, cls, module_name)
+ table.alter_type('account_id', 'varchar')
+
+ super(EndiciaConfiguration, cls).__register__(module_name)
def get_endicia_credentials(self):
"""Validate if endicia credentials are complete.
"""
if not all([
self.account_id,
self.requester_id,
self.passphrase
]):
self.raise_user_error('endicia_credentials_required')
return self
| Migrate account_id from integer field to char field | ## Code Before:
from trytond.model import fields, ModelSingleton, ModelSQL, ModelView
__all__ = ['EndiciaConfiguration']
class EndiciaConfiguration(ModelSingleton, ModelSQL, ModelView):
"""
Configuration settings for Endicia.
"""
__name__ = 'endicia.configuration'
account_id = fields.Integer('Account Id')
requester_id = fields.Char('Requester Id')
passphrase = fields.Char('Passphrase')
is_test = fields.Boolean('Is Test')
@classmethod
def __setup__(cls):
super(EndiciaConfiguration, cls).__setup__()
cls._error_messages.update({
'endicia_credentials_required':
'Endicia settings on endicia configuration are incomplete.',
})
def get_endicia_credentials(self):
"""Validate if endicia credentials are complete.
"""
if not all([
self.account_id,
self.requester_id,
self.passphrase
]):
self.raise_user_error('endicia_credentials_required')
return self
## Instruction:
Migrate account_id from integer field to char field
## Code After:
from trytond import backend
from trytond.model import fields, ModelSingleton, ModelSQL, ModelView
from trytond.transaction import Transaction
__all__ = ['EndiciaConfiguration']
class EndiciaConfiguration(ModelSingleton, ModelSQL, ModelView):
"""
Configuration settings for Endicia.
"""
__name__ = 'endicia.configuration'
account_id = fields.Char('Account Id')
requester_id = fields.Char('Requester Id')
passphrase = fields.Char('Passphrase')
is_test = fields.Boolean('Is Test')
@classmethod
def __setup__(cls):
super(EndiciaConfiguration, cls).__setup__()
cls._error_messages.update({
'endicia_credentials_required':
'Endicia settings on endicia configuration are incomplete.',
})
@classmethod
def __register__(cls, module_name):
TableHandler = backend.get('TableHandler')
cursor = Transaction().cursor
# Migration from 3.4.0.6 : Migrate account_id field to string
if backend.name() == 'postgresql':
cursor.execute(
'SELECT pg_typeof("account_id") '
'FROM endicia_configuration '
'LIMIT 1',
)
# Check if account_id is integer field
is_integer = cursor.fetchone()[0] == 'integer'
if is_integer:
# Migrate integer field to string
table = TableHandler(cursor, cls, module_name)
table.alter_type('account_id', 'varchar')
super(EndiciaConfiguration, cls).__register__(module_name)
def get_endicia_credentials(self):
"""Validate if endicia credentials are complete.
"""
if not all([
self.account_id,
self.requester_id,
self.passphrase
]):
self.raise_user_error('endicia_credentials_required')
return self
| # ... existing code ...
from trytond import backend
from trytond.model import fields, ModelSingleton, ModelSQL, ModelView
from trytond.transaction import Transaction
# ... modified code ...
account_id = fields.Char('Account Id')
requester_id = fields.Char('Requester Id')
...
@classmethod
def __register__(cls, module_name):
TableHandler = backend.get('TableHandler')
cursor = Transaction().cursor
# Migration from 3.4.0.6 : Migrate account_id field to string
if backend.name() == 'postgresql':
cursor.execute(
'SELECT pg_typeof("account_id") '
'FROM endicia_configuration '
'LIMIT 1',
)
# Check if account_id is integer field
is_integer = cursor.fetchone()[0] == 'integer'
if is_integer:
# Migrate integer field to string
table = TableHandler(cursor, cls, module_name)
table.alter_type('account_id', 'varchar')
super(EndiciaConfiguration, cls).__register__(module_name)
def get_endicia_credentials(self):
# ... rest of the code ... |
bb4c1375082d68a78e194d3d1d3399eadc0d1b12 | dlstats/errors.py | dlstats/errors.py |
class DlstatsException(Exception):
def __init__(self, *args, **kwargs):
self.provider_name = kwargs.pop("provider_name", None)
self.dataset_code = kwargs.pop("dataset_code", None)
super().__init__(*args, **kwargs)
class RejectFrequency(DlstatsException):
def __init__(self, *args, **kwargs):
self.frequency = kwargs.pop("frequency", None)
super().__init__(*args, **kwargs)
class RejectEmptySeries(DlstatsException):
pass
class RejectUpdatedDataset(DlstatsException):
"""Reject if dataset is updated
"""
class RejectUpdatedSeries(DlstatsException):
"""Reject if series is updated
"""
def __init__(self, *args, **kwargs):
self.key = kwargs.pop("key", None)
super().__init__(*args, **kwargs)
class MaxErrors(DlstatsException):
pass
|
class DlstatsException(Exception):
def __init__(self, *args, **kwargs):
self.provider_name = kwargs.pop("provider_name", None)
self.dataset_code = kwargs.pop("dataset_code", None)
self.comments = kwargs.pop("comments", None)
super().__init__(*args, **kwargs)
class RejectFrequency(DlstatsException):
def __init__(self, *args, **kwargs):
self.frequency = kwargs.pop("frequency", None)
super().__init__(*args, **kwargs)
class InterruptProcessSeriesData(DlstatsException):
pass
class RejectEmptySeries(DlstatsException):
pass
class RejectUpdatedDataset(DlstatsException):
"""Reject if dataset is updated
"""
class RejectUpdatedSeries(DlstatsException):
"""Reject if series is updated
"""
def __init__(self, *args, **kwargs):
self.key = kwargs.pop("key", None)
super().__init__(*args, **kwargs)
class MaxErrors(DlstatsException):
pass
| Add exception for interrupt data process | Add exception for interrupt data process
| Python | agpl-3.0 | Widukind/dlstats,Widukind/dlstats |
class DlstatsException(Exception):
def __init__(self, *args, **kwargs):
self.provider_name = kwargs.pop("provider_name", None)
self.dataset_code = kwargs.pop("dataset_code", None)
+ self.comments = kwargs.pop("comments", None)
super().__init__(*args, **kwargs)
class RejectFrequency(DlstatsException):
def __init__(self, *args, **kwargs):
self.frequency = kwargs.pop("frequency", None)
super().__init__(*args, **kwargs)
-
+
+ class InterruptProcessSeriesData(DlstatsException):
+ pass
+
class RejectEmptySeries(DlstatsException):
pass
class RejectUpdatedDataset(DlstatsException):
"""Reject if dataset is updated
"""
-
+
class RejectUpdatedSeries(DlstatsException):
"""Reject if series is updated
"""
def __init__(self, *args, **kwargs):
self.key = kwargs.pop("key", None)
super().__init__(*args, **kwargs)
class MaxErrors(DlstatsException):
pass
| Add exception for interrupt data process | ## Code Before:
class DlstatsException(Exception):
def __init__(self, *args, **kwargs):
self.provider_name = kwargs.pop("provider_name", None)
self.dataset_code = kwargs.pop("dataset_code", None)
super().__init__(*args, **kwargs)
class RejectFrequency(DlstatsException):
def __init__(self, *args, **kwargs):
self.frequency = kwargs.pop("frequency", None)
super().__init__(*args, **kwargs)
class RejectEmptySeries(DlstatsException):
pass
class RejectUpdatedDataset(DlstatsException):
"""Reject if dataset is updated
"""
class RejectUpdatedSeries(DlstatsException):
"""Reject if series is updated
"""
def __init__(self, *args, **kwargs):
self.key = kwargs.pop("key", None)
super().__init__(*args, **kwargs)
class MaxErrors(DlstatsException):
pass
## Instruction:
Add exception for interrupt data process
## Code After:
class DlstatsException(Exception):
def __init__(self, *args, **kwargs):
self.provider_name = kwargs.pop("provider_name", None)
self.dataset_code = kwargs.pop("dataset_code", None)
self.comments = kwargs.pop("comments", None)
super().__init__(*args, **kwargs)
class RejectFrequency(DlstatsException):
def __init__(self, *args, **kwargs):
self.frequency = kwargs.pop("frequency", None)
super().__init__(*args, **kwargs)
class InterruptProcessSeriesData(DlstatsException):
pass
class RejectEmptySeries(DlstatsException):
pass
class RejectUpdatedDataset(DlstatsException):
"""Reject if dataset is updated
"""
class RejectUpdatedSeries(DlstatsException):
"""Reject if series is updated
"""
def __init__(self, *args, **kwargs):
self.key = kwargs.pop("key", None)
super().__init__(*args, **kwargs)
class MaxErrors(DlstatsException):
pass
| # ... existing code ...
self.dataset_code = kwargs.pop("dataset_code", None)
self.comments = kwargs.pop("comments", None)
super().__init__(*args, **kwargs)
# ... modified code ...
super().__init__(*args, **kwargs)
class InterruptProcessSeriesData(DlstatsException):
pass
class RejectEmptySeries(DlstatsException):
...
"""
class RejectUpdatedSeries(DlstatsException):
# ... rest of the code ... |
74b2883c3371304e8f5ea95b0454fb006d85ba3d | mapentity/urls.py | mapentity/urls.py | from django.conf import settings
from django.conf.urls import patterns, url
from . import app_settings
from .views import (map_screenshot, convert, history_delete,
serve_secure_media, JSSettings)
_MEDIA_URL = settings.MEDIA_URL.replace(app_settings['ROOT_URL'], '')[1:]
urlpatterns = patterns(
'',
url(r'^%s(?P<path>.*?)$' % _MEDIA_URL, serve_secure_media),
url(r'^map_screenshot/$', map_screenshot, name='map_screenshot'),
url(r'^convert/$', convert, name='convert'),
url(r'^history/delete/$', history_delete, name='history_delete'),
# See default value in app_settings.JS_SETTINGS.
# Will be overriden, most probably.
url(r'^api/settings.json$', JSSettings.as_view(), name='js_settings'),
)
| from django.conf import settings
from django.conf.urls import patterns, url
from . import app_settings
from .views import (map_screenshot, convert, history_delete,
serve_secure_media, JSSettings)
_MEDIA_URL = settings.MEDIA_URL.replace(app_settings['ROOT_URL'], '')
if _MEDIA_URL.startswith('/'):
_MEDIA_URL = _MEDIA_URL[1:]
if _MEDIA_URL.endswith('/'):
_MEDIA_URL = _MEDIA_URL[:-1]
urlpatterns = patterns(
'',
url(r'^%s(?P<path>.*?)$' % _MEDIA_URL, serve_secure_media),
url(r'^map_screenshot/$', map_screenshot, name='map_screenshot'),
url(r'^convert/$', convert, name='convert'),
url(r'^history/delete/$', history_delete, name='history_delete'),
# See default value in app_settings.JS_SETTINGS.
# Will be overriden, most probably.
url(r'^api/settings.json$', JSSettings.as_view(), name='js_settings'),
)
| Remove leading and trailing slash of MEDIA_URL | Remove leading and trailing slash of MEDIA_URL
Conflicts:
mapentity/static/mapentity/Leaflet.label
| Python | bsd-3-clause | Anaethelion/django-mapentity,Anaethelion/django-mapentity,makinacorpus/django-mapentity,makinacorpus/django-mapentity,Anaethelion/django-mapentity,makinacorpus/django-mapentity | from django.conf import settings
from django.conf.urls import patterns, url
from . import app_settings
from .views import (map_screenshot, convert, history_delete,
serve_secure_media, JSSettings)
- _MEDIA_URL = settings.MEDIA_URL.replace(app_settings['ROOT_URL'], '')[1:]
+ _MEDIA_URL = settings.MEDIA_URL.replace(app_settings['ROOT_URL'], '')
+ if _MEDIA_URL.startswith('/'):
+ _MEDIA_URL = _MEDIA_URL[1:]
+ if _MEDIA_URL.endswith('/'):
+ _MEDIA_URL = _MEDIA_URL[:-1]
urlpatterns = patterns(
'',
url(r'^%s(?P<path>.*?)$' % _MEDIA_URL, serve_secure_media),
url(r'^map_screenshot/$', map_screenshot, name='map_screenshot'),
url(r'^convert/$', convert, name='convert'),
url(r'^history/delete/$', history_delete, name='history_delete'),
# See default value in app_settings.JS_SETTINGS.
# Will be overriden, most probably.
url(r'^api/settings.json$', JSSettings.as_view(), name='js_settings'),
)
| Remove leading and trailing slash of MEDIA_URL | ## Code Before:
from django.conf import settings
from django.conf.urls import patterns, url
from . import app_settings
from .views import (map_screenshot, convert, history_delete,
serve_secure_media, JSSettings)
_MEDIA_URL = settings.MEDIA_URL.replace(app_settings['ROOT_URL'], '')[1:]
urlpatterns = patterns(
'',
url(r'^%s(?P<path>.*?)$' % _MEDIA_URL, serve_secure_media),
url(r'^map_screenshot/$', map_screenshot, name='map_screenshot'),
url(r'^convert/$', convert, name='convert'),
url(r'^history/delete/$', history_delete, name='history_delete'),
# See default value in app_settings.JS_SETTINGS.
# Will be overriden, most probably.
url(r'^api/settings.json$', JSSettings.as_view(), name='js_settings'),
)
## Instruction:
Remove leading and trailing slash of MEDIA_URL
## Code After:
from django.conf import settings
from django.conf.urls import patterns, url
from . import app_settings
from .views import (map_screenshot, convert, history_delete,
serve_secure_media, JSSettings)
_MEDIA_URL = settings.MEDIA_URL.replace(app_settings['ROOT_URL'], '')
if _MEDIA_URL.startswith('/'):
_MEDIA_URL = _MEDIA_URL[1:]
if _MEDIA_URL.endswith('/'):
_MEDIA_URL = _MEDIA_URL[:-1]
urlpatterns = patterns(
'',
url(r'^%s(?P<path>.*?)$' % _MEDIA_URL, serve_secure_media),
url(r'^map_screenshot/$', map_screenshot, name='map_screenshot'),
url(r'^convert/$', convert, name='convert'),
url(r'^history/delete/$', history_delete, name='history_delete'),
# See default value in app_settings.JS_SETTINGS.
# Will be overriden, most probably.
url(r'^api/settings.json$', JSSettings.as_view(), name='js_settings'),
)
| // ... existing code ...
_MEDIA_URL = settings.MEDIA_URL.replace(app_settings['ROOT_URL'], '')
if _MEDIA_URL.startswith('/'):
_MEDIA_URL = _MEDIA_URL[1:]
if _MEDIA_URL.endswith('/'):
_MEDIA_URL = _MEDIA_URL[:-1]
// ... rest of the code ... |
6f5e4ff4f8e4002566a9ac18bcb22778be9409bd | electro/api.py | electro/api.py |
from electro.errors import ResourceDuplicatedDefinedError
class API(object):
def __init__(self, app=None, decorators=None,
catch_all_404s=None):
self.app = app
self.endpoints = set()
self.decorators = decorators or []
self.catch_all_404s = catch_all_404s
def add_resource(self, resource, url, **kw):
endpoint = kw.pop('endpoint', None) or resource.__name__.lower()
self.endpoints.add(endpoint)
if endpoint in self.app.view_functions:
previous_view_class = self.app.view_functions[endpoint].__dict__['view_class']
if previous_view_class != resource:
raise ResourceDuplicatedDefinedError(endpoint)
resource.endpoint = endpoint
resource_func = resource.as_view(endpoint)
for decorator in self.decorators:
resource_func = decorator(resource_func)
self.app.add_url_rule(url, view_func=resource_func, **kw)
|
from electro.errors import ResourceDuplicatedDefinedError
class API(object):
def __init__(self, app=None, decorators=None,
catch_all_404s=None):
self.app = app
self.endpoints = set()
self.decorators = decorators or []
self.catch_all_404s = catch_all_404s
def add_resource(self, resource, url, **kw):
endpoint = kw.pop('endpoint', None) or resource.__name__.lower()
self.endpoints.add(endpoint)
if endpoint in self.app.view_functions:
previous_view_class = self.app.view_functions[endpoint].__dict__['view_class']
if previous_view_class != resource:
raise ResourceDuplicatedDefinedError(endpoint)
resource.endpoint = endpoint
resource_func = resource.as_view(endpoint)
for decorator in self.decorators:
resource_func = decorator(resource_func)
self.app.add_url_rule(url, view_func=resource_func, endpoint=endpoint, **kw)
| Add endpoint for flask app. | Add endpoint for flask app.
| Python | mit | soasme/electro |
from electro.errors import ResourceDuplicatedDefinedError
class API(object):
def __init__(self, app=None, decorators=None,
catch_all_404s=None):
self.app = app
self.endpoints = set()
self.decorators = decorators or []
self.catch_all_404s = catch_all_404s
def add_resource(self, resource, url, **kw):
endpoint = kw.pop('endpoint', None) or resource.__name__.lower()
self.endpoints.add(endpoint)
if endpoint in self.app.view_functions:
previous_view_class = self.app.view_functions[endpoint].__dict__['view_class']
if previous_view_class != resource:
raise ResourceDuplicatedDefinedError(endpoint)
resource.endpoint = endpoint
resource_func = resource.as_view(endpoint)
for decorator in self.decorators:
resource_func = decorator(resource_func)
- self.app.add_url_rule(url, view_func=resource_func, **kw)
+ self.app.add_url_rule(url, view_func=resource_func, endpoint=endpoint, **kw)
| Add endpoint for flask app. | ## Code Before:
from electro.errors import ResourceDuplicatedDefinedError
class API(object):
def __init__(self, app=None, decorators=None,
catch_all_404s=None):
self.app = app
self.endpoints = set()
self.decorators = decorators or []
self.catch_all_404s = catch_all_404s
def add_resource(self, resource, url, **kw):
endpoint = kw.pop('endpoint', None) or resource.__name__.lower()
self.endpoints.add(endpoint)
if endpoint in self.app.view_functions:
previous_view_class = self.app.view_functions[endpoint].__dict__['view_class']
if previous_view_class != resource:
raise ResourceDuplicatedDefinedError(endpoint)
resource.endpoint = endpoint
resource_func = resource.as_view(endpoint)
for decorator in self.decorators:
resource_func = decorator(resource_func)
self.app.add_url_rule(url, view_func=resource_func, **kw)
## Instruction:
Add endpoint for flask app.
## Code After:
from electro.errors import ResourceDuplicatedDefinedError
class API(object):
def __init__(self, app=None, decorators=None,
catch_all_404s=None):
self.app = app
self.endpoints = set()
self.decorators = decorators or []
self.catch_all_404s = catch_all_404s
def add_resource(self, resource, url, **kw):
endpoint = kw.pop('endpoint', None) or resource.__name__.lower()
self.endpoints.add(endpoint)
if endpoint in self.app.view_functions:
previous_view_class = self.app.view_functions[endpoint].__dict__['view_class']
if previous_view_class != resource:
raise ResourceDuplicatedDefinedError(endpoint)
resource.endpoint = endpoint
resource_func = resource.as_view(endpoint)
for decorator in self.decorators:
resource_func = decorator(resource_func)
self.app.add_url_rule(url, view_func=resource_func, endpoint=endpoint, **kw)
| # ... existing code ...
resource_func = decorator(resource_func)
self.app.add_url_rule(url, view_func=resource_func, endpoint=endpoint, **kw)
# ... rest of the code ... |
fba24207cc48aee53e023992be67ced518dc3e9d | utils.py | utils.py | import os
import boto
import boto.s3
from boto.s3.key import Key
import requests
import uuid
# http://stackoverflow.com/a/42493144
def upload_url_to_s3(image_url):
image_res = requests.get(image_url, stream=True)
image = image_res.raw
image_data = image.read()
fname = '{}.jpg'.format(str(uuid.uuid4()))
conn = boto.connect_s3(os.environ['AWS_ACCESS_KEY_ID_DIVERSEUI'],
os.environ['AWS_SECRET_KEY_DIVERSEUI'])
bucket = conn.get_bucket('diverse-ui')
k = Key(bucket, 'faces/{}'.format(fname))
k.set_contents_from_string(image_data)
k.make_public()
return 'https://d3iw72m71ie81c.cloudfront.net/{}'.format(fname)
| import os
import boto
import boto.s3
from boto.s3.key import Key
import requests
import uuid
# http://stackoverflow.com/a/42493144
def upload_url_to_s3(image_url):
image_res = requests.get(image_url, stream=True)
image = image_res.raw
image_data = image.read()
fname = str(uuid.uuid4())
conn = boto.connect_s3(os.environ['AWS_ACCESS_KEY_ID_DIVERSEUI'],
os.environ['AWS_SECRET_KEY_DIVERSEUI'])
bucket = conn.get_bucket('diverse-ui')
k = Key(bucket, 'faces/{}'.format(fname))
k.set_contents_from_string(image_data)
k.make_public()
return 'https://d3iw72m71ie81c.cloudfront.net/{}'.format(fname)
| Use uuid as file name | Use uuid as file name
| Python | mit | reneepadgham/diverseui,reneepadgham/diverseui,reneepadgham/diverseui | import os
import boto
import boto.s3
from boto.s3.key import Key
import requests
import uuid
# http://stackoverflow.com/a/42493144
def upload_url_to_s3(image_url):
image_res = requests.get(image_url, stream=True)
image = image_res.raw
image_data = image.read()
- fname = '{}.jpg'.format(str(uuid.uuid4()))
+ fname = str(uuid.uuid4())
conn = boto.connect_s3(os.environ['AWS_ACCESS_KEY_ID_DIVERSEUI'],
os.environ['AWS_SECRET_KEY_DIVERSEUI'])
bucket = conn.get_bucket('diverse-ui')
k = Key(bucket, 'faces/{}'.format(fname))
k.set_contents_from_string(image_data)
k.make_public()
return 'https://d3iw72m71ie81c.cloudfront.net/{}'.format(fname)
| Use uuid as file name | ## Code Before:
import os
import boto
import boto.s3
from boto.s3.key import Key
import requests
import uuid
# http://stackoverflow.com/a/42493144
def upload_url_to_s3(image_url):
image_res = requests.get(image_url, stream=True)
image = image_res.raw
image_data = image.read()
fname = '{}.jpg'.format(str(uuid.uuid4()))
conn = boto.connect_s3(os.environ['AWS_ACCESS_KEY_ID_DIVERSEUI'],
os.environ['AWS_SECRET_KEY_DIVERSEUI'])
bucket = conn.get_bucket('diverse-ui')
k = Key(bucket, 'faces/{}'.format(fname))
k.set_contents_from_string(image_data)
k.make_public()
return 'https://d3iw72m71ie81c.cloudfront.net/{}'.format(fname)
## Instruction:
Use uuid as file name
## Code After:
import os
import boto
import boto.s3
from boto.s3.key import Key
import requests
import uuid
# http://stackoverflow.com/a/42493144
def upload_url_to_s3(image_url):
image_res = requests.get(image_url, stream=True)
image = image_res.raw
image_data = image.read()
fname = str(uuid.uuid4())
conn = boto.connect_s3(os.environ['AWS_ACCESS_KEY_ID_DIVERSEUI'],
os.environ['AWS_SECRET_KEY_DIVERSEUI'])
bucket = conn.get_bucket('diverse-ui')
k = Key(bucket, 'faces/{}'.format(fname))
k.set_contents_from_string(image_data)
k.make_public()
return 'https://d3iw72m71ie81c.cloudfront.net/{}'.format(fname)
| // ... existing code ...
fname = str(uuid.uuid4())
// ... rest of the code ... |
d2c368995e33b375404e3c01f79fdc5a14a48282 | polyaxon/libs/repos/utils.py | polyaxon/libs/repos/utils.py | from django.core.exceptions import ObjectDoesNotExist
from db.models.repos import CodeReference
def get_project_code_reference(project, commit=None):
if not project.has_code:
return None
repo = project.repo
if commit:
try:
return CodeReference.objects.get(repo=repo, commit=commit)
except ObjectDoesNotExist:
return None
# If no commit is provided we get the last commit, and save new ref if not found
last_commit = repo.last_commit
if not last_commit:
return None
code_reference, _ = CodeReference.objects.get_or_create(repo=repo, commit=last_commit[0])
return code_reference
def get_code_reference(instance, commit):
return get_project_code_reference(instance.project, commit=commit)
def assign_code_reference(instance, commit=None):
if instance.code_reference is not None:
return
if not commit and instance.specification and instance.specification.build:
commit = instance.specification.build.commit
code_reference = get_code_reference(instance=instance, commit=commit)
if code_reference:
instance.code_reference = code_reference
return instance
| from django.core.exceptions import ObjectDoesNotExist
from db.models.repos import CodeReference
def get_code_reference(instance, commit=None, external_repo=None):
project = instance.project
repo = project.repo if project.has_code else external_repo
if not repo:
return None
if commit:
try:
return CodeReference.objects.get(repo=repo, commit=commit)
except ObjectDoesNotExist:
return None
# If no commit is provided we get the last commit, and save new ref if not found
last_commit = repo.last_commit
if not last_commit:
return None
code_reference, _ = CodeReference.objects.get_or_create(repo=repo, commit=last_commit[0])
return code_reference
def assign_code_reference(instance, commit=None):
if instance.code_reference is not None:
return
build = instance.specification.build if instance.specification else None
if not commit and build:
commit = build.commit
external_repo = build.git if build and build.git else None
code_reference = get_code_reference(instance=instance,
commit=commit,
external_repo=external_repo)
if code_reference:
instance.code_reference = code_reference
return instance
| Extend code references with external repos | Extend code references with external repos
| Python | apache-2.0 | polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon | from django.core.exceptions import ObjectDoesNotExist
from db.models.repos import CodeReference
- def get_project_code_reference(project, commit=None):
- if not project.has_code:
+ def get_code_reference(instance, commit=None, external_repo=None):
+ project = instance.project
+
+ repo = project.repo if project.has_code else external_repo
+
+ if not repo:
return None
-
- repo = project.repo
if commit:
try:
return CodeReference.objects.get(repo=repo, commit=commit)
except ObjectDoesNotExist:
return None
# If no commit is provided we get the last commit, and save new ref if not found
last_commit = repo.last_commit
if not last_commit:
return None
code_reference, _ = CodeReference.objects.get_or_create(repo=repo, commit=last_commit[0])
return code_reference
- def get_code_reference(instance, commit):
- return get_project_code_reference(instance.project, commit=commit)
-
-
def assign_code_reference(instance, commit=None):
if instance.code_reference is not None:
return
- if not commit and instance.specification and instance.specification.build:
- commit = instance.specification.build.commit
+ build = instance.specification.build if instance.specification else None
+ if not commit and build:
+ commit = build.commit
+ external_repo = build.git if build and build.git else None
- code_reference = get_code_reference(instance=instance, commit=commit)
+ code_reference = get_code_reference(instance=instance,
+ commit=commit,
+ external_repo=external_repo)
if code_reference:
instance.code_reference = code_reference
return instance
| Extend code references with external repos | ## Code Before:
from django.core.exceptions import ObjectDoesNotExist
from db.models.repos import CodeReference
def get_project_code_reference(project, commit=None):
if not project.has_code:
return None
repo = project.repo
if commit:
try:
return CodeReference.objects.get(repo=repo, commit=commit)
except ObjectDoesNotExist:
return None
# If no commit is provided we get the last commit, and save new ref if not found
last_commit = repo.last_commit
if not last_commit:
return None
code_reference, _ = CodeReference.objects.get_or_create(repo=repo, commit=last_commit[0])
return code_reference
def get_code_reference(instance, commit):
return get_project_code_reference(instance.project, commit=commit)
def assign_code_reference(instance, commit=None):
if instance.code_reference is not None:
return
if not commit and instance.specification and instance.specification.build:
commit = instance.specification.build.commit
code_reference = get_code_reference(instance=instance, commit=commit)
if code_reference:
instance.code_reference = code_reference
return instance
## Instruction:
Extend code references with external repos
## Code After:
from django.core.exceptions import ObjectDoesNotExist
from db.models.repos import CodeReference
def get_code_reference(instance, commit=None, external_repo=None):
project = instance.project
repo = project.repo if project.has_code else external_repo
if not repo:
return None
if commit:
try:
return CodeReference.objects.get(repo=repo, commit=commit)
except ObjectDoesNotExist:
return None
# If no commit is provided we get the last commit, and save new ref if not found
last_commit = repo.last_commit
if not last_commit:
return None
code_reference, _ = CodeReference.objects.get_or_create(repo=repo, commit=last_commit[0])
return code_reference
def assign_code_reference(instance, commit=None):
if instance.code_reference is not None:
return
build = instance.specification.build if instance.specification else None
if not commit and build:
commit = build.commit
external_repo = build.git if build and build.git else None
code_reference = get_code_reference(instance=instance,
commit=commit,
external_repo=external_repo)
if code_reference:
instance.code_reference = code_reference
return instance
| // ... existing code ...
def get_code_reference(instance, commit=None, external_repo=None):
project = instance.project
repo = project.repo if project.has_code else external_repo
if not repo:
return None
// ... modified code ...
def assign_code_reference(instance, commit=None):
...
return
build = instance.specification.build if instance.specification else None
if not commit and build:
commit = build.commit
external_repo = build.git if build and build.git else None
code_reference = get_code_reference(instance=instance,
commit=commit,
external_repo=external_repo)
if code_reference:
// ... rest of the code ... |
733f116125e7c061cf9f0e11e5b1008ee5272131 | test/conftest.py | test/conftest.py | import pytest
import os
import json
import sys
if sys.version_info[0] == 2:
from codecs import open
# This pair of generator expressions are pretty lame, but building lists is a
# bad idea as I plan to have a substantial number of tests here.
story_directories = (
os.path.join('test/test_fixtures', d) for d in os.listdir('test/test_fixtures')
)
story_files = (
os.path.join(storydir, name) for storydir in story_directories
for name in os.listdir(storydir)
if 'raw-data' not in storydir
)
raw_story_files = (
os.path.join('test/test_fixtures/raw-data', name)
for name in os.listdir('test/test_fixtures/raw-data')
)
@pytest.fixture(scope='class', params=story_files)
def story(request):
"""
Provides a detailed HPACK story to test with.
"""
with open(request.param, 'r', encoding='utf-8') as f:
return json.load(f)
@pytest.fixture(scope='class', params=raw_story_files)
def raw_story(request):
"""
Provides a detailed HPACK story to test the encoder with.
"""
with open(request.param, 'r', encoding='utf-8') as f:
return json.load(f)
| import pytest
import os
import json
import sys
from hypothesis.strategies import text
if sys.version_info[0] == 2:
from codecs import open
# We need to grab one text example from hypothesis to prime its cache.
text().example()
# This pair of generator expressions are pretty lame, but building lists is a
# bad idea as I plan to have a substantial number of tests here.
story_directories = (
os.path.join('test/test_fixtures', d) for d in os.listdir('test/test_fixtures')
)
story_files = (
os.path.join(storydir, name) for storydir in story_directories
for name in os.listdir(storydir)
if 'raw-data' not in storydir
)
raw_story_files = (
os.path.join('test/test_fixtures/raw-data', name)
for name in os.listdir('test/test_fixtures/raw-data')
)
@pytest.fixture(scope='class', params=story_files)
def story(request):
"""
Provides a detailed HPACK story to test with.
"""
with open(request.param, 'r', encoding='utf-8') as f:
return json.load(f)
@pytest.fixture(scope='class', params=raw_story_files)
def raw_story(request):
"""
Provides a detailed HPACK story to test the encoder with.
"""
with open(request.param, 'r', encoding='utf-8') as f:
return json.load(f)
| Fix some test breakages with Hypothesis. | Fix some test breakages with Hypothesis.
| Python | mit | python-hyper/hpack,python-hyper/hpack | import pytest
import os
import json
import sys
+ from hypothesis.strategies import text
+
if sys.version_info[0] == 2:
from codecs import open
+
+ # We need to grab one text example from hypothesis to prime its cache.
+ text().example()
# This pair of generator expressions are pretty lame, but building lists is a
# bad idea as I plan to have a substantial number of tests here.
story_directories = (
os.path.join('test/test_fixtures', d) for d in os.listdir('test/test_fixtures')
)
story_files = (
os.path.join(storydir, name) for storydir in story_directories
for name in os.listdir(storydir)
if 'raw-data' not in storydir
)
raw_story_files = (
os.path.join('test/test_fixtures/raw-data', name)
for name in os.listdir('test/test_fixtures/raw-data')
)
@pytest.fixture(scope='class', params=story_files)
def story(request):
"""
Provides a detailed HPACK story to test with.
"""
with open(request.param, 'r', encoding='utf-8') as f:
return json.load(f)
@pytest.fixture(scope='class', params=raw_story_files)
def raw_story(request):
"""
Provides a detailed HPACK story to test the encoder with.
"""
with open(request.param, 'r', encoding='utf-8') as f:
return json.load(f)
| Fix some test breakages with Hypothesis. | ## Code Before:
import pytest
import os
import json
import sys
if sys.version_info[0] == 2:
from codecs import open
# This pair of generator expressions are pretty lame, but building lists is a
# bad idea as I plan to have a substantial number of tests here.
story_directories = (
os.path.join('test/test_fixtures', d) for d in os.listdir('test/test_fixtures')
)
story_files = (
os.path.join(storydir, name) for storydir in story_directories
for name in os.listdir(storydir)
if 'raw-data' not in storydir
)
raw_story_files = (
os.path.join('test/test_fixtures/raw-data', name)
for name in os.listdir('test/test_fixtures/raw-data')
)
@pytest.fixture(scope='class', params=story_files)
def story(request):
"""
Provides a detailed HPACK story to test with.
"""
with open(request.param, 'r', encoding='utf-8') as f:
return json.load(f)
@pytest.fixture(scope='class', params=raw_story_files)
def raw_story(request):
"""
Provides a detailed HPACK story to test the encoder with.
"""
with open(request.param, 'r', encoding='utf-8') as f:
return json.load(f)
## Instruction:
Fix some test breakages with Hypothesis.
## Code After:
import pytest
import os
import json
import sys
from hypothesis.strategies import text
if sys.version_info[0] == 2:
from codecs import open
# We need to grab one text example from hypothesis to prime its cache.
text().example()
# This pair of generator expressions are pretty lame, but building lists is a
# bad idea as I plan to have a substantial number of tests here.
story_directories = (
os.path.join('test/test_fixtures', d) for d in os.listdir('test/test_fixtures')
)
story_files = (
os.path.join(storydir, name) for storydir in story_directories
for name in os.listdir(storydir)
if 'raw-data' not in storydir
)
raw_story_files = (
os.path.join('test/test_fixtures/raw-data', name)
for name in os.listdir('test/test_fixtures/raw-data')
)
@pytest.fixture(scope='class', params=story_files)
def story(request):
"""
Provides a detailed HPACK story to test with.
"""
with open(request.param, 'r', encoding='utf-8') as f:
return json.load(f)
@pytest.fixture(scope='class', params=raw_story_files)
def raw_story(request):
"""
Provides a detailed HPACK story to test the encoder with.
"""
with open(request.param, 'r', encoding='utf-8') as f:
return json.load(f)
| // ... existing code ...
from hypothesis.strategies import text
if sys.version_info[0] == 2:
// ... modified code ...
from codecs import open
# We need to grab one text example from hypothesis to prime its cache.
text().example()
// ... rest of the code ... |
ff489b1541f896025a0c630be6abe2d23843ec36 | examples/05_alternative_language.py | examples/05_alternative_language.py |
from pyhmsa.datafile import DataFile
from pyhmsa.type.language import langstr
datafile = DataFile()
author = langstr('Fyodor Dostoyevsky', {'ru': u'Фёдор Миха́йлович Достое́вский'})
datafile.header.author = author
print(datafile.header.author.alternatives['ru']) # Returns ... |
from pyhmsa.datafile import DataFile
from pyhmsa.type.language import langstr
datafile = DataFile()
author = langstr('Wilhelm Conrad Roentgen', {'de': u'Wilhelm Conrad Röntgen'})
datafile.header.author = author
print(datafile.header.author.alternatives['de']) # Returns ... | Replace name in alternative language to prevent compilation problems with LaTeX | Replace name in alternative language to prevent compilation problems
with LaTeX | Python | mit | pyhmsa/pyhmsa |
from pyhmsa.datafile import DataFile
from pyhmsa.type.language import langstr
datafile = DataFile()
- author = langstr('Fyodor Dostoyevsky', {'ru': u'Фёдор Миха́йлович Достое́вский'})
+ author = langstr('Wilhelm Conrad Roentgen', {'de': u'Wilhelm Conrad Röntgen'})
datafile.header.author = author
- print(datafile.header.author.alternatives['ru']) # Returns ...
+ print(datafile.header.author.alternatives['de']) # Returns ... | Replace name in alternative language to prevent compilation problems with LaTeX | ## Code Before:
from pyhmsa.datafile import DataFile
from pyhmsa.type.language import langstr
datafile = DataFile()
author = langstr('Fyodor Dostoyevsky', {'ru': u'Фёдор Миха́йлович Достое́вский'})
datafile.header.author = author
print(datafile.header.author.alternatives['ru']) # Returns ...
## Instruction:
Replace name in alternative language to prevent compilation problems with LaTeX
## Code After:
from pyhmsa.datafile import DataFile
from pyhmsa.type.language import langstr
datafile = DataFile()
author = langstr('Wilhelm Conrad Roentgen', {'de': u'Wilhelm Conrad Röntgen'})
datafile.header.author = author
print(datafile.header.author.alternatives['de']) # Returns ... | // ... existing code ...
author = langstr('Wilhelm Conrad Roentgen', {'de': u'Wilhelm Conrad Röntgen'})
datafile.header.author = author
// ... modified code ...
print(datafile.header.author.alternatives['de']) # Returns ...
// ... rest of the code ... |
e07e436b461015365b2cbbdb96daa8bfc3ae31a4 | {{cookiecutter.repo_name}}/config/urls.py | {{cookiecutter.repo_name}}/config/urls.py | from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.views.generic import TemplateView
# Comment the next two lines to disable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('', # noqa
url(r'^$', TemplateView.as_view(template_name='pages/home.html'), name="home"),
url(r'^about/$', TemplateView.as_view(template_name='pages/about.html'), name="about"),
# Django Admin (Comment the next line to disable the admin)
url(r'^admin/', include(admin.site.urls)),
# User management
url(r'^users/', include("{{ cookiecutter.repo_name }}.users.urls", namespace="users")),
url(r'^accounts/', include('allauth.urls')),
# Your stuff: custom urls includes go here
) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import include, patterns, url
from django.conf.urls.static import static
from django.contrib import admin
from django.views.generic import TemplateView
urlpatterns = patterns('', # noqa
url(r'^$', TemplateView.as_view(template_name='pages/home.html'), name="home"),
url(r'^about/$', TemplateView.as_view(template_name='pages/about.html'), name="about"),
# Django Admin
url(r'^admin/', include(admin.site.urls)),
# User management
url(r'^users/', include("{{ cookiecutter.repo_name }}.users.urls", namespace="users")),
url(r'^accounts/', include('allauth.urls')),
# Your stuff: custom urls includes go here
) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| Remove admin.autodiscover() call, it's called automatically in 1.7+ | Remove admin.autodiscover() call, it's called automatically in 1.7+
| Python | bsd-3-clause | interaktiviti/cookiecutter-django,asyncee/cookiecutter-django,kappataumu/cookiecutter-django,Sushantgakhar/cookiecutter-django,hairychris/cookiecutter-django,Sushantgakhar/cookiecutter-django,thornomad/cookiecutter-django,stepmr/cookiecutter-django,javipalanca/cookiecutter-django,wldcordeiro/cookiecutter-django-essentials,pydanny/cookiecutter-django,janusnic/cookiecutter-django,b-kolodziej/cookiecutter-django,rtorr/cookiecutter-django,crdoconnor/cookiecutter-django,kaidokert/cookiecutter-django,ryankanno/cookiecutter-django,trungdong/cookiecutter-django,ingenioustechie/cookiecutter-django-openshift,luzfcb/cookiecutter-django,drxos/cookiecutter-django-dokku,thornomad/cookiecutter-django,thisjustin/cookiecutter-django,Nene-Padi/cookiecutter-django,thisjustin/cookiecutter-django,ad-m/cookiecutter-django,ingenioustechie/cookiecutter-django-openshift,aleprovencio/cookiecutter-django,chrisfranzen/cookiecutter-django,webyneter/cookiecutter-django,stepanovsh/project_template,hackultura/django-project-template,calculuscowboy/cookiecutter-django,IanLee1521/cookiecutter-django,Parbhat/cookiecutter-django-foundation,primoz-k/cookiecutter-django,gappsexperts/cookiecutter-django,yunti/cookiecutter-django,Sushantgakhar/cookiecutter-django,siauPatrick/cookiecutter-django,nunchaks/cookiecutter-django,andresgz/cookiecutter-django,gengue/django-new-marana,asyncee/cookiecutter-django,webspired/cookiecutter-django,ryankanno/cookiecutter-django,gappsexperts/cookiecutter-django,audreyr/cookiecutter-django,martinblech/cookiecutter-django,hackebrot/cookiecutter-django,bopo/cookiecutter-django,javipalanca/cookiecutter-django,javipalanca/cookiecutter-django,stepmr/cookiecutter-django,primoz-k/cookiecutter-django,janusnic/cookiecutter-django,ryankanno/cookiecutter-django,siauPatrick/cookiecutter-django,schacki/cookiecutter-django,kappataumu/cookiecutter-django,ad-m/cookiecutter-django,aeikenberry/cookiecutter-django-rest-babel,stepanovsh/project_template,trungdong/cookiecutter-django,jondelmil/cookiecutter-django,the3ballsoft/django-new-marana,asyncee/cookiecutter-django,bopo/cookiecutter-django,Nene-Padi/cookiecutter-django,gappsexperts/cookiecutter-django,yehoshuk/cookiecutter-django,yehoshuk/cookiecutter-django,ujjwalwahi/cookiecutter-django,andela-ijubril/cookiecutter-django,IanLee1521/cookiecutter-django,andela-ijubril/cookiecutter-django,chrisfranzen/cookiecutter-django,mjhea0/cookiecutter-django,topwebmaster/cookiecutter-django,mistalaba/cookiecutter-django,janusnic/cookiecutter-django,HellerCommaA/cookiecutter-django,thornomad/cookiecutter-django,ddiazpinto/cookiecutter-django,pydanny/cookiecutter-django,kappataumu/cookiecutter-django,calculuscowboy/cookiecutter-django,andresgz/cookiecutter-django,martinblech/cookiecutter-django,chrisfranzen/cookiecutter-django,kaidokert/cookiecutter-django,ovidner/cookiecutter-django,webspired/cookiecutter-django,mjhea0/cookiecutter-django,b-kolodziej/cookiecutter-django,ddiazpinto/cookiecutter-django,ingenioustechie/cookiecutter-django-openshift,HellerCommaA/cookiecutter-django,rtorr/cookiecutter-django,ovidner/cookiecutter-django,topwebmaster/cookiecutter-django,wy123123/cookiecutter-django,IanLee1521/cookiecutter-django,kappataumu/cookiecutter-django,wldcordeiro/cookiecutter-django-essentials,Nene-Padi/cookiecutter-django,interaktiviti/cookiecutter-django,hackebrot/cookiecutter-django,wy123123/cookiecutter-django,audreyr/cookiecutter-django,stepmr/cookiecutter-django,thisjustin/cookiecutter-django,bopo/cookiecutter-django,HellerCommaA/cookiecutter-django,Sushantgakhar/cookiecutter-django,HandyCodeJob/hcj-django-temp,bogdal/cookiecutter-django,HellerCommaA/cookiecutter-django,webyneter/cookiecutter-django,hairychris/cookiecutter-django,rtorr/cookiecutter-django,audreyr/cookiecutter-django,interaktiviti/cookiecutter-django,the3ballsoft/django-new-marana,ad-m/cookiecutter-django,stepanovsh/project_template,luzfcb/cookiecutter-django,hairychris/cookiecutter-django,yehoshuk/cookiecutter-django,stepanovsh/project_template,rtorr/cookiecutter-django,mistalaba/cookiecutter-django,HandyCodeJob/hcj-django-temp,audreyr/cookiecutter-django,andela-ijubril/cookiecutter-django,nunchaks/cookiecutter-django,yunti/cookiecutter-django,trungdong/cookiecutter-django,wldcordeiro/cookiecutter-django-essentials,drxos/cookiecutter-django-dokku,HandyCodeJob/hcj-django-temp,wy123123/cookiecutter-django,asyncee/cookiecutter-django,ujjwalwahi/cookiecutter-django,drxos/cookiecutter-django-dokku,javipalanca/cookiecutter-django,schacki/cookiecutter-django,andresgz/cookiecutter-django,bogdal/cookiecutter-django,janusnic/cookiecutter-django,hackultura/django-project-template,IanLee1521/cookiecutter-django,bogdal/cookiecutter-django,kaidokert/cookiecutter-django,thisjustin/cookiecutter-django,wy123123/cookiecutter-django,martinblech/cookiecutter-django,gengue/django-new-marana,ingenioustechie/cookiecutter-django-openshift,chrisfranzen/cookiecutter-django,aeikenberry/cookiecutter-django-rest-babel,gappsexperts/cookiecutter-django,ujjwalwahi/cookiecutter-django,topwebmaster/cookiecutter-django,hairychris/cookiecutter-django,ddiazpinto/cookiecutter-django,schacki/cookiecutter-django,martinblech/cookiecutter-django,webyneter/cookiecutter-django,jondelmil/cookiecutter-django,webspired/cookiecutter-django,hackultura/django-project-template,primoz-k/cookiecutter-django,aleprovencio/cookiecutter-django,Parbhat/cookiecutter-django-foundation,topwebmaster/cookiecutter-django,pydanny/cookiecutter-django,aeikenberry/cookiecutter-django-rest-babel,the3ballsoft/django-new-marana,nunchaks/cookiecutter-django,andresgz/cookiecutter-django,javipalanca/cookiecutter-django,ad-m/cookiecutter-django,crdoconnor/cookiecutter-django,ovidner/cookiecutter-django,crdoconnor/cookiecutter-django,ddiazpinto/cookiecutter-django,siauPatrick/cookiecutter-django,kaidokert/cookiecutter-django,Nene-Padi/cookiecutter-django,luzfcb/cookiecutter-django,hackebrot/cookiecutter-django,yunti/cookiecutter-django,HandyCodeJob/hcj-django-temp,ryankanno/cookiecutter-django,mjhea0/cookiecutter-django,aleprovencio/cookiecutter-django,thornomad/cookiecutter-django,pydanny/cookiecutter-django,stepanovsh/project_template,b-kolodziej/cookiecutter-django,mistalaba/cookiecutter-django,nunchaks/cookiecutter-django,ovidner/cookiecutter-django,jondelmil/cookiecutter-django,calculuscowboy/cookiecutter-django,gengue/django-new-marana,jondelmil/cookiecutter-django,webyneter/cookiecutter-django,aleprovencio/cookiecutter-django,mjhea0/cookiecutter-django,yunti/cookiecutter-django,bopo/cookiecutter-django,drxos/cookiecutter-django-dokku,crdoconnor/cookiecutter-django,mistalaba/cookiecutter-django,b-kolodziej/cookiecutter-django,interaktiviti/cookiecutter-django,webspired/cookiecutter-django,stepmr/cookiecutter-django,trungdong/cookiecutter-django,ujjwalwahi/cookiecutter-django,andela-ijubril/cookiecutter-django,hackebrot/cookiecutter-django,hackultura/django-project-template,aeikenberry/cookiecutter-django-rest-babel,Parbhat/cookiecutter-django-foundation,calculuscowboy/cookiecutter-django,Parbhat/cookiecutter-django-foundation,schacki/cookiecutter-django,luzfcb/cookiecutter-django,bogdal/cookiecutter-django,siauPatrick/cookiecutter-django,primoz-k/cookiecutter-django,yehoshuk/cookiecutter-django | from __future__ import unicode_literals
from django.conf import settings
- from django.conf.urls import patterns, include, url
+ from django.conf.urls import include, patterns, url
from django.conf.urls.static import static
+ from django.contrib import admin
from django.views.generic import TemplateView
-
- # Comment the next two lines to disable the admin:
- from django.contrib import admin
- admin.autodiscover()
urlpatterns = patterns('', # noqa
url(r'^$', TemplateView.as_view(template_name='pages/home.html'), name="home"),
url(r'^about/$', TemplateView.as_view(template_name='pages/about.html'), name="about"),
- # Django Admin (Comment the next line to disable the admin)
+ # Django Admin
url(r'^admin/', include(admin.site.urls)),
# User management
url(r'^users/', include("{{ cookiecutter.repo_name }}.users.urls", namespace="users")),
url(r'^accounts/', include('allauth.urls')),
# Your stuff: custom urls includes go here
) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| Remove admin.autodiscover() call, it's called automatically in 1.7+ | ## Code Before:
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.views.generic import TemplateView
# Comment the next two lines to disable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('', # noqa
url(r'^$', TemplateView.as_view(template_name='pages/home.html'), name="home"),
url(r'^about/$', TemplateView.as_view(template_name='pages/about.html'), name="about"),
# Django Admin (Comment the next line to disable the admin)
url(r'^admin/', include(admin.site.urls)),
# User management
url(r'^users/', include("{{ cookiecutter.repo_name }}.users.urls", namespace="users")),
url(r'^accounts/', include('allauth.urls')),
# Your stuff: custom urls includes go here
) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
## Instruction:
Remove admin.autodiscover() call, it's called automatically in 1.7+
## Code After:
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import include, patterns, url
from django.conf.urls.static import static
from django.contrib import admin
from django.views.generic import TemplateView
urlpatterns = patterns('', # noqa
url(r'^$', TemplateView.as_view(template_name='pages/home.html'), name="home"),
url(r'^about/$', TemplateView.as_view(template_name='pages/about.html'), name="about"),
# Django Admin
url(r'^admin/', include(admin.site.urls)),
# User management
url(r'^users/', include("{{ cookiecutter.repo_name }}.users.urls", namespace="users")),
url(r'^accounts/', include('allauth.urls')),
# Your stuff: custom urls includes go here
) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| ...
from django.conf import settings
from django.conf.urls import include, patterns, url
from django.conf.urls.static import static
from django.contrib import admin
from django.views.generic import TemplateView
...
# Django Admin
url(r'^admin/', include(admin.site.urls)),
... |
818de1d8ef32ef853d37e753cc0dc701d76d04ea | app/apis/search_api.py | app/apis/search_api.py | from flask import Blueprint, jsonify, request
from importlib import import_module
import re
blueprint = Blueprint('search_api', __name__, url_prefix='/search')
@blueprint.route('/<string:model>')
def api(model):
global Model
class_name = model.title() + 'Search'
model_name = model + '_search'
Model = getattr(import_module('app.models.' + model_name), class_name)
query_string = request.args.get('query')
query_string = re.sub('[^0-9a-zA-Z ]+', '*', query_string).lower()
if not query_string:
return 'Query is missing', 400
query = Model.query.filter(Model.search.contains(query_string))
return jsonify(data=[q.serialize() for q in query.all()])
|
import sys
from flask import Blueprint, jsonify, request
from importlib import import_module
from unicodedata import normalize
reload(sys)
sys.setdefaultencoding('utf8')
def remove_accents(txt):
return normalize('NFKD', txt.decode('utf-8')).encode('ASCII','ignore')
blueprint = Blueprint('search_api', __name__, url_prefix='/search')
@blueprint.route('/<string:model>')
def api(model):
global Model
class_name = model.title() + 'Search'
model_name = model + '_search'
Model = getattr(import_module('app.models.' + model_name), class_name)
query_string = request.args.get('query')
query_string = remove_accents(query_string).lower()
if not query_string:
return 'Query is missing', 400
query = Model.query
for word in query_string.split(' '):
query = query.filter(Model.search.contains(word))
return jsonify(data=[q.serialize() for q in query.all()])
| Add support to search for word in search api | Add support to search for word in search api
| Python | mit | daniel1409/dataviva-api,DataViva/dataviva-api | +
+ import sys
from flask import Blueprint, jsonify, request
from importlib import import_module
- import re
+ from unicodedata import normalize
+ reload(sys)
+ sys.setdefaultencoding('utf8')
+
+ def remove_accents(txt):
+ return normalize('NFKD', txt.decode('utf-8')).encode('ASCII','ignore')
blueprint = Blueprint('search_api', __name__, url_prefix='/search')
@blueprint.route('/<string:model>')
def api(model):
global Model
class_name = model.title() + 'Search'
model_name = model + '_search'
Model = getattr(import_module('app.models.' + model_name), class_name)
query_string = request.args.get('query')
- query_string = re.sub('[^0-9a-zA-Z ]+', '*', query_string).lower()
+ query_string = remove_accents(query_string).lower()
if not query_string:
return 'Query is missing', 400
+ query = Model.query
+
+ for word in query_string.split(' '):
- query = Model.query.filter(Model.search.contains(query_string))
+ query = query.filter(Model.search.contains(word))
return jsonify(data=[q.serialize() for q in query.all()])
| Add support to search for word in search api | ## Code Before:
from flask import Blueprint, jsonify, request
from importlib import import_module
import re
blueprint = Blueprint('search_api', __name__, url_prefix='/search')
@blueprint.route('/<string:model>')
def api(model):
global Model
class_name = model.title() + 'Search'
model_name = model + '_search'
Model = getattr(import_module('app.models.' + model_name), class_name)
query_string = request.args.get('query')
query_string = re.sub('[^0-9a-zA-Z ]+', '*', query_string).lower()
if not query_string:
return 'Query is missing', 400
query = Model.query.filter(Model.search.contains(query_string))
return jsonify(data=[q.serialize() for q in query.all()])
## Instruction:
Add support to search for word in search api
## Code After:
import sys
from flask import Blueprint, jsonify, request
from importlib import import_module
from unicodedata import normalize
reload(sys)
sys.setdefaultencoding('utf8')
def remove_accents(txt):
return normalize('NFKD', txt.decode('utf-8')).encode('ASCII','ignore')
blueprint = Blueprint('search_api', __name__, url_prefix='/search')
@blueprint.route('/<string:model>')
def api(model):
global Model
class_name = model.title() + 'Search'
model_name = model + '_search'
Model = getattr(import_module('app.models.' + model_name), class_name)
query_string = request.args.get('query')
query_string = remove_accents(query_string).lower()
if not query_string:
return 'Query is missing', 400
query = Model.query
for word in query_string.split(' '):
query = query.filter(Model.search.contains(word))
return jsonify(data=[q.serialize() for q in query.all()])
| // ... existing code ...
import sys
from flask import Blueprint, jsonify, request
// ... modified code ...
from importlib import import_module
from unicodedata import normalize
reload(sys)
sys.setdefaultencoding('utf8')
def remove_accents(txt):
return normalize('NFKD', txt.decode('utf-8')).encode('ASCII','ignore')
...
query_string = request.args.get('query')
query_string = remove_accents(query_string).lower()
...
query = Model.query
for word in query_string.split(' '):
query = query.filter(Model.search.contains(word))
// ... rest of the code ... |
7a324d85ef76604c919c2c7e2f38fbda17b3d01c | docs/examples/led_travis.py | docs/examples/led_travis.py | from travispy import TravisPy
from gpiozero import LED
from gpiozero.tools import negated
from time import sleep
from signal import pause
def build_passed(repo='RPi-Distro/python-gpiozero', delay=3600):
t = TravisPy()
r = t.repo(repo)
while True:
yield r.last_build_state == 'passed'
sleep(delay) # Sleep an hour before hitting travis again
red = LED(12)
green = LED(16)
red.source = negated(green.values)
green.source = build_passed()
pause()
| from travispy import TravisPy
from gpiozero import LED
from gpiozero.tools import negated
from time import sleep
from signal import pause
def build_passed(repo):
t = TravisPy()
r = t.repo(repo)
while True:
yield r.last_build_state == 'passed'
red = LED(12)
green = LED(16)
green.source = build_passed('RPi-Distro/python-gpiozero')
green.source_delay = 60 * 5 # check every 5 minutes
red.source = negated(green.values)
pause()
| Use source_delay instead of sleep, and tidy up a bit | Use source_delay instead of sleep, and tidy up a bit | Python | bsd-3-clause | RPi-Distro/python-gpiozero,waveform80/gpio-zero,MrHarcombe/python-gpiozero | from travispy import TravisPy
from gpiozero import LED
from gpiozero.tools import negated
from time import sleep
from signal import pause
- def build_passed(repo='RPi-Distro/python-gpiozero', delay=3600):
+ def build_passed(repo):
t = TravisPy()
r = t.repo(repo)
while True:
yield r.last_build_state == 'passed'
- sleep(delay) # Sleep an hour before hitting travis again
red = LED(12)
green = LED(16)
+ green.source = build_passed('RPi-Distro/python-gpiozero')
+ green.source_delay = 60 * 5 # check every 5 minutes
red.source = negated(green.values)
- green.source = build_passed()
+
pause()
| Use source_delay instead of sleep, and tidy up a bit | ## Code Before:
from travispy import TravisPy
from gpiozero import LED
from gpiozero.tools import negated
from time import sleep
from signal import pause
def build_passed(repo='RPi-Distro/python-gpiozero', delay=3600):
t = TravisPy()
r = t.repo(repo)
while True:
yield r.last_build_state == 'passed'
sleep(delay) # Sleep an hour before hitting travis again
red = LED(12)
green = LED(16)
red.source = negated(green.values)
green.source = build_passed()
pause()
## Instruction:
Use source_delay instead of sleep, and tidy up a bit
## Code After:
from travispy import TravisPy
from gpiozero import LED
from gpiozero.tools import negated
from time import sleep
from signal import pause
def build_passed(repo):
t = TravisPy()
r = t.repo(repo)
while True:
yield r.last_build_state == 'passed'
red = LED(12)
green = LED(16)
green.source = build_passed('RPi-Distro/python-gpiozero')
green.source_delay = 60 * 5 # check every 5 minutes
red.source = negated(green.values)
pause()
| # ... existing code ...
def build_passed(repo):
t = TravisPy()
# ... modified code ...
yield r.last_build_state == 'passed'
...
green.source = build_passed('RPi-Distro/python-gpiozero')
green.source_delay = 60 * 5 # check every 5 minutes
red.source = negated(green.values)
pause()
# ... rest of the code ... |
6ef289403b4d88bc5e1a70568133924de54c2b9f | pyang/plugins/bbf.py | pyang/plugins/bbf.py |
import optparse
from pyang import plugin
from pyang.plugins import lint
def pyang_plugin_init():
plugin.register_plugin(BBFPlugin())
class BBFPlugin(lint.LintPlugin):
def __init__(self):
lint.LintPlugin.__init__(self)
self.namespace_prefixes = ['urn:bbf:yang:']
self.modulename_prefixes = ['bbf']
def add_opts(self, optparser):
optlist = [
optparse.make_option("--bbf",
dest="bbf",
action="store_true",
help="Validate the module(s) according to " \
"BBF rules."),
]
optparser.add_options(optlist)
def setup_ctx(self, ctx):
if not ctx.opts.bbf:
return
self._setup_ctx(ctx)
|
import optparse
from pyang import plugin
from pyang.plugins import lint
def pyang_plugin_init():
plugin.register_plugin(BBFPlugin())
class BBFPlugin(lint.LintPlugin):
def __init__(self):
lint.LintPlugin.__init__(self)
self.namespace_prefixes = ['urn:bbf:yang:']
self.modulename_prefixes = ['bbf']
self.ensure_hyphenated_names = True
def add_opts(self, optparser):
optlist = [
optparse.make_option("--bbf",
dest="bbf",
action="store_true",
help="Validate the module(s) according to " \
"BBF rules."),
]
optparser.add_options(optlist)
def setup_ctx(self, ctx):
if not ctx.opts.bbf:
return
self._setup_ctx(ctx)
if ctx.max_line_len is None:
ctx.max_line_len = 70
| Set the parent class 'ensure_hyphenated_names' and set 'ctx.max_line_len' to 70 | Set the parent class 'ensure_hyphenated_names' and set 'ctx.max_line_len' to 70
This is to match the settings that BBF uses when validating its modules. The max_line_len setting won't override an explicit --max-line-len from the command line.
| Python | isc | mbj4668/pyang,mbj4668/pyang |
import optparse
from pyang import plugin
from pyang.plugins import lint
def pyang_plugin_init():
plugin.register_plugin(BBFPlugin())
class BBFPlugin(lint.LintPlugin):
def __init__(self):
lint.LintPlugin.__init__(self)
self.namespace_prefixes = ['urn:bbf:yang:']
self.modulename_prefixes = ['bbf']
+ self.ensure_hyphenated_names = True
def add_opts(self, optparser):
optlist = [
optparse.make_option("--bbf",
dest="bbf",
action="store_true",
help="Validate the module(s) according to " \
"BBF rules."),
]
optparser.add_options(optlist)
def setup_ctx(self, ctx):
if not ctx.opts.bbf:
return
self._setup_ctx(ctx)
+ if ctx.max_line_len is None:
+ ctx.max_line_len = 70
| Set the parent class 'ensure_hyphenated_names' and set 'ctx.max_line_len' to 70 | ## Code Before:
import optparse
from pyang import plugin
from pyang.plugins import lint
def pyang_plugin_init():
plugin.register_plugin(BBFPlugin())
class BBFPlugin(lint.LintPlugin):
def __init__(self):
lint.LintPlugin.__init__(self)
self.namespace_prefixes = ['urn:bbf:yang:']
self.modulename_prefixes = ['bbf']
def add_opts(self, optparser):
optlist = [
optparse.make_option("--bbf",
dest="bbf",
action="store_true",
help="Validate the module(s) according to " \
"BBF rules."),
]
optparser.add_options(optlist)
def setup_ctx(self, ctx):
if not ctx.opts.bbf:
return
self._setup_ctx(ctx)
## Instruction:
Set the parent class 'ensure_hyphenated_names' and set 'ctx.max_line_len' to 70
## Code After:
import optparse
from pyang import plugin
from pyang.plugins import lint
def pyang_plugin_init():
plugin.register_plugin(BBFPlugin())
class BBFPlugin(lint.LintPlugin):
def __init__(self):
lint.LintPlugin.__init__(self)
self.namespace_prefixes = ['urn:bbf:yang:']
self.modulename_prefixes = ['bbf']
self.ensure_hyphenated_names = True
def add_opts(self, optparser):
optlist = [
optparse.make_option("--bbf",
dest="bbf",
action="store_true",
help="Validate the module(s) according to " \
"BBF rules."),
]
optparser.add_options(optlist)
def setup_ctx(self, ctx):
if not ctx.opts.bbf:
return
self._setup_ctx(ctx)
if ctx.max_line_len is None:
ctx.max_line_len = 70
| ...
self.modulename_prefixes = ['bbf']
self.ensure_hyphenated_names = True
...
self._setup_ctx(ctx)
if ctx.max_line_len is None:
ctx.max_line_len = 70
... |
7d8c724abc4b5a692bd046313774921bc288f7a4 | src/unittest/python/daemonize_tests.py | src/unittest/python/daemonize_tests.py | from __future__ import print_function, absolute_import, division
from unittest2 import TestCase
from mock import patch
from succubus import Daemon
class TestDaemonize(TestCase):
@patch('succubus.daemonize.sys')
def test_must_pop_sys_argv_before_loading_config(self, mock_sys):
"""The sys.argv.pop() must happen before load_configuration()
This way, load_configuration() has a chance to parse the command
line arguments, which may contain something like a --config=xyz
parameter that affects config loading.
"""
class MyDaemon(Daemon):
def load_configuration(self):
if self.param1 != 'start':
raise Exception("param1 not yet set")
mock_sys.argv = ['foo', 'start', '--config=xyz']
a = MyDaemon(pid_file='foo.pid')
| from __future__ import print_function, absolute_import, division
from unittest2 import TestCase
from mock import patch
from succubus import Daemon
class TestDaemonize(TestCase):
@patch('succubus.daemonize.sys')
def test_must_pop_sys_argv_before_loading_config(self, mock_sys):
"""The sys.argv.pop() must happen before load_configuration()
This way, load_configuration() has a chance to parse the command
line arguments, which may contain something like a --config=xyz
parameter that affects config loading.
"""
class MyDaemon(Daemon):
def load_configuration(self):
if self.param1 != 'start':
raise Exception("param1 not yet set")
mock_sys.argv = ['foo', 'start', '--config=xyz']
a = MyDaemon(pid_file='foo.pid')
@patch("succubus.daemonize.os.setgid")
def test_set_gid_translates_group_name(self, mock_setgid):
daemon = Daemon(pid_file="foo")
daemon.group = "root"
daemon.set_gid()
mock_setgid.assert_called_with(0)
@patch("succubus.daemonize.os.setuid")
def test_set_uid_translates_user_name(self, mock_setuid):
daemon = Daemon(pid_file="foo")
daemon.user = "root"
daemon.set_uid()
mock_setuid.assert_called_with(0)
| Test that set_(g|u)id actually changes the id | Test that set_(g|u)id actually changes the id
| Python | apache-2.0 | ImmobilienScout24/succubus | from __future__ import print_function, absolute_import, division
from unittest2 import TestCase
from mock import patch
from succubus import Daemon
class TestDaemonize(TestCase):
@patch('succubus.daemonize.sys')
def test_must_pop_sys_argv_before_loading_config(self, mock_sys):
"""The sys.argv.pop() must happen before load_configuration()
This way, load_configuration() has a chance to parse the command
line arguments, which may contain something like a --config=xyz
parameter that affects config loading.
"""
class MyDaemon(Daemon):
def load_configuration(self):
if self.param1 != 'start':
raise Exception("param1 not yet set")
mock_sys.argv = ['foo', 'start', '--config=xyz']
a = MyDaemon(pid_file='foo.pid')
+ @patch("succubus.daemonize.os.setgid")
+ def test_set_gid_translates_group_name(self, mock_setgid):
+ daemon = Daemon(pid_file="foo")
+ daemon.group = "root"
+
+ daemon.set_gid()
+
+ mock_setgid.assert_called_with(0)
+
+ @patch("succubus.daemonize.os.setuid")
+ def test_set_uid_translates_user_name(self, mock_setuid):
+ daemon = Daemon(pid_file="foo")
+ daemon.user = "root"
+
+ daemon.set_uid()
+
+ mock_setuid.assert_called_with(0)
+ | Test that set_(g|u)id actually changes the id | ## Code Before:
from __future__ import print_function, absolute_import, division
from unittest2 import TestCase
from mock import patch
from succubus import Daemon
class TestDaemonize(TestCase):
@patch('succubus.daemonize.sys')
def test_must_pop_sys_argv_before_loading_config(self, mock_sys):
"""The sys.argv.pop() must happen before load_configuration()
This way, load_configuration() has a chance to parse the command
line arguments, which may contain something like a --config=xyz
parameter that affects config loading.
"""
class MyDaemon(Daemon):
def load_configuration(self):
if self.param1 != 'start':
raise Exception("param1 not yet set")
mock_sys.argv = ['foo', 'start', '--config=xyz']
a = MyDaemon(pid_file='foo.pid')
## Instruction:
Test that set_(g|u)id actually changes the id
## Code After:
from __future__ import print_function, absolute_import, division
from unittest2 import TestCase
from mock import patch
from succubus import Daemon
class TestDaemonize(TestCase):
@patch('succubus.daemonize.sys')
def test_must_pop_sys_argv_before_loading_config(self, mock_sys):
"""The sys.argv.pop() must happen before load_configuration()
This way, load_configuration() has a chance to parse the command
line arguments, which may contain something like a --config=xyz
parameter that affects config loading.
"""
class MyDaemon(Daemon):
def load_configuration(self):
if self.param1 != 'start':
raise Exception("param1 not yet set")
mock_sys.argv = ['foo', 'start', '--config=xyz']
a = MyDaemon(pid_file='foo.pid')
@patch("succubus.daemonize.os.setgid")
def test_set_gid_translates_group_name(self, mock_setgid):
daemon = Daemon(pid_file="foo")
daemon.group = "root"
daemon.set_gid()
mock_setgid.assert_called_with(0)
@patch("succubus.daemonize.os.setuid")
def test_set_uid_translates_user_name(self, mock_setuid):
daemon = Daemon(pid_file="foo")
daemon.user = "root"
daemon.set_uid()
mock_setuid.assert_called_with(0)
| ...
a = MyDaemon(pid_file='foo.pid')
@patch("succubus.daemonize.os.setgid")
def test_set_gid_translates_group_name(self, mock_setgid):
daemon = Daemon(pid_file="foo")
daemon.group = "root"
daemon.set_gid()
mock_setgid.assert_called_with(0)
@patch("succubus.daemonize.os.setuid")
def test_set_uid_translates_user_name(self, mock_setuid):
daemon = Daemon(pid_file="foo")
daemon.user = "root"
daemon.set_uid()
mock_setuid.assert_called_with(0)
... |
9fdea42df37c722aefb5e8fb7c04c45c06c20f17 | tests/test_client_users.py | tests/test_client_users.py | import pydle
from .fixtures import with_client
from .mocks import Mock
@with_client()
def test_user_creation(server, client):
client._create_user('WiZ')
assert 'WiZ' in client.users
assert client.users['WiZ']['nickname'] == 'WiZ'
@with_client()
def test_user_renaming(server, client):
client._create_user('WiZ')
client._rename_user('WiZ', 'jilles')
assert 'WiZ' not in client.users
assert 'jilles' in client.users
assert client.users['jilles']['nickname'] == 'jilles'
@with_client()
def test_user_deletion(server, client):
client._create_user('WiZ')
client._destroy_user('WiZ')
assert 'WiZ' not in client.users
| import pydle
from .fixtures import with_client
@with_client()
def test_client_same_nick(server, client):
assert client.is_same_nick('WiZ', 'WiZ')
assert not client.is_same_nick('WiZ', 'jilles')
assert not client.is_same_nick('WiZ', 'wiz')
@with_client()
def test_user_creation(server, client):
client._create_user('WiZ')
assert 'WiZ' in client.users
assert client.users['WiZ']['nickname'] == 'WiZ'
@with_client()
def test_user_invalid_creation(server, client):
client._create_user('irc.fbi.gov')
assert 'irc.fbi.gov' not in client.users
@with_client()
def test_user_renaming(server, client):
client._create_user('WiZ')
client._rename_user('WiZ', 'jilles')
assert 'WiZ' not in client.users
assert 'jilles' in client.users
assert client.users['jilles']['nickname'] == 'jilles'
@with_client()
def test_user_renaming_creation(server, client):
client._rename_user('null', 'WiZ')
assert 'WiZ' in client.users
assert 'null' not in client.users
@with_client()
def test_user_deletion(server, client):
client._create_user('WiZ')
client._destroy_user('WiZ')
assert 'WiZ' not in client.users
@with_client()
def test_user_synchronization(server, client):
client._create_user('WiZ')
client._sync_user('WiZ', { 'hostname': 'og.irc.developer' })
assert client.users['WiZ']['hostname'] == 'og.irc.developer'
@with_client()
def test_user_synchronization_creation(server, client):
client._sync_user('WiZ', {})
assert 'WiZ' in client.users
@with_client()
def test_user_invalid_synchronization(server, client):
client._sync_user('irc.fbi.gov', {})
assert 'irc.fbi.gov' not in client.users
| Extend client:users tests to renaming and synchronization. | tests: Extend client:users tests to renaming and synchronization.
| Python | bsd-3-clause | Shizmob/pydle | import pydle
from .fixtures import with_client
- from .mocks import Mock
+
+ @with_client()
+ def test_client_same_nick(server, client):
+ assert client.is_same_nick('WiZ', 'WiZ')
+ assert not client.is_same_nick('WiZ', 'jilles')
+ assert not client.is_same_nick('WiZ', 'wiz')
@with_client()
def test_user_creation(server, client):
client._create_user('WiZ')
assert 'WiZ' in client.users
assert client.users['WiZ']['nickname'] == 'WiZ'
+
+ @with_client()
+ def test_user_invalid_creation(server, client):
+ client._create_user('irc.fbi.gov')
+ assert 'irc.fbi.gov' not in client.users
@with_client()
def test_user_renaming(server, client):
client._create_user('WiZ')
client._rename_user('WiZ', 'jilles')
assert 'WiZ' not in client.users
assert 'jilles' in client.users
assert client.users['jilles']['nickname'] == 'jilles'
@with_client()
+ def test_user_renaming_creation(server, client):
+ client._rename_user('null', 'WiZ')
+
+ assert 'WiZ' in client.users
+ assert 'null' not in client.users
+
+ @with_client()
def test_user_deletion(server, client):
client._create_user('WiZ')
client._destroy_user('WiZ')
assert 'WiZ' not in client.users
+ @with_client()
+ def test_user_synchronization(server, client):
+ client._create_user('WiZ')
+ client._sync_user('WiZ', { 'hostname': 'og.irc.developer' })
+
+ assert client.users['WiZ']['hostname'] == 'og.irc.developer'
+
+ @with_client()
+ def test_user_synchronization_creation(server, client):
+ client._sync_user('WiZ', {})
+ assert 'WiZ' in client.users
+
+ @with_client()
+ def test_user_invalid_synchronization(server, client):
+ client._sync_user('irc.fbi.gov', {})
+ assert 'irc.fbi.gov' not in client.users
+ | Extend client:users tests to renaming and synchronization. | ## Code Before:
import pydle
from .fixtures import with_client
from .mocks import Mock
@with_client()
def test_user_creation(server, client):
client._create_user('WiZ')
assert 'WiZ' in client.users
assert client.users['WiZ']['nickname'] == 'WiZ'
@with_client()
def test_user_renaming(server, client):
client._create_user('WiZ')
client._rename_user('WiZ', 'jilles')
assert 'WiZ' not in client.users
assert 'jilles' in client.users
assert client.users['jilles']['nickname'] == 'jilles'
@with_client()
def test_user_deletion(server, client):
client._create_user('WiZ')
client._destroy_user('WiZ')
assert 'WiZ' not in client.users
## Instruction:
Extend client:users tests to renaming and synchronization.
## Code After:
import pydle
from .fixtures import with_client
@with_client()
def test_client_same_nick(server, client):
assert client.is_same_nick('WiZ', 'WiZ')
assert not client.is_same_nick('WiZ', 'jilles')
assert not client.is_same_nick('WiZ', 'wiz')
@with_client()
def test_user_creation(server, client):
client._create_user('WiZ')
assert 'WiZ' in client.users
assert client.users['WiZ']['nickname'] == 'WiZ'
@with_client()
def test_user_invalid_creation(server, client):
client._create_user('irc.fbi.gov')
assert 'irc.fbi.gov' not in client.users
@with_client()
def test_user_renaming(server, client):
client._create_user('WiZ')
client._rename_user('WiZ', 'jilles')
assert 'WiZ' not in client.users
assert 'jilles' in client.users
assert client.users['jilles']['nickname'] == 'jilles'
@with_client()
def test_user_renaming_creation(server, client):
client._rename_user('null', 'WiZ')
assert 'WiZ' in client.users
assert 'null' not in client.users
@with_client()
def test_user_deletion(server, client):
client._create_user('WiZ')
client._destroy_user('WiZ')
assert 'WiZ' not in client.users
@with_client()
def test_user_synchronization(server, client):
client._create_user('WiZ')
client._sync_user('WiZ', { 'hostname': 'og.irc.developer' })
assert client.users['WiZ']['hostname'] == 'og.irc.developer'
@with_client()
def test_user_synchronization_creation(server, client):
client._sync_user('WiZ', {})
assert 'WiZ' in client.users
@with_client()
def test_user_invalid_synchronization(server, client):
client._sync_user('irc.fbi.gov', {})
assert 'irc.fbi.gov' not in client.users
| ...
from .fixtures import with_client
@with_client()
def test_client_same_nick(server, client):
assert client.is_same_nick('WiZ', 'WiZ')
assert not client.is_same_nick('WiZ', 'jilles')
assert not client.is_same_nick('WiZ', 'wiz')
...
assert client.users['WiZ']['nickname'] == 'WiZ'
@with_client()
def test_user_invalid_creation(server, client):
client._create_user('irc.fbi.gov')
assert 'irc.fbi.gov' not in client.users
...
@with_client()
def test_user_renaming_creation(server, client):
client._rename_user('null', 'WiZ')
assert 'WiZ' in client.users
assert 'null' not in client.users
@with_client()
def test_user_deletion(server, client):
...
assert 'WiZ' not in client.users
@with_client()
def test_user_synchronization(server, client):
client._create_user('WiZ')
client._sync_user('WiZ', { 'hostname': 'og.irc.developer' })
assert client.users['WiZ']['hostname'] == 'og.irc.developer'
@with_client()
def test_user_synchronization_creation(server, client):
client._sync_user('WiZ', {})
assert 'WiZ' in client.users
@with_client()
def test_user_invalid_synchronization(server, client):
client._sync_user('irc.fbi.gov', {})
assert 'irc.fbi.gov' not in client.users
... |
96d2e3d47cf193046f68fef859244fd31be2ffa9 | utils.py | utils.py | import vx
def _expose(f=None, name=None):
if name is None:
name = f.__name__.lstrip('_')
if getattr(vx, name, None) is not None:
raise AttributeError("Cannot expose duplicate name: '{}'".format(name))
if f is None:
def g(f):
setattr(vx, name, f)
return f
return g
setattr(vx, name, f)
return f
vx.expose = _expose
@vx.expose
def _repeat(c, times=4):
for _ in range(times):
c()
| import vx
from functools import partial
def _expose(f=None, name=None):
if f is None:
return partial(_expose, name=name)
if name is None:
name = f.__name__.lstrip('_')
if getattr(vx, name, None) is not None:
raise AttributeError("Cannot expose duplicate name: '{}'".format(name))
setattr(vx, name, f)
return f
vx.expose = _expose
@vx.expose
def _repeat(c, times=4):
for _ in range(times):
c()
| Fix vx.expose to not crash when None,None is passed | Fix vx.expose to not crash when None,None is passed
Also reformat it using functools.partial
| Python | mit | philipdexter/vx,philipdexter/vx | import vx
+ from functools import partial
+
def _expose(f=None, name=None):
+ if f is None:
+ return partial(_expose, name=name)
if name is None:
name = f.__name__.lstrip('_')
if getattr(vx, name, None) is not None:
raise AttributeError("Cannot expose duplicate name: '{}'".format(name))
- if f is None:
- def g(f):
- setattr(vx, name, f)
- return f
- return g
setattr(vx, name, f)
return f
vx.expose = _expose
@vx.expose
def _repeat(c, times=4):
for _ in range(times):
c()
| Fix vx.expose to not crash when None,None is passed | ## Code Before:
import vx
def _expose(f=None, name=None):
if name is None:
name = f.__name__.lstrip('_')
if getattr(vx, name, None) is not None:
raise AttributeError("Cannot expose duplicate name: '{}'".format(name))
if f is None:
def g(f):
setattr(vx, name, f)
return f
return g
setattr(vx, name, f)
return f
vx.expose = _expose
@vx.expose
def _repeat(c, times=4):
for _ in range(times):
c()
## Instruction:
Fix vx.expose to not crash when None,None is passed
## Code After:
import vx
from functools import partial
def _expose(f=None, name=None):
if f is None:
return partial(_expose, name=name)
if name is None:
name = f.__name__.lstrip('_')
if getattr(vx, name, None) is not None:
raise AttributeError("Cannot expose duplicate name: '{}'".format(name))
setattr(vx, name, f)
return f
vx.expose = _expose
@vx.expose
def _repeat(c, times=4):
for _ in range(times):
c()
| // ... existing code ...
from functools import partial
def _expose(f=None, name=None):
if f is None:
return partial(_expose, name=name)
if name is None:
// ... modified code ...
raise AttributeError("Cannot expose duplicate name: '{}'".format(name))
setattr(vx, name, f)
// ... rest of the code ... |
e46e512fad9bc92c1725711e2800e44bb699d281 | deploy/mirrors/greasyfork.py | deploy/mirrors/greasyfork.py | from mechanize import Browser
def exec_(config, summary, script):
USERNAME = config['USERNAME']
PASSWORD = config['PASSWORD']
SCRIPT_ID = config['SCRIPT_ID']
LOGIN_URL = 'https://greasyfork.org/users/sign_in'
EDIT_URL = 'https://greasyfork.org/scripts/{0}/versions/new'.format(SCRIPT_ID)
b = Browser()
# login
b.open(LOGIN_URL)
b.select_form(nr=1)
b['user[email]'] = USERNAME
b['user[password]'] = PASSWORD
b.submit()
# edit source
b.open(EDIT_URL)
b.select_form(nr=1)
b['script_version[additional_info]'] = summary.encode('utf-8')
b.submit(name='commit')
# ex: ts=4 sts=4 sw=4 et
# sublime: tab_size 4; translate_tabs_to_spaces true; detect_indentation false; use_tab_stops true;
# kate: space-indent on; indent-width 4;
| from mechanize import Browser
def exec_(config, summary, script):
USERNAME = config['USERNAME']
PASSWORD = config['PASSWORD']
SCRIPT_ID = config['SCRIPT_ID']
LOGIN_URL = 'https://greasyfork.org/users/sign_in'
EDIT_URL = 'https://greasyfork.org/scripts/{0}/versions/new'.format(SCRIPT_ID)
b = Browser()
# login
b.open(LOGIN_URL)
b.select_form(nr=2)
b['user[email]'] = USERNAME
b['user[password]'] = PASSWORD
b.submit()
# edit source
b.open(EDIT_URL)
b.select_form(nr=2)
b['script_version[additional_info]'] = summary.encode('utf-8')
b['script_version[code]'] = script.encode('utf-8')
b.submit(name='commit')
# ex: ts=4 sts=4 sw=4 et
# sublime: tab_size 4; translate_tabs_to_spaces true; detect_indentation false; use_tab_stops true;
# kate: space-indent on; indent-width 4;
| Fix Greasy Fork deploy script. | Fix Greasy Fork deploy script.
| Python | bsd-2-clause | MNBuyskih/adsbypasser,tablesmit/adsbypasser,xor10/adsbypasser,kehugter/adsbypasser,tosunkaya/adsbypasser,xor10/adsbypasser,MNBuyskih/adsbypasser,tablesmit/adsbypasser,kehugter/adsbypasser,kehugter/adsbypasser,tablesmit/adsbypasser,xor10/adsbypasser,tosunkaya/adsbypasser,MNBuyskih/adsbypasser,tosunkaya/adsbypasser | from mechanize import Browser
def exec_(config, summary, script):
USERNAME = config['USERNAME']
PASSWORD = config['PASSWORD']
SCRIPT_ID = config['SCRIPT_ID']
LOGIN_URL = 'https://greasyfork.org/users/sign_in'
EDIT_URL = 'https://greasyfork.org/scripts/{0}/versions/new'.format(SCRIPT_ID)
b = Browser()
# login
b.open(LOGIN_URL)
- b.select_form(nr=1)
+ b.select_form(nr=2)
b['user[email]'] = USERNAME
b['user[password]'] = PASSWORD
b.submit()
# edit source
b.open(EDIT_URL)
- b.select_form(nr=1)
+ b.select_form(nr=2)
b['script_version[additional_info]'] = summary.encode('utf-8')
+ b['script_version[code]'] = script.encode('utf-8')
b.submit(name='commit')
# ex: ts=4 sts=4 sw=4 et
# sublime: tab_size 4; translate_tabs_to_spaces true; detect_indentation false; use_tab_stops true;
# kate: space-indent on; indent-width 4;
| Fix Greasy Fork deploy script. | ## Code Before:
from mechanize import Browser
def exec_(config, summary, script):
USERNAME = config['USERNAME']
PASSWORD = config['PASSWORD']
SCRIPT_ID = config['SCRIPT_ID']
LOGIN_URL = 'https://greasyfork.org/users/sign_in'
EDIT_URL = 'https://greasyfork.org/scripts/{0}/versions/new'.format(SCRIPT_ID)
b = Browser()
# login
b.open(LOGIN_URL)
b.select_form(nr=1)
b['user[email]'] = USERNAME
b['user[password]'] = PASSWORD
b.submit()
# edit source
b.open(EDIT_URL)
b.select_form(nr=1)
b['script_version[additional_info]'] = summary.encode('utf-8')
b.submit(name='commit')
# ex: ts=4 sts=4 sw=4 et
# sublime: tab_size 4; translate_tabs_to_spaces true; detect_indentation false; use_tab_stops true;
# kate: space-indent on; indent-width 4;
## Instruction:
Fix Greasy Fork deploy script.
## Code After:
from mechanize import Browser
def exec_(config, summary, script):
USERNAME = config['USERNAME']
PASSWORD = config['PASSWORD']
SCRIPT_ID = config['SCRIPT_ID']
LOGIN_URL = 'https://greasyfork.org/users/sign_in'
EDIT_URL = 'https://greasyfork.org/scripts/{0}/versions/new'.format(SCRIPT_ID)
b = Browser()
# login
b.open(LOGIN_URL)
b.select_form(nr=2)
b['user[email]'] = USERNAME
b['user[password]'] = PASSWORD
b.submit()
# edit source
b.open(EDIT_URL)
b.select_form(nr=2)
b['script_version[additional_info]'] = summary.encode('utf-8')
b['script_version[code]'] = script.encode('utf-8')
b.submit(name='commit')
# ex: ts=4 sts=4 sw=4 et
# sublime: tab_size 4; translate_tabs_to_spaces true; detect_indentation false; use_tab_stops true;
# kate: space-indent on; indent-width 4;
| // ... existing code ...
b.open(LOGIN_URL)
b.select_form(nr=2)
b['user[email]'] = USERNAME
// ... modified code ...
b.open(EDIT_URL)
b.select_form(nr=2)
b['script_version[additional_info]'] = summary.encode('utf-8')
b['script_version[code]'] = script.encode('utf-8')
b.submit(name='commit')
// ... rest of the code ... |
a248ac96a04cccc31f881496e45db3212ad46118 | core/components/security/factor.py | core/components/security/factor.py |
import json
from u2flib_server.u2f import (begin_registration,
begin_authentication,
complete_registration,
complete_authentication)
from components.eternity import config
facet = config.rss['link']
async def enroll(user):
enroll = begin_registration(facet, user.get('_u2f_devices_', []))
user['_u2f_enroll_'] = enroll.json
return user, json.dumps(enroll.data_for_client)
async def bind(user, data):
response = data['tokenResponse']
enroll = user.pop('_u2f_enroll_')
device, cert = complete_registration(enroll, response, [facet])
patch = device
patch['deviceName'] = data['deviceName']
patch['registerDate'] = data['date']
user.setdefault('_u2f_devices_', []).append(json.dumps(patch))
# cert = x509.load_der_x509_certificate(cert, default_backend())
return user, True
async def sign(user):
challenge = begin_authentication(facet, user.get('_u2f_devices_', []))
user['_u2f_challenge_'] = challenge.json
return user, json.dumps(challenge.data_for_client)
async def verify(user, data):
challenge = user.pop('_u2f_challenge_')
try:
complete_authentication(challenge, data, [facet])
except AttributeError:
return user, False
return user, True
|
import json
from u2flib_server.u2f import (begin_registration,
begin_authentication,
complete_registration,
complete_authentication)
from components.eternity import config
facet = config.rss['link']
async def enroll(user):
enroll = begin_registration(facet, user.get('_u2f_devices_', []))
user['_u2f_enroll_'] = enroll.json
return user, json.dumps(enroll.data_for_client)
async def bind(user, data):
response = data['tokenResponse']
enroll = user.pop('_u2f_enroll_')
device, cert = complete_registration(enroll, response, [facet])
patch = device
patch['deviceName'] = data['deviceName']
patch['registerDate'] = data['date']
user.setdefault('_u2f_devices_', []).append(json.dumps(patch))
# cert = x509.load_der_x509_certificate(cert, default_backend())
return user, True
async def sign(user):
challenge = begin_authentication(facet, user.get('_u2f_devices_', []))
user['_u2f_challenge_'] = challenge.json
return user, json.dumps(challenge.data_for_client)
async def verify(user, data):
challenge = user.pop('_u2f_challenge_')
try:
complete_authentication(challenge, data['tokenResponse'], [facet])
except AttributeError:
return user, False
return user, True
| Fix server error when login with u2f | Fix server error when login with u2f
| Python | mit | chiaki64/Windless,chiaki64/Windless |
import json
from u2flib_server.u2f import (begin_registration,
begin_authentication,
complete_registration,
complete_authentication)
from components.eternity import config
facet = config.rss['link']
async def enroll(user):
enroll = begin_registration(facet, user.get('_u2f_devices_', []))
user['_u2f_enroll_'] = enroll.json
return user, json.dumps(enroll.data_for_client)
async def bind(user, data):
response = data['tokenResponse']
enroll = user.pop('_u2f_enroll_')
device, cert = complete_registration(enroll, response, [facet])
patch = device
patch['deviceName'] = data['deviceName']
patch['registerDate'] = data['date']
user.setdefault('_u2f_devices_', []).append(json.dumps(patch))
# cert = x509.load_der_x509_certificate(cert, default_backend())
return user, True
async def sign(user):
challenge = begin_authentication(facet, user.get('_u2f_devices_', []))
user['_u2f_challenge_'] = challenge.json
return user, json.dumps(challenge.data_for_client)
async def verify(user, data):
challenge = user.pop('_u2f_challenge_')
try:
- complete_authentication(challenge, data, [facet])
+ complete_authentication(challenge, data['tokenResponse'], [facet])
except AttributeError:
return user, False
return user, True
| Fix server error when login with u2f | ## Code Before:
import json
from u2flib_server.u2f import (begin_registration,
begin_authentication,
complete_registration,
complete_authentication)
from components.eternity import config
facet = config.rss['link']
async def enroll(user):
enroll = begin_registration(facet, user.get('_u2f_devices_', []))
user['_u2f_enroll_'] = enroll.json
return user, json.dumps(enroll.data_for_client)
async def bind(user, data):
response = data['tokenResponse']
enroll = user.pop('_u2f_enroll_')
device, cert = complete_registration(enroll, response, [facet])
patch = device
patch['deviceName'] = data['deviceName']
patch['registerDate'] = data['date']
user.setdefault('_u2f_devices_', []).append(json.dumps(patch))
# cert = x509.load_der_x509_certificate(cert, default_backend())
return user, True
async def sign(user):
challenge = begin_authentication(facet, user.get('_u2f_devices_', []))
user['_u2f_challenge_'] = challenge.json
return user, json.dumps(challenge.data_for_client)
async def verify(user, data):
challenge = user.pop('_u2f_challenge_')
try:
complete_authentication(challenge, data, [facet])
except AttributeError:
return user, False
return user, True
## Instruction:
Fix server error when login with u2f
## Code After:
import json
from u2flib_server.u2f import (begin_registration,
begin_authentication,
complete_registration,
complete_authentication)
from components.eternity import config
facet = config.rss['link']
async def enroll(user):
enroll = begin_registration(facet, user.get('_u2f_devices_', []))
user['_u2f_enroll_'] = enroll.json
return user, json.dumps(enroll.data_for_client)
async def bind(user, data):
response = data['tokenResponse']
enroll = user.pop('_u2f_enroll_')
device, cert = complete_registration(enroll, response, [facet])
patch = device
patch['deviceName'] = data['deviceName']
patch['registerDate'] = data['date']
user.setdefault('_u2f_devices_', []).append(json.dumps(patch))
# cert = x509.load_der_x509_certificate(cert, default_backend())
return user, True
async def sign(user):
challenge = begin_authentication(facet, user.get('_u2f_devices_', []))
user['_u2f_challenge_'] = challenge.json
return user, json.dumps(challenge.data_for_client)
async def verify(user, data):
challenge = user.pop('_u2f_challenge_')
try:
complete_authentication(challenge, data['tokenResponse'], [facet])
except AttributeError:
return user, False
return user, True
| # ... existing code ...
try:
complete_authentication(challenge, data['tokenResponse'], [facet])
except AttributeError:
# ... rest of the code ... |
b44345efada2a89423c89ec88a24f1dbe97ef562 | viewer.py | viewer.py |
if __name__ == '__main__':
import wx
from controller import ViewerController
app = wx.App(0)
ViewerController()
app.MainLoop()
|
if __name__ == '__main__':
import sys
try:
import wx
except ImportError:
print("""\
You need to install WXPython to use the viewer
http://wxpython.org/download.php
""")
sys.exit()
from controller import ViewerController
app = wx.App(0)
ViewerController()
app.MainLoop()
| Add simple test for whether WX is installed. Display download link if not. | Add simple test for whether WX is installed. Display download link if not.
| Python | agpl-3.0 | nccgroup/lapith |
if __name__ == '__main__':
+ import sys
+ try:
- import wx
+ import wx
+ except ImportError:
+ print("""\
+ You need to install WXPython to use the viewer
+
+ http://wxpython.org/download.php
+ """)
+ sys.exit()
from controller import ViewerController
app = wx.App(0)
ViewerController()
app.MainLoop()
| Add simple test for whether WX is installed. Display download link if not. | ## Code Before:
if __name__ == '__main__':
import wx
from controller import ViewerController
app = wx.App(0)
ViewerController()
app.MainLoop()
## Instruction:
Add simple test for whether WX is installed. Display download link if not.
## Code After:
if __name__ == '__main__':
import sys
try:
import wx
except ImportError:
print("""\
You need to install WXPython to use the viewer
http://wxpython.org/download.php
""")
sys.exit()
from controller import ViewerController
app = wx.App(0)
ViewerController()
app.MainLoop()
| # ... existing code ...
if __name__ == '__main__':
import sys
try:
import wx
except ImportError:
print("""\
You need to install WXPython to use the viewer
http://wxpython.org/download.php
""")
sys.exit()
from controller import ViewerController
# ... rest of the code ... |
82e0987375ff99e0d94068c1ec6078d3920249f2 | nc/data/__init__.py | nc/data/__init__.py | DEFAULT_URL = "https://s3-us-west-2.amazonaws.com/openpolicingdata/TS_2016_04_13T13.38.34.887.zip" # noqa
| DEFAULT_URL = "https://s3-us-west-2.amazonaws.com/openpolicingdata/TS_2016_06_22T09.52.20.780.zip" # noqa
| Add this fix from the master branch | Add this fix from the master branch
(It was in the import_nc management command file, but newer
code places it here.)
| Python | mit | OpenDataPolicingNC/Traffic-Stops,OpenDataPolicingNC/Traffic-Stops,OpenDataPolicingNC/Traffic-Stops,OpenDataPolicingNC/Traffic-Stops | - DEFAULT_URL = "https://s3-us-west-2.amazonaws.com/openpolicingdata/TS_2016_04_13T13.38.34.887.zip" # noqa
+ DEFAULT_URL = "https://s3-us-west-2.amazonaws.com/openpolicingdata/TS_2016_06_22T09.52.20.780.zip" # noqa
| Add this fix from the master branch | ## Code Before:
DEFAULT_URL = "https://s3-us-west-2.amazonaws.com/openpolicingdata/TS_2016_04_13T13.38.34.887.zip" # noqa
## Instruction:
Add this fix from the master branch
## Code After:
DEFAULT_URL = "https://s3-us-west-2.amazonaws.com/openpolicingdata/TS_2016_06_22T09.52.20.780.zip" # noqa
| // ... existing code ...
DEFAULT_URL = "https://s3-us-west-2.amazonaws.com/openpolicingdata/TS_2016_06_22T09.52.20.780.zip" # noqa
// ... rest of the code ... |
40e9375f6b35b4a05ad311822705b7a7efe46b56 | site_scons/get_libs.py | site_scons/get_libs.py | import os
import sys
from SCons.Script import File
from path_helpers import path
def get_lib_paths():
if sys.platform == 'win32':
lib_paths = set(os.environ['PATH'].split(';'))
else:
lib_paths = set()
if os.environ.has_key('LIBRARY_PATH'):
lib_paths.update(os.environ['LIBRARY_PATH'].split(':'))
if os.environ.has_key('LD_LIBRARY_PATH'):
lib_paths.update(os.environ['LD_LIBRARY_PATH'].split(':'))
lib_paths = (['/usr/lib', '/usr/lib/x86_64-linux-gnu',
'/usr/local/lib'] + list(lib_paths))
return lib_paths
def get_lib(lib_name, LIBPATH=None):
if not LIBPATH:
LIBPATH = []
else:
LIBPATH = LIBPATH[:]
LIBPATH += get_lib_paths()
for lp in [path(p) for p in LIBPATH]:
try:
files = lp.files(lib_name)
except OSError:
continue
if files:
return File(sorted(files, key=len)[0])
return None
| import os
import sys
from SCons.Script import File
from path_helpers import path
def get_lib_paths():
if sys.platform == 'win32':
lib_paths = set(os.environ['PATH'].split(';'))
else:
lib_paths = set()
if os.environ.has_key('LIBRARY_PATH'):
lib_paths.update(os.environ['LIBRARY_PATH'].split(':'))
if os.environ.has_key('LD_LIBRARY_PATH'):
lib_paths.update(os.environ['LD_LIBRARY_PATH'].split(':'))
lib_paths = (['/usr/lib', '/usr/lib/i386-linux-gnu',
'/usr/lib/x86_64-linux-gnu', '/usr/local/lib'] +
list(lib_paths))
return lib_paths
def get_lib(lib_name, LIBPATH=None):
if not LIBPATH:
LIBPATH = []
else:
LIBPATH = LIBPATH[:]
LIBPATH += get_lib_paths()
for lp in [path(p) for p in LIBPATH]:
try:
files = lp.files(lib_name)
except OSError:
continue
if files:
return File(sorted(files, key=len)[0])
return None
| Add Linux 32-bit search path for Boost libraries | Add Linux 32-bit search path for Boost libraries
| Python | bsd-3-clause | wheeler-microfluidics/dmf-control-board-firmware,wheeler-microfluidics/dmf-control-board-firmware,wheeler-microfluidics/dmf-control-board-firmware,wheeler-microfluidics/dmf-control-board-firmware | import os
import sys
from SCons.Script import File
from path_helpers import path
def get_lib_paths():
if sys.platform == 'win32':
lib_paths = set(os.environ['PATH'].split(';'))
else:
lib_paths = set()
if os.environ.has_key('LIBRARY_PATH'):
lib_paths.update(os.environ['LIBRARY_PATH'].split(':'))
if os.environ.has_key('LD_LIBRARY_PATH'):
lib_paths.update(os.environ['LD_LIBRARY_PATH'].split(':'))
- lib_paths = (['/usr/lib', '/usr/lib/x86_64-linux-gnu',
+ lib_paths = (['/usr/lib', '/usr/lib/i386-linux-gnu',
+ '/usr/lib/x86_64-linux-gnu', '/usr/local/lib'] +
- '/usr/local/lib'] + list(lib_paths))
+ list(lib_paths))
return lib_paths
def get_lib(lib_name, LIBPATH=None):
if not LIBPATH:
LIBPATH = []
else:
LIBPATH = LIBPATH[:]
LIBPATH += get_lib_paths()
for lp in [path(p) for p in LIBPATH]:
try:
files = lp.files(lib_name)
except OSError:
continue
if files:
return File(sorted(files, key=len)[0])
return None
| Add Linux 32-bit search path for Boost libraries | ## Code Before:
import os
import sys
from SCons.Script import File
from path_helpers import path
def get_lib_paths():
if sys.platform == 'win32':
lib_paths = set(os.environ['PATH'].split(';'))
else:
lib_paths = set()
if os.environ.has_key('LIBRARY_PATH'):
lib_paths.update(os.environ['LIBRARY_PATH'].split(':'))
if os.environ.has_key('LD_LIBRARY_PATH'):
lib_paths.update(os.environ['LD_LIBRARY_PATH'].split(':'))
lib_paths = (['/usr/lib', '/usr/lib/x86_64-linux-gnu',
'/usr/local/lib'] + list(lib_paths))
return lib_paths
def get_lib(lib_name, LIBPATH=None):
if not LIBPATH:
LIBPATH = []
else:
LIBPATH = LIBPATH[:]
LIBPATH += get_lib_paths()
for lp in [path(p) for p in LIBPATH]:
try:
files = lp.files(lib_name)
except OSError:
continue
if files:
return File(sorted(files, key=len)[0])
return None
## Instruction:
Add Linux 32-bit search path for Boost libraries
## Code After:
import os
import sys
from SCons.Script import File
from path_helpers import path
def get_lib_paths():
if sys.platform == 'win32':
lib_paths = set(os.environ['PATH'].split(';'))
else:
lib_paths = set()
if os.environ.has_key('LIBRARY_PATH'):
lib_paths.update(os.environ['LIBRARY_PATH'].split(':'))
if os.environ.has_key('LD_LIBRARY_PATH'):
lib_paths.update(os.environ['LD_LIBRARY_PATH'].split(':'))
lib_paths = (['/usr/lib', '/usr/lib/i386-linux-gnu',
'/usr/lib/x86_64-linux-gnu', '/usr/local/lib'] +
list(lib_paths))
return lib_paths
def get_lib(lib_name, LIBPATH=None):
if not LIBPATH:
LIBPATH = []
else:
LIBPATH = LIBPATH[:]
LIBPATH += get_lib_paths()
for lp in [path(p) for p in LIBPATH]:
try:
files = lp.files(lib_name)
except OSError:
continue
if files:
return File(sorted(files, key=len)[0])
return None
| # ... existing code ...
lib_paths.update(os.environ['LD_LIBRARY_PATH'].split(':'))
lib_paths = (['/usr/lib', '/usr/lib/i386-linux-gnu',
'/usr/lib/x86_64-linux-gnu', '/usr/local/lib'] +
list(lib_paths))
return lib_paths
# ... rest of the code ... |
76600b63940da9322673ce6cd436129a7d65f10d | scripts/ec2/terminate_all.py | scripts/ec2/terminate_all.py |
import boto3
from subprocess import call
ec2 = boto3.resource('ec2')
filters = [{'Name': 'instance-state-name', 'Values': ['running']}]
if "EC2_KEY_NAME" in os.environ:
filters.append({'Name': 'key-name', 'Values': [os.environ['EC2_KEY_NAME']]})
instances = ec2.instances.filter(Filters=filters)
ids = [instance.id for instance in instances]
print("Terminating:", ids)
ec2.instances.filter(InstanceIds=ids).terminate()
##########################################################################
|
import boto3
import os
from subprocess import call
ec2 = boto3.resource('ec2')
filters = [{'Name': 'instance-state-name', 'Values': ['running']}]
if "EC2_KEY_NAME" in os.environ:
filters.append({'Name': 'key-name', 'Values': [os.environ['EC2_KEY_NAME']]})
instances = ec2.instances.filter(Filters=filters)
ids = [instance.id for instance in instances]
print("Terminating:", ids)
ec2.instances.filter(InstanceIds=ids).terminate()
##########################################################################
| Add import statement for os | Add import statement for os | Python | bsd-2-clause | manpen/thrill,manpen/thrill,manpen/thrill,manpen/thrill,manpen/thrill |
import boto3
+ import os
from subprocess import call
ec2 = boto3.resource('ec2')
filters = [{'Name': 'instance-state-name', 'Values': ['running']}]
if "EC2_KEY_NAME" in os.environ:
filters.append({'Name': 'key-name', 'Values': [os.environ['EC2_KEY_NAME']]})
instances = ec2.instances.filter(Filters=filters)
ids = [instance.id for instance in instances]
print("Terminating:", ids)
ec2.instances.filter(InstanceIds=ids).terminate()
##########################################################################
| Add import statement for os | ## Code Before:
import boto3
from subprocess import call
ec2 = boto3.resource('ec2')
filters = [{'Name': 'instance-state-name', 'Values': ['running']}]
if "EC2_KEY_NAME" in os.environ:
filters.append({'Name': 'key-name', 'Values': [os.environ['EC2_KEY_NAME']]})
instances = ec2.instances.filter(Filters=filters)
ids = [instance.id for instance in instances]
print("Terminating:", ids)
ec2.instances.filter(InstanceIds=ids).terminate()
##########################################################################
## Instruction:
Add import statement for os
## Code After:
import boto3
import os
from subprocess import call
ec2 = boto3.resource('ec2')
filters = [{'Name': 'instance-state-name', 'Values': ['running']}]
if "EC2_KEY_NAME" in os.environ:
filters.append({'Name': 'key-name', 'Values': [os.environ['EC2_KEY_NAME']]})
instances = ec2.instances.filter(Filters=filters)
ids = [instance.id for instance in instances]
print("Terminating:", ids)
ec2.instances.filter(InstanceIds=ids).terminate()
##########################################################################
| ...
import boto3
import os
from subprocess import call
... |
39d0c335759781de8cf1644cdf499588441b434d | tviserrys/urls.py | tviserrys/urls.py | from django.contrib.auth import views as auth_views
from django.conf.urls import patterns, include, url
from django.conf.urls import url
from django.contrib import admin
from . import views
from tviserrys.settings import MEDIA_ROOT
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^admin/', admin.site.urls),
url(r'^login/$', auth_views.login),
url(r'^logout/$', auth_views.logout),
url(r'^password_change/$', auth_views.password_change),
url(r'^password_change/done/$', auth_views.password_change_done),
url(r'^password_reset/$', auth_views.password_reset),
url(r'^password_reset/done/$', auth_views.password_reset_done),
url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', auth_views.password_reset_confirm),
url(r'^reset/done/$', auth_views.password_reset_complete),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': MEDIA_ROOT, 'show_indexes': False}),
]
| from django.contrib.auth import views as auth_views
from django.conf.urls import patterns, include, url
from django.conf.urls import url
from django.contrib import admin
from . import views
from tviserrys.settings import MEDIA_ROOT
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^tviit/', include('tviit.urls', namespace='tviit')),
url(r'^admin/', admin.site.urls),
url(r'^login/$', auth_views.login),
url(r'^logout/$', auth_views.logout),
url(r'^password_change/$', auth_views.password_change),
url(r'^password_change/done/$', auth_views.password_change_done),
url(r'^password_reset/$', auth_views.password_reset),
url(r'^password_reset/done/$', auth_views.password_reset_done),
url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', auth_views.password_reset_confirm),
url(r'^reset/done/$', auth_views.password_reset_complete),
url(r'^profile/', include('user_profile.urls', namespace='profile')),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': MEDIA_ROOT, 'show_indexes': False}),
]
| Add Tviit and profile url patterns | Add Tviit and profile url patterns
| Python | mit | DeWaster/Tviserrys,DeWaster/Tviserrys | from django.contrib.auth import views as auth_views
from django.conf.urls import patterns, include, url
from django.conf.urls import url
from django.contrib import admin
from . import views
from tviserrys.settings import MEDIA_ROOT
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
+ url(r'^tviit/', include('tviit.urls', namespace='tviit')),
url(r'^admin/', admin.site.urls),
url(r'^login/$', auth_views.login),
url(r'^logout/$', auth_views.logout),
url(r'^password_change/$', auth_views.password_change),
url(r'^password_change/done/$', auth_views.password_change_done),
url(r'^password_reset/$', auth_views.password_reset),
url(r'^password_reset/done/$', auth_views.password_reset_done),
url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', auth_views.password_reset_confirm),
url(r'^reset/done/$', auth_views.password_reset_complete),
+ url(r'^profile/', include('user_profile.urls', namespace='profile')),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': MEDIA_ROOT, 'show_indexes': False}),
]
| Add Tviit and profile url patterns | ## Code Before:
from django.contrib.auth import views as auth_views
from django.conf.urls import patterns, include, url
from django.conf.urls import url
from django.contrib import admin
from . import views
from tviserrys.settings import MEDIA_ROOT
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^admin/', admin.site.urls),
url(r'^login/$', auth_views.login),
url(r'^logout/$', auth_views.logout),
url(r'^password_change/$', auth_views.password_change),
url(r'^password_change/done/$', auth_views.password_change_done),
url(r'^password_reset/$', auth_views.password_reset),
url(r'^password_reset/done/$', auth_views.password_reset_done),
url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', auth_views.password_reset_confirm),
url(r'^reset/done/$', auth_views.password_reset_complete),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': MEDIA_ROOT, 'show_indexes': False}),
]
## Instruction:
Add Tviit and profile url patterns
## Code After:
from django.contrib.auth import views as auth_views
from django.conf.urls import patterns, include, url
from django.conf.urls import url
from django.contrib import admin
from . import views
from tviserrys.settings import MEDIA_ROOT
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^tviit/', include('tviit.urls', namespace='tviit')),
url(r'^admin/', admin.site.urls),
url(r'^login/$', auth_views.login),
url(r'^logout/$', auth_views.logout),
url(r'^password_change/$', auth_views.password_change),
url(r'^password_change/done/$', auth_views.password_change_done),
url(r'^password_reset/$', auth_views.password_reset),
url(r'^password_reset/done/$', auth_views.password_reset_done),
url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', auth_views.password_reset_confirm),
url(r'^reset/done/$', auth_views.password_reset_complete),
url(r'^profile/', include('user_profile.urls', namespace='profile')),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': MEDIA_ROOT, 'show_indexes': False}),
]
| # ... existing code ...
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^tviit/', include('tviit.urls', namespace='tviit')),
url(r'^admin/', admin.site.urls),
# ... modified code ...
url(r'^reset/done/$', auth_views.password_reset_complete),
url(r'^profile/', include('user_profile.urls', namespace='profile')),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve',
# ... rest of the code ... |
78ec1cffde6443016bae2c8aefdb67ab26bfab10 | __init__.py | __init__.py | from . import OctoPrintOutputDevicePlugin
from . import DiscoverOctoPrintAction
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"type": "extension",
"plugin": {
"name": "Wifi connection",
"author": "Ultimaker",
"description": catalog.i18nc("Wifi connection", "Wifi connection"),
"api": 3
}
}
def register(app):
return {
"output_device": OctoPrintOutputDevicePlugin.OctoPrintOutputDevicePlugin(),
"machine_action": DiscoverOctoPrintAction.DiscoverOctoPrintAction()
} | from . import OctoPrintOutputDevicePlugin
from . import DiscoverOctoPrintAction
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"type": "extension",
"plugin": {
"name": "OctoPrint connection",
"author": "fieldOfView",
"version": "1.0",
"description": catalog.i18nc("@info:whatsthis", "Allows sending prints to OctoPrint and monitoring the progress"),
"api": 3
}
}
def register(app):
return {
"output_device": OctoPrintOutputDevicePlugin.OctoPrintOutputDevicePlugin(),
"machine_action": DiscoverOctoPrintAction.DiscoverOctoPrintAction()
} | Update plugin information (name, description, version, author) | Update plugin information (name, description, version, author)
| Python | agpl-3.0 | fieldOfView/OctoPrintPlugin | from . import OctoPrintOutputDevicePlugin
from . import DiscoverOctoPrintAction
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"type": "extension",
"plugin": {
- "name": "Wifi connection",
+ "name": "OctoPrint connection",
- "author": "Ultimaker",
+ "author": "fieldOfView",
- "description": catalog.i18nc("Wifi connection", "Wifi connection"),
+ "version": "1.0",
+ "description": catalog.i18nc("@info:whatsthis", "Allows sending prints to OctoPrint and monitoring the progress"),
"api": 3
}
}
def register(app):
return {
"output_device": OctoPrintOutputDevicePlugin.OctoPrintOutputDevicePlugin(),
"machine_action": DiscoverOctoPrintAction.DiscoverOctoPrintAction()
} | Update plugin information (name, description, version, author) | ## Code Before:
from . import OctoPrintOutputDevicePlugin
from . import DiscoverOctoPrintAction
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"type": "extension",
"plugin": {
"name": "Wifi connection",
"author": "Ultimaker",
"description": catalog.i18nc("Wifi connection", "Wifi connection"),
"api": 3
}
}
def register(app):
return {
"output_device": OctoPrintOutputDevicePlugin.OctoPrintOutputDevicePlugin(),
"machine_action": DiscoverOctoPrintAction.DiscoverOctoPrintAction()
}
## Instruction:
Update plugin information (name, description, version, author)
## Code After:
from . import OctoPrintOutputDevicePlugin
from . import DiscoverOctoPrintAction
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"type": "extension",
"plugin": {
"name": "OctoPrint connection",
"author": "fieldOfView",
"version": "1.0",
"description": catalog.i18nc("@info:whatsthis", "Allows sending prints to OctoPrint and monitoring the progress"),
"api": 3
}
}
def register(app):
return {
"output_device": OctoPrintOutputDevicePlugin.OctoPrintOutputDevicePlugin(),
"machine_action": DiscoverOctoPrintAction.DiscoverOctoPrintAction()
} | # ... existing code ...
"plugin": {
"name": "OctoPrint connection",
"author": "fieldOfView",
"version": "1.0",
"description": catalog.i18nc("@info:whatsthis", "Allows sending prints to OctoPrint and monitoring the progress"),
"api": 3
# ... rest of the code ... |
68b52fedf5b22891a4fc9cf121417ced38d0ea00 | rolepermissions/utils.py | rolepermissions/utils.py | from __future__ import unicode_literals
import re
import collections
def user_is_authenticated(user):
if isinstance(user.is_authenticated, collections.Callable):
authenticated = user.is_authenticated()
else:
authenticated = user.is_authenticated
return authenticated
def camelToSnake(s):
"""
https://gist.github.com/jaytaylor/3660565
Is it ironic that this function is written in camel case, yet it
converts to snake case? hmm..
"""
_underscorer1 = re.compile(r'(.)([A-Z][a-z]+)')
_underscorer2 = re.compile('([a-z0-9])([A-Z])')
subbed = _underscorer1.sub(r'\1_\2', s)
return _underscorer2.sub(r'\1_\2', subbed).lower()
def snake_to_title(s):
return ' '.join(x.capitalize() for x in s.split('_'))
def camel_or_snake_to_title(s):
return snake_to_title(camelToSnake(s))
| from __future__ import unicode_literals
import re
try:
from collections.abc import Callable
except ImportError:
from collections import Callable
def user_is_authenticated(user):
if isinstance(user.is_authenticated, Callable):
authenticated = user.is_authenticated()
else:
authenticated = user.is_authenticated
return authenticated
def camelToSnake(s):
"""
https://gist.github.com/jaytaylor/3660565
Is it ironic that this function is written in camel case, yet it
converts to snake case? hmm..
"""
_underscorer1 = re.compile(r'(.)([A-Z][a-z]+)')
_underscorer2 = re.compile('([a-z0-9])([A-Z])')
subbed = _underscorer1.sub(r'\1_\2', s)
return _underscorer2.sub(r'\1_\2', subbed).lower()
def snake_to_title(s):
return ' '.join(x.capitalize() for x in s.split('_'))
def camel_or_snake_to_title(s):
return snake_to_title(camelToSnake(s))
| Fix import of Callable for Python 3.9 | Fix import of Callable for Python 3.9
Python 3.3 moved Callable to collections.abc and Python 3.9 removes Callable from collections module | Python | mit | vintasoftware/django-role-permissions | from __future__ import unicode_literals
import re
- import collections
+ try:
+ from collections.abc import Callable
+ except ImportError:
+ from collections import Callable
def user_is_authenticated(user):
- if isinstance(user.is_authenticated, collections.Callable):
+ if isinstance(user.is_authenticated, Callable):
authenticated = user.is_authenticated()
else:
authenticated = user.is_authenticated
return authenticated
def camelToSnake(s):
"""
https://gist.github.com/jaytaylor/3660565
Is it ironic that this function is written in camel case, yet it
converts to snake case? hmm..
"""
_underscorer1 = re.compile(r'(.)([A-Z][a-z]+)')
_underscorer2 = re.compile('([a-z0-9])([A-Z])')
subbed = _underscorer1.sub(r'\1_\2', s)
return _underscorer2.sub(r'\1_\2', subbed).lower()
def snake_to_title(s):
return ' '.join(x.capitalize() for x in s.split('_'))
def camel_or_snake_to_title(s):
return snake_to_title(camelToSnake(s))
| Fix import of Callable for Python 3.9 | ## Code Before:
from __future__ import unicode_literals
import re
import collections
def user_is_authenticated(user):
if isinstance(user.is_authenticated, collections.Callable):
authenticated = user.is_authenticated()
else:
authenticated = user.is_authenticated
return authenticated
def camelToSnake(s):
"""
https://gist.github.com/jaytaylor/3660565
Is it ironic that this function is written in camel case, yet it
converts to snake case? hmm..
"""
_underscorer1 = re.compile(r'(.)([A-Z][a-z]+)')
_underscorer2 = re.compile('([a-z0-9])([A-Z])')
subbed = _underscorer1.sub(r'\1_\2', s)
return _underscorer2.sub(r'\1_\2', subbed).lower()
def snake_to_title(s):
return ' '.join(x.capitalize() for x in s.split('_'))
def camel_or_snake_to_title(s):
return snake_to_title(camelToSnake(s))
## Instruction:
Fix import of Callable for Python 3.9
## Code After:
from __future__ import unicode_literals
import re
try:
from collections.abc import Callable
except ImportError:
from collections import Callable
def user_is_authenticated(user):
if isinstance(user.is_authenticated, Callable):
authenticated = user.is_authenticated()
else:
authenticated = user.is_authenticated
return authenticated
def camelToSnake(s):
"""
https://gist.github.com/jaytaylor/3660565
Is it ironic that this function is written in camel case, yet it
converts to snake case? hmm..
"""
_underscorer1 = re.compile(r'(.)([A-Z][a-z]+)')
_underscorer2 = re.compile('([a-z0-9])([A-Z])')
subbed = _underscorer1.sub(r'\1_\2', s)
return _underscorer2.sub(r'\1_\2', subbed).lower()
def snake_to_title(s):
return ' '.join(x.capitalize() for x in s.split('_'))
def camel_or_snake_to_title(s):
return snake_to_title(camelToSnake(s))
| ...
import re
try:
from collections.abc import Callable
except ImportError:
from collections import Callable
...
def user_is_authenticated(user):
if isinstance(user.is_authenticated, Callable):
authenticated = user.is_authenticated()
... |
1560bf7112652cbbc06d6d58031cd268d293ee13 | atmo/users/factories.py | atmo/users/factories.py | import factory
from django.contrib.auth.models import User, Group
class GroupFactory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda n: "Group #%s" % n)
class Meta:
model = Group
class UserFactory(factory.django.DjangoModelFactory):
username = factory.Sequence(lambda n: 'user%s' % n)
first_name = factory.Sequence(lambda n: "user %03d" % n)
email = '[email protected]'
password = factory.PostGenerationMethodCall('set_password', 'password')
class Meta:
model = User
@factory.post_generation
def groups(self, create, extracted, **kwargs):
if not create:
# Simple build, do nothing.
return
if extracted:
# A list of groups were passed in, use them
for group in extracted:
self.groups.add(group)
| import factory
from django.contrib.auth.models import User, Group
from django.contrib.auth.hashers import make_password
class GroupFactory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda n: "Group #%s" % n)
class Meta:
model = Group
class UserFactory(factory.django.DjangoModelFactory):
username = factory.Sequence(lambda n: 'user%s' % n)
first_name = factory.Sequence(lambda n: "user %03d" % n)
email = '[email protected]'
class Meta:
model = User
@factory.post_generation
def password(self, create, extracted, **kwargs):
if not create:
return
return make_password('password')
@factory.post_generation
def groups(self, create, extracted, **kwargs):
if not create:
# Simple build, do nothing.
return
if extracted:
# A list of groups were passed in, use them
for group in extracted:
self.groups.add(group)
| Fix incompatibility with recent factory_boy postgeneration. | Fix incompatibility with recent factory_boy postgeneration.
| Python | mpl-2.0 | mozilla/telemetry-analysis-service,mozilla/telemetry-analysis-service,mozilla/telemetry-analysis-service,mozilla/telemetry-analysis-service | import factory
from django.contrib.auth.models import User, Group
+ from django.contrib.auth.hashers import make_password
class GroupFactory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda n: "Group #%s" % n)
class Meta:
model = Group
class UserFactory(factory.django.DjangoModelFactory):
username = factory.Sequence(lambda n: 'user%s' % n)
first_name = factory.Sequence(lambda n: "user %03d" % n)
email = '[email protected]'
- password = factory.PostGenerationMethodCall('set_password', 'password')
class Meta:
model = User
+
+ @factory.post_generation
+ def password(self, create, extracted, **kwargs):
+ if not create:
+ return
+ return make_password('password')
@factory.post_generation
def groups(self, create, extracted, **kwargs):
if not create:
# Simple build, do nothing.
return
if extracted:
# A list of groups were passed in, use them
for group in extracted:
self.groups.add(group)
| Fix incompatibility with recent factory_boy postgeneration. | ## Code Before:
import factory
from django.contrib.auth.models import User, Group
class GroupFactory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda n: "Group #%s" % n)
class Meta:
model = Group
class UserFactory(factory.django.DjangoModelFactory):
username = factory.Sequence(lambda n: 'user%s' % n)
first_name = factory.Sequence(lambda n: "user %03d" % n)
email = '[email protected]'
password = factory.PostGenerationMethodCall('set_password', 'password')
class Meta:
model = User
@factory.post_generation
def groups(self, create, extracted, **kwargs):
if not create:
# Simple build, do nothing.
return
if extracted:
# A list of groups were passed in, use them
for group in extracted:
self.groups.add(group)
## Instruction:
Fix incompatibility with recent factory_boy postgeneration.
## Code After:
import factory
from django.contrib.auth.models import User, Group
from django.contrib.auth.hashers import make_password
class GroupFactory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda n: "Group #%s" % n)
class Meta:
model = Group
class UserFactory(factory.django.DjangoModelFactory):
username = factory.Sequence(lambda n: 'user%s' % n)
first_name = factory.Sequence(lambda n: "user %03d" % n)
email = '[email protected]'
class Meta:
model = User
@factory.post_generation
def password(self, create, extracted, **kwargs):
if not create:
return
return make_password('password')
@factory.post_generation
def groups(self, create, extracted, **kwargs):
if not create:
# Simple build, do nothing.
return
if extracted:
# A list of groups were passed in, use them
for group in extracted:
self.groups.add(group)
| # ... existing code ...
from django.contrib.auth.models import User, Group
from django.contrib.auth.hashers import make_password
# ... modified code ...
email = '[email protected]'
...
model = User
@factory.post_generation
def password(self, create, extracted, **kwargs):
if not create:
return
return make_password('password')
# ... rest of the code ... |
8090fa9c072656497ff383e9b76d49af2955e420 | examples/hopv/hopv_graph_conv.py | examples/hopv/hopv_graph_conv.py | from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import numpy as np
from models import GraphConvTensorGraph
np.random.seed(123)
import tensorflow as tf
tf.set_random_seed(123)
import deepchem as dc
from deepchem.molnet import load_hopv
# Load HOPV dataset
hopv_tasks, hopv_datasets, transformers = load_hopv(featurizer='GraphConv')
train_dataset, valid_dataset, test_dataset = hopv_datasets
# Fit models
metric = [
dc.metrics.Metric(dc.metrics.pearson_r2_score, np.mean, mode="regression"),
dc.metrics.Metric(
dc.metrics.mean_absolute_error, np.mean, mode="regression")
]
# Number of features on conv-mols
n_feat = 75
# Batch size of models
batch_size = 50
model = GraphConvTensorGraph(
len(hopv_tasks), batch_size=batch_size, mode='regression')
# Fit trained model
model.fit(train_dataset, nb_epoch=25)
print("Evaluating model")
train_scores = model.evaluate(train_dataset, metric, transformers)
valid_scores = model.evaluate(valid_dataset, metric, transformers)
print("Train scores")
print(train_scores)
print("Validation scores")
print(valid_scores)
| from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import numpy as np
from models import GraphConvModel
np.random.seed(123)
import tensorflow as tf
tf.set_random_seed(123)
import deepchem as dc
from deepchem.molnet import load_hopv
# Load HOPV dataset
hopv_tasks, hopv_datasets, transformers = load_hopv(featurizer='GraphConv')
train_dataset, valid_dataset, test_dataset = hopv_datasets
# Fit models
metric = [
dc.metrics.Metric(dc.metrics.pearson_r2_score, np.mean, mode="regression"),
dc.metrics.Metric(
dc.metrics.mean_absolute_error, np.mean, mode="regression")
]
# Number of features on conv-mols
n_feat = 75
# Batch size of models
batch_size = 50
model = GraphConvModel(
len(hopv_tasks), batch_size=batch_size, mode='regression')
# Fit trained model
model.fit(train_dataset, nb_epoch=25)
print("Evaluating model")
train_scores = model.evaluate(train_dataset, metric, transformers)
valid_scores = model.evaluate(valid_dataset, metric, transformers)
print("Train scores")
print(train_scores)
print("Validation scores")
print(valid_scores)
| Fix GraphConvTensorGraph to GraphConvModel in hopv example | Fix GraphConvTensorGraph to GraphConvModel in hopv example
| Python | mit | Agent007/deepchem,lilleswing/deepchem,lilleswing/deepchem,Agent007/deepchem,peastman/deepchem,miaecle/deepchem,peastman/deepchem,ktaneishi/deepchem,miaecle/deepchem,Agent007/deepchem,deepchem/deepchem,ktaneishi/deepchem,deepchem/deepchem,ktaneishi/deepchem,miaecle/deepchem,lilleswing/deepchem | from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import numpy as np
- from models import GraphConvTensorGraph
+ from models import GraphConvModel
np.random.seed(123)
import tensorflow as tf
tf.set_random_seed(123)
import deepchem as dc
from deepchem.molnet import load_hopv
# Load HOPV dataset
hopv_tasks, hopv_datasets, transformers = load_hopv(featurizer='GraphConv')
train_dataset, valid_dataset, test_dataset = hopv_datasets
# Fit models
metric = [
dc.metrics.Metric(dc.metrics.pearson_r2_score, np.mean, mode="regression"),
dc.metrics.Metric(
dc.metrics.mean_absolute_error, np.mean, mode="regression")
]
# Number of features on conv-mols
n_feat = 75
# Batch size of models
batch_size = 50
- model = GraphConvTensorGraph(
+ model = GraphConvModel(
len(hopv_tasks), batch_size=batch_size, mode='regression')
# Fit trained model
model.fit(train_dataset, nb_epoch=25)
print("Evaluating model")
train_scores = model.evaluate(train_dataset, metric, transformers)
valid_scores = model.evaluate(valid_dataset, metric, transformers)
print("Train scores")
print(train_scores)
print("Validation scores")
print(valid_scores)
| Fix GraphConvTensorGraph to GraphConvModel in hopv example | ## Code Before:
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import numpy as np
from models import GraphConvTensorGraph
np.random.seed(123)
import tensorflow as tf
tf.set_random_seed(123)
import deepchem as dc
from deepchem.molnet import load_hopv
# Load HOPV dataset
hopv_tasks, hopv_datasets, transformers = load_hopv(featurizer='GraphConv')
train_dataset, valid_dataset, test_dataset = hopv_datasets
# Fit models
metric = [
dc.metrics.Metric(dc.metrics.pearson_r2_score, np.mean, mode="regression"),
dc.metrics.Metric(
dc.metrics.mean_absolute_error, np.mean, mode="regression")
]
# Number of features on conv-mols
n_feat = 75
# Batch size of models
batch_size = 50
model = GraphConvTensorGraph(
len(hopv_tasks), batch_size=batch_size, mode='regression')
# Fit trained model
model.fit(train_dataset, nb_epoch=25)
print("Evaluating model")
train_scores = model.evaluate(train_dataset, metric, transformers)
valid_scores = model.evaluate(valid_dataset, metric, transformers)
print("Train scores")
print(train_scores)
print("Validation scores")
print(valid_scores)
## Instruction:
Fix GraphConvTensorGraph to GraphConvModel in hopv example
## Code After:
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import numpy as np
from models import GraphConvModel
np.random.seed(123)
import tensorflow as tf
tf.set_random_seed(123)
import deepchem as dc
from deepchem.molnet import load_hopv
# Load HOPV dataset
hopv_tasks, hopv_datasets, transformers = load_hopv(featurizer='GraphConv')
train_dataset, valid_dataset, test_dataset = hopv_datasets
# Fit models
metric = [
dc.metrics.Metric(dc.metrics.pearson_r2_score, np.mean, mode="regression"),
dc.metrics.Metric(
dc.metrics.mean_absolute_error, np.mean, mode="regression")
]
# Number of features on conv-mols
n_feat = 75
# Batch size of models
batch_size = 50
model = GraphConvModel(
len(hopv_tasks), batch_size=batch_size, mode='regression')
# Fit trained model
model.fit(train_dataset, nb_epoch=25)
print("Evaluating model")
train_scores = model.evaluate(train_dataset, metric, transformers)
valid_scores = model.evaluate(valid_dataset, metric, transformers)
print("Train scores")
print(train_scores)
print("Validation scores")
print(valid_scores)
| # ... existing code ...
from models import GraphConvModel
# ... modified code ...
batch_size = 50
model = GraphConvModel(
len(hopv_tasks), batch_size=batch_size, mode='regression')
# ... rest of the code ... |
0d023a51283d477e4b3d02059361b003a91134e0 | jaspyx/scope.py | jaspyx/scope.py | class Scope(object):
tmp_index = 0
def __init__(self, parent=None):
self.parent = parent
self.prefix = []
self.declarations = {}
self.globals = set()
self.inherited = True
def prefixed(self, name):
return '.'.join(self.prefix + [name])
def declare(self, name, var=True):
self.declarations[name] = var
def get_scope(self, name, inherit=False):
if name in self.declarations and (not inherit or self.inherited):
return self
elif self.parent is not None:
return self.parent.get_scope(name, True)
else:
return None
def declare_global(self, name):
self.globals.add(name)
def is_global(self, name):
return name in self.globals
def get_global_scope(self):
if self.parent:
return self.parent.get_global_scope()
else:
return self
@classmethod
def alloc_temp(cls):
cls.tmp_index += 1
return '__jpx_tmp_%i' % cls.tmp_index
| class Scope(object):
def __init__(self, parent=None):
self.parent = parent
self.prefix = []
self.declarations = {}
self.globals = set()
self.inherited = True
def prefixed(self, name):
return '.'.join(self.prefix + [name])
def declare(self, name, var=True):
self.declarations[name] = var
def get_scope(self, name, inherit=False):
if name in self.declarations and (not inherit or self.inherited):
return self
elif self.parent is not None:
return self.parent.get_scope(name, True)
else:
return None
def declare_global(self, name):
self.globals.add(name)
def is_global(self, name):
return name in self.globals
def get_global_scope(self):
if self.parent:
return self.parent.get_global_scope()
else:
return self
| Remove temp var allocation code. | Remove temp var allocation code.
| Python | mit | ztane/jaspyx,iksteen/jaspyx | class Scope(object):
- tmp_index = 0
-
def __init__(self, parent=None):
self.parent = parent
self.prefix = []
self.declarations = {}
self.globals = set()
self.inherited = True
def prefixed(self, name):
return '.'.join(self.prefix + [name])
def declare(self, name, var=True):
self.declarations[name] = var
def get_scope(self, name, inherit=False):
if name in self.declarations and (not inherit or self.inherited):
return self
elif self.parent is not None:
return self.parent.get_scope(name, True)
else:
return None
def declare_global(self, name):
self.globals.add(name)
def is_global(self, name):
return name in self.globals
def get_global_scope(self):
if self.parent:
return self.parent.get_global_scope()
else:
return self
- @classmethod
- def alloc_temp(cls):
- cls.tmp_index += 1
- return '__jpx_tmp_%i' % cls.tmp_index
- | Remove temp var allocation code. | ## Code Before:
class Scope(object):
tmp_index = 0
def __init__(self, parent=None):
self.parent = parent
self.prefix = []
self.declarations = {}
self.globals = set()
self.inherited = True
def prefixed(self, name):
return '.'.join(self.prefix + [name])
def declare(self, name, var=True):
self.declarations[name] = var
def get_scope(self, name, inherit=False):
if name in self.declarations and (not inherit or self.inherited):
return self
elif self.parent is not None:
return self.parent.get_scope(name, True)
else:
return None
def declare_global(self, name):
self.globals.add(name)
def is_global(self, name):
return name in self.globals
def get_global_scope(self):
if self.parent:
return self.parent.get_global_scope()
else:
return self
@classmethod
def alloc_temp(cls):
cls.tmp_index += 1
return '__jpx_tmp_%i' % cls.tmp_index
## Instruction:
Remove temp var allocation code.
## Code After:
class Scope(object):
def __init__(self, parent=None):
self.parent = parent
self.prefix = []
self.declarations = {}
self.globals = set()
self.inherited = True
def prefixed(self, name):
return '.'.join(self.prefix + [name])
def declare(self, name, var=True):
self.declarations[name] = var
def get_scope(self, name, inherit=False):
if name in self.declarations and (not inherit or self.inherited):
return self
elif self.parent is not None:
return self.parent.get_scope(name, True)
else:
return None
def declare_global(self, name):
self.globals.add(name)
def is_global(self, name):
return name in self.globals
def get_global_scope(self):
if self.parent:
return self.parent.get_global_scope()
else:
return self
| # ... existing code ...
class Scope(object):
def __init__(self, parent=None):
# ... modified code ...
return self
# ... rest of the code ... |
eaa4de2ecbcf29c9e56ebf2fa69099055e469fbc | tests/test_conversion.py | tests/test_conversion.py | from asciisciit import conversions as conv
import numpy as np
def test_lookup_method_equivalency():
img = np.random.randint(0, 255, (300,300), dtype=np.uint8)
pil_ascii = conv.apply_lut_pil(img)
np_ascii = conv.apply_lut_numpy(img)
assert(pil_ascii == np_ascii)
pil_ascii = conv.apply_lut_pil(img, "binary")
np_ascii = conv.apply_lut_numpy(img, "binary")
assert(pil_ascii == np_ascii) | import itertools
from asciisciit import conversions as conv
import numpy as np
import pytest
@pytest.mark.parametrize("invert,equalize,lut,lookup_func",
itertools.product((True, False),
(True, False),
("simple", "binary"),
(None, conv.apply_lut_pil)))
def test_pil_to_ascii(invert, equalize, lut, lookup_func):
img = np.random.randint(0, 255, (480, 640), dtype=np.uint8)
h, w = img.shape
expected_len = int(h*0.5*conv.ASPECTCORRECTIONFACTOR)*(int(w*0.5)+1)+1
img = conv.numpy_to_pil(img)
text = conv.pil_to_ascii(img, 0.5, invert, equalize, lut, lookup_func)
assert(len(text) == expected_len)
@pytest.mark.parametrize("invert,equalize,lut",
itertools.product((True, False),
(True, False),
("simple", "binary")))
def test_numpy_to_ascii(invert, equalize, lut):
img = np.random.randint(0, 255, (480, 640), dtype=np.uint8)
h, w = img.shape
expected_len = int(h*0.5*conv.ASPECTCORRECTIONFACTOR)*(int(w*0.5)+1)+1
text = conv.numpy_to_ascii(img, 0.5, invert, equalize, lut)
assert(len(text) == expected_len)
def test_lookup_method_equivalency():
img = np.random.randint(0, 255, (300,300), dtype=np.uint8)
pil_ascii = conv.apply_lut_pil(img)
np_ascii = conv.apply_lut_numpy(img)
assert(pil_ascii == np_ascii)
pil_ascii = conv.apply_lut_pil(img, "binary")
np_ascii = conv.apply_lut_numpy(img, "binary")
assert(pil_ascii == np_ascii)
| Add tests to minimally exercise basic conversion functionality | Add tests to minimally exercise basic conversion functionality
| Python | mit | derricw/asciisciit | + import itertools
from asciisciit import conversions as conv
import numpy as np
+ import pytest
+
+
+ @pytest.mark.parametrize("invert,equalize,lut,lookup_func",
+ itertools.product((True, False),
+ (True, False),
+ ("simple", "binary"),
+ (None, conv.apply_lut_pil)))
+ def test_pil_to_ascii(invert, equalize, lut, lookup_func):
+ img = np.random.randint(0, 255, (480, 640), dtype=np.uint8)
+ h, w = img.shape
+ expected_len = int(h*0.5*conv.ASPECTCORRECTIONFACTOR)*(int(w*0.5)+1)+1
+ img = conv.numpy_to_pil(img)
+ text = conv.pil_to_ascii(img, 0.5, invert, equalize, lut, lookup_func)
+ assert(len(text) == expected_len)
+
+
+ @pytest.mark.parametrize("invert,equalize,lut",
+ itertools.product((True, False),
+ (True, False),
+ ("simple", "binary")))
+ def test_numpy_to_ascii(invert, equalize, lut):
+ img = np.random.randint(0, 255, (480, 640), dtype=np.uint8)
+ h, w = img.shape
+ expected_len = int(h*0.5*conv.ASPECTCORRECTIONFACTOR)*(int(w*0.5)+1)+1
+ text = conv.numpy_to_ascii(img, 0.5, invert, equalize, lut)
+ assert(len(text) == expected_len)
def test_lookup_method_equivalency():
img = np.random.randint(0, 255, (300,300), dtype=np.uint8)
pil_ascii = conv.apply_lut_pil(img)
np_ascii = conv.apply_lut_numpy(img)
assert(pil_ascii == np_ascii)
pil_ascii = conv.apply_lut_pil(img, "binary")
np_ascii = conv.apply_lut_numpy(img, "binary")
assert(pil_ascii == np_ascii)
+ | Add tests to minimally exercise basic conversion functionality | ## Code Before:
from asciisciit import conversions as conv
import numpy as np
def test_lookup_method_equivalency():
img = np.random.randint(0, 255, (300,300), dtype=np.uint8)
pil_ascii = conv.apply_lut_pil(img)
np_ascii = conv.apply_lut_numpy(img)
assert(pil_ascii == np_ascii)
pil_ascii = conv.apply_lut_pil(img, "binary")
np_ascii = conv.apply_lut_numpy(img, "binary")
assert(pil_ascii == np_ascii)
## Instruction:
Add tests to minimally exercise basic conversion functionality
## Code After:
import itertools
from asciisciit import conversions as conv
import numpy as np
import pytest
@pytest.mark.parametrize("invert,equalize,lut,lookup_func",
itertools.product((True, False),
(True, False),
("simple", "binary"),
(None, conv.apply_lut_pil)))
def test_pil_to_ascii(invert, equalize, lut, lookup_func):
img = np.random.randint(0, 255, (480, 640), dtype=np.uint8)
h, w = img.shape
expected_len = int(h*0.5*conv.ASPECTCORRECTIONFACTOR)*(int(w*0.5)+1)+1
img = conv.numpy_to_pil(img)
text = conv.pil_to_ascii(img, 0.5, invert, equalize, lut, lookup_func)
assert(len(text) == expected_len)
@pytest.mark.parametrize("invert,equalize,lut",
itertools.product((True, False),
(True, False),
("simple", "binary")))
def test_numpy_to_ascii(invert, equalize, lut):
img = np.random.randint(0, 255, (480, 640), dtype=np.uint8)
h, w = img.shape
expected_len = int(h*0.5*conv.ASPECTCORRECTIONFACTOR)*(int(w*0.5)+1)+1
text = conv.numpy_to_ascii(img, 0.5, invert, equalize, lut)
assert(len(text) == expected_len)
def test_lookup_method_equivalency():
img = np.random.randint(0, 255, (300,300), dtype=np.uint8)
pil_ascii = conv.apply_lut_pil(img)
np_ascii = conv.apply_lut_numpy(img)
assert(pil_ascii == np_ascii)
pil_ascii = conv.apply_lut_pil(img, "binary")
np_ascii = conv.apply_lut_numpy(img, "binary")
assert(pil_ascii == np_ascii)
| ...
import itertools
from asciisciit import conversions as conv
...
import numpy as np
import pytest
@pytest.mark.parametrize("invert,equalize,lut,lookup_func",
itertools.product((True, False),
(True, False),
("simple", "binary"),
(None, conv.apply_lut_pil)))
def test_pil_to_ascii(invert, equalize, lut, lookup_func):
img = np.random.randint(0, 255, (480, 640), dtype=np.uint8)
h, w = img.shape
expected_len = int(h*0.5*conv.ASPECTCORRECTIONFACTOR)*(int(w*0.5)+1)+1
img = conv.numpy_to_pil(img)
text = conv.pil_to_ascii(img, 0.5, invert, equalize, lut, lookup_func)
assert(len(text) == expected_len)
@pytest.mark.parametrize("invert,equalize,lut",
itertools.product((True, False),
(True, False),
("simple", "binary")))
def test_numpy_to_ascii(invert, equalize, lut):
img = np.random.randint(0, 255, (480, 640), dtype=np.uint8)
h, w = img.shape
expected_len = int(h*0.5*conv.ASPECTCORRECTIONFACTOR)*(int(w*0.5)+1)+1
text = conv.numpy_to_ascii(img, 0.5, invert, equalize, lut)
assert(len(text) == expected_len)
... |
711c992a89f9a6118d2b274e2a526be62e670a92 | examples/flask_server.py | examples/flask_server.py | from flask import Flask, request # type: ignore
from jsonrpcserver import method, dispatch, Result, Success
app = Flask(__name__)
@method
def ping() -> Result:
return Success("pong")
@app.route("/", methods=["POST"])
def index() -> str:
return dispatch(request.get_data().decode())
if __name__ == "__main__":
app.run()
| from flask import Flask, Response, request # type: ignore
from jsonrpcserver import Result, Success, dispatch, method
app = Flask(__name__)
@method
def ping() -> Result:
return Success("pong")
@app.route("/", methods=["POST"])
def index() -> str:
return Response(
dispatch(request.get_data().decode()), content_type="application/json"
)
if __name__ == "__main__":
app.run()
| Set content-type in flask example | Set content-type in flask example
| Python | mit | bcb/jsonrpcserver | - from flask import Flask, request # type: ignore
+ from flask import Flask, Response, request # type: ignore
- from jsonrpcserver import method, dispatch, Result, Success
+ from jsonrpcserver import Result, Success, dispatch, method
app = Flask(__name__)
@method
def ping() -> Result:
return Success("pong")
@app.route("/", methods=["POST"])
def index() -> str:
- return dispatch(request.get_data().decode())
+ return Response(
+ dispatch(request.get_data().decode()), content_type="application/json"
+ )
if __name__ == "__main__":
app.run()
| Set content-type in flask example | ## Code Before:
from flask import Flask, request # type: ignore
from jsonrpcserver import method, dispatch, Result, Success
app = Flask(__name__)
@method
def ping() -> Result:
return Success("pong")
@app.route("/", methods=["POST"])
def index() -> str:
return dispatch(request.get_data().decode())
if __name__ == "__main__":
app.run()
## Instruction:
Set content-type in flask example
## Code After:
from flask import Flask, Response, request # type: ignore
from jsonrpcserver import Result, Success, dispatch, method
app = Flask(__name__)
@method
def ping() -> Result:
return Success("pong")
@app.route("/", methods=["POST"])
def index() -> str:
return Response(
dispatch(request.get_data().decode()), content_type="application/json"
)
if __name__ == "__main__":
app.run()
| // ... existing code ...
from flask import Flask, Response, request # type: ignore
from jsonrpcserver import Result, Success, dispatch, method
// ... modified code ...
def index() -> str:
return Response(
dispatch(request.get_data().decode()), content_type="application/json"
)
// ... rest of the code ... |
fe715bb784326a661b14d01e02189074228e7c13 | securedrop/tests/test_manage.py | securedrop/tests/test_manage.py |
import manage
import unittest
class TestManagePy(unittest.TestCase):
def test_parse_args(self):
# just test that the arg parser is stable
manage.get_args()
|
import manage
import mock
from StringIO import StringIO
import sys
import unittest
import __builtin__
import utils
class TestManagePy(unittest.TestCase):
def test_parse_args(self):
# just test that the arg parser is stable
manage.get_args()
class TestManagementCommand(unittest.TestCase):
def setUp(self):
utils.env.setup()
def tearDown(self):
utils.env.teardown()
@mock.patch("__builtin__.raw_input", return_value='N')
@mock.patch("manage.getpass", return_value='testtesttest')
@mock.patch("sys.stdout", new_callable=StringIO)
def test_exception_handling_when_duplicate_username(self, mock_raw_input,
mock_getpass,
mock_stdout):
"""Regression test for duplicate username logic in manage.py"""
# Inserting the user for the first time should succeed
return_value = manage._add_user()
self.assertEqual(return_value, 0)
self.assertIn('successfully added', sys.stdout.getvalue())
# Inserting the user for a second time should fail
return_value = manage._add_user()
self.assertEqual(return_value, 1)
self.assertIn('ERROR: That username is already taken!',
sys.stdout.getvalue())
| Add unit test to check duplicate username error is handled in manage.py | Add unit test to check duplicate username error is handled in manage.py
| Python | agpl-3.0 | heartsucker/securedrop,conorsch/securedrop,ageis/securedrop,garrettr/securedrop,heartsucker/securedrop,conorsch/securedrop,conorsch/securedrop,ehartsuyker/securedrop,micahflee/securedrop,heartsucker/securedrop,micahflee/securedrop,ehartsuyker/securedrop,ehartsuyker/securedrop,ageis/securedrop,ehartsuyker/securedrop,garrettr/securedrop,garrettr/securedrop,micahflee/securedrop,ageis/securedrop,ehartsuyker/securedrop,conorsch/securedrop,ehartsuyker/securedrop,ageis/securedrop,garrettr/securedrop,heartsucker/securedrop,heartsucker/securedrop,micahflee/securedrop,conorsch/securedrop |
import manage
+ import mock
+ from StringIO import StringIO
+ import sys
import unittest
+ import __builtin__
+
+ import utils
class TestManagePy(unittest.TestCase):
def test_parse_args(self):
# just test that the arg parser is stable
manage.get_args()
+
+ class TestManagementCommand(unittest.TestCase):
+ def setUp(self):
+ utils.env.setup()
+
+ def tearDown(self):
+ utils.env.teardown()
+
+ @mock.patch("__builtin__.raw_input", return_value='N')
+ @mock.patch("manage.getpass", return_value='testtesttest')
+ @mock.patch("sys.stdout", new_callable=StringIO)
+ def test_exception_handling_when_duplicate_username(self, mock_raw_input,
+ mock_getpass,
+ mock_stdout):
+ """Regression test for duplicate username logic in manage.py"""
+
+ # Inserting the user for the first time should succeed
+ return_value = manage._add_user()
+ self.assertEqual(return_value, 0)
+ self.assertIn('successfully added', sys.stdout.getvalue())
+
+ # Inserting the user for a second time should fail
+ return_value = manage._add_user()
+ self.assertEqual(return_value, 1)
+ self.assertIn('ERROR: That username is already taken!',
+ sys.stdout.getvalue())
+ | Add unit test to check duplicate username error is handled in manage.py | ## Code Before:
import manage
import unittest
class TestManagePy(unittest.TestCase):
def test_parse_args(self):
# just test that the arg parser is stable
manage.get_args()
## Instruction:
Add unit test to check duplicate username error is handled in manage.py
## Code After:
import manage
import mock
from StringIO import StringIO
import sys
import unittest
import __builtin__
import utils
class TestManagePy(unittest.TestCase):
def test_parse_args(self):
# just test that the arg parser is stable
manage.get_args()
class TestManagementCommand(unittest.TestCase):
def setUp(self):
utils.env.setup()
def tearDown(self):
utils.env.teardown()
@mock.patch("__builtin__.raw_input", return_value='N')
@mock.patch("manage.getpass", return_value='testtesttest')
@mock.patch("sys.stdout", new_callable=StringIO)
def test_exception_handling_when_duplicate_username(self, mock_raw_input,
mock_getpass,
mock_stdout):
"""Regression test for duplicate username logic in manage.py"""
# Inserting the user for the first time should succeed
return_value = manage._add_user()
self.assertEqual(return_value, 0)
self.assertIn('successfully added', sys.stdout.getvalue())
# Inserting the user for a second time should fail
return_value = manage._add_user()
self.assertEqual(return_value, 1)
self.assertIn('ERROR: That username is already taken!',
sys.stdout.getvalue())
| # ... existing code ...
import manage
import mock
from StringIO import StringIO
import sys
import unittest
import __builtin__
import utils
# ... modified code ...
manage.get_args()
class TestManagementCommand(unittest.TestCase):
def setUp(self):
utils.env.setup()
def tearDown(self):
utils.env.teardown()
@mock.patch("__builtin__.raw_input", return_value='N')
@mock.patch("manage.getpass", return_value='testtesttest')
@mock.patch("sys.stdout", new_callable=StringIO)
def test_exception_handling_when_duplicate_username(self, mock_raw_input,
mock_getpass,
mock_stdout):
"""Regression test for duplicate username logic in manage.py"""
# Inserting the user for the first time should succeed
return_value = manage._add_user()
self.assertEqual(return_value, 0)
self.assertIn('successfully added', sys.stdout.getvalue())
# Inserting the user for a second time should fail
return_value = manage._add_user()
self.assertEqual(return_value, 1)
self.assertIn('ERROR: That username is already taken!',
sys.stdout.getvalue())
# ... rest of the code ... |
349bb1ce2c15239ae3f9c066ed774b20369b9c0d | src/ggrc/settings/app_engine.py | src/ggrc/settings/app_engine.py |
APP_ENGINE = True
ENABLE_JASMINE = False
LOGIN_MANAGER = 'ggrc.login.appengine'
FULLTEXT_INDEXER = 'ggrc.fulltext.mysql.MysqlIndexer'
# Cannot access filesystem on AppEngine or when using SDK
AUTOBUILD_ASSETS = False
SQLALCHEMY_RECORD_QUERIES = True
MEMCACHE_MECHANISM = True
|
APP_ENGINE = True
ENABLE_JASMINE = False
LOGIN_MANAGER = 'ggrc.login.appengine'
FULLTEXT_INDEXER = 'ggrc.fulltext.mysql.MysqlIndexer'
# Cannot access filesystem on AppEngine or when using SDK
AUTOBUILD_ASSETS = False
SQLALCHEMY_RECORD_QUERIES = True
MEMCACHE_MECHANISM = True
CALENDAR_MECHANISM = True
| Enable Calendar integration on App Engine deployments | Enable Calendar integration on App Engine deployments
| Python | apache-2.0 | NejcZupec/ggrc-core,uskudnik/ggrc-core,j0gurt/ggrc-core,j0gurt/ggrc-core,edofic/ggrc-core,kr41/ggrc-core,hasanalom/ggrc-core,andrei-karalionak/ggrc-core,hyperNURb/ggrc-core,edofic/ggrc-core,selahssea/ggrc-core,vladan-m/ggrc-core,jmakov/ggrc-core,prasannav7/ggrc-core,vladan-m/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,AleksNeStu/ggrc-core,andrei-karalionak/ggrc-core,kr41/ggrc-core,plamut/ggrc-core,andrei-karalionak/ggrc-core,selahssea/ggrc-core,prasannav7/ggrc-core,plamut/ggrc-core,selahssea/ggrc-core,selahssea/ggrc-core,j0gurt/ggrc-core,prasannav7/ggrc-core,uskudnik/ggrc-core,VinnieJohns/ggrc-core,hyperNURb/ggrc-core,josthkko/ggrc-core,josthkko/ggrc-core,hasanalom/ggrc-core,kr41/ggrc-core,jmakov/ggrc-core,uskudnik/ggrc-core,josthkko/ggrc-core,prasannav7/ggrc-core,NejcZupec/ggrc-core,AleksNeStu/ggrc-core,AleksNeStu/ggrc-core,hasanalom/ggrc-core,j0gurt/ggrc-core,NejcZupec/ggrc-core,plamut/ggrc-core,AleksNeStu/ggrc-core,hasanalom/ggrc-core,uskudnik/ggrc-core,edofic/ggrc-core,uskudnik/ggrc-core,josthkko/ggrc-core,jmakov/ggrc-core,plamut/ggrc-core,NejcZupec/ggrc-core,edofic/ggrc-core,jmakov/ggrc-core,hasanalom/ggrc-core,jmakov/ggrc-core,hyperNURb/ggrc-core,vladan-m/ggrc-core,hyperNURb/ggrc-core,VinnieJohns/ggrc-core,hyperNURb/ggrc-core,vladan-m/ggrc-core,vladan-m/ggrc-core,kr41/ggrc-core,andrei-karalionak/ggrc-core |
APP_ENGINE = True
ENABLE_JASMINE = False
LOGIN_MANAGER = 'ggrc.login.appengine'
FULLTEXT_INDEXER = 'ggrc.fulltext.mysql.MysqlIndexer'
# Cannot access filesystem on AppEngine or when using SDK
AUTOBUILD_ASSETS = False
SQLALCHEMY_RECORD_QUERIES = True
MEMCACHE_MECHANISM = True
+ CALENDAR_MECHANISM = True
| Enable Calendar integration on App Engine deployments | ## Code Before:
APP_ENGINE = True
ENABLE_JASMINE = False
LOGIN_MANAGER = 'ggrc.login.appengine'
FULLTEXT_INDEXER = 'ggrc.fulltext.mysql.MysqlIndexer'
# Cannot access filesystem on AppEngine or when using SDK
AUTOBUILD_ASSETS = False
SQLALCHEMY_RECORD_QUERIES = True
MEMCACHE_MECHANISM = True
## Instruction:
Enable Calendar integration on App Engine deployments
## Code After:
APP_ENGINE = True
ENABLE_JASMINE = False
LOGIN_MANAGER = 'ggrc.login.appengine'
FULLTEXT_INDEXER = 'ggrc.fulltext.mysql.MysqlIndexer'
# Cannot access filesystem on AppEngine or when using SDK
AUTOBUILD_ASSETS = False
SQLALCHEMY_RECORD_QUERIES = True
MEMCACHE_MECHANISM = True
CALENDAR_MECHANISM = True
| # ... existing code ...
MEMCACHE_MECHANISM = True
CALENDAR_MECHANISM = True
# ... rest of the code ... |
0d5c3b5f0c9278e834fc4df2a5d227972a1b513d | tests/unit/modules/file_test.py | tests/unit/modules/file_test.py | import tempfile
from saltunittest import TestCase, TestLoader, TextTestRunner
from salt import config as sconfig
from salt.modules import file as filemod
from salt.modules import cmdmod
filemod.__salt__ = {
'cmd.run': cmdmod.run,
}
SED_CONTENT = """test
some
content
/var/lib/foo/app/test
here
"""
class FileModuleTestCase(TestCase):
def test_sed_limit_escaped(self):
with tempfile.NamedTemporaryFile() as tfile:
tfile.write(SED_CONTENT)
tfile.seek(0, 0)
path = tfile.name
before = '/var/lib/foo'
after = ''
limit = '^{0}'.format(before)
filemod.sed(path, before, after, limit=limit)
with open(path, 'rb') as newfile:
self.assertEquals(SED_CONTENT.replace(before, ''), newfile.read())
if __name__ == "__main__":
loader = TestLoader()
tests = loader.loadTestsFromTestCase(FileModuleTestCase)
TextTestRunner(verbosity=1).run(tests)
| import tempfile
from saltunittest import TestCase, TestLoader, TextTestRunner
from salt import config as sconfig
from salt.modules import file as filemod
from salt.modules import cmdmod
filemod.__salt__ = {
'cmd.run': cmdmod.run,
'cmd.run_all': cmdmod.run_all
}
SED_CONTENT = """test
some
content
/var/lib/foo/app/test
here
"""
class FileModuleTestCase(TestCase):
def test_sed_limit_escaped(self):
with tempfile.NamedTemporaryFile() as tfile:
tfile.write(SED_CONTENT)
tfile.seek(0, 0)
path = tfile.name
before = '/var/lib/foo'
after = ''
limit = '^{0}'.format(before)
filemod.sed(path, before, after, limit=limit)
with open(path, 'rb') as newfile:
self.assertEquals(
SED_CONTENT.replace(before, ''),
newfile.read()
)
if __name__ == "__main__":
loader = TestLoader()
tests = loader.loadTestsFromTestCase(FileModuleTestCase)
TextTestRunner(verbosity=1).run(tests)
| Add `cmd.run_all` to `__salt__`. Required for the unit test. | Add `cmd.run_all` to `__salt__`. Required for the unit test.
| Python | apache-2.0 | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | import tempfile
from saltunittest import TestCase, TestLoader, TextTestRunner
from salt import config as sconfig
from salt.modules import file as filemod
from salt.modules import cmdmod
filemod.__salt__ = {
'cmd.run': cmdmod.run,
+ 'cmd.run_all': cmdmod.run_all
}
SED_CONTENT = """test
some
content
/var/lib/foo/app/test
here
"""
class FileModuleTestCase(TestCase):
def test_sed_limit_escaped(self):
with tempfile.NamedTemporaryFile() as tfile:
tfile.write(SED_CONTENT)
tfile.seek(0, 0)
path = tfile.name
before = '/var/lib/foo'
after = ''
limit = '^{0}'.format(before)
filemod.sed(path, before, after, limit=limit)
with open(path, 'rb') as newfile:
- self.assertEquals(SED_CONTENT.replace(before, ''), newfile.read())
+ self.assertEquals(
+ SED_CONTENT.replace(before, ''),
+ newfile.read()
+ )
if __name__ == "__main__":
loader = TestLoader()
tests = loader.loadTestsFromTestCase(FileModuleTestCase)
TextTestRunner(verbosity=1).run(tests)
| Add `cmd.run_all` to `__salt__`. Required for the unit test. | ## Code Before:
import tempfile
from saltunittest import TestCase, TestLoader, TextTestRunner
from salt import config as sconfig
from salt.modules import file as filemod
from salt.modules import cmdmod
filemod.__salt__ = {
'cmd.run': cmdmod.run,
}
SED_CONTENT = """test
some
content
/var/lib/foo/app/test
here
"""
class FileModuleTestCase(TestCase):
def test_sed_limit_escaped(self):
with tempfile.NamedTemporaryFile() as tfile:
tfile.write(SED_CONTENT)
tfile.seek(0, 0)
path = tfile.name
before = '/var/lib/foo'
after = ''
limit = '^{0}'.format(before)
filemod.sed(path, before, after, limit=limit)
with open(path, 'rb') as newfile:
self.assertEquals(SED_CONTENT.replace(before, ''), newfile.read())
if __name__ == "__main__":
loader = TestLoader()
tests = loader.loadTestsFromTestCase(FileModuleTestCase)
TextTestRunner(verbosity=1).run(tests)
## Instruction:
Add `cmd.run_all` to `__salt__`. Required for the unit test.
## Code After:
import tempfile
from saltunittest import TestCase, TestLoader, TextTestRunner
from salt import config as sconfig
from salt.modules import file as filemod
from salt.modules import cmdmod
filemod.__salt__ = {
'cmd.run': cmdmod.run,
'cmd.run_all': cmdmod.run_all
}
SED_CONTENT = """test
some
content
/var/lib/foo/app/test
here
"""
class FileModuleTestCase(TestCase):
def test_sed_limit_escaped(self):
with tempfile.NamedTemporaryFile() as tfile:
tfile.write(SED_CONTENT)
tfile.seek(0, 0)
path = tfile.name
before = '/var/lib/foo'
after = ''
limit = '^{0}'.format(before)
filemod.sed(path, before, after, limit=limit)
with open(path, 'rb') as newfile:
self.assertEquals(
SED_CONTENT.replace(before, ''),
newfile.read()
)
if __name__ == "__main__":
loader = TestLoader()
tests = loader.loadTestsFromTestCase(FileModuleTestCase)
TextTestRunner(verbosity=1).run(tests)
| # ... existing code ...
'cmd.run': cmdmod.run,
'cmd.run_all': cmdmod.run_all
}
# ... modified code ...
with open(path, 'rb') as newfile:
self.assertEquals(
SED_CONTENT.replace(before, ''),
newfile.read()
)
# ... rest of the code ... |
ba0ea7491fab383992013a8379592657eedfe1ce | scripts/contrib/model_info.py | scripts/contrib/model_info.py |
import sys
import argparse
import numpy as np
import yaml
DESC = "Prints version and model type from model.npz file."
S2S_SPECIAL_NODE = "special:model.yml"
def main():
args = parse_args()
model = np.load(args.model)
if S2S_SPECIAL_NODE not in model:
print("No special Marian YAML node found in the model")
exit(1)
yaml_text = bytes(model[S2S_SPECIAL_NODE]).decode('ascii')
if not args.key:
print(yaml_text)
exit(0)
# fix the invalid trailing unicode character '#x0000' added to the YAML
# string by the C++ cnpy library
try:
yaml_node = yaml.load(yaml_text)
except yaml.reader.ReaderError:
yaml_node = yaml.load(yaml_text[:-1])
print(yaml_node[args.key])
def parse_args():
parser = argparse.ArgumentParser(description=DESC)
parser.add_argument("-m", "--model", help="model file", required=True)
parser.add_argument("-k", "--key", help="print value for specific key")
return parser.parse_args()
if __name__ == "__main__":
main()
|
import sys
import argparse
import numpy as np
import yaml
DESC = "Prints keys and values from model.npz file."
S2S_SPECIAL_NODE = "special:model.yml"
def main():
args = parse_args()
model = np.load(args.model)
if args.special:
if S2S_SPECIAL_NODE not in model:
print("No special Marian YAML node found in the model")
exit(1)
yaml_text = bytes(model[S2S_SPECIAL_NODE]).decode('ascii')
if not args.key:
print(yaml_text)
exit(0)
# fix the invalid trailing unicode character '#x0000' added to the YAML
# string by the C++ cnpy library
try:
yaml_node = yaml.load(yaml_text)
except yaml.reader.ReaderError:
yaml_node = yaml.load(yaml_text[:-1])
print(yaml_node[args.key])
else:
if args.key:
if args.key not in model:
print("Key not found")
exit(1)
print(model[args.key])
else:
for key in model:
print(key)
def parse_args():
parser = argparse.ArgumentParser(description=DESC)
parser.add_argument("-m", "--model", help="model file", required=True)
parser.add_argument("-k", "--key", help="print value for specific key")
parser.add_argument("-s", "--special", action="store_true",
help="print values from special:model.yml node")
return parser.parse_args()
if __name__ == "__main__":
main()
| Add printing value for any key from model.npz | Add printing value for any key from model.npz
| Python | mit | emjotde/amunmt,emjotde/amunmt,marian-nmt/marian-train,emjotde/amunmt,amunmt/marian,emjotde/amunn,amunmt/marian,emjotde/amunn,emjotde/amunmt,marian-nmt/marian-train,emjotde/amunn,marian-nmt/marian-train,emjotde/amunn,marian-nmt/marian-train,emjotde/Marian,marian-nmt/marian-train,emjotde/Marian,amunmt/marian |
import sys
import argparse
import numpy as np
import yaml
- DESC = "Prints version and model type from model.npz file."
+ DESC = "Prints keys and values from model.npz file."
S2S_SPECIAL_NODE = "special:model.yml"
def main():
args = parse_args()
+ model = np.load(args.model)
- model = np.load(args.model)
+ if args.special:
- if S2S_SPECIAL_NODE not in model:
+ if S2S_SPECIAL_NODE not in model:
- print("No special Marian YAML node found in the model")
+ print("No special Marian YAML node found in the model")
- exit(1)
+ exit(1)
- yaml_text = bytes(model[S2S_SPECIAL_NODE]).decode('ascii')
+ yaml_text = bytes(model[S2S_SPECIAL_NODE]).decode('ascii')
- if not args.key:
+ if not args.key:
- print(yaml_text)
+ print(yaml_text)
- exit(0)
+ exit(0)
- # fix the invalid trailing unicode character '#x0000' added to the YAML
+ # fix the invalid trailing unicode character '#x0000' added to the YAML
- # string by the C++ cnpy library
+ # string by the C++ cnpy library
- try:
+ try:
- yaml_node = yaml.load(yaml_text)
+ yaml_node = yaml.load(yaml_text)
- except yaml.reader.ReaderError:
+ except yaml.reader.ReaderError:
- yaml_node = yaml.load(yaml_text[:-1])
+ yaml_node = yaml.load(yaml_text[:-1])
- print(yaml_node[args.key])
+ print(yaml_node[args.key])
+ else:
+ if args.key:
+ if args.key not in model:
+ print("Key not found")
+ exit(1)
+ print(model[args.key])
+ else:
+ for key in model:
+ print(key)
def parse_args():
parser = argparse.ArgumentParser(description=DESC)
parser.add_argument("-m", "--model", help="model file", required=True)
parser.add_argument("-k", "--key", help="print value for specific key")
+ parser.add_argument("-s", "--special", action="store_true",
+ help="print values from special:model.yml node")
return parser.parse_args()
if __name__ == "__main__":
main()
| Add printing value for any key from model.npz | ## Code Before:
import sys
import argparse
import numpy as np
import yaml
DESC = "Prints version and model type from model.npz file."
S2S_SPECIAL_NODE = "special:model.yml"
def main():
args = parse_args()
model = np.load(args.model)
if S2S_SPECIAL_NODE not in model:
print("No special Marian YAML node found in the model")
exit(1)
yaml_text = bytes(model[S2S_SPECIAL_NODE]).decode('ascii')
if not args.key:
print(yaml_text)
exit(0)
# fix the invalid trailing unicode character '#x0000' added to the YAML
# string by the C++ cnpy library
try:
yaml_node = yaml.load(yaml_text)
except yaml.reader.ReaderError:
yaml_node = yaml.load(yaml_text[:-1])
print(yaml_node[args.key])
def parse_args():
parser = argparse.ArgumentParser(description=DESC)
parser.add_argument("-m", "--model", help="model file", required=True)
parser.add_argument("-k", "--key", help="print value for specific key")
return parser.parse_args()
if __name__ == "__main__":
main()
## Instruction:
Add printing value for any key from model.npz
## Code After:
import sys
import argparse
import numpy as np
import yaml
DESC = "Prints keys and values from model.npz file."
S2S_SPECIAL_NODE = "special:model.yml"
def main():
args = parse_args()
model = np.load(args.model)
if args.special:
if S2S_SPECIAL_NODE not in model:
print("No special Marian YAML node found in the model")
exit(1)
yaml_text = bytes(model[S2S_SPECIAL_NODE]).decode('ascii')
if not args.key:
print(yaml_text)
exit(0)
# fix the invalid trailing unicode character '#x0000' added to the YAML
# string by the C++ cnpy library
try:
yaml_node = yaml.load(yaml_text)
except yaml.reader.ReaderError:
yaml_node = yaml.load(yaml_text[:-1])
print(yaml_node[args.key])
else:
if args.key:
if args.key not in model:
print("Key not found")
exit(1)
print(model[args.key])
else:
for key in model:
print(key)
def parse_args():
parser = argparse.ArgumentParser(description=DESC)
parser.add_argument("-m", "--model", help="model file", required=True)
parser.add_argument("-k", "--key", help="print value for specific key")
parser.add_argument("-s", "--special", action="store_true",
help="print values from special:model.yml node")
return parser.parse_args()
if __name__ == "__main__":
main()
| # ... existing code ...
DESC = "Prints keys and values from model.npz file."
S2S_SPECIAL_NODE = "special:model.yml"
# ... modified code ...
args = parse_args()
model = np.load(args.model)
if args.special:
if S2S_SPECIAL_NODE not in model:
print("No special Marian YAML node found in the model")
exit(1)
yaml_text = bytes(model[S2S_SPECIAL_NODE]).decode('ascii')
if not args.key:
print(yaml_text)
exit(0)
# fix the invalid trailing unicode character '#x0000' added to the YAML
# string by the C++ cnpy library
try:
yaml_node = yaml.load(yaml_text)
except yaml.reader.ReaderError:
yaml_node = yaml.load(yaml_text[:-1])
print(yaml_node[args.key])
else:
if args.key:
if args.key not in model:
print("Key not found")
exit(1)
print(model[args.key])
else:
for key in model:
print(key)
...
parser.add_argument("-k", "--key", help="print value for specific key")
parser.add_argument("-s", "--special", action="store_true",
help="print values from special:model.yml node")
return parser.parse_args()
# ... rest of the code ... |
d68935dfb34f7c5fc463f94e49f0c060717b17b8 | cmsplugin_contact_plus/checks.py | cmsplugin_contact_plus/checks.py | from django.core.checks import Warning, register
def warn_1_3_changes(app_configs, **kwargs):
return [
Warning(
'cmsplugin-contact-plus >= 1.3 has renamed the "input" field. Do not forget to migrate your '
'database and update your templates',
hint=None,
obj=None,
id='cmsplugin_contact_plus.W001',
)
]
def register_checks():
for check in [
warn_1_3_changes,
]:
register(check)
| from django.core.checks import Warning, register
def warn_1_3_changes(app_configs, **kwargs):
return [
Warning(
'cmsplugin-contact-plus >= 1.3 has renamed the "input" field. Do not forget to migrate your '
'database and update your templates',
hint=None,
obj=None,
id='cmsplugin_contact_plus.W001',
)
]
def register_checks():
for check in [
# warn_1_3_changes, # Might be more annoying than useful
]:
register(check)
| Comment out warning for renamed field | Comment out warning for renamed field
| Python | bsd-3-clause | arteria/cmsplugin-contact-plus,arteria/cmsplugin-contact-plus,worthwhile/cmsplugin-remote-form,worthwhile/cmsplugin-remote-form | from django.core.checks import Warning, register
def warn_1_3_changes(app_configs, **kwargs):
return [
Warning(
'cmsplugin-contact-plus >= 1.3 has renamed the "input" field. Do not forget to migrate your '
'database and update your templates',
hint=None,
obj=None,
id='cmsplugin_contact_plus.W001',
)
]
def register_checks():
for check in [
- warn_1_3_changes,
+ # warn_1_3_changes, # Might be more annoying than useful
]:
register(check)
| Comment out warning for renamed field | ## Code Before:
from django.core.checks import Warning, register
def warn_1_3_changes(app_configs, **kwargs):
return [
Warning(
'cmsplugin-contact-plus >= 1.3 has renamed the "input" field. Do not forget to migrate your '
'database and update your templates',
hint=None,
obj=None,
id='cmsplugin_contact_plus.W001',
)
]
def register_checks():
for check in [
warn_1_3_changes,
]:
register(check)
## Instruction:
Comment out warning for renamed field
## Code After:
from django.core.checks import Warning, register
def warn_1_3_changes(app_configs, **kwargs):
return [
Warning(
'cmsplugin-contact-plus >= 1.3 has renamed the "input" field. Do not forget to migrate your '
'database and update your templates',
hint=None,
obj=None,
id='cmsplugin_contact_plus.W001',
)
]
def register_checks():
for check in [
# warn_1_3_changes, # Might be more annoying than useful
]:
register(check)
| // ... existing code ...
for check in [
# warn_1_3_changes, # Might be more annoying than useful
]:
// ... rest of the code ... |
6422f6057d43dfb5259028291991f39c5b81b446 | spreadflow_core/flow.py | spreadflow_core/flow.py | from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from collections import defaultdict
class Flowmap(dict):
def __init__(self):
super(Flowmap, self).__init__()
self.decorators = []
self.annotations = {}
def graph(self):
result = defaultdict(set)
backlog = set()
processed = set()
for port_out, port_in in self.iteritems():
result[port_out].add(port_in)
backlog.add(port_in)
while len(backlog):
node = backlog.pop()
if node in processed:
continue
else:
processed.add(node)
try:
arcs = tuple(node.dependencies)
except AttributeError:
continue
for port_out, port_in in arcs:
result[port_out].add(port_in)
backlog.add(port_out)
backlog.add(port_in)
return result
| from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from collections import defaultdict, MutableMapping
class Flowmap(MutableMapping):
def __init__(self):
super(Flowmap, self).__init__()
self.annotations = {}
self.connections = {}
self.decorators = []
def __getitem__(self, key):
return self.connections[key]
def __setitem__(self, key, value):
self.connections[key] = value
def __delitem__(self, key):
del self.connections[key]
def __iter__(self):
return iter(self.connections)
def __len__(self):
return len(self.connections)
def graph(self):
result = defaultdict(set)
backlog = set()
processed = set()
for port_out, port_in in self.iteritems():
result[port_out].add(port_in)
backlog.add(port_in)
while len(backlog):
node = backlog.pop()
if node in processed:
continue
else:
processed.add(node)
try:
arcs = tuple(node.dependencies)
except AttributeError:
continue
for port_out, port_in in arcs:
result[port_out].add(port_in)
backlog.add(port_out)
backlog.add(port_in)
return result
| Refactor Flowmap into a MutableMapping | Refactor Flowmap into a MutableMapping
| Python | mit | spreadflow/spreadflow-core,znerol/spreadflow-core | from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
- from collections import defaultdict
+ from collections import defaultdict, MutableMapping
- class Flowmap(dict):
+ class Flowmap(MutableMapping):
def __init__(self):
super(Flowmap, self).__init__()
+ self.annotations = {}
+ self.connections = {}
self.decorators = []
- self.annotations = {}
+
+ def __getitem__(self, key):
+ return self.connections[key]
+
+ def __setitem__(self, key, value):
+ self.connections[key] = value
+
+ def __delitem__(self, key):
+ del self.connections[key]
+
+ def __iter__(self):
+ return iter(self.connections)
+
+ def __len__(self):
+ return len(self.connections)
def graph(self):
result = defaultdict(set)
backlog = set()
processed = set()
for port_out, port_in in self.iteritems():
result[port_out].add(port_in)
backlog.add(port_in)
while len(backlog):
node = backlog.pop()
if node in processed:
continue
else:
processed.add(node)
try:
arcs = tuple(node.dependencies)
except AttributeError:
continue
for port_out, port_in in arcs:
result[port_out].add(port_in)
backlog.add(port_out)
backlog.add(port_in)
return result
| Refactor Flowmap into a MutableMapping | ## Code Before:
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from collections import defaultdict
class Flowmap(dict):
def __init__(self):
super(Flowmap, self).__init__()
self.decorators = []
self.annotations = {}
def graph(self):
result = defaultdict(set)
backlog = set()
processed = set()
for port_out, port_in in self.iteritems():
result[port_out].add(port_in)
backlog.add(port_in)
while len(backlog):
node = backlog.pop()
if node in processed:
continue
else:
processed.add(node)
try:
arcs = tuple(node.dependencies)
except AttributeError:
continue
for port_out, port_in in arcs:
result[port_out].add(port_in)
backlog.add(port_out)
backlog.add(port_in)
return result
## Instruction:
Refactor Flowmap into a MutableMapping
## Code After:
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from collections import defaultdict, MutableMapping
class Flowmap(MutableMapping):
def __init__(self):
super(Flowmap, self).__init__()
self.annotations = {}
self.connections = {}
self.decorators = []
def __getitem__(self, key):
return self.connections[key]
def __setitem__(self, key, value):
self.connections[key] = value
def __delitem__(self, key):
del self.connections[key]
def __iter__(self):
return iter(self.connections)
def __len__(self):
return len(self.connections)
def graph(self):
result = defaultdict(set)
backlog = set()
processed = set()
for port_out, port_in in self.iteritems():
result[port_out].add(port_in)
backlog.add(port_in)
while len(backlog):
node = backlog.pop()
if node in processed:
continue
else:
processed.add(node)
try:
arcs = tuple(node.dependencies)
except AttributeError:
continue
for port_out, port_in in arcs:
result[port_out].add(port_in)
backlog.add(port_out)
backlog.add(port_in)
return result
| // ... existing code ...
from collections import defaultdict, MutableMapping
// ... modified code ...
class Flowmap(MutableMapping):
def __init__(self):
...
super(Flowmap, self).__init__()
self.annotations = {}
self.connections = {}
self.decorators = []
def __getitem__(self, key):
return self.connections[key]
def __setitem__(self, key, value):
self.connections[key] = value
def __delitem__(self, key):
del self.connections[key]
def __iter__(self):
return iter(self.connections)
def __len__(self):
return len(self.connections)
// ... rest of the code ... |
cd41fdbdb53008c9701213d4f223bb8df0514ecb | byceps/util/datetime/timezone.py | byceps/util/datetime/timezone.py |
from datetime import datetime
from flask import current_app
import pendulum
def local_tz_to_utc(dt: datetime):
"""Convert date/time object from configured default local time to UTC."""
tz_str = get_timezone_string()
return (pendulum.instance(dt)
.set(tz=tz_str)
.in_tz(pendulum.UTC)
# Keep SQLAlchemy from converting it to another zone.
.replace(tzinfo=None))
def utc_to_local_tz(dt: datetime) -> datetime:
"""Convert naive date/time object from UTC to configured time zone."""
tz_str = get_timezone_string()
return pendulum.instance(dt).in_tz(tz_str)
def get_timezone_string() -> str:
"""Return the configured default timezone as a string."""
return current_app.config['TIMEZONE']
|
from flask import current_app
def get_timezone_string() -> str:
"""Return the configured default timezone as a string."""
return current_app.config['TIMEZONE']
| Remove unused custom functions `local_tz_to_utc`, `utc_to_local_tz` | Remove unused custom functions `local_tz_to_utc`, `utc_to_local_tz`
| Python | bsd-3-clause | homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps | -
- from datetime import datetime
from flask import current_app
- import pendulum
-
-
-
- def local_tz_to_utc(dt: datetime):
- """Convert date/time object from configured default local time to UTC."""
- tz_str = get_timezone_string()
-
- return (pendulum.instance(dt)
- .set(tz=tz_str)
- .in_tz(pendulum.UTC)
- # Keep SQLAlchemy from converting it to another zone.
- .replace(tzinfo=None))
-
-
- def utc_to_local_tz(dt: datetime) -> datetime:
- """Convert naive date/time object from UTC to configured time zone."""
- tz_str = get_timezone_string()
- return pendulum.instance(dt).in_tz(tz_str)
def get_timezone_string() -> str:
"""Return the configured default timezone as a string."""
return current_app.config['TIMEZONE']
| Remove unused custom functions `local_tz_to_utc`, `utc_to_local_tz` | ## Code Before:
from datetime import datetime
from flask import current_app
import pendulum
def local_tz_to_utc(dt: datetime):
"""Convert date/time object from configured default local time to UTC."""
tz_str = get_timezone_string()
return (pendulum.instance(dt)
.set(tz=tz_str)
.in_tz(pendulum.UTC)
# Keep SQLAlchemy from converting it to another zone.
.replace(tzinfo=None))
def utc_to_local_tz(dt: datetime) -> datetime:
"""Convert naive date/time object from UTC to configured time zone."""
tz_str = get_timezone_string()
return pendulum.instance(dt).in_tz(tz_str)
def get_timezone_string() -> str:
"""Return the configured default timezone as a string."""
return current_app.config['TIMEZONE']
## Instruction:
Remove unused custom functions `local_tz_to_utc`, `utc_to_local_tz`
## Code After:
from flask import current_app
def get_timezone_string() -> str:
"""Return the configured default timezone as a string."""
return current_app.config['TIMEZONE']
| // ... existing code ...
// ... modified code ...
from flask import current_app
// ... rest of the code ... |
42f0c76664337af80d692fe7649f3643c237cc47 | Tests/MathFunctionsTest.py | Tests/MathFunctionsTest.py | from Math.MathFunctions import *
def pointTest():
point1 = (0, 0)
point2 = (2, 4)
print("Point 1: {}".format(point1))
print("Point 2: {}".format(point2))
print("Point distance: {}".format(pointDistance(point1[0],point1[1],point2[0],point2[1])))
angle = pointAngle(point1[0],point1[1],point2[0],point2[1]);
print("Point angle: {:.3f}, {:.3f} degrees".format(angle, angle*RAD_TO_DEG))
pointTest()
| from Math.MathFunctions import *
import unittest
class TestPointMethods(unittest.TestCase):
def test_point(self):
point1 = (0, 0)
point2 = (2, 4)
angle = pointAngle(point1[0], point1[1], point2[0], point2[1])
dist = pointDistance(point1[0], point1[1], point2[0], point2[1])
self.assertAlmostEqual(angle, 1.1071487177940904)
self.assertAlmostEqual(dist, 4.47213595499958)
class TestHelperMethods(unittest.TestCase):
def test_clamp(self):
self.assertEqual(clamp(10, 1, 5), 5)
self.assertEqual(clamp(0, 1, 5), 1)
self.assertEqual(clamp(3, 1, 5), 3)
self.assertEqual(clamp(5, 1, 5), 5)
if __name__ == '__main__':
unittest.main()
| Use python's unit testing framework | Use python's unit testing framework
| Python | mit | turtles/PythonScripts | from Math.MathFunctions import *
+ import unittest
- def pointTest():
+ class TestPointMethods(unittest.TestCase):
+ def test_point(self):
- point1 = (0, 0)
+ point1 = (0, 0)
- point2 = (2, 4)
+ point2 = (2, 4)
- print("Point 1: {}".format(point1))
- print("Point 2: {}".format(point2))
- print("Point distance: {}".format(pointDistance(point1[0],point1[1],point2[0],point2[1])))
- angle = pointAngle(point1[0],point1[1],point2[0],point2[1]);
+ angle = pointAngle(point1[0], point1[1], point2[0], point2[1])
- print("Point angle: {:.3f}, {:.3f} degrees".format(angle, angle*RAD_TO_DEG))
+ dist = pointDistance(point1[0], point1[1], point2[0], point2[1])
- pointTest()
+ self.assertAlmostEqual(angle, 1.1071487177940904)
+ self.assertAlmostEqual(dist, 4.47213595499958)
+ class TestHelperMethods(unittest.TestCase):
+ def test_clamp(self):
+ self.assertEqual(clamp(10, 1, 5), 5)
+ self.assertEqual(clamp(0, 1, 5), 1)
+ self.assertEqual(clamp(3, 1, 5), 3)
+ self.assertEqual(clamp(5, 1, 5), 5)
+
+ if __name__ == '__main__':
+ unittest.main()
+ | Use python's unit testing framework | ## Code Before:
from Math.MathFunctions import *
def pointTest():
point1 = (0, 0)
point2 = (2, 4)
print("Point 1: {}".format(point1))
print("Point 2: {}".format(point2))
print("Point distance: {}".format(pointDistance(point1[0],point1[1],point2[0],point2[1])))
angle = pointAngle(point1[0],point1[1],point2[0],point2[1]);
print("Point angle: {:.3f}, {:.3f} degrees".format(angle, angle*RAD_TO_DEG))
pointTest()
## Instruction:
Use python's unit testing framework
## Code After:
from Math.MathFunctions import *
import unittest
class TestPointMethods(unittest.TestCase):
def test_point(self):
point1 = (0, 0)
point2 = (2, 4)
angle = pointAngle(point1[0], point1[1], point2[0], point2[1])
dist = pointDistance(point1[0], point1[1], point2[0], point2[1])
self.assertAlmostEqual(angle, 1.1071487177940904)
self.assertAlmostEqual(dist, 4.47213595499958)
class TestHelperMethods(unittest.TestCase):
def test_clamp(self):
self.assertEqual(clamp(10, 1, 5), 5)
self.assertEqual(clamp(0, 1, 5), 1)
self.assertEqual(clamp(3, 1, 5), 3)
self.assertEqual(clamp(5, 1, 5), 5)
if __name__ == '__main__':
unittest.main()
| # ... existing code ...
from Math.MathFunctions import *
import unittest
class TestPointMethods(unittest.TestCase):
def test_point(self):
point1 = (0, 0)
point2 = (2, 4)
angle = pointAngle(point1[0], point1[1], point2[0], point2[1])
dist = pointDistance(point1[0], point1[1], point2[0], point2[1])
self.assertAlmostEqual(angle, 1.1071487177940904)
self.assertAlmostEqual(dist, 4.47213595499958)
class TestHelperMethods(unittest.TestCase):
def test_clamp(self):
self.assertEqual(clamp(10, 1, 5), 5)
self.assertEqual(clamp(0, 1, 5), 1)
self.assertEqual(clamp(3, 1, 5), 3)
self.assertEqual(clamp(5, 1, 5), 5)
if __name__ == '__main__':
unittest.main()
# ... rest of the code ... |
f333f29d4170527c985bc695cd7b8331041769d5 | eva/layers/out_channels.py | eva/layers/out_channels.py | from keras.layers import Convolution2D, Reshape, Lambda, Activation
from eva.layers.masked_convolution2d import MaskedConvolution2D
class OutChannels(object):
def __init__(self, height, width, channels, masked=False, palette=256):
self.height = height
self.width = width
self.channels = channels
self.cxp = MaskedConvolution2D if masked else Convolution2D
self.palette = palette
def __call__(self, model):
if self.channels == 1:
outs = Convolution2D(1, 1, 1, activation='sigmoid', border_mode='valid')(model)
else:
out = self.cxp(self.palette*self.channels, 1, 1, border_mode='valid', name='channels_mult_palette')(model)
out = Reshape((self.height, self.width, self.palette, self.channels), name='palette_channels')(out)
outs = [None] * self.channels
for i in range(self.channels):
outs[i] = Lambda(lambda x: x[:, :, :, :, i], name='channel'+str(i)+'_extract')(out)
outs[i] = Reshape((self.height * self.width, self.palette), name='hw_palette'+str(i))(outs[i])
outs[i] = Activation('softmax', name='channel'+str(i))(outs[i])
return outs | from keras.layers import Convolution2D, Reshape, Lambda, Activation
from eva.layers.masked_convolution2d import MaskedConvolution2D
class OutChannels(object):
def __init__(self, height, width, channels, masked=False, palette=256):
self.height = height
self.width = width
self.channels = channels
self.cxp = MaskedConvolution2D if masked else Convolution2D
self.palette = palette
def __call__(self, model):
if self.channels == 1:
outs = Convolution2D(1, 1, 1, border_mode='valid')(model)
outs = Activation('sigmoid')(outs)
else:
out = self.cxp(self.palette*self.channels, 1, 1, border_mode='valid', name='channels_mult_palette')(model)
out = Reshape((self.height, self.width, self.palette, self.channels), name='palette_channels')(out)
outs = [None] * self.channels
for i in range(self.channels):
outs[i] = Lambda(lambda x: x[:, :, :, :, i], name='channel'+str(i)+'_extract')(out)
outs[i] = Reshape((self.height * self.width, self.palette), name='hw_palette'+str(i))(outs[i])
outs[i] = Activation('softmax', name='channel'+str(i))(outs[i])
return outs | Make mono channel output activation more readable | Make mono channel output activation more readable
| Python | apache-2.0 | israelg99/eva | from keras.layers import Convolution2D, Reshape, Lambda, Activation
from eva.layers.masked_convolution2d import MaskedConvolution2D
class OutChannels(object):
def __init__(self, height, width, channels, masked=False, palette=256):
self.height = height
self.width = width
self.channels = channels
self.cxp = MaskedConvolution2D if masked else Convolution2D
self.palette = palette
def __call__(self, model):
if self.channels == 1:
- outs = Convolution2D(1, 1, 1, activation='sigmoid', border_mode='valid')(model)
+ outs = Convolution2D(1, 1, 1, border_mode='valid')(model)
+ outs = Activation('sigmoid')(outs)
else:
out = self.cxp(self.palette*self.channels, 1, 1, border_mode='valid', name='channels_mult_palette')(model)
out = Reshape((self.height, self.width, self.palette, self.channels), name='palette_channels')(out)
outs = [None] * self.channels
for i in range(self.channels):
outs[i] = Lambda(lambda x: x[:, :, :, :, i], name='channel'+str(i)+'_extract')(out)
outs[i] = Reshape((self.height * self.width, self.palette), name='hw_palette'+str(i))(outs[i])
outs[i] = Activation('softmax', name='channel'+str(i))(outs[i])
return outs | Make mono channel output activation more readable | ## Code Before:
from keras.layers import Convolution2D, Reshape, Lambda, Activation
from eva.layers.masked_convolution2d import MaskedConvolution2D
class OutChannels(object):
def __init__(self, height, width, channels, masked=False, palette=256):
self.height = height
self.width = width
self.channels = channels
self.cxp = MaskedConvolution2D if masked else Convolution2D
self.palette = palette
def __call__(self, model):
if self.channels == 1:
outs = Convolution2D(1, 1, 1, activation='sigmoid', border_mode='valid')(model)
else:
out = self.cxp(self.palette*self.channels, 1, 1, border_mode='valid', name='channels_mult_palette')(model)
out = Reshape((self.height, self.width, self.palette, self.channels), name='palette_channels')(out)
outs = [None] * self.channels
for i in range(self.channels):
outs[i] = Lambda(lambda x: x[:, :, :, :, i], name='channel'+str(i)+'_extract')(out)
outs[i] = Reshape((self.height * self.width, self.palette), name='hw_palette'+str(i))(outs[i])
outs[i] = Activation('softmax', name='channel'+str(i))(outs[i])
return outs
## Instruction:
Make mono channel output activation more readable
## Code After:
from keras.layers import Convolution2D, Reshape, Lambda, Activation
from eva.layers.masked_convolution2d import MaskedConvolution2D
class OutChannels(object):
def __init__(self, height, width, channels, masked=False, palette=256):
self.height = height
self.width = width
self.channels = channels
self.cxp = MaskedConvolution2D if masked else Convolution2D
self.palette = palette
def __call__(self, model):
if self.channels == 1:
outs = Convolution2D(1, 1, 1, border_mode='valid')(model)
outs = Activation('sigmoid')(outs)
else:
out = self.cxp(self.palette*self.channels, 1, 1, border_mode='valid', name='channels_mult_palette')(model)
out = Reshape((self.height, self.width, self.palette, self.channels), name='palette_channels')(out)
outs = [None] * self.channels
for i in range(self.channels):
outs[i] = Lambda(lambda x: x[:, :, :, :, i], name='channel'+str(i)+'_extract')(out)
outs[i] = Reshape((self.height * self.width, self.palette), name='hw_palette'+str(i))(outs[i])
outs[i] = Activation('softmax', name='channel'+str(i))(outs[i])
return outs | # ... existing code ...
if self.channels == 1:
outs = Convolution2D(1, 1, 1, border_mode='valid')(model)
outs = Activation('sigmoid')(outs)
else:
# ... rest of the code ... |
2fbd90a9995e8552e818e53d3b213e4cfef470de | molly/installer/dbcreate.py | molly/installer/dbcreate.py |
from molly.installer.utils import quiet_exec, CommandFailed
def create(dba_user, dba_pass, username, password, database):
creds = []
if dba_user:
creds += ['-U', dba_user]
if dba_pass:
os.environ['PGPASSWORD'] = dba_pass
try:
quiet_exec(['psql'] + creds + ['-c',"CREATE USER %s WITH PASSWORD '%s';" % (username, password)], 'dbcreate')
except CommandFailed:
pass
quiet_exec(['psql'] + creds + ['-c',"ALTER ROLE %s WITH PASSWORD '%s';" % (username, password)], 'dbcreate')
try:
quiet_exec(['createdb'] + creds + ['-T','template_postgis',database], 'dbcreate')
except CommandFailed:
quiet_exec(['dropdb'] + creds + [database], 'dbcreate')
quiet_exec(['createdb'] + creds + ['-T','template_postgis',database], 'dbcreate')
quiet_exec(['psql'] + creds + ['-c',"GRANT ALL ON DATABASE %s TO %s;" % (database, username)], 'dbcreate')
if dba_pass:
del os.environ['PGPASSWORD']
|
import os
from molly.installer.utils import quiet_exec, CommandFailed
def create(dba_user, dba_pass, username, password, database):
creds = []
if dba_user:
creds += ['-U', dba_user]
if dba_pass:
os.environ['PGPASSWORD'] = dba_pass
try:
quiet_exec(['psql'] + creds + ['-c',"CREATE USER %s WITH PASSWORD '%s';" % (username, password)], 'dbcreate')
except CommandFailed:
pass
quiet_exec(['psql'] + creds + ['-c',"ALTER ROLE %s WITH PASSWORD '%s';" % (username, password)], 'dbcreate')
try:
quiet_exec(['createdb'] + creds + ['-T','template_postgis',database], 'dbcreate')
except CommandFailed:
quiet_exec(['dropdb'] + creds + [database], 'dbcreate')
quiet_exec(['createdb'] + creds + ['-T','template_postgis',database], 'dbcreate')
quiet_exec(['psql'] + creds + ['-c',"GRANT ALL ON DATABASE %s TO %s;" % (database, username)], 'dbcreate')
if dba_pass:
del os.environ['PGPASSWORD']
| Fix broken setting of postgres password | Fix broken setting of postgres password
| Python | apache-2.0 | mollyproject/mollyproject,mollyproject/mollyproject,mollyproject/mollyproject | +
+ import os
from molly.installer.utils import quiet_exec, CommandFailed
def create(dba_user, dba_pass, username, password, database):
creds = []
if dba_user:
creds += ['-U', dba_user]
if dba_pass:
os.environ['PGPASSWORD'] = dba_pass
try:
quiet_exec(['psql'] + creds + ['-c',"CREATE USER %s WITH PASSWORD '%s';" % (username, password)], 'dbcreate')
except CommandFailed:
pass
quiet_exec(['psql'] + creds + ['-c',"ALTER ROLE %s WITH PASSWORD '%s';" % (username, password)], 'dbcreate')
try:
quiet_exec(['createdb'] + creds + ['-T','template_postgis',database], 'dbcreate')
except CommandFailed:
quiet_exec(['dropdb'] + creds + [database], 'dbcreate')
quiet_exec(['createdb'] + creds + ['-T','template_postgis',database], 'dbcreate')
quiet_exec(['psql'] + creds + ['-c',"GRANT ALL ON DATABASE %s TO %s;" % (database, username)], 'dbcreate')
if dba_pass:
del os.environ['PGPASSWORD']
| Fix broken setting of postgres password | ## Code Before:
from molly.installer.utils import quiet_exec, CommandFailed
def create(dba_user, dba_pass, username, password, database):
creds = []
if dba_user:
creds += ['-U', dba_user]
if dba_pass:
os.environ['PGPASSWORD'] = dba_pass
try:
quiet_exec(['psql'] + creds + ['-c',"CREATE USER %s WITH PASSWORD '%s';" % (username, password)], 'dbcreate')
except CommandFailed:
pass
quiet_exec(['psql'] + creds + ['-c',"ALTER ROLE %s WITH PASSWORD '%s';" % (username, password)], 'dbcreate')
try:
quiet_exec(['createdb'] + creds + ['-T','template_postgis',database], 'dbcreate')
except CommandFailed:
quiet_exec(['dropdb'] + creds + [database], 'dbcreate')
quiet_exec(['createdb'] + creds + ['-T','template_postgis',database], 'dbcreate')
quiet_exec(['psql'] + creds + ['-c',"GRANT ALL ON DATABASE %s TO %s;" % (database, username)], 'dbcreate')
if dba_pass:
del os.environ['PGPASSWORD']
## Instruction:
Fix broken setting of postgres password
## Code After:
import os
from molly.installer.utils import quiet_exec, CommandFailed
def create(dba_user, dba_pass, username, password, database):
creds = []
if dba_user:
creds += ['-U', dba_user]
if dba_pass:
os.environ['PGPASSWORD'] = dba_pass
try:
quiet_exec(['psql'] + creds + ['-c',"CREATE USER %s WITH PASSWORD '%s';" % (username, password)], 'dbcreate')
except CommandFailed:
pass
quiet_exec(['psql'] + creds + ['-c',"ALTER ROLE %s WITH PASSWORD '%s';" % (username, password)], 'dbcreate')
try:
quiet_exec(['createdb'] + creds + ['-T','template_postgis',database], 'dbcreate')
except CommandFailed:
quiet_exec(['dropdb'] + creds + [database], 'dbcreate')
quiet_exec(['createdb'] + creds + ['-T','template_postgis',database], 'dbcreate')
quiet_exec(['psql'] + creds + ['-c',"GRANT ALL ON DATABASE %s TO %s;" % (database, username)], 'dbcreate')
if dba_pass:
del os.environ['PGPASSWORD']
| # ... existing code ...
import os
# ... rest of the code ... |
72ee5d6949f25158b6cbd1deead45ee1e939be5b | sympy/series/__init__.py | sympy/series/__init__.py | from order import Order
from limits import limit, Limit
from gruntz import gruntz
from series import series
O = Order
__all__ = [gruntz, limit, series, O, Order, Limit]
| from order import Order
from limits import limit, Limit
from gruntz import gruntz
from series import series
O = Order
__all__ = ['gruntz', 'limit', 'series', 'O', 'Order', 'Limit']
| Fix __all__ usage for sympy/series | Fix __all__ usage for sympy/series
| Python | bsd-3-clause | hargup/sympy,Arafatk/sympy,chaffra/sympy,kaushik94/sympy,atreyv/sympy,jbbskinny/sympy,aktech/sympy,AunShiLord/sympy,AkademieOlympia/sympy,beni55/sympy,jerli/sympy,vipulroxx/sympy,Titan-C/sympy,yukoba/sympy,minrk/sympy,Shaswat27/sympy,abloomston/sympy,yashsharan/sympy,shipci/sympy,drufat/sympy,jerli/sympy,toolforger/sympy,Gadal/sympy,cswiercz/sympy,cswiercz/sympy,atsao72/sympy,MridulS/sympy,emon10005/sympy,shikil/sympy,aktech/sympy,iamutkarshtiwari/sympy,moble/sympy,Arafatk/sympy,VaibhavAgarwalVA/sympy,wanglongqi/sympy,kaichogami/sympy,Gadal/sympy,postvakje/sympy,ahhda/sympy,yukoba/sympy,sahmed95/sympy,hrashk/sympy,oliverlee/sympy,garvitr/sympy,farhaanbukhsh/sympy,jaimahajan1997/sympy,wanglongqi/sympy,kaushik94/sympy,shikil/sympy,grevutiu-gabriel/sympy,maniteja123/sympy,VaibhavAgarwalVA/sympy,drufat/sympy,abloomston/sympy,atsao72/sympy,Davidjohnwilson/sympy,jaimahajan1997/sympy,sahilshekhawat/sympy,moble/sympy,Davidjohnwilson/sympy,diofant/diofant,Sumith1896/sympy,meghana1995/sympy,AunShiLord/sympy,Vishluck/sympy,kmacinnis/sympy,toolforger/sympy,Davidjohnwilson/sympy,Curious72/sympy,grevutiu-gabriel/sympy,madan96/sympy,kumarkrishna/sympy,chaffra/sympy,souravsingh/sympy,MechCoder/sympy,jamesblunt/sympy,ga7g08/sympy,postvakje/sympy,jbbskinny/sympy,mcdaniel67/sympy,farhaanbukhsh/sympy,rahuldan/sympy,sunny94/temp,Gadal/sympy,hargup/sympy,garvitr/sympy,atreyv/sympy,lidavidm/sympy,abhiii5459/sympy,MechCoder/sympy,saurabhjn76/sympy,shipci/sympy,mcdaniel67/sympy,hrashk/sympy,yashsharan/sympy,bukzor/sympy,Shaswat27/sympy,sahmed95/sympy,Sumith1896/sympy,rahuldan/sympy,shipci/sympy,yukoba/sympy,cccfran/sympy,liangjiaxing/sympy,kaichogami/sympy,skidzo/sympy,bukzor/sympy,kaushik94/sympy,jerli/sympy,Designist/sympy,kmacinnis/sympy,asm666/sympy,Arafatk/sympy,yashsharan/sympy,sahilshekhawat/sympy,pandeyadarsh/sympy,Mitchkoens/sympy,jamesblunt/sympy,saurabhjn76/sympy,hargup/sympy,maniteja123/sympy,debugger22/sympy,saurabhjn76/sympy,wyom/sympy,kmacinnis/sympy,MridulS/sympy,cswiercz/sympy,debugger22/sympy,cccfran/sympy,cccfran/sympy,iamutkarshtiwari/sympy,oliverlee/sympy,maniteja123/sympy,MechCoder/sympy,skirpichev/omg,lindsayad/sympy,VaibhavAgarwalVA/sympy,rahuldan/sympy,madan96/sympy,iamutkarshtiwari/sympy,asm666/sympy,asm666/sympy,pernici/sympy,Designist/sympy,Mitchkoens/sympy,grevutiu-gabriel/sympy,AkademieOlympia/sympy,garvitr/sympy,sampadsaha5/sympy,lindsayad/sympy,atsao72/sympy,drufat/sympy,ga7g08/sympy,Vishluck/sympy,hrashk/sympy,bukzor/sympy,pandeyadarsh/sympy,flacjacket/sympy,sahmed95/sympy,skidzo/sympy,beni55/sympy,emon10005/sympy,emon10005/sympy,pbrady/sympy,jamesblunt/sympy,Designist/sympy,chaffra/sympy,kumarkrishna/sympy,madan96/sympy,moble/sympy,beni55/sympy,Sumith1896/sympy,dqnykamp/sympy,mafiya69/sympy,pbrady/sympy,ChristinaZografou/sympy,ChristinaZografou/sympy,kevalds51/sympy,liangjiaxing/sympy,Titan-C/sympy,amitjamadagni/sympy,minrk/sympy,lidavidm/sympy,dqnykamp/sympy,mafiya69/sympy,sampadsaha5/sympy,liangjiaxing/sympy,ahhda/sympy,sampadsaha5/sympy,Curious72/sympy,aktech/sympy,lindsayad/sympy,wyom/sympy,vipulroxx/sympy,atreyv/sympy,jbbskinny/sympy,mafiya69/sympy,sunny94/temp,srjoglekar246/sympy,pbrady/sympy,shikil/sympy,wyom/sympy,debugger22/sympy,Titan-C/sympy,toolforger/sympy,mcdaniel67/sympy,kevalds51/sympy,MridulS/sympy,Vishluck/sympy,sahilshekhawat/sympy,lidavidm/sympy,sunny94/temp,Shaswat27/sympy,oliverlee/sympy,abhiii5459/sympy,ChristinaZografou/sympy,abloomston/sympy,AunShiLord/sympy,dqnykamp/sympy,jaimahajan1997/sympy,meghana1995/sympy,postvakje/sympy,souravsingh/sympy,souravsingh/sympy,AkademieOlympia/sympy,skidzo/sympy,wanglongqi/sympy,pandeyadarsh/sympy,kevalds51/sympy,farhaanbukhsh/sympy,kaichogami/sympy,meghana1995/sympy,amitjamadagni/sympy,ahhda/sympy,Curious72/sympy,abhiii5459/sympy,Mitchkoens/sympy,kumarkrishna/sympy,vipulroxx/sympy,ga7g08/sympy | from order import Order
from limits import limit, Limit
from gruntz import gruntz
from series import series
O = Order
- __all__ = [gruntz, limit, series, O, Order, Limit]
+ __all__ = ['gruntz', 'limit', 'series', 'O', 'Order', 'Limit']
| Fix __all__ usage for sympy/series | ## Code Before:
from order import Order
from limits import limit, Limit
from gruntz import gruntz
from series import series
O = Order
__all__ = [gruntz, limit, series, O, Order, Limit]
## Instruction:
Fix __all__ usage for sympy/series
## Code After:
from order import Order
from limits import limit, Limit
from gruntz import gruntz
from series import series
O = Order
__all__ = ['gruntz', 'limit', 'series', 'O', 'Order', 'Limit']
| # ... existing code ...
__all__ = ['gruntz', 'limit', 'series', 'O', 'Order', 'Limit']
# ... rest of the code ... |
b9ccbb2addd8dcaeb100bb5e95768caa2a97c280 | srttools/core/__init__.py | srttools/core/__init__.py | import warnings
try:
import matplotlib
# matplotlib.use('TkAgg')
HAS_MPL = True
except ImportError:
HAS_MPL = False
try:
import statsmodels.api as sm
HAS_STATSM = True
except ImportError:
HAS_STATSM = False
try:
from numba import jit, vectorize
except ImportError:
warnings.warn("Numba not installed. Faking it")
def jit(fun):
return fun
def vectorize(*args, **kwargs):
return jit
| import warnings
DEFAULT_MPL_BACKEND = 'TkAgg'
try:
import matplotlib
# This is necessary. Random backends might respond incorrectly.
matplotlib.use(DEFAULT_MPL_BACKEND)
HAS_MPL = True
except ImportError:
HAS_MPL = False
try:
import statsmodels.api as sm
version = [int(i) for i in sm.version.version.split('.')]
# Minimum version 0.8.0
if version < (0, 8, 0):
warnings.warn("Please update statsmodels")
raise ImportError
HAS_STATSM = True
except ImportError:
HAS_STATSM = False
try:
from numba import jit, vectorize
except ImportError:
warnings.warn("Numba not installed. Faking it")
def jit(fun):
return fun
def vectorize(*args, **kwargs):
return jit
| Set default backend, and minimum statsmodels version | Set default backend, and minimum statsmodels version
| Python | bsd-3-clause | matteobachetti/srt-single-dish-tools | import warnings
+ DEFAULT_MPL_BACKEND = 'TkAgg'
try:
import matplotlib
- # matplotlib.use('TkAgg')
+ # This is necessary. Random backends might respond incorrectly.
+ matplotlib.use(DEFAULT_MPL_BACKEND)
HAS_MPL = True
except ImportError:
HAS_MPL = False
try:
import statsmodels.api as sm
+ version = [int(i) for i in sm.version.version.split('.')]
+
+ # Minimum version 0.8.0
+ if version < (0, 8, 0):
+ warnings.warn("Please update statsmodels")
+ raise ImportError
+
HAS_STATSM = True
except ImportError:
HAS_STATSM = False
try:
from numba import jit, vectorize
except ImportError:
warnings.warn("Numba not installed. Faking it")
def jit(fun):
return fun
def vectorize(*args, **kwargs):
return jit
| Set default backend, and minimum statsmodels version | ## Code Before:
import warnings
try:
import matplotlib
# matplotlib.use('TkAgg')
HAS_MPL = True
except ImportError:
HAS_MPL = False
try:
import statsmodels.api as sm
HAS_STATSM = True
except ImportError:
HAS_STATSM = False
try:
from numba import jit, vectorize
except ImportError:
warnings.warn("Numba not installed. Faking it")
def jit(fun):
return fun
def vectorize(*args, **kwargs):
return jit
## Instruction:
Set default backend, and minimum statsmodels version
## Code After:
import warnings
DEFAULT_MPL_BACKEND = 'TkAgg'
try:
import matplotlib
# This is necessary. Random backends might respond incorrectly.
matplotlib.use(DEFAULT_MPL_BACKEND)
HAS_MPL = True
except ImportError:
HAS_MPL = False
try:
import statsmodels.api as sm
version = [int(i) for i in sm.version.version.split('.')]
# Minimum version 0.8.0
if version < (0, 8, 0):
warnings.warn("Please update statsmodels")
raise ImportError
HAS_STATSM = True
except ImportError:
HAS_STATSM = False
try:
from numba import jit, vectorize
except ImportError:
warnings.warn("Numba not installed. Faking it")
def jit(fun):
return fun
def vectorize(*args, **kwargs):
return jit
| // ... existing code ...
import warnings
DEFAULT_MPL_BACKEND = 'TkAgg'
try:
// ... modified code ...
# This is necessary. Random backends might respond incorrectly.
matplotlib.use(DEFAULT_MPL_BACKEND)
HAS_MPL = True
...
import statsmodels.api as sm
version = [int(i) for i in sm.version.version.split('.')]
# Minimum version 0.8.0
if version < (0, 8, 0):
warnings.warn("Please update statsmodels")
raise ImportError
HAS_STATSM = True
// ... rest of the code ... |
ebf5e05acfb7f1edce0c0987576ee712f3fdea54 | test/scripts/test_sequana_coverage.py | test/scripts/test_sequana_coverage.py | from sequana.scripts import coverage
from nose.plugins.attrib import attr
from sequana import sequana_data
#@attr("skip")
class TestPipeline(object):
@classmethod
def setup_class(klass):
"""This method is run once for each class before any tests are run"""
klass.prog = "sequana_coverage"
klass.params = {'prog': klass.prog}
@classmethod
def teardown_class(klass):
"""This method is run once for each class _after_ all tests are run"""
import os
# local nosetests execution
try:os.remove('README')
except:pass
try:os.remove('quality.rules')
except:pass
try:os.remove('config.yaml')
except:pass
def _test_version(self):
coverage.main([self.prog, '--version'])
def test_help(self):
try:
coverage.main([self.prog, '--help'])
assert False
except SystemExit:
pass
else:
raise Exception
def test_input(self):
filename = sequana_data('virus.bed', 'data')
reference = sequana_data('tofill.fa', 'data')
coverage.main([self.prog, '-i', filename, "-o"]) # "-r", reference])
| from sequana.scripts import coverage
from sequana import sequana_data
import pytest
prog = "sequana_coverage"
@pytest.fixture
def coveragefix():
import os
# local nosetests execution
try:os.remove('README')
except:pass
try:os.remove('quality.rules')
except:pass
try:os.remove('config.yaml')
except:pass
def test_version():
try:
coverage.main([prog, '--version'])
assert False
except SystemExit:
pass
else:
raise Exception
def test_help():
try:
coverage.main([prog, '--help'])
assert False
except SystemExit:
pass
else:
raise Exception
def test_input(tmpdir):
import os
directory = tmpdir.mkdir("report")
name = directory.__str__()
filename = sequana_data('virus.bed', 'data')
reference = sequana_data('tofill.fa', 'data')
coverage.main([prog, '-i', filename, "-o", "--output-directory", name])
assert os.path.exists(name + os.sep + "coverage_mapping.chrom1.html")
| Fix tests to use pytest | Fix tests to use pytest
| Python | bsd-3-clause | sequana/sequana,sequana/sequana,sequana/sequana,sequana/sequana,sequana/sequana | from sequana.scripts import coverage
- from nose.plugins.attrib import attr
from sequana import sequana_data
+ import pytest
+ prog = "sequana_coverage"
- #@attr("skip")
- class TestPipeline(object):
+ @pytest.fixture
+ def coveragefix():
- @classmethod
- def setup_class(klass):
- """This method is run once for each class before any tests are run"""
- klass.prog = "sequana_coverage"
- klass.params = {'prog': klass.prog}
-
- @classmethod
- def teardown_class(klass):
- """This method is run once for each class _after_ all tests are run"""
- import os
+ import os
- # local nosetests execution
+ # local nosetests execution
- try:os.remove('README')
+ try:os.remove('README')
- except:pass
+ except:pass
- try:os.remove('quality.rules')
+ try:os.remove('quality.rules')
- except:pass
+ except:pass
- try:os.remove('config.yaml')
+ try:os.remove('config.yaml')
- except:pass
+ except:pass
-
- def _test_version(self):
- coverage.main([self.prog, '--version'])
-
- def test_help(self):
- try:
- coverage.main([self.prog, '--help'])
- assert False
- except SystemExit:
- pass
- else:
- raise Exception
-
- def test_input(self):
- filename = sequana_data('virus.bed', 'data')
- reference = sequana_data('tofill.fa', 'data')
- coverage.main([self.prog, '-i', filename, "-o"]) # "-r", reference])
+ def test_version():
+ try:
+ coverage.main([prog, '--version'])
+ assert False
+ except SystemExit:
+ pass
+ else:
+ raise Exception
+
+ def test_help():
+ try:
+ coverage.main([prog, '--help'])
+ assert False
+ except SystemExit:
+ pass
+ else:
+ raise Exception
+
+
+ def test_input(tmpdir):
+
+ import os
+ directory = tmpdir.mkdir("report")
+ name = directory.__str__()
+
+ filename = sequana_data('virus.bed', 'data')
+ reference = sequana_data('tofill.fa', 'data')
+ coverage.main([prog, '-i', filename, "-o", "--output-directory", name])
+ assert os.path.exists(name + os.sep + "coverage_mapping.chrom1.html")
+ | Fix tests to use pytest | ## Code Before:
from sequana.scripts import coverage
from nose.plugins.attrib import attr
from sequana import sequana_data
#@attr("skip")
class TestPipeline(object):
@classmethod
def setup_class(klass):
"""This method is run once for each class before any tests are run"""
klass.prog = "sequana_coverage"
klass.params = {'prog': klass.prog}
@classmethod
def teardown_class(klass):
"""This method is run once for each class _after_ all tests are run"""
import os
# local nosetests execution
try:os.remove('README')
except:pass
try:os.remove('quality.rules')
except:pass
try:os.remove('config.yaml')
except:pass
def _test_version(self):
coverage.main([self.prog, '--version'])
def test_help(self):
try:
coverage.main([self.prog, '--help'])
assert False
except SystemExit:
pass
else:
raise Exception
def test_input(self):
filename = sequana_data('virus.bed', 'data')
reference = sequana_data('tofill.fa', 'data')
coverage.main([self.prog, '-i', filename, "-o"]) # "-r", reference])
## Instruction:
Fix tests to use pytest
## Code After:
from sequana.scripts import coverage
from sequana import sequana_data
import pytest
prog = "sequana_coverage"
@pytest.fixture
def coveragefix():
import os
# local nosetests execution
try:os.remove('README')
except:pass
try:os.remove('quality.rules')
except:pass
try:os.remove('config.yaml')
except:pass
def test_version():
try:
coverage.main([prog, '--version'])
assert False
except SystemExit:
pass
else:
raise Exception
def test_help():
try:
coverage.main([prog, '--help'])
assert False
except SystemExit:
pass
else:
raise Exception
def test_input(tmpdir):
import os
directory = tmpdir.mkdir("report")
name = directory.__str__()
filename = sequana_data('virus.bed', 'data')
reference = sequana_data('tofill.fa', 'data')
coverage.main([prog, '-i', filename, "-o", "--output-directory", name])
assert os.path.exists(name + os.sep + "coverage_mapping.chrom1.html")
| ...
from sequana.scripts import coverage
from sequana import sequana_data
import pytest
prog = "sequana_coverage"
@pytest.fixture
def coveragefix():
import os
# local nosetests execution
try:os.remove('README')
except:pass
try:os.remove('quality.rules')
except:pass
try:os.remove('config.yaml')
except:pass
...
def test_version():
try:
coverage.main([prog, '--version'])
assert False
except SystemExit:
pass
else:
raise Exception
def test_help():
try:
coverage.main([prog, '--help'])
assert False
except SystemExit:
pass
else:
raise Exception
def test_input(tmpdir):
import os
directory = tmpdir.mkdir("report")
name = directory.__str__()
filename = sequana_data('virus.bed', 'data')
reference = sequana_data('tofill.fa', 'data')
coverage.main([prog, '-i', filename, "-o", "--output-directory", name])
assert os.path.exists(name + os.sep + "coverage_mapping.chrom1.html")
... |
6d18ff715a5fa3059ddb609c1abdbbb06b15ad63 | fuel/downloaders/celeba.py | fuel/downloaders/celeba.py | from fuel.downloaders.base import default_downloader
def fill_subparser(subparser):
"""Sets up a subparser to download the CelebA dataset file.
Parameters
----------
subparser : :class:`argparse.ArgumentParser`
Subparser handling the `celeba` command.
"""
urls = ['https://www.dropbox.com/sh/8oqt9vytwxb3s4r/'
'AAB7G69NLjRNqv_tyiULHSVUa/list_attr_celeba.txt?dl=1',
'https://www.dropbox.com/sh/8oqt9vytwxb3s4r/'
'AADVdnYbokd7TXhpvfWLL3sga/img_align_celeba.zip?dl=1']
filenames = ['list_attr_celeba.txt', 'img_align_celeba.zip']
subparser.set_defaults(urls=urls, filenames=filenames)
return default_downloader
| from fuel.downloaders.base import default_downloader
def fill_subparser(subparser):
"""Sets up a subparser to download the CelebA dataset file.
Parameters
----------
subparser : :class:`argparse.ArgumentParser`
Subparser handling the `celeba` command.
"""
urls = ['https://www.dropbox.com/sh/8oqt9vytwxb3s4r/'
'AAC7-uCaJkmPmvLX2_P5qy0ga/Anno/list_attr_celeba.txt?dl=1',
'https://www.dropbox.com/sh/8oqt9vytwxb3s4r/'
'AADIKlz8PR9zr6Y20qbkunrba/Img/img_align_celeba.zip?dl=1']
filenames = ['list_attr_celeba.txt', 'img_align_celeba.zip']
subparser.set_defaults(urls=urls, filenames=filenames)
return default_downloader
| Update download links for CelebA files | Update download links for CelebA files
| Python | mit | mila-udem/fuel,dmitriy-serdyuk/fuel,dmitriy-serdyuk/fuel,mila-udem/fuel,vdumoulin/fuel,vdumoulin/fuel | from fuel.downloaders.base import default_downloader
def fill_subparser(subparser):
"""Sets up a subparser to download the CelebA dataset file.
Parameters
----------
subparser : :class:`argparse.ArgumentParser`
Subparser handling the `celeba` command.
"""
urls = ['https://www.dropbox.com/sh/8oqt9vytwxb3s4r/'
- 'AAB7G69NLjRNqv_tyiULHSVUa/list_attr_celeba.txt?dl=1',
+ 'AAC7-uCaJkmPmvLX2_P5qy0ga/Anno/list_attr_celeba.txt?dl=1',
'https://www.dropbox.com/sh/8oqt9vytwxb3s4r/'
- 'AADVdnYbokd7TXhpvfWLL3sga/img_align_celeba.zip?dl=1']
+ 'AADIKlz8PR9zr6Y20qbkunrba/Img/img_align_celeba.zip?dl=1']
filenames = ['list_attr_celeba.txt', 'img_align_celeba.zip']
subparser.set_defaults(urls=urls, filenames=filenames)
return default_downloader
| Update download links for CelebA files | ## Code Before:
from fuel.downloaders.base import default_downloader
def fill_subparser(subparser):
"""Sets up a subparser to download the CelebA dataset file.
Parameters
----------
subparser : :class:`argparse.ArgumentParser`
Subparser handling the `celeba` command.
"""
urls = ['https://www.dropbox.com/sh/8oqt9vytwxb3s4r/'
'AAB7G69NLjRNqv_tyiULHSVUa/list_attr_celeba.txt?dl=1',
'https://www.dropbox.com/sh/8oqt9vytwxb3s4r/'
'AADVdnYbokd7TXhpvfWLL3sga/img_align_celeba.zip?dl=1']
filenames = ['list_attr_celeba.txt', 'img_align_celeba.zip']
subparser.set_defaults(urls=urls, filenames=filenames)
return default_downloader
## Instruction:
Update download links for CelebA files
## Code After:
from fuel.downloaders.base import default_downloader
def fill_subparser(subparser):
"""Sets up a subparser to download the CelebA dataset file.
Parameters
----------
subparser : :class:`argparse.ArgumentParser`
Subparser handling the `celeba` command.
"""
urls = ['https://www.dropbox.com/sh/8oqt9vytwxb3s4r/'
'AAC7-uCaJkmPmvLX2_P5qy0ga/Anno/list_attr_celeba.txt?dl=1',
'https://www.dropbox.com/sh/8oqt9vytwxb3s4r/'
'AADIKlz8PR9zr6Y20qbkunrba/Img/img_align_celeba.zip?dl=1']
filenames = ['list_attr_celeba.txt', 'img_align_celeba.zip']
subparser.set_defaults(urls=urls, filenames=filenames)
return default_downloader
| # ... existing code ...
urls = ['https://www.dropbox.com/sh/8oqt9vytwxb3s4r/'
'AAC7-uCaJkmPmvLX2_P5qy0ga/Anno/list_attr_celeba.txt?dl=1',
'https://www.dropbox.com/sh/8oqt9vytwxb3s4r/'
'AADIKlz8PR9zr6Y20qbkunrba/Img/img_align_celeba.zip?dl=1']
filenames = ['list_attr_celeba.txt', 'img_align_celeba.zip']
# ... rest of the code ... |
6336e8e13c01b6a81b8586499e7a3e8fc8b532a8 | launch_control/commands/interface.py | launch_control/commands/interface.py | from launch_control.utils.registry import RegistryBase
class Command(RegistryBase):
"""
Base class for all command line tool sub-commands.
"""
def __init__(self, parser, args):
"""
Prepare instance for executing commands.
This method is called immediately after all arguments are parsed
and results are available. This gives subclasses a chance to
configure themselves.
The default implementation does not do anything.
"""
pass
def invoke(self, args):
"""
Invoke command action.
"""
raise NotImplemented()
@classmethod
def get_name(cls):
"""
Return the name of this command.
The default implementation strips any leading underscores
and replaces all other underscores with dashes.
"""
return cls.__name__.lstrip("_").replace("_", "-")
@classmethod
def get_help(cls):
"""
Return the help message of this command
"""
return cls.__doc__
@classmethod
def register_arguments(cls, parser):
"""
Register arguments if required.
Subclasses can override this to add any arguments that will be
exposed to the command line interface.
"""
pass
| import inspect
from launch_control.utils.registry import RegistryBase
class Command(RegistryBase):
"""
Base class for all command line tool sub-commands.
"""
def __init__(self, parser, args):
"""
Prepare instance for executing commands.
This method is called immediately after all arguments are parsed
and results are available. This gives subclasses a chance to
configure themselves.
The default implementation does not do anything.
"""
pass
def invoke(self, args):
"""
Invoke command action.
"""
raise NotImplemented()
@classmethod
def get_name(cls):
"""
Return the name of this command.
The default implementation strips any leading underscores
and replaces all other underscores with dashes.
"""
return cls.__name__.lstrip("_").replace("_", "-")
@classmethod
def get_help(cls):
"""
Return the help message of this command
"""
return inspect.getdoc(cls)
@classmethod
def register_arguments(cls, parser):
"""
Register arguments if required.
Subclasses can override this to add any arguments that will be
exposed to the command line interface.
"""
pass
| Use inspect.getdoc() instead of plain __doc__ | Use inspect.getdoc() instead of plain __doc__
| Python | agpl-3.0 | Linaro/lava-server,Linaro/lava-server,OSSystems/lava-server,Linaro/lava-server,OSSystems/lava-server,OSSystems/lava-server,Linaro/lava-server | + import inspect
+
from launch_control.utils.registry import RegistryBase
+
class Command(RegistryBase):
"""
Base class for all command line tool sub-commands.
"""
def __init__(self, parser, args):
"""
Prepare instance for executing commands.
This method is called immediately after all arguments are parsed
and results are available. This gives subclasses a chance to
configure themselves.
The default implementation does not do anything.
"""
pass
def invoke(self, args):
"""
Invoke command action.
"""
raise NotImplemented()
@classmethod
def get_name(cls):
"""
Return the name of this command.
The default implementation strips any leading underscores
and replaces all other underscores with dashes.
"""
return cls.__name__.lstrip("_").replace("_", "-")
-
@classmethod
def get_help(cls):
"""
Return the help message of this command
"""
- return cls.__doc__
+ return inspect.getdoc(cls)
@classmethod
def register_arguments(cls, parser):
"""
Register arguments if required.
Subclasses can override this to add any arguments that will be
exposed to the command line interface.
"""
pass
| Use inspect.getdoc() instead of plain __doc__ | ## Code Before:
from launch_control.utils.registry import RegistryBase
class Command(RegistryBase):
"""
Base class for all command line tool sub-commands.
"""
def __init__(self, parser, args):
"""
Prepare instance for executing commands.
This method is called immediately after all arguments are parsed
and results are available. This gives subclasses a chance to
configure themselves.
The default implementation does not do anything.
"""
pass
def invoke(self, args):
"""
Invoke command action.
"""
raise NotImplemented()
@classmethod
def get_name(cls):
"""
Return the name of this command.
The default implementation strips any leading underscores
and replaces all other underscores with dashes.
"""
return cls.__name__.lstrip("_").replace("_", "-")
@classmethod
def get_help(cls):
"""
Return the help message of this command
"""
return cls.__doc__
@classmethod
def register_arguments(cls, parser):
"""
Register arguments if required.
Subclasses can override this to add any arguments that will be
exposed to the command line interface.
"""
pass
## Instruction:
Use inspect.getdoc() instead of plain __doc__
## Code After:
import inspect
from launch_control.utils.registry import RegistryBase
class Command(RegistryBase):
"""
Base class for all command line tool sub-commands.
"""
def __init__(self, parser, args):
"""
Prepare instance for executing commands.
This method is called immediately after all arguments are parsed
and results are available. This gives subclasses a chance to
configure themselves.
The default implementation does not do anything.
"""
pass
def invoke(self, args):
"""
Invoke command action.
"""
raise NotImplemented()
@classmethod
def get_name(cls):
"""
Return the name of this command.
The default implementation strips any leading underscores
and replaces all other underscores with dashes.
"""
return cls.__name__.lstrip("_").replace("_", "-")
@classmethod
def get_help(cls):
"""
Return the help message of this command
"""
return inspect.getdoc(cls)
@classmethod
def register_arguments(cls, parser):
"""
Register arguments if required.
Subclasses can override this to add any arguments that will be
exposed to the command line interface.
"""
pass
| ...
import inspect
from launch_control.utils.registry import RegistryBase
...
@classmethod
...
"""
return inspect.getdoc(cls)
... |
e58c78fea4b604905333b490a22e640477d5e2d5 | django_pytest/test_runner.py | django_pytest/test_runner.py | def run_tests(test_labels, verbosity=1, interactive=True, extra_tests=[]):
import sys
from pkg_resources import load_entry_point
sys.argv[1:] = sys.argv[2:]
# Remove stop word (--) from argument list again. This separates Django
# command options from py.test ones.
try:
del sys.argv[sys.argv.index('--')]
except ValueError:
pass
try:
entry_point = load_entry_point('py>=1.0.0', 'console_scripts', 'py.test')
except ImportError:
entry_point = load_entry_point('pytest>=2.0', 'console_scripts', 'py.test')
sys.exit(entry_point())
| class TestRunner(object):
def __init__(self, verbosity=1, interactive=True, failfast=True, **kwargs):
self.verbosity = verbosity
self.interactive = interactive
self.failfast = failfast
def run_tests(self, test_labels):
import pytest
import sys
if test_labels is None:
print ('Not yet implemented: py.test is still not able to '
'discover the tests in all the INSTALLED_APPS as Django '
'requires.')
exit(1)
pytest_args = []
if self.failfast:
pytest_args.append('--exitfirst')
if self.verbosity == 0:
pytest_args.append('--quiet')
elif self.verbosity > 1:
pytest_args.append('--verbose')
# Remove arguments before (--). This separates Django command options
# from py.test ones.
try:
pytest_args_index = sys.argv.index('--') + 1
pytest_args.extend(sys.argv[pytest_args_index:])
except ValueError:
pass
sys.exit(pytest.main(pytest_args))
# Keep the old name to be backwards-compatible
def run_tests(test_labels, verbosity=1, interactive=True, extra_tests=[]):
runner = TestRunner(verbosity, interactive, failfast=False)
runner.run_tests(test_labels)
| Add a new TestRunner class to remove Django deprecation warnings | Add a new TestRunner class to remove Django deprecation warnings
| Python | bsd-3-clause | buchuki/django-pytest,0101/django-pytest | + class TestRunner(object):
+ def __init__(self, verbosity=1, interactive=True, failfast=True, **kwargs):
+ self.verbosity = verbosity
+ self.interactive = interactive
+ self.failfast = failfast
+
+ def run_tests(self, test_labels):
+ import pytest
+ import sys
+
+ if test_labels is None:
+ print ('Not yet implemented: py.test is still not able to '
+ 'discover the tests in all the INSTALLED_APPS as Django '
+ 'requires.')
+ exit(1)
+
+ pytest_args = []
+ if self.failfast:
+ pytest_args.append('--exitfirst')
+ if self.verbosity == 0:
+ pytest_args.append('--quiet')
+ elif self.verbosity > 1:
+ pytest_args.append('--verbose')
+
+ # Remove arguments before (--). This separates Django command options
+ # from py.test ones.
+ try:
+ pytest_args_index = sys.argv.index('--') + 1
+ pytest_args.extend(sys.argv[pytest_args_index:])
+ except ValueError:
+ pass
+
+ sys.exit(pytest.main(pytest_args))
+
+
+ # Keep the old name to be backwards-compatible
def run_tests(test_labels, verbosity=1, interactive=True, extra_tests=[]):
+ runner = TestRunner(verbosity, interactive, failfast=False)
+ runner.run_tests(test_labels)
- import sys
- from pkg_resources import load_entry_point
- sys.argv[1:] = sys.argv[2:]
- # Remove stop word (--) from argument list again. This separates Django
- # command options from py.test ones.
- try:
- del sys.argv[sys.argv.index('--')]
- except ValueError:
- pass
-
- try:
- entry_point = load_entry_point('py>=1.0.0', 'console_scripts', 'py.test')
- except ImportError:
- entry_point = load_entry_point('pytest>=2.0', 'console_scripts', 'py.test')
-
- sys.exit(entry_point())
- | Add a new TestRunner class to remove Django deprecation warnings | ## Code Before:
def run_tests(test_labels, verbosity=1, interactive=True, extra_tests=[]):
import sys
from pkg_resources import load_entry_point
sys.argv[1:] = sys.argv[2:]
# Remove stop word (--) from argument list again. This separates Django
# command options from py.test ones.
try:
del sys.argv[sys.argv.index('--')]
except ValueError:
pass
try:
entry_point = load_entry_point('py>=1.0.0', 'console_scripts', 'py.test')
except ImportError:
entry_point = load_entry_point('pytest>=2.0', 'console_scripts', 'py.test')
sys.exit(entry_point())
## Instruction:
Add a new TestRunner class to remove Django deprecation warnings
## Code After:
class TestRunner(object):
def __init__(self, verbosity=1, interactive=True, failfast=True, **kwargs):
self.verbosity = verbosity
self.interactive = interactive
self.failfast = failfast
def run_tests(self, test_labels):
import pytest
import sys
if test_labels is None:
print ('Not yet implemented: py.test is still not able to '
'discover the tests in all the INSTALLED_APPS as Django '
'requires.')
exit(1)
pytest_args = []
if self.failfast:
pytest_args.append('--exitfirst')
if self.verbosity == 0:
pytest_args.append('--quiet')
elif self.verbosity > 1:
pytest_args.append('--verbose')
# Remove arguments before (--). This separates Django command options
# from py.test ones.
try:
pytest_args_index = sys.argv.index('--') + 1
pytest_args.extend(sys.argv[pytest_args_index:])
except ValueError:
pass
sys.exit(pytest.main(pytest_args))
# Keep the old name to be backwards-compatible
def run_tests(test_labels, verbosity=1, interactive=True, extra_tests=[]):
runner = TestRunner(verbosity, interactive, failfast=False)
runner.run_tests(test_labels)
| ...
class TestRunner(object):
def __init__(self, verbosity=1, interactive=True, failfast=True, **kwargs):
self.verbosity = verbosity
self.interactive = interactive
self.failfast = failfast
def run_tests(self, test_labels):
import pytest
import sys
if test_labels is None:
print ('Not yet implemented: py.test is still not able to '
'discover the tests in all the INSTALLED_APPS as Django '
'requires.')
exit(1)
pytest_args = []
if self.failfast:
pytest_args.append('--exitfirst')
if self.verbosity == 0:
pytest_args.append('--quiet')
elif self.verbosity > 1:
pytest_args.append('--verbose')
# Remove arguments before (--). This separates Django command options
# from py.test ones.
try:
pytest_args_index = sys.argv.index('--') + 1
pytest_args.extend(sys.argv[pytest_args_index:])
except ValueError:
pass
sys.exit(pytest.main(pytest_args))
# Keep the old name to be backwards-compatible
def run_tests(test_labels, verbosity=1, interactive=True, extra_tests=[]):
runner = TestRunner(verbosity, interactive, failfast=False)
runner.run_tests(test_labels)
... |
7e5d8eb0d6eabb427d7e9bd02bac3ee7b90d228d | src/config.py | src/config.py |
import urllib
import urllib.request
proxies = [
False,
False
] |
import urllib
import urllib.request
from pprint import pprint
proxies = [
'',
''
]
_tested_proxies = False
def test_proxies():
global _tested_proxies
if _tested_proxies:
return
_tested_proxies = {}
def _testproxy(proxyid):
if proxyid=='':
return True
if _tested_proxies.get(proxyid) is not None:
return _tested_proxies.get(proxyid)
print("Pretesting proxy",proxyid)
proxy = urllib.request.ProxyHandler( {'http': proxyid , 'https': proxyid } )
opener = urllib.request.build_opener(proxy)
#urllib.request.install_opener(opener)
try:
opened = opener.open('http://example.com')
if not opened:
_tested_proxies[proxyid] = False
return False
assert(opened.read().find(b"Example Domain")>-1)
except urllib.error.URLError as e:
try:
opened = opener.open('http://google.com')
if not opened:
_tested_proxies[proxyid] = False
return False
except urllib.error.URLError as e:
print("Proxy error",proxyid,e)
_tested_proxies[proxyid] = False
return False
_tested_proxies[proxyid] = True
return True
proxies[:] = [tup for tup in proxies if _testproxy(tup)]
_tested_proxies = True
| Test proxies before using them. | Test proxies before using them.
| Python | mit | koivunen/whoisabusetool |
import urllib
import urllib.request
+ from pprint import pprint
+ proxies = [
+ '',
+ ''
+ ]
- proxies = [
- False,
- False
- ]
+
+ _tested_proxies = False
+ def test_proxies():
+ global _tested_proxies
+
+ if _tested_proxies:
+ return
+
+ _tested_proxies = {}
+
+ def _testproxy(proxyid):
+ if proxyid=='':
+ return True
+
+ if _tested_proxies.get(proxyid) is not None:
+ return _tested_proxies.get(proxyid)
+
+ print("Pretesting proxy",proxyid)
+ proxy = urllib.request.ProxyHandler( {'http': proxyid , 'https': proxyid } )
+ opener = urllib.request.build_opener(proxy)
+ #urllib.request.install_opener(opener)
+ try:
+ opened = opener.open('http://example.com')
+ if not opened:
+ _tested_proxies[proxyid] = False
+ return False
+ assert(opened.read().find(b"Example Domain")>-1)
+
+ except urllib.error.URLError as e:
+ try:
+ opened = opener.open('http://google.com')
+ if not opened:
+ _tested_proxies[proxyid] = False
+ return False
+
+ except urllib.error.URLError as e:
+ print("Proxy error",proxyid,e)
+ _tested_proxies[proxyid] = False
+ return False
+
+ _tested_proxies[proxyid] = True
+ return True
+
+ proxies[:] = [tup for tup in proxies if _testproxy(tup)]
+
+ _tested_proxies = True
+ | Test proxies before using them. | ## Code Before:
import urllib
import urllib.request
proxies = [
False,
False
]
## Instruction:
Test proxies before using them.
## Code After:
import urllib
import urllib.request
from pprint import pprint
proxies = [
'',
''
]
_tested_proxies = False
def test_proxies():
global _tested_proxies
if _tested_proxies:
return
_tested_proxies = {}
def _testproxy(proxyid):
if proxyid=='':
return True
if _tested_proxies.get(proxyid) is not None:
return _tested_proxies.get(proxyid)
print("Pretesting proxy",proxyid)
proxy = urllib.request.ProxyHandler( {'http': proxyid , 'https': proxyid } )
opener = urllib.request.build_opener(proxy)
#urllib.request.install_opener(opener)
try:
opened = opener.open('http://example.com')
if not opened:
_tested_proxies[proxyid] = False
return False
assert(opened.read().find(b"Example Domain")>-1)
except urllib.error.URLError as e:
try:
opened = opener.open('http://google.com')
if not opened:
_tested_proxies[proxyid] = False
return False
except urllib.error.URLError as e:
print("Proxy error",proxyid,e)
_tested_proxies[proxyid] = False
return False
_tested_proxies[proxyid] = True
return True
proxies[:] = [tup for tup in proxies if _testproxy(tup)]
_tested_proxies = True
| ...
import urllib.request
from pprint import pprint
proxies = [
'',
''
]
_tested_proxies = False
def test_proxies():
global _tested_proxies
if _tested_proxies:
return
_tested_proxies = {}
def _testproxy(proxyid):
if proxyid=='':
return True
if _tested_proxies.get(proxyid) is not None:
return _tested_proxies.get(proxyid)
print("Pretesting proxy",proxyid)
proxy = urllib.request.ProxyHandler( {'http': proxyid , 'https': proxyid } )
opener = urllib.request.build_opener(proxy)
#urllib.request.install_opener(opener)
try:
opened = opener.open('http://example.com')
if not opened:
_tested_proxies[proxyid] = False
return False
assert(opened.read().find(b"Example Domain")>-1)
except urllib.error.URLError as e:
try:
opened = opener.open('http://google.com')
if not opened:
_tested_proxies[proxyid] = False
return False
except urllib.error.URLError as e:
print("Proxy error",proxyid,e)
_tested_proxies[proxyid] = False
return False
_tested_proxies[proxyid] = True
return True
proxies[:] = [tup for tup in proxies if _testproxy(tup)]
_tested_proxies = True
... |
a2c8ade4d73b6756fef2829c0e656acbe60f2b03 | fabfile.py | fabfile.py | from fabric.api import local
from fabric.api import warn_only
CMD_MANAGE = "python manage.py "
def auto_schema():
with warn_only():
schema('rockit.foundation.core')
schema('rockit.plugins.mailout')
schema('rockit.plugins.razberry')
def build():
migrate('rockit.foundation.core')
migrate('rockit.plugins.mailout')
migrate('rockit.plugins.razberry')
load_data('rockit/foundation/core/fixtures/settings.json')
load_data('rockit/plugins/mailout/fixtures/servers.json')
test()
def load_data(path):
local(CMD_MANAGE + 'loaddata %s' % path)
def migrate(app):
local(CMD_MANAGE + 'migrate %s' % app)
def runserver(localonly=True):
if localonly:
local(CMD_MANAGE + 'runserver')
else:
local(CMD_MANAGE + 'runserver 0.0.0.0')
def schema(app):
local(CMD_MANAGE + 'schemamigration %s --auto' % app)
def setup(environment):
local('pip install -r requirements/%s' % environment)
def test():
local(CMD_MANAGE + 'test') | from fabric.api import local
CMD_MANAGE = "python manage.py "
def auto_schema():
schema('rockit.foundation.core')
schema('rockit.plugins.mailout')
schema('rockit.plugins.razberry')
def build():
migrate('rockit.foundation.core')
migrate('rockit.plugins.mailout')
migrate('rockit.plugins.razberry')
load_data('rockit/foundation/core/fixtures/settings.json')
load_data('rockit/plugins/mailout/fixtures/servers.json')
test()
def load_data(path):
local(CMD_MANAGE + 'loaddata %s' % path)
def migrate(app):
local(CMD_MANAGE + 'migrate %s' % app)
def runserver(localonly=True):
if localonly:
local(CMD_MANAGE + 'runserver')
else:
local(CMD_MANAGE + 'runserver 0.0.0.0')
def schema(app):
local(CMD_MANAGE + 'schemamigration %s --auto' % app)
def setup(environment):
local('pip install -r requirements/%s' % environment)
def test():
local(CMD_MANAGE + 'test') | Remove warn only from fabric file | Remove warn only from fabric file
| Python | mit | acreations/rockit-server,acreations/rockit-server,acreations/rockit-server,acreations/rockit-server | from fabric.api import local
- from fabric.api import warn_only
CMD_MANAGE = "python manage.py "
def auto_schema():
- with warn_only():
- schema('rockit.foundation.core')
+ schema('rockit.foundation.core')
- schema('rockit.plugins.mailout')
+ schema('rockit.plugins.mailout')
- schema('rockit.plugins.razberry')
+ schema('rockit.plugins.razberry')
def build():
migrate('rockit.foundation.core')
migrate('rockit.plugins.mailout')
migrate('rockit.plugins.razberry')
load_data('rockit/foundation/core/fixtures/settings.json')
load_data('rockit/plugins/mailout/fixtures/servers.json')
test()
def load_data(path):
local(CMD_MANAGE + 'loaddata %s' % path)
def migrate(app):
local(CMD_MANAGE + 'migrate %s' % app)
def runserver(localonly=True):
if localonly:
local(CMD_MANAGE + 'runserver')
else:
local(CMD_MANAGE + 'runserver 0.0.0.0')
def schema(app):
local(CMD_MANAGE + 'schemamigration %s --auto' % app)
def setup(environment):
local('pip install -r requirements/%s' % environment)
def test():
local(CMD_MANAGE + 'test') | Remove warn only from fabric file | ## Code Before:
from fabric.api import local
from fabric.api import warn_only
CMD_MANAGE = "python manage.py "
def auto_schema():
with warn_only():
schema('rockit.foundation.core')
schema('rockit.plugins.mailout')
schema('rockit.plugins.razberry')
def build():
migrate('rockit.foundation.core')
migrate('rockit.plugins.mailout')
migrate('rockit.plugins.razberry')
load_data('rockit/foundation/core/fixtures/settings.json')
load_data('rockit/plugins/mailout/fixtures/servers.json')
test()
def load_data(path):
local(CMD_MANAGE + 'loaddata %s' % path)
def migrate(app):
local(CMD_MANAGE + 'migrate %s' % app)
def runserver(localonly=True):
if localonly:
local(CMD_MANAGE + 'runserver')
else:
local(CMD_MANAGE + 'runserver 0.0.0.0')
def schema(app):
local(CMD_MANAGE + 'schemamigration %s --auto' % app)
def setup(environment):
local('pip install -r requirements/%s' % environment)
def test():
local(CMD_MANAGE + 'test')
## Instruction:
Remove warn only from fabric file
## Code After:
from fabric.api import local
CMD_MANAGE = "python manage.py "
def auto_schema():
schema('rockit.foundation.core')
schema('rockit.plugins.mailout')
schema('rockit.plugins.razberry')
def build():
migrate('rockit.foundation.core')
migrate('rockit.plugins.mailout')
migrate('rockit.plugins.razberry')
load_data('rockit/foundation/core/fixtures/settings.json')
load_data('rockit/plugins/mailout/fixtures/servers.json')
test()
def load_data(path):
local(CMD_MANAGE + 'loaddata %s' % path)
def migrate(app):
local(CMD_MANAGE + 'migrate %s' % app)
def runserver(localonly=True):
if localonly:
local(CMD_MANAGE + 'runserver')
else:
local(CMD_MANAGE + 'runserver 0.0.0.0')
def schema(app):
local(CMD_MANAGE + 'schemamigration %s --auto' % app)
def setup(environment):
local('pip install -r requirements/%s' % environment)
def test():
local(CMD_MANAGE + 'test') | # ... existing code ...
from fabric.api import local
# ... modified code ...
def auto_schema():
schema('rockit.foundation.core')
schema('rockit.plugins.mailout')
schema('rockit.plugins.razberry')
# ... rest of the code ... |
e3c840567fae974b2a1f169b05b86de97b60c8d0 | gitcms/publications/urls.py | gitcms/publications/urls.py | from django.conf.urls.defaults import *
import settings
import views
urlpatterns = patterns('',
(r'^papers/(?P<paper>.+)$', views.papers),
(r'^publications/?$', views.publications, { 'collection' : 'luispedro' }),
(r'^publications/(?P<collection>.+)$', views.publications),
(r'^publications/files/(?P<file>.+)$', 'django.views.static.serve', {'document_root': settings._BASE_DIR + '/../media/publications/files'}),
)
| from django.conf.urls.defaults import *
import settings
import views
urlpatterns = patterns('',
(r'^papers/(?P<paper>.+)$', views.papers),
(r'^publications/?$', views.publications, { 'collection' : 'luispedro' }),
(r'^publications/(?P<collection>.+)$', views.publications),
(r'^publications/files/(?P<file>.+)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT + '/publications/files'}),
)
| Remove stay mention to BASE_URL | Remove stay mention to BASE_URL
| Python | agpl-3.0 | luispedro/django-gitcms,luispedro/django-gitcms | from django.conf.urls.defaults import *
import settings
import views
urlpatterns = patterns('',
(r'^papers/(?P<paper>.+)$', views.papers),
(r'^publications/?$', views.publications, { 'collection' : 'luispedro' }),
(r'^publications/(?P<collection>.+)$', views.publications),
- (r'^publications/files/(?P<file>.+)$', 'django.views.static.serve', {'document_root': settings._BASE_DIR + '/../media/publications/files'}),
+ (r'^publications/files/(?P<file>.+)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT + '/publications/files'}),
)
| Remove stay mention to BASE_URL | ## Code Before:
from django.conf.urls.defaults import *
import settings
import views
urlpatterns = patterns('',
(r'^papers/(?P<paper>.+)$', views.papers),
(r'^publications/?$', views.publications, { 'collection' : 'luispedro' }),
(r'^publications/(?P<collection>.+)$', views.publications),
(r'^publications/files/(?P<file>.+)$', 'django.views.static.serve', {'document_root': settings._BASE_DIR + '/../media/publications/files'}),
)
## Instruction:
Remove stay mention to BASE_URL
## Code After:
from django.conf.urls.defaults import *
import settings
import views
urlpatterns = patterns('',
(r'^papers/(?P<paper>.+)$', views.papers),
(r'^publications/?$', views.publications, { 'collection' : 'luispedro' }),
(r'^publications/(?P<collection>.+)$', views.publications),
(r'^publications/files/(?P<file>.+)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT + '/publications/files'}),
)
| ...
(r'^publications/(?P<collection>.+)$', views.publications),
(r'^publications/files/(?P<file>.+)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT + '/publications/files'}),
)
... |
464fc1e9a905df25b12975422d5b48cf8286306c | custom/icds_reports/utils/migrations.py | custom/icds_reports/utils/migrations.py | from __future__ import absolute_import
from __future__ import unicode_literals
from corehq.sql_db.operations import RawSQLMigration
def get_view_migrations():
sql_views = [
'awc_location_months.sql',
'agg_awc_monthly.sql',
'agg_ccs_record_monthly.sql',
'agg_child_health_monthly.sql',
'daily_attendance.sql',
'agg_awc_daily.sql',
'child_health_monthly.sql',
'disha_indicators.sql',
'ccs_record_monthly_view.sql',
'agg_ls_monthly.sql',
'service_delivery_monthly.sql',
]
migrator = RawSQLMigration(('custom', 'icds_reports', 'migrations', 'sql_templates', 'database_views'))
operations = []
for view in sql_views:
operations.append(migrator.get_migration(view))
return operations
| from __future__ import absolute_import
from __future__ import unicode_literals
from corehq.sql_db.operations import RawSQLMigration
def get_view_migrations():
sql_views = [
'awc_location_months.sql',
'agg_awc_monthly.sql',
'agg_ccs_record_monthly.sql',
'agg_child_health_monthly.sql',
'daily_attendance.sql',
'agg_awc_daily.sql',
'child_health_monthly.sql',
'disha_indicators.sql',
'ccs_record_monthly_view.sql',
'agg_ls_monthly.sql',
'service_delivery_monthly.sql',
'aww_incentive_report_monthly.sql',
]
migrator = RawSQLMigration(('custom', 'icds_reports', 'migrations', 'sql_templates', 'database_views'))
operations = []
for view in sql_views:
operations.append(migrator.get_migration(view))
return operations
| Add aww_incentive report view to migration util | Add aww_incentive report view to migration util
| Python | bsd-3-clause | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | from __future__ import absolute_import
from __future__ import unicode_literals
from corehq.sql_db.operations import RawSQLMigration
def get_view_migrations():
sql_views = [
'awc_location_months.sql',
'agg_awc_monthly.sql',
'agg_ccs_record_monthly.sql',
'agg_child_health_monthly.sql',
'daily_attendance.sql',
'agg_awc_daily.sql',
'child_health_monthly.sql',
'disha_indicators.sql',
'ccs_record_monthly_view.sql',
'agg_ls_monthly.sql',
'service_delivery_monthly.sql',
+ 'aww_incentive_report_monthly.sql',
]
migrator = RawSQLMigration(('custom', 'icds_reports', 'migrations', 'sql_templates', 'database_views'))
operations = []
for view in sql_views:
operations.append(migrator.get_migration(view))
return operations
| Add aww_incentive report view to migration util | ## Code Before:
from __future__ import absolute_import
from __future__ import unicode_literals
from corehq.sql_db.operations import RawSQLMigration
def get_view_migrations():
sql_views = [
'awc_location_months.sql',
'agg_awc_monthly.sql',
'agg_ccs_record_monthly.sql',
'agg_child_health_monthly.sql',
'daily_attendance.sql',
'agg_awc_daily.sql',
'child_health_monthly.sql',
'disha_indicators.sql',
'ccs_record_monthly_view.sql',
'agg_ls_monthly.sql',
'service_delivery_monthly.sql',
]
migrator = RawSQLMigration(('custom', 'icds_reports', 'migrations', 'sql_templates', 'database_views'))
operations = []
for view in sql_views:
operations.append(migrator.get_migration(view))
return operations
## Instruction:
Add aww_incentive report view to migration util
## Code After:
from __future__ import absolute_import
from __future__ import unicode_literals
from corehq.sql_db.operations import RawSQLMigration
def get_view_migrations():
sql_views = [
'awc_location_months.sql',
'agg_awc_monthly.sql',
'agg_ccs_record_monthly.sql',
'agg_child_health_monthly.sql',
'daily_attendance.sql',
'agg_awc_daily.sql',
'child_health_monthly.sql',
'disha_indicators.sql',
'ccs_record_monthly_view.sql',
'agg_ls_monthly.sql',
'service_delivery_monthly.sql',
'aww_incentive_report_monthly.sql',
]
migrator = RawSQLMigration(('custom', 'icds_reports', 'migrations', 'sql_templates', 'database_views'))
operations = []
for view in sql_views:
operations.append(migrator.get_migration(view))
return operations
| ...
'service_delivery_monthly.sql',
'aww_incentive_report_monthly.sql',
]
... |
69a94173a48d04bc9e409278574844ebbc43af8b | dadd/worker/__init__.py | dadd/worker/__init__.py | import os
from functools import partial
import click
from flask import Flask
from dadd import server
app = Flask(__name__)
app.config.from_object('dadd.worker.settings')
import dadd.worker.handlers # noqa
@click.command()
@click.pass_context
def run(ctx):
if os.environ.get('DEBUG') or (ctx.obj and ctx.obj.get('DEBUG')):
app.debug = True
if ctx.obj:
app.config.update(ctx.obj)
register = partial(dadd.worker.handlers.register,
app.config['HOST'],
app.config['PORT'])
server.monitor('Dadd_Heartbeat', register, 2)
server.mount(app, '/')
server.run(app.config)
| from functools import partial
import click
from flask import Flask
from dadd import server
from dadd.master.utils import update_config
app = Flask(__name__)
app.config.from_object('dadd.worker.settings')
import dadd.worker.handlers # noqa
@click.command()
@click.pass_context
def run(ctx):
if ctx.obj:
app.config.update(ctx.obj)
update_config(app)
register = partial(dadd.worker.handlers.register,
app.config['HOST'],
app.config['PORT'])
server.monitor('Dadd_Heartbeat', register, 2)
server.mount(app, '/')
server.run(app.config)
| Allow worker to use APP_SETTINGS_YAML correctly. | Allow worker to use APP_SETTINGS_YAML correctly.
| Python | bsd-3-clause | ionrock/dadd,ionrock/dadd,ionrock/dadd,ionrock/dadd | - import os
-
from functools import partial
import click
from flask import Flask
from dadd import server
+ from dadd.master.utils import update_config
app = Flask(__name__)
app.config.from_object('dadd.worker.settings')
import dadd.worker.handlers # noqa
@click.command()
@click.pass_context
def run(ctx):
- if os.environ.get('DEBUG') or (ctx.obj and ctx.obj.get('DEBUG')):
- app.debug = True
-
if ctx.obj:
app.config.update(ctx.obj)
+
+ update_config(app)
register = partial(dadd.worker.handlers.register,
app.config['HOST'],
app.config['PORT'])
server.monitor('Dadd_Heartbeat', register, 2)
server.mount(app, '/')
server.run(app.config)
| Allow worker to use APP_SETTINGS_YAML correctly. | ## Code Before:
import os
from functools import partial
import click
from flask import Flask
from dadd import server
app = Flask(__name__)
app.config.from_object('dadd.worker.settings')
import dadd.worker.handlers # noqa
@click.command()
@click.pass_context
def run(ctx):
if os.environ.get('DEBUG') or (ctx.obj and ctx.obj.get('DEBUG')):
app.debug = True
if ctx.obj:
app.config.update(ctx.obj)
register = partial(dadd.worker.handlers.register,
app.config['HOST'],
app.config['PORT'])
server.monitor('Dadd_Heartbeat', register, 2)
server.mount(app, '/')
server.run(app.config)
## Instruction:
Allow worker to use APP_SETTINGS_YAML correctly.
## Code After:
from functools import partial
import click
from flask import Flask
from dadd import server
from dadd.master.utils import update_config
app = Flask(__name__)
app.config.from_object('dadd.worker.settings')
import dadd.worker.handlers # noqa
@click.command()
@click.pass_context
def run(ctx):
if ctx.obj:
app.config.update(ctx.obj)
update_config(app)
register = partial(dadd.worker.handlers.register,
app.config['HOST'],
app.config['PORT'])
server.monitor('Dadd_Heartbeat', register, 2)
server.mount(app, '/')
server.run(app.config)
| # ... existing code ...
from functools import partial
# ... modified code ...
from dadd import server
from dadd.master.utils import update_config
...
def run(ctx):
if ctx.obj:
...
app.config.update(ctx.obj)
update_config(app)
# ... rest of the code ... |
25f69d929af9ef600f067343524272bcaef54a6b | KerbalStuff/celery.py | KerbalStuff/celery.py | import smtplib
from celery import Celery
from email.mime.text import MIMEText
from KerbalStuff.config import _cfg, _cfgi, _cfgb
app = Celery("tasks", broker="redis://localhost:6379/0")
def chunks(l, n):
""" Yield successive n-sized chunks from l.
"""
for i in range(0, len(l), n):
yield l[i:i+n]
@app.task
def send_mail(sender, recipients, subject, message, important=False):
if _cfg("smtp-host") == "":
return
smtp = smtplib.SMTP(host=_cfg("smtp-host"), port=_cfgi("smtp-port"))
message = MIMEText(message)
if important:
message['X-MC-Important'] = "true"
message['X-MC-PreserveRecipients'] = "false"
message['Subject'] = subject
message['From'] = sender
for group in chunks(recipients, 100):
message['To'] = ";".join(group)
print("Sending email from {} to {} recipients".format(sender, len(group)))
smtp.sendmail(sender, group, message.as_string())
smtp.quit()
| import smtplib
from celery import Celery
from email.mime.text import MIMEText
from KerbalStuff.config import _cfg, _cfgi, _cfgb
app = Celery("tasks", broker="redis://localhost:6379/0")
def chunks(l, n):
""" Yield successive n-sized chunks from l.
"""
for i in range(0, len(l), n):
yield l[i:i+n]
@app.task
def send_mail(sender, recipients, subject, message, important=False):
if _cfg("smtp-host") == "":
return
smtp = smtplib.SMTP(host=_cfg("smtp-host"), port=_cfgi("smtp-port"))
message = MIMEText(message)
if important:
message['X-MC-Important'] = "true"
message['X-MC-PreserveRecipients'] = "false"
message['Subject'] = subject
message['From'] = sender
for group in chunks(recipients, 100):
message['To'] = "undisclosed-recipients:;"
print("Sending email from {} to {} recipients".format(sender, len(group)))
smtp.sendmail(sender, group, message.as_string())
smtp.quit()
| Remove addresses from "To" field, BCC instead. | [proposal] Remove addresses from "To" field, BCC instead.
Closing privacy issue by hiding the "To" field altogether. Just commented out the "To" line. For issue #68 | Python | mit | EIREXE/SpaceDock,EIREXE/SpaceDock,EIREXE/SpaceDock,EIREXE/SpaceDock | import smtplib
from celery import Celery
from email.mime.text import MIMEText
from KerbalStuff.config import _cfg, _cfgi, _cfgb
app = Celery("tasks", broker="redis://localhost:6379/0")
def chunks(l, n):
""" Yield successive n-sized chunks from l.
"""
for i in range(0, len(l), n):
yield l[i:i+n]
@app.task
def send_mail(sender, recipients, subject, message, important=False):
if _cfg("smtp-host") == "":
return
smtp = smtplib.SMTP(host=_cfg("smtp-host"), port=_cfgi("smtp-port"))
message = MIMEText(message)
if important:
message['X-MC-Important'] = "true"
message['X-MC-PreserveRecipients'] = "false"
message['Subject'] = subject
message['From'] = sender
for group in chunks(recipients, 100):
- message['To'] = ";".join(group)
+ message['To'] = "undisclosed-recipients:;"
print("Sending email from {} to {} recipients".format(sender, len(group)))
smtp.sendmail(sender, group, message.as_string())
smtp.quit()
| Remove addresses from "To" field, BCC instead. | ## Code Before:
import smtplib
from celery import Celery
from email.mime.text import MIMEText
from KerbalStuff.config import _cfg, _cfgi, _cfgb
app = Celery("tasks", broker="redis://localhost:6379/0")
def chunks(l, n):
""" Yield successive n-sized chunks from l.
"""
for i in range(0, len(l), n):
yield l[i:i+n]
@app.task
def send_mail(sender, recipients, subject, message, important=False):
if _cfg("smtp-host") == "":
return
smtp = smtplib.SMTP(host=_cfg("smtp-host"), port=_cfgi("smtp-port"))
message = MIMEText(message)
if important:
message['X-MC-Important'] = "true"
message['X-MC-PreserveRecipients'] = "false"
message['Subject'] = subject
message['From'] = sender
for group in chunks(recipients, 100):
message['To'] = ";".join(group)
print("Sending email from {} to {} recipients".format(sender, len(group)))
smtp.sendmail(sender, group, message.as_string())
smtp.quit()
## Instruction:
Remove addresses from "To" field, BCC instead.
## Code After:
import smtplib
from celery import Celery
from email.mime.text import MIMEText
from KerbalStuff.config import _cfg, _cfgi, _cfgb
app = Celery("tasks", broker="redis://localhost:6379/0")
def chunks(l, n):
""" Yield successive n-sized chunks from l.
"""
for i in range(0, len(l), n):
yield l[i:i+n]
@app.task
def send_mail(sender, recipients, subject, message, important=False):
if _cfg("smtp-host") == "":
return
smtp = smtplib.SMTP(host=_cfg("smtp-host"), port=_cfgi("smtp-port"))
message = MIMEText(message)
if important:
message['X-MC-Important'] = "true"
message['X-MC-PreserveRecipients'] = "false"
message['Subject'] = subject
message['From'] = sender
for group in chunks(recipients, 100):
message['To'] = "undisclosed-recipients:;"
print("Sending email from {} to {} recipients".format(sender, len(group)))
smtp.sendmail(sender, group, message.as_string())
smtp.quit()
| ...
for group in chunks(recipients, 100):
message['To'] = "undisclosed-recipients:;"
print("Sending email from {} to {} recipients".format(sender, len(group)))
... |
0ede19a4f2c9c6f01db0040d9d108eb0a0b2558c | py/kafka-tmdb.py | py/kafka-tmdb.py | import json
from get_tmdb import GetTMDB
from kafka import KafkaConsumer
try:
from GLOBALS import KAFKA_BROKER, TMDB_API
except ImportError:
print('Get it somewhere else')
class CollectTMDB(object):
def __init__(self, ):
self.tmdb = GetTMDB(TMDB_API)
self.consumer = KafkaConsumer(group_id='tmdb',
bootstrap_servers=['{}:9092'.format(KAFKA_BROKER)])
self.consumer.subscribe(pattern='tmdb')
def run(self):
'''
Collects a message from the topic 'tmdb'. This message is a json containing all the
information collected from the apis further up the stream. We get the imdb_id from
this data and pass it to the tmdb api. We append the information from the tmdb_api
to the msg we collected, and then pass it to the next topic.
'''
for message in self.consumer:
# message value and key are raw bytes -- decode if necessary!
# e.g., for unicode: `message.value.decode('utf-8')
msg_data = json.loads(message.value)
imdb_id = msg_data['imdb_id']
tmdb_data = GetTMDB.get_info(imdb_id)
msg_data.extend(tmdb_data) | import json
from get_tmdb import GetTMDB
from kafka import KafkaConsumer, KafkaProducer
try:
from GLOBALS import KAFKA_BROKER, TMDB_API
except ImportError:
print('Get it somewhere else')
class CollectTMDB(object):
def __init__(self, ):
self.tmdb = GetTMDB(TMDB_API)
self.producer = KafkaProducer(bootstrap_servers=KAFKA_BROKER)
self.consumer = KafkaConsumer(group_id='tmdb',
bootstrap_servers=KAFKA_BROKER)
self.consumer.subscribe(pattern='omdb')
def run(self):
'''
Collects a message from the topic 'tmdb'. This message is a json containing all the
information collected from the apis further up the stream. We get the imdb_id from
this data and pass it to the tmdb api. We append the information from the tmdb_api
to the msg we collected, and then pass it to the next topic.
'''
for message in self.consumer:
# message value and key are raw bytes -- decode if necessary!
# e.g., for unicode: `message.value.decode('utf-8')
msg_data = json.loads(message.value)
imdb_id = msg_data['imdb_id']
tmdb_data = GetTMDB.get_info(imdb_id)
msg_data.extend(tmdb_data)
self.producer.send('tmdb', json.dumps(msg_data)) | Change KAFKA_BROKER parameter, added a send producer | Change KAFKA_BROKER parameter, added a send producer
| Python | mit | kinoreel/kino-gather | import json
from get_tmdb import GetTMDB
- from kafka import KafkaConsumer
+ from kafka import KafkaConsumer, KafkaProducer
try:
from GLOBALS import KAFKA_BROKER, TMDB_API
except ImportError:
print('Get it somewhere else')
class CollectTMDB(object):
def __init__(self, ):
self.tmdb = GetTMDB(TMDB_API)
+ self.producer = KafkaProducer(bootstrap_servers=KAFKA_BROKER)
self.consumer = KafkaConsumer(group_id='tmdb',
- bootstrap_servers=['{}:9092'.format(KAFKA_BROKER)])
+ bootstrap_servers=KAFKA_BROKER)
- self.consumer.subscribe(pattern='tmdb')
+ self.consumer.subscribe(pattern='omdb')
def run(self):
'''
Collects a message from the topic 'tmdb'. This message is a json containing all the
information collected from the apis further up the stream. We get the imdb_id from
this data and pass it to the tmdb api. We append the information from the tmdb_api
to the msg we collected, and then pass it to the next topic.
'''
for message in self.consumer:
# message value and key are raw bytes -- decode if necessary!
# e.g., for unicode: `message.value.decode('utf-8')
msg_data = json.loads(message.value)
imdb_id = msg_data['imdb_id']
tmdb_data = GetTMDB.get_info(imdb_id)
msg_data.extend(tmdb_data)
+ self.producer.send('tmdb', json.dumps(msg_data)) | Change KAFKA_BROKER parameter, added a send producer | ## Code Before:
import json
from get_tmdb import GetTMDB
from kafka import KafkaConsumer
try:
from GLOBALS import KAFKA_BROKER, TMDB_API
except ImportError:
print('Get it somewhere else')
class CollectTMDB(object):
def __init__(self, ):
self.tmdb = GetTMDB(TMDB_API)
self.consumer = KafkaConsumer(group_id='tmdb',
bootstrap_servers=['{}:9092'.format(KAFKA_BROKER)])
self.consumer.subscribe(pattern='tmdb')
def run(self):
'''
Collects a message from the topic 'tmdb'. This message is a json containing all the
information collected from the apis further up the stream. We get the imdb_id from
this data and pass it to the tmdb api. We append the information from the tmdb_api
to the msg we collected, and then pass it to the next topic.
'''
for message in self.consumer:
# message value and key are raw bytes -- decode if necessary!
# e.g., for unicode: `message.value.decode('utf-8')
msg_data = json.loads(message.value)
imdb_id = msg_data['imdb_id']
tmdb_data = GetTMDB.get_info(imdb_id)
msg_data.extend(tmdb_data)
## Instruction:
Change KAFKA_BROKER parameter, added a send producer
## Code After:
import json
from get_tmdb import GetTMDB
from kafka import KafkaConsumer, KafkaProducer
try:
from GLOBALS import KAFKA_BROKER, TMDB_API
except ImportError:
print('Get it somewhere else')
class CollectTMDB(object):
def __init__(self, ):
self.tmdb = GetTMDB(TMDB_API)
self.producer = KafkaProducer(bootstrap_servers=KAFKA_BROKER)
self.consumer = KafkaConsumer(group_id='tmdb',
bootstrap_servers=KAFKA_BROKER)
self.consumer.subscribe(pattern='omdb')
def run(self):
'''
Collects a message from the topic 'tmdb'. This message is a json containing all the
information collected from the apis further up the stream. We get the imdb_id from
this data and pass it to the tmdb api. We append the information from the tmdb_api
to the msg we collected, and then pass it to the next topic.
'''
for message in self.consumer:
# message value and key are raw bytes -- decode if necessary!
# e.g., for unicode: `message.value.decode('utf-8')
msg_data = json.loads(message.value)
imdb_id = msg_data['imdb_id']
tmdb_data = GetTMDB.get_info(imdb_id)
msg_data.extend(tmdb_data)
self.producer.send('tmdb', json.dumps(msg_data)) | ...
from get_tmdb import GetTMDB
from kafka import KafkaConsumer, KafkaProducer
...
self.tmdb = GetTMDB(TMDB_API)
self.producer = KafkaProducer(bootstrap_servers=KAFKA_BROKER)
self.consumer = KafkaConsumer(group_id='tmdb',
bootstrap_servers=KAFKA_BROKER)
self.consumer.subscribe(pattern='omdb')
...
msg_data.extend(tmdb_data)
self.producer.send('tmdb', json.dumps(msg_data))
... |
e5e6d4ac9e86aa7e44694cf4746c4c9ec91df107 | setup.py | setup.py |
from distutils.core import setup
setup(name = 'skeleton',
version = '0.0',
description = 'A python skeleton project',
long_description = '''
This project represents a basic python skeleton project that can be used as
the basis for other projects. Feel free to fork this project and use as you
see fit, but please update the information here.
Note: Licensing only applies to the skeleton itself, should you use this
skeleton as the basis for a new project, update the license accordingly.
''',
author = 'Chris Salch',
author_email = '[email protected]',
url = 'https://github.com/arlaneenalra/python-skeleton',
classifiers = [
'License :: OSI Approved :: BSD License'
],
license = 'License :: OSI Approved :: BSD License',
packages = [],
package_dir = { '': 'lib'},
scripts = [],
py_modules = [],
)
|
from distutils.core import setup
setup(name = 'skeleton',
version = '0.0',
description = 'A python skeleton project',
long_description = '''
This project represents a basic python skeleton project that can be used as
the basis for other projects. Feel free to fork this project and use as you
see fit, but please update the information here.
Note: Licensing only applies to the skeleton itself, should you use this
skeleton as the basis for a new project, update the license accordingly.
''',
author = 'Chris Salch',
author_email = '[email protected]',
url = 'https://github.com/arlaneenalra/python-skeleton',
classifiers = [
'License :: OSI Approved :: BSD License'
],
license = 'License :: OSI Approved :: BSD License',
packages = [],
scripts = [],
py_modules = [],
package_dir = { '': 'lib'},
)
| Move generic lib to botton of config. | Move generic lib to botton of config.
| Python | bsd-2-clause | arlaneenalra/python-skeleton |
from distutils.core import setup
setup(name = 'skeleton',
version = '0.0',
description = 'A python skeleton project',
long_description = '''
This project represents a basic python skeleton project that can be used as
the basis for other projects. Feel free to fork this project and use as you
see fit, but please update the information here.
Note: Licensing only applies to the skeleton itself, should you use this
skeleton as the basis for a new project, update the license accordingly.
''',
author = 'Chris Salch',
author_email = '[email protected]',
url = 'https://github.com/arlaneenalra/python-skeleton',
classifiers = [
'License :: OSI Approved :: BSD License'
],
license = 'License :: OSI Approved :: BSD License',
packages = [],
- package_dir = { '': 'lib'},
scripts = [],
py_modules = [],
+ package_dir = { '': 'lib'},
)
| Move generic lib to botton of config. | ## Code Before:
from distutils.core import setup
setup(name = 'skeleton',
version = '0.0',
description = 'A python skeleton project',
long_description = '''
This project represents a basic python skeleton project that can be used as
the basis for other projects. Feel free to fork this project and use as you
see fit, but please update the information here.
Note: Licensing only applies to the skeleton itself, should you use this
skeleton as the basis for a new project, update the license accordingly.
''',
author = 'Chris Salch',
author_email = '[email protected]',
url = 'https://github.com/arlaneenalra/python-skeleton',
classifiers = [
'License :: OSI Approved :: BSD License'
],
license = 'License :: OSI Approved :: BSD License',
packages = [],
package_dir = { '': 'lib'},
scripts = [],
py_modules = [],
)
## Instruction:
Move generic lib to botton of config.
## Code After:
from distutils.core import setup
setup(name = 'skeleton',
version = '0.0',
description = 'A python skeleton project',
long_description = '''
This project represents a basic python skeleton project that can be used as
the basis for other projects. Feel free to fork this project and use as you
see fit, but please update the information here.
Note: Licensing only applies to the skeleton itself, should you use this
skeleton as the basis for a new project, update the license accordingly.
''',
author = 'Chris Salch',
author_email = '[email protected]',
url = 'https://github.com/arlaneenalra/python-skeleton',
classifiers = [
'License :: OSI Approved :: BSD License'
],
license = 'License :: OSI Approved :: BSD License',
packages = [],
scripts = [],
py_modules = [],
package_dir = { '': 'lib'},
)
| // ... existing code ...
packages = [],
scripts = [],
// ... modified code ...
py_modules = [],
package_dir = { '': 'lib'},
)
// ... rest of the code ... |
61f06da13bef77f576a0c2dea77febf0d2d4b6fb | subl.py | subl.py | from .dependencies import dependencies
dependencies.load()
import sublime, sublime_plugin
from sublime import Region
import subl_source_kitten
# Sublime Text will will call `on_query_completions` itself
class SublCompletions(sublime_plugin.EventListener):
def on_query_completions(self, view, prefix, locations):
offset = locations[0]
file = view.file_name()
if not file.endswith(".swift"):
return None
project_directory = view.window().folders()[0]
text = view.substr(Region(0, view.size()))
suggestions = subl_source_kitten.complete(offset, file, project_directory, text)
return (suggestions, sublime.INHIBIT_WORD_COMPLETIONS | sublime.INHIBIT_EXPLICIT_COMPLETIONS)
| from .dependencies import dependencies
dependencies.load()
import sublime, sublime_plugin
from sublime import Region
import subl_source_kitten
# Sublime Text will will call `on_query_completions` itself
class SublCompletions(sublime_plugin.EventListener):
def on_query_completions(self, view, prefix, locations):
offset = locations[0]
file = view.file_name()
if file != None and not file.endswith(".swift"):
return None
project_directory = view.window().folders()[0]
text = view.substr(Region(0, view.size()))
suggestions = subl_source_kitten.complete(offset, file, project_directory, text)
return (suggestions, sublime.INHIBIT_WORD_COMPLETIONS | sublime.INHIBIT_EXPLICIT_COMPLETIONS)
| Allow autocomplete on non-persisted swift files | Allow autocomplete on non-persisted swift files
| Python | mit | Dan2552/SourceKittenSubl,Dan2552/SourceKittenSubl,Dan2552/SourceKittenSubl | from .dependencies import dependencies
dependencies.load()
import sublime, sublime_plugin
from sublime import Region
import subl_source_kitten
# Sublime Text will will call `on_query_completions` itself
class SublCompletions(sublime_plugin.EventListener):
def on_query_completions(self, view, prefix, locations):
offset = locations[0]
file = view.file_name()
+
- if not file.endswith(".swift"):
+ if file != None and not file.endswith(".swift"):
return None
+
project_directory = view.window().folders()[0]
text = view.substr(Region(0, view.size()))
suggestions = subl_source_kitten.complete(offset, file, project_directory, text)
return (suggestions, sublime.INHIBIT_WORD_COMPLETIONS | sublime.INHIBIT_EXPLICIT_COMPLETIONS)
| Allow autocomplete on non-persisted swift files | ## Code Before:
from .dependencies import dependencies
dependencies.load()
import sublime, sublime_plugin
from sublime import Region
import subl_source_kitten
# Sublime Text will will call `on_query_completions` itself
class SublCompletions(sublime_plugin.EventListener):
def on_query_completions(self, view, prefix, locations):
offset = locations[0]
file = view.file_name()
if not file.endswith(".swift"):
return None
project_directory = view.window().folders()[0]
text = view.substr(Region(0, view.size()))
suggestions = subl_source_kitten.complete(offset, file, project_directory, text)
return (suggestions, sublime.INHIBIT_WORD_COMPLETIONS | sublime.INHIBIT_EXPLICIT_COMPLETIONS)
## Instruction:
Allow autocomplete on non-persisted swift files
## Code After:
from .dependencies import dependencies
dependencies.load()
import sublime, sublime_plugin
from sublime import Region
import subl_source_kitten
# Sublime Text will will call `on_query_completions` itself
class SublCompletions(sublime_plugin.EventListener):
def on_query_completions(self, view, prefix, locations):
offset = locations[0]
file = view.file_name()
if file != None and not file.endswith(".swift"):
return None
project_directory = view.window().folders()[0]
text = view.substr(Region(0, view.size()))
suggestions = subl_source_kitten.complete(offset, file, project_directory, text)
return (suggestions, sublime.INHIBIT_WORD_COMPLETIONS | sublime.INHIBIT_EXPLICIT_COMPLETIONS)
| // ... existing code ...
file = view.file_name()
if file != None and not file.endswith(".swift"):
return None
project_directory = view.window().folders()[0]
// ... rest of the code ... |
bd3dad98976d5e02c4a941ae3c687174db78781d | src/WebCatch/catchLink.py | src/WebCatch/catchLink.py | import requests
import re
import os
url = "https://www.autohome.com.cn/shanghai/"
urlBox = []
def catchURL(url):
file = requests.get(url,timeout=2)
data = file.content
links = re.findall(r'(https?://[^\s)";]+\.(\w|/)*)',str(data))
for i in links:
try:
currentURL = i[0]
if currentURL not in urlBox:
urlBox.append(currentURL)
os.system("ssh [email protected] psql test -c \
'insert into url values(nextval('url_seq'), '"+ currentURL +"')'")
print(currentURL)
catchURL(currentURL)
except Exception as e:
pass
continue
catchURL(url) | import requests
import re
import os
url = "https://www.autohome.com.cn/shanghai/"
urlBox = []
def catchURL(url):
file = requests.get(url,timeout=5)
data = file.content
links = re.findall(r'(https?://[^\s)";]+\.(\w|/)*)',str(data))
for i in links:
try:
currentURL = i[0]
if currentURL not in urlBox:
urlBox.append(currentURL)
sql = """
ssh [email protected] psql test -U pgadmin << EOF
insert into url values(nextval(\'url_seq\'), \'"""+currentURL+"""\');
EOF
"""
print(sql)
os.popen(sql)
print(currentURL)
catchURL(currentURL)
except Exception as e:
pass
continue
catchURL(url) | Put the crawled link into the database | Put the crawled link into the database
| Python | mit | zhaodjie/py_learning | - import requests
import re
import os
url = "https://www.autohome.com.cn/shanghai/"
urlBox = []
def catchURL(url):
file = requests.get(url,timeout=2)
data = file.content
links = re.findall(r'(https?://[^\s)";]+\.(\w|/)*)',str(data))
for i in links:
try:
currentURL = i[0]
if currentURL not in urlBox:
urlBox.append(currentURL)
os.system("ssh [email protected] psql test -c \
'insert into url values(nextval('url_seq'), '"+ currentURL +"')'")
print(currentURL)
catchURL(currentURL)
except Exception as e:
pass
continue
catchURL(url)
+ import requests
+ import re
+ import os
+
+ url = "https://www.autohome.com.cn/shanghai/"
+ urlBox = []
+ def catchURL(url):
+ file = requests.get(url,timeout=5)
+ data = file.content
+ links = re.findall(r'(https?://[^\s)";]+\.(\w|/)*)',str(data))
+ for i in links:
+ try:
+ currentURL = i[0]
+ if currentURL not in urlBox:
+ urlBox.append(currentURL)
+ sql = """
+ ssh [email protected] psql test -U pgadmin << EOF
+ insert into url values(nextval(\'url_seq\'), \'"""+currentURL+"""\');
+ EOF
+ """
+ print(sql)
+ os.popen(sql)
+ print(currentURL)
+ catchURL(currentURL)
+ except Exception as e:
+ pass
+ continue
+
+
+
+
+ catchURL(url) | Put the crawled link into the database | ## Code Before:
import requests
import re
import os
url = "https://www.autohome.com.cn/shanghai/"
urlBox = []
def catchURL(url):
file = requests.get(url,timeout=2)
data = file.content
links = re.findall(r'(https?://[^\s)";]+\.(\w|/)*)',str(data))
for i in links:
try:
currentURL = i[0]
if currentURL not in urlBox:
urlBox.append(currentURL)
os.system("ssh [email protected] psql test -c \
'insert into url values(nextval('url_seq'), '"+ currentURL +"')'")
print(currentURL)
catchURL(currentURL)
except Exception as e:
pass
continue
catchURL(url)
## Instruction:
Put the crawled link into the database
## Code After:
import requests
import re
import os
url = "https://www.autohome.com.cn/shanghai/"
urlBox = []
def catchURL(url):
file = requests.get(url,timeout=5)
data = file.content
links = re.findall(r'(https?://[^\s)";]+\.(\w|/)*)',str(data))
for i in links:
try:
currentURL = i[0]
if currentURL not in urlBox:
urlBox.append(currentURL)
sql = """
ssh [email protected] psql test -U pgadmin << EOF
insert into url values(nextval(\'url_seq\'), \'"""+currentURL+"""\');
EOF
"""
print(sql)
os.popen(sql)
print(currentURL)
catchURL(currentURL)
except Exception as e:
pass
continue
catchURL(url) | ...
def catchURL(url):
file = requests.get(url,timeout=5)
data = file.content
...
urlBox.append(currentURL)
sql = """
ssh [email protected] psql test -U pgadmin << EOF
insert into url values(nextval(\'url_seq\'), \'"""+currentURL+"""\');
EOF
"""
print(sql)
os.popen(sql)
print(currentURL)
... |
6aec2246389934bca253a2fcd18f3ac24525c670 | molvs/utils.py | molvs/utils.py |
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
import functools
from itertools import izip, tee
def memoized_property(fget):
"""Decorator to create memoized properties."""
attr_name = '_{}'.format(fget.__name__)
@functools.wraps(fget)
def fget_memoized(self):
if not hasattr(self, attr_name):
setattr(self, attr_name, fget(self))
return getattr(self, attr_name)
return property(fget_memoized)
def pairwise(iterable):
"""Utility function to iterate in a pairwise fashion."""
a, b = tee(iterable)
next(b, None)
return izip(a, b)
|
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
import functools
from itertools import tee
try:
from itertools import izip
except ImportError:
izip = zip
def memoized_property(fget):
"""Decorator to create memoized properties."""
attr_name = '_{}'.format(fget.__name__)
@functools.wraps(fget)
def fget_memoized(self):
if not hasattr(self, attr_name):
setattr(self, attr_name, fget(self))
return getattr(self, attr_name)
return property(fget_memoized)
def pairwise(iterable):
"""Utility function to iterate in a pairwise fashion."""
a, b = tee(iterable)
next(b, None)
return izip(a, b)
| Fix izip import for python3 | Fix izip import for python3
| Python | mit | mcs07/MolVS |
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
import functools
- from itertools import izip, tee
+ from itertools import tee
+
+ try:
+ from itertools import izip
+ except ImportError:
+ izip = zip
def memoized_property(fget):
"""Decorator to create memoized properties."""
attr_name = '_{}'.format(fget.__name__)
@functools.wraps(fget)
def fget_memoized(self):
if not hasattr(self, attr_name):
setattr(self, attr_name, fget(self))
return getattr(self, attr_name)
return property(fget_memoized)
def pairwise(iterable):
"""Utility function to iterate in a pairwise fashion."""
a, b = tee(iterable)
next(b, None)
return izip(a, b)
| Fix izip import for python3 | ## Code Before:
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
import functools
from itertools import izip, tee
def memoized_property(fget):
"""Decorator to create memoized properties."""
attr_name = '_{}'.format(fget.__name__)
@functools.wraps(fget)
def fget_memoized(self):
if not hasattr(self, attr_name):
setattr(self, attr_name, fget(self))
return getattr(self, attr_name)
return property(fget_memoized)
def pairwise(iterable):
"""Utility function to iterate in a pairwise fashion."""
a, b = tee(iterable)
next(b, None)
return izip(a, b)
## Instruction:
Fix izip import for python3
## Code After:
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
import functools
from itertools import tee
try:
from itertools import izip
except ImportError:
izip = zip
def memoized_property(fget):
"""Decorator to create memoized properties."""
attr_name = '_{}'.format(fget.__name__)
@functools.wraps(fget)
def fget_memoized(self):
if not hasattr(self, attr_name):
setattr(self, attr_name, fget(self))
return getattr(self, attr_name)
return property(fget_memoized)
def pairwise(iterable):
"""Utility function to iterate in a pairwise fashion."""
a, b = tee(iterable)
next(b, None)
return izip(a, b)
| // ... existing code ...
import functools
from itertools import tee
try:
from itertools import izip
except ImportError:
izip = zip
// ... rest of the code ... |
ba3282d4df890daa054be808dfbf503404b77c3c | src/dirtyfields/dirtyfields.py | src/dirtyfields/dirtyfields.py | from django.db.models.signals import post_save
class DirtyFieldsMixin(object):
def __init__(self, *args, **kwargs):
super(DirtyFieldsMixin, self).__init__(*args, **kwargs)
post_save.connect(reset_state, sender=self.__class__,
dispatch_uid='%s-DirtyFieldsMixin-sweeper' % self.__class__.__name__)
reset_state(sender=self.__class__, instance=self)
def _as_dict(self):
return dict([(f.name, getattr(self, f.name)) for f in self._meta.local_fields if not f.rel])
def get_dirty_fields(self):
new_state = self._as_dict()
return dict([(key, value) for key, value in self._original_state.iteritems() if value != new_state[key]])
def is_dirty(self):
# in order to be dirty we need to have been saved at least once, so we
# check for a primary key and we need our dirty fields to not be empty
if not self.pk:
return True
return {} != self.get_dirty_fields()
def reset_state(sender, instance, **kwargs):
instance._original_state = instance._as_dict()
| from django.db.models.signals import post_save
class DirtyFieldsMixin(object):
def __init__(self, *args, **kwargs):
super(DirtyFieldsMixin, self).__init__(*args, **kwargs)
post_save.connect(reset_state, sender=self.__class__,
dispatch_uid='%s-DirtyFieldsMixin-sweeper' % self.__class__.__name__)
reset_state(sender=self.__class__, instance=self)
def _as_dict(self):
return dict([(f.name, f.to_python(getattr(self, f.name))) for f in self._meta.local_fields if not f.rel])
def get_dirty_fields(self):
new_state = self._as_dict()
return dict([(key, value) for key, value in self._original_state.iteritems() if value != new_state[key]])
def is_dirty(self):
# in order to be dirty we need to have been saved at least once, so we
# check for a primary key and we need our dirty fields to not be empty
if not self.pk:
return True
return {} != self.get_dirty_fields()
def reset_state(sender, instance, **kwargs):
instance._original_state = instance._as_dict()
| Use field.to_python to do django type conversions on the field before checking if dirty. | Use field.to_python to do django type conversions on the field before checking if dirty.
This solves issues where you might have a decimal field that you write a string to, eg:
>>> m = MyModel.objects.get(id=1)
>>> m.my_decimal_field
Decimal('1.00')
>>> m.my_decimal_field = u'1.00' # from a form or something
>>> m.is_dirty() # currently evaluates to True, should evaluate to False
False
This pull request could probably use some unit testing, but it should be safe as the base class for django fields defines to_python as:
def to_python(self, value):
return value
So, any field type that does not have an explicit to_python method will behave as before this change. | Python | bsd-3-clause | romgar/django-dirtyfields,smn/django-dirtyfields,jdotjdot/django-dirtyfields | from django.db.models.signals import post_save
class DirtyFieldsMixin(object):
def __init__(self, *args, **kwargs):
super(DirtyFieldsMixin, self).__init__(*args, **kwargs)
post_save.connect(reset_state, sender=self.__class__,
dispatch_uid='%s-DirtyFieldsMixin-sweeper' % self.__class__.__name__)
reset_state(sender=self.__class__, instance=self)
def _as_dict(self):
- return dict([(f.name, getattr(self, f.name)) for f in self._meta.local_fields if not f.rel])
+ return dict([(f.name, f.to_python(getattr(self, f.name))) for f in self._meta.local_fields if not f.rel])
def get_dirty_fields(self):
new_state = self._as_dict()
return dict([(key, value) for key, value in self._original_state.iteritems() if value != new_state[key]])
def is_dirty(self):
# in order to be dirty we need to have been saved at least once, so we
# check for a primary key and we need our dirty fields to not be empty
if not self.pk:
return True
return {} != self.get_dirty_fields()
def reset_state(sender, instance, **kwargs):
instance._original_state = instance._as_dict()
| Use field.to_python to do django type conversions on the field before checking if dirty. | ## Code Before:
from django.db.models.signals import post_save
class DirtyFieldsMixin(object):
def __init__(self, *args, **kwargs):
super(DirtyFieldsMixin, self).__init__(*args, **kwargs)
post_save.connect(reset_state, sender=self.__class__,
dispatch_uid='%s-DirtyFieldsMixin-sweeper' % self.__class__.__name__)
reset_state(sender=self.__class__, instance=self)
def _as_dict(self):
return dict([(f.name, getattr(self, f.name)) for f in self._meta.local_fields if not f.rel])
def get_dirty_fields(self):
new_state = self._as_dict()
return dict([(key, value) for key, value in self._original_state.iteritems() if value != new_state[key]])
def is_dirty(self):
# in order to be dirty we need to have been saved at least once, so we
# check for a primary key and we need our dirty fields to not be empty
if not self.pk:
return True
return {} != self.get_dirty_fields()
def reset_state(sender, instance, **kwargs):
instance._original_state = instance._as_dict()
## Instruction:
Use field.to_python to do django type conversions on the field before checking if dirty.
## Code After:
from django.db.models.signals import post_save
class DirtyFieldsMixin(object):
def __init__(self, *args, **kwargs):
super(DirtyFieldsMixin, self).__init__(*args, **kwargs)
post_save.connect(reset_state, sender=self.__class__,
dispatch_uid='%s-DirtyFieldsMixin-sweeper' % self.__class__.__name__)
reset_state(sender=self.__class__, instance=self)
def _as_dict(self):
return dict([(f.name, f.to_python(getattr(self, f.name))) for f in self._meta.local_fields if not f.rel])
def get_dirty_fields(self):
new_state = self._as_dict()
return dict([(key, value) for key, value in self._original_state.iteritems() if value != new_state[key]])
def is_dirty(self):
# in order to be dirty we need to have been saved at least once, so we
# check for a primary key and we need our dirty fields to not be empty
if not self.pk:
return True
return {} != self.get_dirty_fields()
def reset_state(sender, instance, **kwargs):
instance._original_state = instance._as_dict()
| # ... existing code ...
def _as_dict(self):
return dict([(f.name, f.to_python(getattr(self, f.name))) for f in self._meta.local_fields if not f.rel])
# ... rest of the code ... |
8af3aef367135dbbc55e573c6a943a86ff3ccd9d | survey/tests/locale/test_locale_normalization.py | survey/tests/locale/test_locale_normalization.py | import os
import platform
import subprocess
import unittest
class TestLocaleNormalization(unittest.TestCase):
LOCALE_PATH = "survey/locale/"
def test_normalization(self):
""" We test if the messages were properly created with makemessages --no-obsolete --no-wrap. """
if platform.system() == "Windows":
python_3 = ["py", "-3"]
else:
python_3 = ["python3"]
makemessages_command = python_3 + [
"manage.py",
"makemessages",
"--no-obsolete",
"--no-wrap",
"--ignore",
"venv",
]
number_of_language = len(os.listdir(self.LOCALE_PATH))
subprocess.check_call(makemessages_command)
git_diff_command = ["git", "diff", self.LOCALE_PATH]
git_diff = subprocess.check_output(git_diff_command).decode("utf8")
# In the diff we should have a change only for the date of the generation
# So 2 * @@ * number of language
number_of_change = git_diff.count("@@") / 2
msg = (
"You did not update the translation following your changes. Maybe you did not use the "
"normalized 'python3 manage.py makemessages --no-obsolete --no-wrap' ? If you're "
"working locally, just use 'git add {}', we launched it during tests.".format(self.LOCALE_PATH),
)
self.assertEqual(number_of_change, number_of_language, msg)
| import os
import platform
import subprocess
import unittest
from pathlib import Path
class TestLocaleNormalization(unittest.TestCase):
LOCALE_PATH = Path("survey", "locale").absolute()
def test_normalization(self):
""" We test if the messages were properly created with makemessages --no-obsolete --no-wrap. """
if platform.system() == "Windows":
python_3 = ["py", "-3"]
else:
python_3 = ["python3"]
makemessages_command = python_3 + [
"manage.py",
"makemessages",
"--no-obsolete",
"--no-wrap",
"--ignore",
"venv",
]
number_of_language = len(os.listdir(self.LOCALE_PATH))
subprocess.check_call(makemessages_command)
git_diff_command = ["git", "diff", self.LOCALE_PATH]
git_diff = subprocess.check_output(git_diff_command).decode("utf8")
# In the diff we should have a change only for the date of the generation
# So 2 * @@ * number of language
number_of_change = git_diff.count("@@") / 2
msg = (
"You did not update the translation following your changes. Maybe you did not use the "
"normalized 'python3 manage.py makemessages --no-obsolete --no-wrap' ? If you're "
"working locally, just use 'git add {}', we launched it during tests.".format(self.LOCALE_PATH),
)
self.assertEqual(number_of_change, number_of_language, msg)
| Use an absolute Path for localization tests | Use an absolute Path for localization tests
| Python | agpl-3.0 | Pierre-Sassoulas/django-survey,Pierre-Sassoulas/django-survey,Pierre-Sassoulas/django-survey | import os
import platform
import subprocess
import unittest
+ from pathlib import Path
class TestLocaleNormalization(unittest.TestCase):
- LOCALE_PATH = "survey/locale/"
+ LOCALE_PATH = Path("survey", "locale").absolute()
def test_normalization(self):
""" We test if the messages were properly created with makemessages --no-obsolete --no-wrap. """
if platform.system() == "Windows":
python_3 = ["py", "-3"]
else:
python_3 = ["python3"]
makemessages_command = python_3 + [
"manage.py",
"makemessages",
"--no-obsolete",
"--no-wrap",
"--ignore",
"venv",
]
number_of_language = len(os.listdir(self.LOCALE_PATH))
subprocess.check_call(makemessages_command)
git_diff_command = ["git", "diff", self.LOCALE_PATH]
git_diff = subprocess.check_output(git_diff_command).decode("utf8")
# In the diff we should have a change only for the date of the generation
# So 2 * @@ * number of language
number_of_change = git_diff.count("@@") / 2
msg = (
"You did not update the translation following your changes. Maybe you did not use the "
"normalized 'python3 manage.py makemessages --no-obsolete --no-wrap' ? If you're "
"working locally, just use 'git add {}', we launched it during tests.".format(self.LOCALE_PATH),
)
self.assertEqual(number_of_change, number_of_language, msg)
| Use an absolute Path for localization tests | ## Code Before:
import os
import platform
import subprocess
import unittest
class TestLocaleNormalization(unittest.TestCase):
LOCALE_PATH = "survey/locale/"
def test_normalization(self):
""" We test if the messages were properly created with makemessages --no-obsolete --no-wrap. """
if platform.system() == "Windows":
python_3 = ["py", "-3"]
else:
python_3 = ["python3"]
makemessages_command = python_3 + [
"manage.py",
"makemessages",
"--no-obsolete",
"--no-wrap",
"--ignore",
"venv",
]
number_of_language = len(os.listdir(self.LOCALE_PATH))
subprocess.check_call(makemessages_command)
git_diff_command = ["git", "diff", self.LOCALE_PATH]
git_diff = subprocess.check_output(git_diff_command).decode("utf8")
# In the diff we should have a change only for the date of the generation
# So 2 * @@ * number of language
number_of_change = git_diff.count("@@") / 2
msg = (
"You did not update the translation following your changes. Maybe you did not use the "
"normalized 'python3 manage.py makemessages --no-obsolete --no-wrap' ? If you're "
"working locally, just use 'git add {}', we launched it during tests.".format(self.LOCALE_PATH),
)
self.assertEqual(number_of_change, number_of_language, msg)
## Instruction:
Use an absolute Path for localization tests
## Code After:
import os
import platform
import subprocess
import unittest
from pathlib import Path
class TestLocaleNormalization(unittest.TestCase):
LOCALE_PATH = Path("survey", "locale").absolute()
def test_normalization(self):
""" We test if the messages were properly created with makemessages --no-obsolete --no-wrap. """
if platform.system() == "Windows":
python_3 = ["py", "-3"]
else:
python_3 = ["python3"]
makemessages_command = python_3 + [
"manage.py",
"makemessages",
"--no-obsolete",
"--no-wrap",
"--ignore",
"venv",
]
number_of_language = len(os.listdir(self.LOCALE_PATH))
subprocess.check_call(makemessages_command)
git_diff_command = ["git", "diff", self.LOCALE_PATH]
git_diff = subprocess.check_output(git_diff_command).decode("utf8")
# In the diff we should have a change only for the date of the generation
# So 2 * @@ * number of language
number_of_change = git_diff.count("@@") / 2
msg = (
"You did not update the translation following your changes. Maybe you did not use the "
"normalized 'python3 manage.py makemessages --no-obsolete --no-wrap' ? If you're "
"working locally, just use 'git add {}', we launched it during tests.".format(self.LOCALE_PATH),
)
self.assertEqual(number_of_change, number_of_language, msg)
| # ... existing code ...
import unittest
from pathlib import Path
# ... modified code ...
LOCALE_PATH = Path("survey", "locale").absolute()
# ... rest of the code ... |
1dca7eeb036423d1d5889e5ec084f9f91f90eb74 | spacy/tests/regression/test_issue957.py | spacy/tests/regression/test_issue957.py | import pytest
from ... import load as load_spacy
def test_issue913(en_tokenizer):
'''Test that spaCy doesn't hang on many periods.'''
string = '0'
for i in range(1, 100):
string += '.%d' % i
doc = en_tokenizer(string)
# Don't want tests to fail if they haven't installed pytest-timeout plugin
try:
test_issue913 = pytest.mark.timeout(5)(test_issue913)
except NameError:
pass
| from __future__ import unicode_literals
import pytest
from ... import load as load_spacy
def test_issue957(en_tokenizer):
'''Test that spaCy doesn't hang on many periods.'''
string = '0'
for i in range(1, 100):
string += '.%d' % i
doc = en_tokenizer(string)
# Don't want tests to fail if they haven't installed pytest-timeout plugin
try:
test_issue913 = pytest.mark.timeout(5)(test_issue913)
except NameError:
pass
| Add unicode declaration on new regression test | Add unicode declaration on new regression test
| Python | mit | honnibal/spaCy,raphael0202/spaCy,raphael0202/spaCy,oroszgy/spaCy.hu,honnibal/spaCy,honnibal/spaCy,aikramer2/spaCy,aikramer2/spaCy,recognai/spaCy,recognai/spaCy,raphael0202/spaCy,Gregory-Howard/spaCy,Gregory-Howard/spaCy,recognai/spaCy,recognai/spaCy,explosion/spaCy,Gregory-Howard/spaCy,oroszgy/spaCy.hu,recognai/spaCy,Gregory-Howard/spaCy,explosion/spaCy,spacy-io/spaCy,raphael0202/spaCy,oroszgy/spaCy.hu,aikramer2/spaCy,honnibal/spaCy,oroszgy/spaCy.hu,oroszgy/spaCy.hu,explosion/spaCy,spacy-io/spaCy,explosion/spaCy,aikramer2/spaCy,spacy-io/spaCy,aikramer2/spaCy,aikramer2/spaCy,explosion/spaCy,Gregory-Howard/spaCy,Gregory-Howard/spaCy,raphael0202/spaCy,oroszgy/spaCy.hu,spacy-io/spaCy,spacy-io/spaCy,raphael0202/spaCy,explosion/spaCy,recognai/spaCy,spacy-io/spaCy | + from __future__ import unicode_literals
+
import pytest
from ... import load as load_spacy
- def test_issue913(en_tokenizer):
+ def test_issue957(en_tokenizer):
'''Test that spaCy doesn't hang on many periods.'''
string = '0'
for i in range(1, 100):
string += '.%d' % i
doc = en_tokenizer(string)
# Don't want tests to fail if they haven't installed pytest-timeout plugin
try:
test_issue913 = pytest.mark.timeout(5)(test_issue913)
except NameError:
pass
| Add unicode declaration on new regression test | ## Code Before:
import pytest
from ... import load as load_spacy
def test_issue913(en_tokenizer):
'''Test that spaCy doesn't hang on many periods.'''
string = '0'
for i in range(1, 100):
string += '.%d' % i
doc = en_tokenizer(string)
# Don't want tests to fail if they haven't installed pytest-timeout plugin
try:
test_issue913 = pytest.mark.timeout(5)(test_issue913)
except NameError:
pass
## Instruction:
Add unicode declaration on new regression test
## Code After:
from __future__ import unicode_literals
import pytest
from ... import load as load_spacy
def test_issue957(en_tokenizer):
'''Test that spaCy doesn't hang on many periods.'''
string = '0'
for i in range(1, 100):
string += '.%d' % i
doc = en_tokenizer(string)
# Don't want tests to fail if they haven't installed pytest-timeout plugin
try:
test_issue913 = pytest.mark.timeout(5)(test_issue913)
except NameError:
pass
| # ... existing code ...
from __future__ import unicode_literals
import pytest
# ... modified code ...
def test_issue957(en_tokenizer):
'''Test that spaCy doesn't hang on many periods.'''
# ... rest of the code ... |
d6bff0a4e632f0bda9a143acede58c0765066ada | attest/tests/hook.py | attest/tests/hook.py | from attest import Tests, assert_hook
from attest.hook import ExpressionEvaluator
suite = Tests()
@suite.test
def eval():
value = 1 + 1
valgen = (v for v in [value])
samples = {
'isinstance(value, int)': 'True',
'value == int("2")': "(2 == 2)",
'value.denominator': '1',
'value == 5 - 3': '(2 == 2)',
'{"value": value}': "{'value': 2}",
'[valgen.next() for _ in [value]] == [v for v in [value]]':
'([2] == [2])',
}
for expr, result in samples.iteritems():
ev = repr(ExpressionEvaluator(expr, globals(), locals()))
assert ev == result
assert bool(ev) is True
| from attest import Tests, assert_hook
from attest.hook import ExpressionEvaluator
suite = Tests()
@suite.test
def eval():
value = 1 + 1
valgen = (v for v in [value])
samples = {
'isinstance(value, int)': 'True',
'value == int("2")': "(2 == 2)",
'type(value).__name__': "'int'",
'value == 5 - 3': '(2 == 2)',
'{"value": value}': "{'value': 2}",
'[valgen.next() for _ in [value]] == [v for v in [value]]':
'([2] == [2])',
}
for expr, result in samples.iteritems():
ev = repr(ExpressionEvaluator(expr, globals(), locals()))
assert ev == result
assert bool(ev) is True
| Fix tests for visit_Attribute on 2.5/PyPy | Fix tests for visit_Attribute on 2.5/PyPy
| Python | bsd-2-clause | dag/attest | from attest import Tests, assert_hook
from attest.hook import ExpressionEvaluator
suite = Tests()
@suite.test
def eval():
value = 1 + 1
valgen = (v for v in [value])
samples = {
'isinstance(value, int)': 'True',
'value == int("2")': "(2 == 2)",
- 'value.denominator': '1',
+ 'type(value).__name__': "'int'",
'value == 5 - 3': '(2 == 2)',
'{"value": value}': "{'value': 2}",
'[valgen.next() for _ in [value]] == [v for v in [value]]':
'([2] == [2])',
}
for expr, result in samples.iteritems():
ev = repr(ExpressionEvaluator(expr, globals(), locals()))
assert ev == result
assert bool(ev) is True
| Fix tests for visit_Attribute on 2.5/PyPy | ## Code Before:
from attest import Tests, assert_hook
from attest.hook import ExpressionEvaluator
suite = Tests()
@suite.test
def eval():
value = 1 + 1
valgen = (v for v in [value])
samples = {
'isinstance(value, int)': 'True',
'value == int("2")': "(2 == 2)",
'value.denominator': '1',
'value == 5 - 3': '(2 == 2)',
'{"value": value}': "{'value': 2}",
'[valgen.next() for _ in [value]] == [v for v in [value]]':
'([2] == [2])',
}
for expr, result in samples.iteritems():
ev = repr(ExpressionEvaluator(expr, globals(), locals()))
assert ev == result
assert bool(ev) is True
## Instruction:
Fix tests for visit_Attribute on 2.5/PyPy
## Code After:
from attest import Tests, assert_hook
from attest.hook import ExpressionEvaluator
suite = Tests()
@suite.test
def eval():
value = 1 + 1
valgen = (v for v in [value])
samples = {
'isinstance(value, int)': 'True',
'value == int("2")': "(2 == 2)",
'type(value).__name__': "'int'",
'value == 5 - 3': '(2 == 2)',
'{"value": value}': "{'value': 2}",
'[valgen.next() for _ in [value]] == [v for v in [value]]':
'([2] == [2])',
}
for expr, result in samples.iteritems():
ev = repr(ExpressionEvaluator(expr, globals(), locals()))
assert ev == result
assert bool(ev) is True
| ...
'value == int("2")': "(2 == 2)",
'type(value).__name__': "'int'",
'value == 5 - 3': '(2 == 2)',
... |
dcecd75cae428bb27ec8759a21e52267a55f149a | django_comments/signals.py | django_comments/signals.py | from django.dispatch import Signal
# Sent just before a comment will be posted (after it's been approved and
# moderated; this can be used to modify the comment (in place) with posting
# details or other such actions. If any receiver returns False the comment will be
# discarded and a 400 response. This signal is sent at more or less
# the same time (just before, actually) as the Comment object's pre-save signal,
# except that the HTTP request is sent along with this signal.
comment_will_be_posted = Signal(providing_args=["comment", "request"])
# Sent just after a comment was posted. See above for how this differs
# from the Comment object's post-save signal.
comment_was_posted = Signal(providing_args=["comment", "request"])
# Sent after a comment was "flagged" in some way. Check the flag to see if this
# was a user requesting removal of a comment, a moderator approving/removing a
# comment, or some other custom user flag.
comment_was_flagged = Signal(providing_args=["comment", "flag", "created", "request"])
| from django.dispatch import Signal
# Sent just before a comment will be posted (after it's been approved and
# moderated; this can be used to modify the comment (in place) with posting
# details or other such actions. If any receiver returns False the comment will be
# discarded and a 400 response. This signal is sent at more or less
# the same time (just before, actually) as the Comment object's pre-save signal,
# except that the HTTP request is sent along with this signal.
# Arguments: "comment", "request"
comment_will_be_posted = Signal()
# Sent just after a comment was posted. See above for how this differs
# from the Comment object's post-save signal.
# Arguments: "comment", "request"
comment_was_posted = Signal()
# Sent after a comment was "flagged" in some way. Check the flag to see if this
# was a user requesting removal of a comment, a moderator approving/removing a
# comment, or some other custom user flag.
# Arguments: "comment", "flag", "created", "request"
comment_was_flagged = Signal()
| Remove Signal(providing_args) argument b/c it is deprecated | Remove Signal(providing_args) argument b/c it is deprecated
RemovedInDjango40Warning: The providing_args argument is deprecated.
As it is purely documentational, it has no replacement. If you rely
on this argument as documentation, you can move the text to a code
comment or docstring.
| Python | bsd-3-clause | django/django-contrib-comments,django/django-contrib-comments | from django.dispatch import Signal
# Sent just before a comment will be posted (after it's been approved and
# moderated; this can be used to modify the comment (in place) with posting
# details or other such actions. If any receiver returns False the comment will be
# discarded and a 400 response. This signal is sent at more or less
# the same time (just before, actually) as the Comment object's pre-save signal,
# except that the HTTP request is sent along with this signal.
- comment_will_be_posted = Signal(providing_args=["comment", "request"])
+ # Arguments: "comment", "request"
+ comment_will_be_posted = Signal()
# Sent just after a comment was posted. See above for how this differs
# from the Comment object's post-save signal.
- comment_was_posted = Signal(providing_args=["comment", "request"])
+ # Arguments: "comment", "request"
+ comment_was_posted = Signal()
# Sent after a comment was "flagged" in some way. Check the flag to see if this
# was a user requesting removal of a comment, a moderator approving/removing a
# comment, or some other custom user flag.
- comment_was_flagged = Signal(providing_args=["comment", "flag", "created", "request"])
+ # Arguments: "comment", "flag", "created", "request"
+ comment_was_flagged = Signal()
| Remove Signal(providing_args) argument b/c it is deprecated | ## Code Before:
from django.dispatch import Signal
# Sent just before a comment will be posted (after it's been approved and
# moderated; this can be used to modify the comment (in place) with posting
# details or other such actions. If any receiver returns False the comment will be
# discarded and a 400 response. This signal is sent at more or less
# the same time (just before, actually) as the Comment object's pre-save signal,
# except that the HTTP request is sent along with this signal.
comment_will_be_posted = Signal(providing_args=["comment", "request"])
# Sent just after a comment was posted. See above for how this differs
# from the Comment object's post-save signal.
comment_was_posted = Signal(providing_args=["comment", "request"])
# Sent after a comment was "flagged" in some way. Check the flag to see if this
# was a user requesting removal of a comment, a moderator approving/removing a
# comment, or some other custom user flag.
comment_was_flagged = Signal(providing_args=["comment", "flag", "created", "request"])
## Instruction:
Remove Signal(providing_args) argument b/c it is deprecated
## Code After:
from django.dispatch import Signal
# Sent just before a comment will be posted (after it's been approved and
# moderated; this can be used to modify the comment (in place) with posting
# details or other such actions. If any receiver returns False the comment will be
# discarded and a 400 response. This signal is sent at more or less
# the same time (just before, actually) as the Comment object's pre-save signal,
# except that the HTTP request is sent along with this signal.
# Arguments: "comment", "request"
comment_will_be_posted = Signal()
# Sent just after a comment was posted. See above for how this differs
# from the Comment object's post-save signal.
# Arguments: "comment", "request"
comment_was_posted = Signal()
# Sent after a comment was "flagged" in some way. Check the flag to see if this
# was a user requesting removal of a comment, a moderator approving/removing a
# comment, or some other custom user flag.
# Arguments: "comment", "flag", "created", "request"
comment_was_flagged = Signal()
| // ... existing code ...
# except that the HTTP request is sent along with this signal.
# Arguments: "comment", "request"
comment_will_be_posted = Signal()
// ... modified code ...
# from the Comment object's post-save signal.
# Arguments: "comment", "request"
comment_was_posted = Signal()
...
# comment, or some other custom user flag.
# Arguments: "comment", "flag", "created", "request"
comment_was_flagged = Signal()
// ... rest of the code ... |
f5cc3275a11c809bb6f5ab097414d0a5ccda2341 | main.py | main.py | def main():
website = input("Input website(cnn, nytimes, bbc, nzherald): ")
url = input("Input url: ")
scraper(website, url)
def scraper(website, url):
print("%s, %s" % (website, url))
if __name__ == '__main__':
main() | def main():
website = input("Input website(cnn, nytimes, bbc, nzherald): ")
url = input("Input url: ")
scraper(website, url)
def scraper(website, url):
if ".com" not in url:
print("Invalid url")
exit()
print("%s, %s" % (website, url))
if __name__ == '__main__':
main() | Check for .com in url | Check for .com in url
| Python | mit | Alex-Gurung/ScrapeTheNews | def main():
website = input("Input website(cnn, nytimes, bbc, nzherald): ")
url = input("Input url: ")
scraper(website, url)
def scraper(website, url):
+ if ".com" not in url:
+ print("Invalid url")
+ exit()
print("%s, %s" % (website, url))
if __name__ == '__main__':
main() | Check for .com in url | ## Code Before:
def main():
website = input("Input website(cnn, nytimes, bbc, nzherald): ")
url = input("Input url: ")
scraper(website, url)
def scraper(website, url):
print("%s, %s" % (website, url))
if __name__ == '__main__':
main()
## Instruction:
Check for .com in url
## Code After:
def main():
website = input("Input website(cnn, nytimes, bbc, nzherald): ")
url = input("Input url: ")
scraper(website, url)
def scraper(website, url):
if ".com" not in url:
print("Invalid url")
exit()
print("%s, %s" % (website, url))
if __name__ == '__main__':
main() | // ... existing code ...
def scraper(website, url):
if ".com" not in url:
print("Invalid url")
exit()
print("%s, %s" % (website, url))
// ... rest of the code ... |
126c58d78360e69c2d16a40f9396a8158844e2b1 | tests/test_creators.py | tests/test_creators.py |
def test_matrix_creation_endpoint(client):
response = client.post('/matrix', {
'bibliography': '12312312',
'fields': 'title,description',
})
print(response.json())
assert response.status_code == 200
| from condor.models import Bibliography
def test_matrix_creation_endpoint(client, session):
bib = Bibliography(eid='123', description='lorem')
session.add(bib)
session.flush()
response = client.post('/matrix', {
'bibliography': '123',
'fields': 'title,description',
})
response = client.get(f"/matrix/{response.json().get('eid')}")
assert response.status_code == 200
assert response.json().get('bibliography_eid') == '123'
| Create test for matrix post endpoint | Create test for matrix post endpoint
| Python | mit | odarbelaeze/condor-api | + from condor.models import Bibliography
- def test_matrix_creation_endpoint(client):
+ def test_matrix_creation_endpoint(client, session):
+ bib = Bibliography(eid='123', description='lorem')
+ session.add(bib)
+ session.flush()
+
response = client.post('/matrix', {
- 'bibliography': '12312312',
+ 'bibliography': '123',
'fields': 'title,description',
})
- print(response.json())
+
+ response = client.get(f"/matrix/{response.json().get('eid')}")
+
assert response.status_code == 200
+ assert response.json().get('bibliography_eid') == '123'
| Create test for matrix post endpoint | ## Code Before:
def test_matrix_creation_endpoint(client):
response = client.post('/matrix', {
'bibliography': '12312312',
'fields': 'title,description',
})
print(response.json())
assert response.status_code == 200
## Instruction:
Create test for matrix post endpoint
## Code After:
from condor.models import Bibliography
def test_matrix_creation_endpoint(client, session):
bib = Bibliography(eid='123', description='lorem')
session.add(bib)
session.flush()
response = client.post('/matrix', {
'bibliography': '123',
'fields': 'title,description',
})
response = client.get(f"/matrix/{response.json().get('eid')}")
assert response.status_code == 200
assert response.json().get('bibliography_eid') == '123'
| # ... existing code ...
from condor.models import Bibliography
# ... modified code ...
def test_matrix_creation_endpoint(client, session):
bib = Bibliography(eid='123', description='lorem')
session.add(bib)
session.flush()
response = client.post('/matrix', {
'bibliography': '123',
'fields': 'title,description',
...
})
response = client.get(f"/matrix/{response.json().get('eid')}")
assert response.status_code == 200
assert response.json().get('bibliography_eid') == '123'
# ... rest of the code ... |
fe6c924532750f646303fe82728795717b830819 | piper/version.py | piper/version.py | from piper.abc import DynamicItem
from piper.utils import oneshot
class Version(DynamicItem):
"""
Base for versioning classes
"""
def __str__(self): # pragma: nocover
return self.get_version()
def get_version(self):
raise NotImplementedError()
class StaticVersion(Version):
"""
Static versioning, set inside the piper.yml configuration file
"""
@property
def schema(self):
if not hasattr(self, '_schema'):
self._schema = super(StaticVersion, self).schema
self._schema['required'].append('version')
self._schema['properties']['version'] = {
'description': 'Static version to use',
'type': 'string',
}
return self._schema
def get_version(self):
return self.config.version
class GitVersion(Version):
"""
Versioning based on the output of `git describe`
"""
def __init__(self, ns, config):
super(GitVersion, self).__init__(ns, config)
if 'arguments' not in config:
self.config.arguments = None
@property
def schema(self):
if not hasattr(self, '_schema'):
self._schema = super(GitVersion, self).schema
self._schema['properties']['arguments'] = {
'description':
'Space separated arguments passed directly to the '
'`git describe` call.',
'default': "--tags",
'type': 'string',
}
return self._schema
def get_version(self):
cmd = 'git describe'
if self.config.arguments:
cmd += ' ' + self.config.arguments
return oneshot(cmd)
| from piper.abc import DynamicItem
from piper.utils import oneshot
class Version(DynamicItem):
"""
Base for versioning classes
"""
def __str__(self): # pragma: nocover
return self.get_version()
def get_version(self):
raise NotImplementedError()
class StaticVersion(Version):
"""
Static versioning, set inside the piper.yml configuration file
"""
@property
def schema(self):
if not hasattr(self, '_schema'):
self._schema = super(StaticVersion, self).schema
self._schema['required'].append('version')
self._schema['properties']['version'] = {
'description': 'Static version to use',
'type': 'string',
}
return self._schema
def get_version(self):
return self.config.version
class GitVersion(Version):
"""
Versioning based on the output of `git describe`
"""
@property
def schema(self):
if not hasattr(self, '_schema'):
self._schema = super(GitVersion, self).schema
self._schema['properties']['arguments'] = {
'description':
'Space separated arguments passed directly to the '
'`git describe` call.',
'default': "--tags",
'type': 'string',
}
return self._schema
def get_version(self):
cmd = 'git describe'
if self.config.arguments:
cmd += ' ' + self.config.arguments
return oneshot(cmd)
| Remove argument defaulting from Version() | Remove argument defaulting from Version()
It was moved to the ABC and subsequently the check was left behind.
| Python | mit | thiderman/piper | from piper.abc import DynamicItem
from piper.utils import oneshot
class Version(DynamicItem):
"""
Base for versioning classes
"""
def __str__(self): # pragma: nocover
return self.get_version()
def get_version(self):
raise NotImplementedError()
class StaticVersion(Version):
"""
Static versioning, set inside the piper.yml configuration file
"""
@property
def schema(self):
if not hasattr(self, '_schema'):
self._schema = super(StaticVersion, self).schema
self._schema['required'].append('version')
self._schema['properties']['version'] = {
'description': 'Static version to use',
'type': 'string',
}
return self._schema
def get_version(self):
return self.config.version
class GitVersion(Version):
"""
Versioning based on the output of `git describe`
"""
- def __init__(self, ns, config):
- super(GitVersion, self).__init__(ns, config)
- if 'arguments' not in config:
- self.config.arguments = None
-
@property
def schema(self):
if not hasattr(self, '_schema'):
self._schema = super(GitVersion, self).schema
self._schema['properties']['arguments'] = {
'description':
'Space separated arguments passed directly to the '
'`git describe` call.',
'default': "--tags",
'type': 'string',
}
return self._schema
def get_version(self):
cmd = 'git describe'
if self.config.arguments:
cmd += ' ' + self.config.arguments
return oneshot(cmd)
| Remove argument defaulting from Version() | ## Code Before:
from piper.abc import DynamicItem
from piper.utils import oneshot
class Version(DynamicItem):
"""
Base for versioning classes
"""
def __str__(self): # pragma: nocover
return self.get_version()
def get_version(self):
raise NotImplementedError()
class StaticVersion(Version):
"""
Static versioning, set inside the piper.yml configuration file
"""
@property
def schema(self):
if not hasattr(self, '_schema'):
self._schema = super(StaticVersion, self).schema
self._schema['required'].append('version')
self._schema['properties']['version'] = {
'description': 'Static version to use',
'type': 'string',
}
return self._schema
def get_version(self):
return self.config.version
class GitVersion(Version):
"""
Versioning based on the output of `git describe`
"""
def __init__(self, ns, config):
super(GitVersion, self).__init__(ns, config)
if 'arguments' not in config:
self.config.arguments = None
@property
def schema(self):
if not hasattr(self, '_schema'):
self._schema = super(GitVersion, self).schema
self._schema['properties']['arguments'] = {
'description':
'Space separated arguments passed directly to the '
'`git describe` call.',
'default': "--tags",
'type': 'string',
}
return self._schema
def get_version(self):
cmd = 'git describe'
if self.config.arguments:
cmd += ' ' + self.config.arguments
return oneshot(cmd)
## Instruction:
Remove argument defaulting from Version()
## Code After:
from piper.abc import DynamicItem
from piper.utils import oneshot
class Version(DynamicItem):
"""
Base for versioning classes
"""
def __str__(self): # pragma: nocover
return self.get_version()
def get_version(self):
raise NotImplementedError()
class StaticVersion(Version):
"""
Static versioning, set inside the piper.yml configuration file
"""
@property
def schema(self):
if not hasattr(self, '_schema'):
self._schema = super(StaticVersion, self).schema
self._schema['required'].append('version')
self._schema['properties']['version'] = {
'description': 'Static version to use',
'type': 'string',
}
return self._schema
def get_version(self):
return self.config.version
class GitVersion(Version):
"""
Versioning based on the output of `git describe`
"""
@property
def schema(self):
if not hasattr(self, '_schema'):
self._schema = super(GitVersion, self).schema
self._schema['properties']['arguments'] = {
'description':
'Space separated arguments passed directly to the '
'`git describe` call.',
'default': "--tags",
'type': 'string',
}
return self._schema
def get_version(self):
cmd = 'git describe'
if self.config.arguments:
cmd += ' ' + self.config.arguments
return oneshot(cmd)
| // ... existing code ...
@property
// ... rest of the code ... |
452924faafcfb4dcb1eb960ea30ab000f1f93962 | migrations/versions/0245_archived_flag_jobs.py | migrations/versions/0245_archived_flag_jobs.py | from alembic import op
import sqlalchemy as sa
revision = '0245_archived_flag_jobs'
down_revision = '0244_another_letter_org'
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('jobs', sa.Column('archived', sa.Boolean(), nullable=False, server_default=sa.false()))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('jobs', 'archived')
# ### end Alembic commands ###
| from alembic import op
import sqlalchemy as sa
revision = '0245_archived_flag_jobs'
down_revision = '0244_another_letter_org'
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('jobs', sa.Column('archived', sa.Boolean(), nullable=True))
op.execute('update jobs set archived = false')
op.alter_column('jobs', 'archived', nullable=False, server_default=sa.false())
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('jobs', 'archived')
# ### end Alembic commands ###
| Update jobs archived flag before setting the default value | Update jobs archived flag before setting the default value
Running an update before setting the column default value reduces
the time the table is locked (since most rows don't have a NULL
value anymore), but the migration takes slightly longer to run
overall.
| Python | mit | alphagov/notifications-api,alphagov/notifications-api | from alembic import op
import sqlalchemy as sa
revision = '0245_archived_flag_jobs'
down_revision = '0244_another_letter_org'
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
+ op.add_column('jobs', sa.Column('archived', sa.Boolean(), nullable=True))
+ op.execute('update jobs set archived = false')
- op.add_column('jobs', sa.Column('archived', sa.Boolean(), nullable=False, server_default=sa.false()))
+ op.alter_column('jobs', 'archived', nullable=False, server_default=sa.false())
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('jobs', 'archived')
# ### end Alembic commands ###
| Update jobs archived flag before setting the default value | ## Code Before:
from alembic import op
import sqlalchemy as sa
revision = '0245_archived_flag_jobs'
down_revision = '0244_another_letter_org'
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('jobs', sa.Column('archived', sa.Boolean(), nullable=False, server_default=sa.false()))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('jobs', 'archived')
# ### end Alembic commands ###
## Instruction:
Update jobs archived flag before setting the default value
## Code After:
from alembic import op
import sqlalchemy as sa
revision = '0245_archived_flag_jobs'
down_revision = '0244_another_letter_org'
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('jobs', sa.Column('archived', sa.Boolean(), nullable=True))
op.execute('update jobs set archived = false')
op.alter_column('jobs', 'archived', nullable=False, server_default=sa.false())
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('jobs', 'archived')
# ### end Alembic commands ###
| # ... existing code ...
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('jobs', sa.Column('archived', sa.Boolean(), nullable=True))
op.execute('update jobs set archived = false')
op.alter_column('jobs', 'archived', nullable=False, server_default=sa.false())
# ... rest of the code ... |
7a04bb7692b4838e0abe9ba586fc4748ed9cd5d4 | tests/integration/blueprints/site/test_homepage.py | tests/integration/blueprints/site/test_homepage.py |
import pytest
from tests.helpers import http_client
def test_homepage(site_app, site):
with http_client(site_app) as client:
response = client.get('/')
# By default, nothing is mounted on `/`, but at least check that
# the application boots up and doesn't return a server error.
assert response.status_code == 404
|
import pytest
from tests.helpers import http_client
def test_homepage(site_app, site):
with http_client(site_app) as client:
response = client.get('/')
# By default, nothing is mounted on `/`, but at least check that
# the application boots up and doesn't return a server error.
assert response.status_code == 404
assert response.location is None
def test_homepage_with_root_redirect(make_site_app, site):
site_app = make_site_app(ROOT_REDIRECT_TARGET='welcome')
with http_client(site_app) as client:
response = client.get('/')
assert response.status_code == 307
assert response.location == 'http://www.acmecon.test/welcome'
| Test custom root path redirect | Test custom root path redirect
| Python | bsd-3-clause | homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps |
import pytest
from tests.helpers import http_client
def test_homepage(site_app, site):
with http_client(site_app) as client:
response = client.get('/')
# By default, nothing is mounted on `/`, but at least check that
# the application boots up and doesn't return a server error.
assert response.status_code == 404
+ assert response.location is None
+
+ def test_homepage_with_root_redirect(make_site_app, site):
+ site_app = make_site_app(ROOT_REDIRECT_TARGET='welcome')
+
+ with http_client(site_app) as client:
+ response = client.get('/')
+
+ assert response.status_code == 307
+ assert response.location == 'http://www.acmecon.test/welcome'
+ | Test custom root path redirect | ## Code Before:
import pytest
from tests.helpers import http_client
def test_homepage(site_app, site):
with http_client(site_app) as client:
response = client.get('/')
# By default, nothing is mounted on `/`, but at least check that
# the application boots up and doesn't return a server error.
assert response.status_code == 404
## Instruction:
Test custom root path redirect
## Code After:
import pytest
from tests.helpers import http_client
def test_homepage(site_app, site):
with http_client(site_app) as client:
response = client.get('/')
# By default, nothing is mounted on `/`, but at least check that
# the application boots up and doesn't return a server error.
assert response.status_code == 404
assert response.location is None
def test_homepage_with_root_redirect(make_site_app, site):
site_app = make_site_app(ROOT_REDIRECT_TARGET='welcome')
with http_client(site_app) as client:
response = client.get('/')
assert response.status_code == 307
assert response.location == 'http://www.acmecon.test/welcome'
| ...
assert response.status_code == 404
assert response.location is None
def test_homepage_with_root_redirect(make_site_app, site):
site_app = make_site_app(ROOT_REDIRECT_TARGET='welcome')
with http_client(site_app) as client:
response = client.get('/')
assert response.status_code == 307
assert response.location == 'http://www.acmecon.test/welcome'
... |
a3b119e14df4aff213231492470587f88457a241 | setuptools/command/upload.py | setuptools/command/upload.py | import getpass
from distutils.command import upload as orig
class upload(orig.upload):
"""
Override default upload behavior to obtain password
in a variety of different ways.
"""
def finalize_options(self):
orig.upload.finalize_options(self)
# Attempt to obtain password. Short circuit evaluation at the first
# sign of success.
self.password = (
self.password or self._load_password_from_keyring() or
self._prompt_for_password()
)
def _load_password_from_keyring(self):
"""
Attempt to load password from keyring. Suppress Exceptions.
"""
try:
keyring = __import__('keyring')
password = keyring.get_password(self.repository, self.username)
except Exception:
password = None
finally:
return password
def _prompt_for_password(self):
"""
Prompt for a password on the tty. Suppress Exceptions.
"""
password = None
try:
while not password:
password = getpass.getpass()
except (Exception, KeyboardInterrupt):
password = None
finally:
return password
| import getpass
from distutils.command import upload as orig
class upload(orig.upload):
"""
Override default upload behavior to obtain password
in a variety of different ways.
"""
def finalize_options(self):
orig.upload.finalize_options(self)
# Attempt to obtain password. Short circuit evaluation at the first
# sign of success.
self.password = (
self.password or
self._load_password_from_keyring() or
self._prompt_for_password()
)
def _load_password_from_keyring(self):
"""
Attempt to load password from keyring. Suppress Exceptions.
"""
try:
keyring = __import__('keyring')
password = keyring.get_password(self.repository, self.username)
except Exception:
password = None
finally:
return password
def _prompt_for_password(self):
"""
Prompt for a password on the tty. Suppress Exceptions.
"""
password = None
try:
while not password:
password = getpass.getpass()
except (Exception, KeyboardInterrupt):
password = None
finally:
return password
| Add carriage return for symmetry | Add carriage return for symmetry
| Python | mit | pypa/setuptools,pypa/setuptools,pypa/setuptools | import getpass
from distutils.command import upload as orig
class upload(orig.upload):
"""
Override default upload behavior to obtain password
in a variety of different ways.
"""
def finalize_options(self):
orig.upload.finalize_options(self)
# Attempt to obtain password. Short circuit evaluation at the first
# sign of success.
self.password = (
+ self.password or
- self.password or self._load_password_from_keyring() or
+ self._load_password_from_keyring() or
self._prompt_for_password()
)
def _load_password_from_keyring(self):
"""
Attempt to load password from keyring. Suppress Exceptions.
"""
try:
keyring = __import__('keyring')
password = keyring.get_password(self.repository, self.username)
except Exception:
password = None
finally:
return password
def _prompt_for_password(self):
"""
Prompt for a password on the tty. Suppress Exceptions.
"""
password = None
try:
while not password:
password = getpass.getpass()
except (Exception, KeyboardInterrupt):
password = None
finally:
return password
| Add carriage return for symmetry | ## Code Before:
import getpass
from distutils.command import upload as orig
class upload(orig.upload):
"""
Override default upload behavior to obtain password
in a variety of different ways.
"""
def finalize_options(self):
orig.upload.finalize_options(self)
# Attempt to obtain password. Short circuit evaluation at the first
# sign of success.
self.password = (
self.password or self._load_password_from_keyring() or
self._prompt_for_password()
)
def _load_password_from_keyring(self):
"""
Attempt to load password from keyring. Suppress Exceptions.
"""
try:
keyring = __import__('keyring')
password = keyring.get_password(self.repository, self.username)
except Exception:
password = None
finally:
return password
def _prompt_for_password(self):
"""
Prompt for a password on the tty. Suppress Exceptions.
"""
password = None
try:
while not password:
password = getpass.getpass()
except (Exception, KeyboardInterrupt):
password = None
finally:
return password
## Instruction:
Add carriage return for symmetry
## Code After:
import getpass
from distutils.command import upload as orig
class upload(orig.upload):
"""
Override default upload behavior to obtain password
in a variety of different ways.
"""
def finalize_options(self):
orig.upload.finalize_options(self)
# Attempt to obtain password. Short circuit evaluation at the first
# sign of success.
self.password = (
self.password or
self._load_password_from_keyring() or
self._prompt_for_password()
)
def _load_password_from_keyring(self):
"""
Attempt to load password from keyring. Suppress Exceptions.
"""
try:
keyring = __import__('keyring')
password = keyring.get_password(self.repository, self.username)
except Exception:
password = None
finally:
return password
def _prompt_for_password(self):
"""
Prompt for a password on the tty. Suppress Exceptions.
"""
password = None
try:
while not password:
password = getpass.getpass()
except (Exception, KeyboardInterrupt):
password = None
finally:
return password
| # ... existing code ...
self.password = (
self.password or
self._load_password_from_keyring() or
self._prompt_for_password()
# ... rest of the code ... |
a1cf304f9941b811b33e1b2d786b6f38bc514546 | anafero/templatetags/anafero_tags.py | anafero/templatetags/anafero_tags.py | from django import template
from django.contrib.contenttypes.models import ContentType
from anafero.models import ReferralResponse, ACTION_DISPLAY
register = template.Library()
@register.inclusion_tag("anafero/_create_referral_form.html")
def create_referral(url, obj=None):
if obj:
return {"url": url, "obj": obj, "obj_ct": ContentType.objects.get_for_model(obj)}
else:
return {"url": url, "obj": "", "obj_ct": ""}
@register.assignment_tag
def referral_responses(user):
return ReferralResponse.objects.filter(
referral__user=user
).order_by("-created_at")
@register.filter
def action_display(value):
return ACTION_DISPLAY.get(value, value)
| from django import template
from django.contrib.contenttypes.models import ContentType
from anafero.models import ReferralResponse, ACTION_DISPLAY
register = template.Library()
@register.inclusion_tag("anafero/_create_referral_form.html", takes_context=True)
def create_referral(context, url, obj=None):
if obj:
context.update(
{"url": url, "obj": obj, "obj_ct": ContentType.objects.get_for_model(obj)}
)
else:
context.update(
{"url": url, "obj": "", "obj_ct": ""}
)
return context
@register.assignment_tag
def referral_responses(user):
return ReferralResponse.objects.filter(
referral__user=user
).order_by("-created_at")
@register.filter
def action_display(value):
return ACTION_DISPLAY.get(value, value)
| Add full context to the create_referral tag | Add full context to the create_referral tag | Python | mit | pinax/pinax-referrals,pinax/pinax-referrals | from django import template
from django.contrib.contenttypes.models import ContentType
from anafero.models import ReferralResponse, ACTION_DISPLAY
register = template.Library()
- @register.inclusion_tag("anafero/_create_referral_form.html")
+ @register.inclusion_tag("anafero/_create_referral_form.html", takes_context=True)
- def create_referral(url, obj=None):
+ def create_referral(context, url, obj=None):
if obj:
+ context.update(
- return {"url": url, "obj": obj, "obj_ct": ContentType.objects.get_for_model(obj)}
+ {"url": url, "obj": obj, "obj_ct": ContentType.objects.get_for_model(obj)}
+ )
else:
+ context.update(
- return {"url": url, "obj": "", "obj_ct": ""}
+ {"url": url, "obj": "", "obj_ct": ""}
+ )
+ return context
@register.assignment_tag
def referral_responses(user):
return ReferralResponse.objects.filter(
referral__user=user
).order_by("-created_at")
@register.filter
def action_display(value):
return ACTION_DISPLAY.get(value, value)
| Add full context to the create_referral tag | ## Code Before:
from django import template
from django.contrib.contenttypes.models import ContentType
from anafero.models import ReferralResponse, ACTION_DISPLAY
register = template.Library()
@register.inclusion_tag("anafero/_create_referral_form.html")
def create_referral(url, obj=None):
if obj:
return {"url": url, "obj": obj, "obj_ct": ContentType.objects.get_for_model(obj)}
else:
return {"url": url, "obj": "", "obj_ct": ""}
@register.assignment_tag
def referral_responses(user):
return ReferralResponse.objects.filter(
referral__user=user
).order_by("-created_at")
@register.filter
def action_display(value):
return ACTION_DISPLAY.get(value, value)
## Instruction:
Add full context to the create_referral tag
## Code After:
from django import template
from django.contrib.contenttypes.models import ContentType
from anafero.models import ReferralResponse, ACTION_DISPLAY
register = template.Library()
@register.inclusion_tag("anafero/_create_referral_form.html", takes_context=True)
def create_referral(context, url, obj=None):
if obj:
context.update(
{"url": url, "obj": obj, "obj_ct": ContentType.objects.get_for_model(obj)}
)
else:
context.update(
{"url": url, "obj": "", "obj_ct": ""}
)
return context
@register.assignment_tag
def referral_responses(user):
return ReferralResponse.objects.filter(
referral__user=user
).order_by("-created_at")
@register.filter
def action_display(value):
return ACTION_DISPLAY.get(value, value)
| # ... existing code ...
@register.inclusion_tag("anafero/_create_referral_form.html", takes_context=True)
def create_referral(context, url, obj=None):
if obj:
context.update(
{"url": url, "obj": obj, "obj_ct": ContentType.objects.get_for_model(obj)}
)
else:
context.update(
{"url": url, "obj": "", "obj_ct": ""}
)
return context
# ... rest of the code ... |
941ccc65bd14e65f7e877d107f67ee3bfe8e68a3 | thecure/sprites/enemies.py | thecure/sprites/enemies.py | from pygame.locals import *
from thecure import get_engine
from thecure.sprites.base import WalkingSprite
class Enemy(WalkingSprite):
DEFAULT_HEALTH = 10
class InfectedHuman(Enemy):
MOVE_SPEED = 2
def tick(self):
super(InfectedHuman, self).tick()
if self.started:
# Figure out how close we are to the player.
player = get_engine().player
if player.rect.x > self.rect.x:
x = 1
elif player.rect.x < self.rect.x:
x = -1
else:
x = 0
if player.rect.y > self.rect.y:
y = 1
elif player.rect.y < self.rect.y:
y = -1
else:
y = 0
self.velocity = (x * self.MOVE_SPEED, y * self.MOVE_SPEED)
if self.velocity != (0, 0):
self.frame_state = 'walking'
self.anim_timer.start()
self.recompute_direction()
else:
self.frame_state = 'default'
self.anim_timer.stop()
| from pygame.locals import *
from thecure import get_engine
from thecure.sprites.base import Direction, WalkingSprite
class Enemy(WalkingSprite):
DEFAULT_HEALTH = 10
class InfectedHuman(Enemy):
MOVE_SPEED = 2
APPROACH_DISTANCE = 400
def tick(self):
super(InfectedHuman, self).tick()
if self.started:
# Figure out how close we are to the player.
player = get_engine().player
distance_x = abs(player.rect.x - self.rect.x)
distance_y = abs(player.rect.y - self.rect.y)
if (self.frame_state == 'walking' or
(distance_x <= self.APPROACH_DISTANCE and
distance_y <= self.APPROACH_DISTANCE)):
x_dir = None
y_dir = None
if player.rect.x > self.rect.x:
x = 1
x_dir = Direction.RIGHT
elif player.rect.x < self.rect.x:
x = -1
x_dir = Direction.LEFT
else:
x = 0
if player.rect.y > self.rect.y:
y = 1
y_dir = Direction.DOWN
elif player.rect.y < self.rect.y:
y = -1
y_dir = Direction.UP
else:
y = 0
self.velocity = (x * self.MOVE_SPEED, y * self.MOVE_SPEED)
if self.velocity != (0, 0):
self.frame_state = 'walking'
self.anim_timer.start()
if distance_x > distance_y:
self.set_direction(x_dir)
elif distance_y > distance_x:
self.set_direction(y_dir)
else:
self.frame_state = 'default'
self.anim_timer.stop()
| Improve infected human approach AI. | Improve infected human approach AI.
The approach AI now only kicks in when the player is within 400 pixels
of the enemy.
The direction it chooses to look in is a bit more sane now. It will
figure out whether the distance is greater in the X or Y location, and
pick a direction based on that. Now they actually appear to walk toward
the player correctly.
| Python | mit | chipx86/the-cure | from pygame.locals import *
from thecure import get_engine
- from thecure.sprites.base import WalkingSprite
+ from thecure.sprites.base import Direction, WalkingSprite
class Enemy(WalkingSprite):
DEFAULT_HEALTH = 10
class InfectedHuman(Enemy):
MOVE_SPEED = 2
+ APPROACH_DISTANCE = 400
def tick(self):
super(InfectedHuman, self).tick()
if self.started:
# Figure out how close we are to the player.
player = get_engine().player
- if player.rect.x > self.rect.x:
+ distance_x = abs(player.rect.x - self.rect.x)
+ distance_y = abs(player.rect.y - self.rect.y)
- x = 1
- elif player.rect.x < self.rect.x:
- x = -1
- else:
- x = 0
- if player.rect.y > self.rect.y:
+ if (self.frame_state == 'walking' or
+ (distance_x <= self.APPROACH_DISTANCE and
+ distance_y <= self.APPROACH_DISTANCE)):
+ x_dir = None
- y = 1
+ y_dir = None
- elif player.rect.y < self.rect.y:
- y = -1
- else:
- y = 0
- self.velocity = (x * self.MOVE_SPEED, y * self.MOVE_SPEED)
+ if player.rect.x > self.rect.x:
+ x = 1
+ x_dir = Direction.RIGHT
+ elif player.rect.x < self.rect.x:
+ x = -1
+ x_dir = Direction.LEFT
+ else:
+ x = 0
- if self.velocity != (0, 0):
- self.frame_state = 'walking'
- self.anim_timer.start()
- self.recompute_direction()
+ if player.rect.y > self.rect.y:
+ y = 1
+ y_dir = Direction.DOWN
+ elif player.rect.y < self.rect.y:
+ y = -1
+ y_dir = Direction.UP
- else:
+ else:
+ y = 0
- self.frame_state = 'default'
- self.anim_timer.stop()
+ self.velocity = (x * self.MOVE_SPEED, y * self.MOVE_SPEED)
+
+ if self.velocity != (0, 0):
+ self.frame_state = 'walking'
+ self.anim_timer.start()
+
+ if distance_x > distance_y:
+ self.set_direction(x_dir)
+ elif distance_y > distance_x:
+ self.set_direction(y_dir)
+ else:
+ self.frame_state = 'default'
+ self.anim_timer.stop()
+ | Improve infected human approach AI. | ## Code Before:
from pygame.locals import *
from thecure import get_engine
from thecure.sprites.base import WalkingSprite
class Enemy(WalkingSprite):
DEFAULT_HEALTH = 10
class InfectedHuman(Enemy):
MOVE_SPEED = 2
def tick(self):
super(InfectedHuman, self).tick()
if self.started:
# Figure out how close we are to the player.
player = get_engine().player
if player.rect.x > self.rect.x:
x = 1
elif player.rect.x < self.rect.x:
x = -1
else:
x = 0
if player.rect.y > self.rect.y:
y = 1
elif player.rect.y < self.rect.y:
y = -1
else:
y = 0
self.velocity = (x * self.MOVE_SPEED, y * self.MOVE_SPEED)
if self.velocity != (0, 0):
self.frame_state = 'walking'
self.anim_timer.start()
self.recompute_direction()
else:
self.frame_state = 'default'
self.anim_timer.stop()
## Instruction:
Improve infected human approach AI.
## Code After:
from pygame.locals import *
from thecure import get_engine
from thecure.sprites.base import Direction, WalkingSprite
class Enemy(WalkingSprite):
DEFAULT_HEALTH = 10
class InfectedHuman(Enemy):
MOVE_SPEED = 2
APPROACH_DISTANCE = 400
def tick(self):
super(InfectedHuman, self).tick()
if self.started:
# Figure out how close we are to the player.
player = get_engine().player
distance_x = abs(player.rect.x - self.rect.x)
distance_y = abs(player.rect.y - self.rect.y)
if (self.frame_state == 'walking' or
(distance_x <= self.APPROACH_DISTANCE and
distance_y <= self.APPROACH_DISTANCE)):
x_dir = None
y_dir = None
if player.rect.x > self.rect.x:
x = 1
x_dir = Direction.RIGHT
elif player.rect.x < self.rect.x:
x = -1
x_dir = Direction.LEFT
else:
x = 0
if player.rect.y > self.rect.y:
y = 1
y_dir = Direction.DOWN
elif player.rect.y < self.rect.y:
y = -1
y_dir = Direction.UP
else:
y = 0
self.velocity = (x * self.MOVE_SPEED, y * self.MOVE_SPEED)
if self.velocity != (0, 0):
self.frame_state = 'walking'
self.anim_timer.start()
if distance_x > distance_y:
self.set_direction(x_dir)
elif distance_y > distance_x:
self.set_direction(y_dir)
else:
self.frame_state = 'default'
self.anim_timer.stop()
| ...
from thecure import get_engine
from thecure.sprites.base import Direction, WalkingSprite
...
MOVE_SPEED = 2
APPROACH_DISTANCE = 400
...
distance_x = abs(player.rect.x - self.rect.x)
distance_y = abs(player.rect.y - self.rect.y)
if (self.frame_state == 'walking' or
(distance_x <= self.APPROACH_DISTANCE and
distance_y <= self.APPROACH_DISTANCE)):
x_dir = None
y_dir = None
if player.rect.x > self.rect.x:
x = 1
x_dir = Direction.RIGHT
elif player.rect.x < self.rect.x:
x = -1
x_dir = Direction.LEFT
else:
x = 0
if player.rect.y > self.rect.y:
y = 1
y_dir = Direction.DOWN
elif player.rect.y < self.rect.y:
y = -1
y_dir = Direction.UP
else:
y = 0
self.velocity = (x * self.MOVE_SPEED, y * self.MOVE_SPEED)
if self.velocity != (0, 0):
self.frame_state = 'walking'
self.anim_timer.start()
if distance_x > distance_y:
self.set_direction(x_dir)
elif distance_y > distance_x:
self.set_direction(y_dir)
else:
self.frame_state = 'default'
self.anim_timer.stop()
... |
7bf4083ef44585116f0eff86753080612a26b374 | src/__init__.py | src/__init__.py | from bayeslite.api import barplot
from bayeslite.api import cardinality
from bayeslite.api import draw_crosscat
from bayeslite.api import estimate_log_likelihood
from bayeslite.api import heatmap
from bayeslite.api import histogram
from bayeslite.api import mi_hist
from bayeslite.api import nullify
from bayeslite.api import pairplot
from bayeslite.api import plot_crosscat_chain_diagnostics
"""Main bdbcontrib API.
The bdbcontrib module servers a sandbox for experimental and semi-stable
features that are not yet ready for integreation to the bayeslite repository.
"""
__all__ = [
'barplot',
'cardinality',
'draw_crosscat',
'estimate_log_likelihood',
'heatmap',
'histogram',
'mi_hist',
'nullify',
'pairplot',
'plot_crosscat_chain_diagnostics'
] | from bdbcontrib.api import barplot
from bdbcontrib.api import cardinality
from bdbcontrib.api import draw_crosscat
from bdbcontrib.api import estimate_log_likelihood
from bdbcontrib.api import heatmap
from bdbcontrib.api import histogram
from bdbcontrib.api import mi_hist
from bdbcontrib.api import nullify
from bdbcontrib.api import pairplot
from bdbcontrib.api import plot_crosscat_chain_diagnostics
"""Main bdbcontrib API.
The bdbcontrib module servers a sandbox for experimental and semi-stable
features that are not yet ready for integreation to the bayeslite repository.
"""
__all__ = [
'barplot',
'cardinality',
'draw_crosscat',
'estimate_log_likelihood',
'heatmap',
'histogram',
'mi_hist',
'nullify',
'pairplot',
'plot_crosscat_chain_diagnostics'
] | Fix big from bayeslite to bdbcontrib. | Fix big from bayeslite to bdbcontrib.
| Python | apache-2.0 | probcomp/bdbcontrib,probcomp/bdbcontrib | - from bayeslite.api import barplot
+ from bdbcontrib.api import barplot
- from bayeslite.api import cardinality
+ from bdbcontrib.api import cardinality
- from bayeslite.api import draw_crosscat
+ from bdbcontrib.api import draw_crosscat
- from bayeslite.api import estimate_log_likelihood
+ from bdbcontrib.api import estimate_log_likelihood
- from bayeslite.api import heatmap
+ from bdbcontrib.api import heatmap
- from bayeslite.api import histogram
+ from bdbcontrib.api import histogram
- from bayeslite.api import mi_hist
+ from bdbcontrib.api import mi_hist
- from bayeslite.api import nullify
+ from bdbcontrib.api import nullify
- from bayeslite.api import pairplot
+ from bdbcontrib.api import pairplot
- from bayeslite.api import plot_crosscat_chain_diagnostics
+ from bdbcontrib.api import plot_crosscat_chain_diagnostics
"""Main bdbcontrib API.
The bdbcontrib module servers a sandbox for experimental and semi-stable
features that are not yet ready for integreation to the bayeslite repository.
"""
__all__ = [
'barplot',
'cardinality',
'draw_crosscat',
'estimate_log_likelihood',
'heatmap',
'histogram',
'mi_hist',
'nullify',
'pairplot',
'plot_crosscat_chain_diagnostics'
] | Fix big from bayeslite to bdbcontrib. | ## Code Before:
from bayeslite.api import barplot
from bayeslite.api import cardinality
from bayeslite.api import draw_crosscat
from bayeslite.api import estimate_log_likelihood
from bayeslite.api import heatmap
from bayeslite.api import histogram
from bayeslite.api import mi_hist
from bayeslite.api import nullify
from bayeslite.api import pairplot
from bayeslite.api import plot_crosscat_chain_diagnostics
"""Main bdbcontrib API.
The bdbcontrib module servers a sandbox for experimental and semi-stable
features that are not yet ready for integreation to the bayeslite repository.
"""
__all__ = [
'barplot',
'cardinality',
'draw_crosscat',
'estimate_log_likelihood',
'heatmap',
'histogram',
'mi_hist',
'nullify',
'pairplot',
'plot_crosscat_chain_diagnostics'
]
## Instruction:
Fix big from bayeslite to bdbcontrib.
## Code After:
from bdbcontrib.api import barplot
from bdbcontrib.api import cardinality
from bdbcontrib.api import draw_crosscat
from bdbcontrib.api import estimate_log_likelihood
from bdbcontrib.api import heatmap
from bdbcontrib.api import histogram
from bdbcontrib.api import mi_hist
from bdbcontrib.api import nullify
from bdbcontrib.api import pairplot
from bdbcontrib.api import plot_crosscat_chain_diagnostics
"""Main bdbcontrib API.
The bdbcontrib module servers a sandbox for experimental and semi-stable
features that are not yet ready for integreation to the bayeslite repository.
"""
__all__ = [
'barplot',
'cardinality',
'draw_crosscat',
'estimate_log_likelihood',
'heatmap',
'histogram',
'mi_hist',
'nullify',
'pairplot',
'plot_crosscat_chain_diagnostics'
] | # ... existing code ...
from bdbcontrib.api import barplot
from bdbcontrib.api import cardinality
from bdbcontrib.api import draw_crosscat
from bdbcontrib.api import estimate_log_likelihood
from bdbcontrib.api import heatmap
from bdbcontrib.api import histogram
from bdbcontrib.api import mi_hist
from bdbcontrib.api import nullify
from bdbcontrib.api import pairplot
from bdbcontrib.api import plot_crosscat_chain_diagnostics
# ... rest of the code ... |
a06e6cc3c0b0440d3adedd1ccce78309d8fae9a9 | feincms/module/page/extensions/navigationgroups.py | feincms/module/page/extensions/navigationgroups.py |
from __future__ import absolute_import, unicode_literals
from django.db import models
from django.utils.translation import ugettext_lazy as _
from feincms import extensions
class Extension(extensions.Extension):
ident = 'navigationgroups'
groups = [
('default', _('Default')),
('footer', _('Footer')),
]
def handle_model(self):
self.model.add_to_class(
'navigation_group',
models.CharField(
_('navigation group'),
choices=self.groups,
default=self.groups[0][0],
max_length=20,
db_index=True))
def handle_modeladmin(self, modeladmin):
modeladmin.add_extension_options('navigation_group')
modeladmin.extend_list('list_display', ['navigation_group'])
modeladmin.extend_list('list_filter', ['navigation_group'])
|
from __future__ import absolute_import, unicode_literals
from django.db import models
from django.utils.translation import ugettext_lazy as _
from feincms import extensions
class Extension(extensions.Extension):
ident = 'navigationgroups'
groups = [
('default', _('Default')),
('footer', _('Footer')),
]
def handle_model(self):
self.model.add_to_class(
'navigation_group',
models.CharField(
_('navigation group'),
choices=self.groups,
default=self.groups[0][0],
max_length=20,
blank=True,
db_index=True))
def handle_modeladmin(self, modeladmin):
modeladmin.add_extension_options('navigation_group')
modeladmin.extend_list('list_display', ['navigation_group'])
modeladmin.extend_list('list_filter', ['navigation_group'])
| Allow navigationgroup to be blank | Allow navigationgroup to be blank
| Python | bsd-3-clause | joshuajonah/feincms,feincms/feincms,joshuajonah/feincms,joshuajonah/feincms,feincms/feincms,joshuajonah/feincms,feincms/feincms,mjl/feincms,mjl/feincms,mjl/feincms |
from __future__ import absolute_import, unicode_literals
from django.db import models
from django.utils.translation import ugettext_lazy as _
from feincms import extensions
class Extension(extensions.Extension):
ident = 'navigationgroups'
groups = [
('default', _('Default')),
('footer', _('Footer')),
]
def handle_model(self):
self.model.add_to_class(
'navigation_group',
models.CharField(
_('navigation group'),
choices=self.groups,
default=self.groups[0][0],
max_length=20,
+ blank=True,
db_index=True))
def handle_modeladmin(self, modeladmin):
modeladmin.add_extension_options('navigation_group')
modeladmin.extend_list('list_display', ['navigation_group'])
modeladmin.extend_list('list_filter', ['navigation_group'])
| Allow navigationgroup to be blank | ## Code Before:
from __future__ import absolute_import, unicode_literals
from django.db import models
from django.utils.translation import ugettext_lazy as _
from feincms import extensions
class Extension(extensions.Extension):
ident = 'navigationgroups'
groups = [
('default', _('Default')),
('footer', _('Footer')),
]
def handle_model(self):
self.model.add_to_class(
'navigation_group',
models.CharField(
_('navigation group'),
choices=self.groups,
default=self.groups[0][0],
max_length=20,
db_index=True))
def handle_modeladmin(self, modeladmin):
modeladmin.add_extension_options('navigation_group')
modeladmin.extend_list('list_display', ['navigation_group'])
modeladmin.extend_list('list_filter', ['navigation_group'])
## Instruction:
Allow navigationgroup to be blank
## Code After:
from __future__ import absolute_import, unicode_literals
from django.db import models
from django.utils.translation import ugettext_lazy as _
from feincms import extensions
class Extension(extensions.Extension):
ident = 'navigationgroups'
groups = [
('default', _('Default')),
('footer', _('Footer')),
]
def handle_model(self):
self.model.add_to_class(
'navigation_group',
models.CharField(
_('navigation group'),
choices=self.groups,
default=self.groups[0][0],
max_length=20,
blank=True,
db_index=True))
def handle_modeladmin(self, modeladmin):
modeladmin.add_extension_options('navigation_group')
modeladmin.extend_list('list_display', ['navigation_group'])
modeladmin.extend_list('list_filter', ['navigation_group'])
| // ... existing code ...
max_length=20,
blank=True,
db_index=True))
// ... rest of the code ... |
0f7816676eceb42f13786408f1d1a09527919a1e | Modules/Biophotonics/python/iMC/msi/io/spectrometerreader.py | Modules/Biophotonics/python/iMC/msi/io/spectrometerreader.py |
import numpy as np
from msi.io.reader import Reader
from msi.msi import Msi
class SpectrometerReader(Reader):
def __init__(self):
pass
def read(self, file_to_read):
# our spectrometer like to follow german standards in files, we need
# to switch to english ones
transformed=""
replacements = {',': '.', '\r\n': ''}
with open(file_to_read) as infile:
for line in infile:
for src, target in replacements.iteritems():
line = line.replace(src, target)
transformed = "\n".join([transformed, line])
for num, line in enumerate(transformed.splitlines(), 1):
if ">>>>>Begin Spectral Data<<<<<" in line:
break
string_only_spectrum = "\n".join(transformed.splitlines()[num:])
data_vector = np.fromstring(string_only_spectrum,
sep="\t").reshape(-1, 2)
msi = Msi(data_vector[:, 1],
{'wavelengths': data_vector[:, 0] * 10 ** -9})
return msi
|
import numpy as np
from msi.io.reader import Reader
from msi.msi import Msi
class SpectrometerReader(Reader):
def __init__(self):
pass
def read(self, file_to_read):
# our spectrometer like to follow german standards in files, we need
# to switch to english ones
transformed=""
replacements = {',': '.', '\r\n': ''}
with open(file_to_read) as infile:
for line in infile:
for src, target in replacements.iteritems():
line = line.replace(src, target)
transformed = "\n".join([transformed, line])
for num, line in enumerate(transformed.splitlines(), 1):
if ">>>>>Begin" in line:
break
for num_end, line in enumerate(transformed.splitlines(), 1):
if ">>>>>End" in line:
num_end -= 1
break
string_only_spectrum = "\n".join(transformed.splitlines()[num:num_end])
data_vector = np.fromstring(string_only_spectrum,
sep="\t").reshape(-1, 2)
msi = Msi(data_vector[:, 1],
{'wavelengths': data_vector[:, 0] * 10 ** -9})
return msi
| Change SpectrometerReader a little so it can handle more data formats. | Change SpectrometerReader a little so it can handle more data formats.
| Python | bsd-3-clause | MITK/MITK,iwegner/MITK,RabadanLab/MITKats,RabadanLab/MITKats,iwegner/MITK,fmilano/mitk,fmilano/mitk,RabadanLab/MITKats,RabadanLab/MITKats,fmilano/mitk,fmilano/mitk,MITK/MITK,RabadanLab/MITKats,RabadanLab/MITKats,fmilano/mitk,fmilano/mitk,iwegner/MITK,fmilano/mitk,MITK/MITK,iwegner/MITK,iwegner/MITK,MITK/MITK,MITK/MITK,iwegner/MITK,MITK/MITK |
import numpy as np
from msi.io.reader import Reader
from msi.msi import Msi
class SpectrometerReader(Reader):
def __init__(self):
pass
def read(self, file_to_read):
# our spectrometer like to follow german standards in files, we need
# to switch to english ones
transformed=""
replacements = {',': '.', '\r\n': ''}
with open(file_to_read) as infile:
for line in infile:
for src, target in replacements.iteritems():
line = line.replace(src, target)
transformed = "\n".join([transformed, line])
for num, line in enumerate(transformed.splitlines(), 1):
- if ">>>>>Begin Spectral Data<<<<<" in line:
+ if ">>>>>Begin" in line:
break
+
+ for num_end, line in enumerate(transformed.splitlines(), 1):
+ if ">>>>>End" in line:
+ num_end -= 1
+ break
- string_only_spectrum = "\n".join(transformed.splitlines()[num:])
+ string_only_spectrum = "\n".join(transformed.splitlines()[num:num_end])
data_vector = np.fromstring(string_only_spectrum,
sep="\t").reshape(-1, 2)
msi = Msi(data_vector[:, 1],
{'wavelengths': data_vector[:, 0] * 10 ** -9})
return msi
| Change SpectrometerReader a little so it can handle more data formats. | ## Code Before:
import numpy as np
from msi.io.reader import Reader
from msi.msi import Msi
class SpectrometerReader(Reader):
def __init__(self):
pass
def read(self, file_to_read):
# our spectrometer like to follow german standards in files, we need
# to switch to english ones
transformed=""
replacements = {',': '.', '\r\n': ''}
with open(file_to_read) as infile:
for line in infile:
for src, target in replacements.iteritems():
line = line.replace(src, target)
transformed = "\n".join([transformed, line])
for num, line in enumerate(transformed.splitlines(), 1):
if ">>>>>Begin Spectral Data<<<<<" in line:
break
string_only_spectrum = "\n".join(transformed.splitlines()[num:])
data_vector = np.fromstring(string_only_spectrum,
sep="\t").reshape(-1, 2)
msi = Msi(data_vector[:, 1],
{'wavelengths': data_vector[:, 0] * 10 ** -9})
return msi
## Instruction:
Change SpectrometerReader a little so it can handle more data formats.
## Code After:
import numpy as np
from msi.io.reader import Reader
from msi.msi import Msi
class SpectrometerReader(Reader):
def __init__(self):
pass
def read(self, file_to_read):
# our spectrometer like to follow german standards in files, we need
# to switch to english ones
transformed=""
replacements = {',': '.', '\r\n': ''}
with open(file_to_read) as infile:
for line in infile:
for src, target in replacements.iteritems():
line = line.replace(src, target)
transformed = "\n".join([transformed, line])
for num, line in enumerate(transformed.splitlines(), 1):
if ">>>>>Begin" in line:
break
for num_end, line in enumerate(transformed.splitlines(), 1):
if ">>>>>End" in line:
num_end -= 1
break
string_only_spectrum = "\n".join(transformed.splitlines()[num:num_end])
data_vector = np.fromstring(string_only_spectrum,
sep="\t").reshape(-1, 2)
msi = Msi(data_vector[:, 1],
{'wavelengths': data_vector[:, 0] * 10 ** -9})
return msi
| # ... existing code ...
for num, line in enumerate(transformed.splitlines(), 1):
if ">>>>>Begin" in line:
break
for num_end, line in enumerate(transformed.splitlines(), 1):
if ">>>>>End" in line:
num_end -= 1
break
string_only_spectrum = "\n".join(transformed.splitlines()[num:num_end])
data_vector = np.fromstring(string_only_spectrum,
# ... rest of the code ... |
172372000f121b31daa0965dca3bf28976b6cba9 | aiodocker/exceptions.py | aiodocker/exceptions.py | class DockerError(Exception):
def __init__(self, status, data, *args):
super().__init__(*args)
self.status = status
self.message = data['message']
def __repr__(self):
return 'DockerError({self.status}, {self.message!r})'.format(self=self)
def __str__(self):
return 'DockerError({self.status}, {self.message!r})'.format(self=self)
class DockerContainerError(DockerError):
def __init__(self, status, data, container_id, *args):
super().__init__(status, data, *args)
self.container_id = container_id
def __repr__(self):
return 'DockerContainerError({self.status}, {self.message!r}, {self.container_id!r})'.format(self=self)
def __str__(self):
return 'DockerContainerError({self.status}, {self.message!r}, {self.container_id!r})'.format(self=self)
| class DockerError(Exception):
def __init__(self, status, data, *args):
super().__init__(*args)
self.status = status
self.message = data['message']
def __repr__(self):
return 'DockerError({self.status}, {self.message!r})'.format(self=self)
def __str__(self):
return 'DockerError({self.status}, {self.message!r})'.format(self=self)
class DockerContainerError(DockerError):
def __init__(self, status, data, container_id, *args):
super().__init__(status, data, *args)
self.container_id = container_id
def __repr__(self):
return ('DockerContainerError('
'{self.status}, {self.message!r}, '
'{self.container_id!r})').format(self=self)
def __str__(self):
return ('DockerContainerError('
'{self.status}, {self.message!r}, '
'{self.container_id!r})').format(self=self)
| Fix flake8 error (too long line) | Fix flake8 error (too long line)
| Python | mit | barrachri/aiodocker,gaopeiliang/aiodocker,paultag/aiodocker,barrachri/aiodocker,gaopeiliang/aiodocker,barrachri/aiodocker,gaopeiliang/aiodocker | class DockerError(Exception):
def __init__(self, status, data, *args):
super().__init__(*args)
self.status = status
self.message = data['message']
def __repr__(self):
return 'DockerError({self.status}, {self.message!r})'.format(self=self)
def __str__(self):
return 'DockerError({self.status}, {self.message!r})'.format(self=self)
class DockerContainerError(DockerError):
def __init__(self, status, data, container_id, *args):
super().__init__(status, data, *args)
self.container_id = container_id
def __repr__(self):
- return 'DockerContainerError({self.status}, {self.message!r}, {self.container_id!r})'.format(self=self)
+ return ('DockerContainerError('
+ '{self.status}, {self.message!r}, '
+ '{self.container_id!r})').format(self=self)
def __str__(self):
- return 'DockerContainerError({self.status}, {self.message!r}, {self.container_id!r})'.format(self=self)
+ return ('DockerContainerError('
+ '{self.status}, {self.message!r}, '
+ '{self.container_id!r})').format(self=self)
| Fix flake8 error (too long line) | ## Code Before:
class DockerError(Exception):
def __init__(self, status, data, *args):
super().__init__(*args)
self.status = status
self.message = data['message']
def __repr__(self):
return 'DockerError({self.status}, {self.message!r})'.format(self=self)
def __str__(self):
return 'DockerError({self.status}, {self.message!r})'.format(self=self)
class DockerContainerError(DockerError):
def __init__(self, status, data, container_id, *args):
super().__init__(status, data, *args)
self.container_id = container_id
def __repr__(self):
return 'DockerContainerError({self.status}, {self.message!r}, {self.container_id!r})'.format(self=self)
def __str__(self):
return 'DockerContainerError({self.status}, {self.message!r}, {self.container_id!r})'.format(self=self)
## Instruction:
Fix flake8 error (too long line)
## Code After:
class DockerError(Exception):
def __init__(self, status, data, *args):
super().__init__(*args)
self.status = status
self.message = data['message']
def __repr__(self):
return 'DockerError({self.status}, {self.message!r})'.format(self=self)
def __str__(self):
return 'DockerError({self.status}, {self.message!r})'.format(self=self)
class DockerContainerError(DockerError):
def __init__(self, status, data, container_id, *args):
super().__init__(status, data, *args)
self.container_id = container_id
def __repr__(self):
return ('DockerContainerError('
'{self.status}, {self.message!r}, '
'{self.container_id!r})').format(self=self)
def __str__(self):
return ('DockerContainerError('
'{self.status}, {self.message!r}, '
'{self.container_id!r})').format(self=self)
| // ... existing code ...
def __repr__(self):
return ('DockerContainerError('
'{self.status}, {self.message!r}, '
'{self.container_id!r})').format(self=self)
// ... modified code ...
def __str__(self):
return ('DockerContainerError('
'{self.status}, {self.message!r}, '
'{self.container_id!r})').format(self=self)
// ... rest of the code ... |
aee157ce27aa4f00a798b87e07583dc795265eb4 | methodAndKnottiness/reliability.py | methodAndKnottiness/reliability.py | import sys
count = 0
for line in sys.stdin:
if count == 0:
B = int(line)
elif count == 1:
N = int(line)
else:
c, r = line.rstrip().split(' ')
cost = int(c)
reliability = float(r)
count+=1
print("Fin")
| import sys, math
count = 0
cost=[]
reliability=[]
# Read input. Budget, number of machines, cost and reliability
for line in sys.stdin:
if count == 0:
B = int(line)
elif count == 1:
N = int(line)
else:
c, r = line.rstrip().split(' ')
cost.append(int(c))
reliability.append(float(r))
count+=1
M = [[0 for i in range(B)] for i in range(N)]
for i in range(B):
M[0][i]=1
print(cost)
#for i in range(1,N):
for i in range(1,3):
for b in range(0,B):
max = 0
# break
for k in range(0, math.floor(b/cost[i])):
m = M[i-1][b-k*cost[i]]*(1-reliability[i])**k
if m > max:
max = m
print("new max",max)
print("Budget:", B)
print("Number machines:", N)
# print("\nIterated Version:")
# print(M[0:3])
print("Fin")
| Save point, got code written but need to figure out the base probabilities | Save point, got code written but need to figure out the base probabilities
| Python | mit | scrasmussen/ProsaicOeuvre,scrasmussen/ProsaicOeuvre,scrasmussen/ProsaicOeuvre | - import sys
+ import sys, math
count = 0
+ cost=[]
+ reliability=[]
+
+ # Read input. Budget, number of machines, cost and reliability
for line in sys.stdin:
if count == 0:
B = int(line)
elif count == 1:
N = int(line)
else:
c, r = line.rstrip().split(' ')
- cost = int(c)
+ cost.append(int(c))
- reliability = float(r)
+ reliability.append(float(r))
-
+
count+=1
+ M = [[0 for i in range(B)] for i in range(N)]
+ for i in range(B):
+ M[0][i]=1
+
+
+ print(cost)
+ #for i in range(1,N):
+ for i in range(1,3):
+ for b in range(0,B):
+ max = 0
+ # break
+ for k in range(0, math.floor(b/cost[i])):
+ m = M[i-1][b-k*cost[i]]*(1-reliability[i])**k
+ if m > max:
+ max = m
+ print("new max",max)
+
+ print("Budget:", B)
+ print("Number machines:", N)
+ # print("\nIterated Version:")
+ # print(M[0:3])
print("Fin")
| Save point, got code written but need to figure out the base probabilities | ## Code Before:
import sys
count = 0
for line in sys.stdin:
if count == 0:
B = int(line)
elif count == 1:
N = int(line)
else:
c, r = line.rstrip().split(' ')
cost = int(c)
reliability = float(r)
count+=1
print("Fin")
## Instruction:
Save point, got code written but need to figure out the base probabilities
## Code After:
import sys, math
count = 0
cost=[]
reliability=[]
# Read input. Budget, number of machines, cost and reliability
for line in sys.stdin:
if count == 0:
B = int(line)
elif count == 1:
N = int(line)
else:
c, r = line.rstrip().split(' ')
cost.append(int(c))
reliability.append(float(r))
count+=1
M = [[0 for i in range(B)] for i in range(N)]
for i in range(B):
M[0][i]=1
print(cost)
#for i in range(1,N):
for i in range(1,3):
for b in range(0,B):
max = 0
# break
for k in range(0, math.floor(b/cost[i])):
m = M[i-1][b-k*cost[i]]*(1-reliability[i])**k
if m > max:
max = m
print("new max",max)
print("Budget:", B)
print("Number machines:", N)
# print("\nIterated Version:")
# print(M[0:3])
print("Fin")
| ...
import sys, math
...
count = 0
cost=[]
reliability=[]
# Read input. Budget, number of machines, cost and reliability
for line in sys.stdin:
...
c, r = line.rstrip().split(' ')
cost.append(int(c))
reliability.append(float(r))
count+=1
...
M = [[0 for i in range(B)] for i in range(N)]
for i in range(B):
M[0][i]=1
print(cost)
#for i in range(1,N):
for i in range(1,3):
for b in range(0,B):
max = 0
# break
for k in range(0, math.floor(b/cost[i])):
m = M[i-1][b-k*cost[i]]*(1-reliability[i])**k
if m > max:
max = m
print("new max",max)
print("Budget:", B)
print("Number machines:", N)
# print("\nIterated Version:")
# print(M[0:3])
... |
8442e89d005af039252b0f8ab757bb54fa4ed71c | tests.py | tests.py | import unittest
from pollster.pollster import Pollster, Chart
class TestBasic(unittest.TestCase):
def test_basic_setup(self):
p = Pollster()
self.assertIsNotNone(p)
def test_charts(self):
c = Pollster().charts()
self.assertIsNotNone(c)
self.assertIsInstance(c, list)
self.assertGreater(len(c), 0)
def test_chart(self):
c = Pollster().charts()[0]
self.assertIsInstance(c, Chart)
cc = Pollster().chart(c.slug)
self.assertEqual(c.slug, cc.slug)
for attr in ['last_updated',
'title',
'url',
'estimates',
'poll_count',
'topic',
'state',
'slug', ]:
self.assertIsNotNone(getattr(c, attr))
self.assertIsNotNone(getattr(cc, attr))
self.assertEqual(getattr(c, attr), getattr(cc, attr))
self.assertIsInstance(c.estimates_by_date(), list)
def test_polls(self):
polls = Pollster().polls(topic='2016-president')
self.assertGreater(len(polls), 0)
| import unittest
from pollster.pollster import Pollster, Chart
class TestBasic(unittest.TestCase):
def test_basic_setup(self):
p = Pollster()
self.assertIsNotNone(p)
def test_charts(self):
c = Pollster().charts()
self.assertIsNotNone(c)
self.assertIsInstance(c, list)
self.assertGreater(len(c), 0)
def test_chart(self):
c = Pollster().charts()[0]
self.assertIsInstance(c, Chart)
cc = Pollster().chart(c.slug)
self.assertEqual(c.slug, cc.slug)
for attr in ['last_updated',
'title',
'url',
'estimates',
'poll_count',
'topic',
'state',
'slug', ]:
self.assertIsNotNone(getattr(c, attr))
self.assertIsNotNone(getattr(cc, attr))
self.assertEqual(getattr(c, attr), getattr(cc, attr))
self.assertIsInstance(c.estimates_by_date(), list)
def test_polls(self):
polls = Pollster().polls(topic='2016-president')
self.assertGreater(len(polls), 0)
poll = polls[0]
for attr in ['id',
'pollster',
'start_date',
'end_date',
'method',
'source',
'questions',
'survey_houses',
'sponsors',
'partisan',
'affiliation']:
self.assertIsNotNone(getattr(poll, attr))
| Update Poll test to check members. | Update Poll test to check members.
| Python | bsd-2-clause | huffpostdata/python-pollster,ternus/python-pollster | import unittest
from pollster.pollster import Pollster, Chart
class TestBasic(unittest.TestCase):
def test_basic_setup(self):
p = Pollster()
self.assertIsNotNone(p)
def test_charts(self):
c = Pollster().charts()
self.assertIsNotNone(c)
self.assertIsInstance(c, list)
self.assertGreater(len(c), 0)
def test_chart(self):
c = Pollster().charts()[0]
self.assertIsInstance(c, Chart)
cc = Pollster().chart(c.slug)
self.assertEqual(c.slug, cc.slug)
for attr in ['last_updated',
'title',
'url',
'estimates',
'poll_count',
'topic',
'state',
'slug', ]:
self.assertIsNotNone(getattr(c, attr))
self.assertIsNotNone(getattr(cc, attr))
self.assertEqual(getattr(c, attr), getattr(cc, attr))
self.assertIsInstance(c.estimates_by_date(), list)
def test_polls(self):
polls = Pollster().polls(topic='2016-president')
self.assertGreater(len(polls), 0)
+ poll = polls[0]
+ for attr in ['id',
+ 'pollster',
+ 'start_date',
+ 'end_date',
+ 'method',
+ 'source',
+ 'questions',
+ 'survey_houses',
+ 'sponsors',
+ 'partisan',
+ 'affiliation']:
+ self.assertIsNotNone(getattr(poll, attr))
| Update Poll test to check members. | ## Code Before:
import unittest
from pollster.pollster import Pollster, Chart
class TestBasic(unittest.TestCase):
def test_basic_setup(self):
p = Pollster()
self.assertIsNotNone(p)
def test_charts(self):
c = Pollster().charts()
self.assertIsNotNone(c)
self.assertIsInstance(c, list)
self.assertGreater(len(c), 0)
def test_chart(self):
c = Pollster().charts()[0]
self.assertIsInstance(c, Chart)
cc = Pollster().chart(c.slug)
self.assertEqual(c.slug, cc.slug)
for attr in ['last_updated',
'title',
'url',
'estimates',
'poll_count',
'topic',
'state',
'slug', ]:
self.assertIsNotNone(getattr(c, attr))
self.assertIsNotNone(getattr(cc, attr))
self.assertEqual(getattr(c, attr), getattr(cc, attr))
self.assertIsInstance(c.estimates_by_date(), list)
def test_polls(self):
polls = Pollster().polls(topic='2016-president')
self.assertGreater(len(polls), 0)
## Instruction:
Update Poll test to check members.
## Code After:
import unittest
from pollster.pollster import Pollster, Chart
class TestBasic(unittest.TestCase):
def test_basic_setup(self):
p = Pollster()
self.assertIsNotNone(p)
def test_charts(self):
c = Pollster().charts()
self.assertIsNotNone(c)
self.assertIsInstance(c, list)
self.assertGreater(len(c), 0)
def test_chart(self):
c = Pollster().charts()[0]
self.assertIsInstance(c, Chart)
cc = Pollster().chart(c.slug)
self.assertEqual(c.slug, cc.slug)
for attr in ['last_updated',
'title',
'url',
'estimates',
'poll_count',
'topic',
'state',
'slug', ]:
self.assertIsNotNone(getattr(c, attr))
self.assertIsNotNone(getattr(cc, attr))
self.assertEqual(getattr(c, attr), getattr(cc, attr))
self.assertIsInstance(c.estimates_by_date(), list)
def test_polls(self):
polls = Pollster().polls(topic='2016-president')
self.assertGreater(len(polls), 0)
poll = polls[0]
for attr in ['id',
'pollster',
'start_date',
'end_date',
'method',
'source',
'questions',
'survey_houses',
'sponsors',
'partisan',
'affiliation']:
self.assertIsNotNone(getattr(poll, attr))
| # ... existing code ...
self.assertGreater(len(polls), 0)
poll = polls[0]
for attr in ['id',
'pollster',
'start_date',
'end_date',
'method',
'source',
'questions',
'survey_houses',
'sponsors',
'partisan',
'affiliation']:
self.assertIsNotNone(getattr(poll, attr))
# ... rest of the code ... |
b32b047656abd28dd794ee16dfab682337a753b1 | accounts/tests.py | accounts/tests.py | from django.test import TestCase
# Create your tests here.
| from django.test import TestCase
class WelcomePageTest(TestCase):
def test_uses_welcome_template(self):
response = self.client.get('/')
self.assertTemplateUsed(response, 'accounts/welcome.html')
| Add first unit test for welcome page | Add first unit test for welcome page
| Python | mit | randomic/aniauth-tdd,randomic/aniauth-tdd | from django.test import TestCase
- # Create your tests here.
+ class WelcomePageTest(TestCase):
+ def test_uses_welcome_template(self):
+ response = self.client.get('/')
+ self.assertTemplateUsed(response, 'accounts/welcome.html')
+ | Add first unit test for welcome page | ## Code Before:
from django.test import TestCase
# Create your tests here.
## Instruction:
Add first unit test for welcome page
## Code After:
from django.test import TestCase
class WelcomePageTest(TestCase):
def test_uses_welcome_template(self):
response = self.client.get('/')
self.assertTemplateUsed(response, 'accounts/welcome.html')
| # ... existing code ...
class WelcomePageTest(TestCase):
def test_uses_welcome_template(self):
response = self.client.get('/')
self.assertTemplateUsed(response, 'accounts/welcome.html')
# ... rest of the code ... |
1cb201c57c592ebd014910fe225fa594cd87c745 | opendebates/middleware.py | opendebates/middleware.py | from opendebates.utils import get_site_mode
class SiteModeMiddleware(object):
"""
Gets or creates a SiteMode for the request, based on the hostname.
"""
def process_view(self, request, view_func, view_args, view_kwargs):
request.site_mode = get_site_mode(request)
| from opendebates.utils import get_site_mode
class SiteModeMiddleware(object):
"""
Gets or creates a SiteMode for the request, based on the hostname.
"""
def process_request(self, request):
request.site_mode = get_site_mode(request)
| Make sure that the site mode is populated on the request | Make sure that the site mode is populated on the request
even if the request winds up getting dispatched to a flatpage.
| Python | apache-2.0 | caktus/django-opendebates,caktus/django-opendebates,caktus/django-opendebates,caktus/django-opendebates | from opendebates.utils import get_site_mode
class SiteModeMiddleware(object):
"""
Gets or creates a SiteMode for the request, based on the hostname.
"""
- def process_view(self, request, view_func, view_args, view_kwargs):
+ def process_request(self, request):
request.site_mode = get_site_mode(request)
| Make sure that the site mode is populated on the request | ## Code Before:
from opendebates.utils import get_site_mode
class SiteModeMiddleware(object):
"""
Gets or creates a SiteMode for the request, based on the hostname.
"""
def process_view(self, request, view_func, view_args, view_kwargs):
request.site_mode = get_site_mode(request)
## Instruction:
Make sure that the site mode is populated on the request
## Code After:
from opendebates.utils import get_site_mode
class SiteModeMiddleware(object):
"""
Gets or creates a SiteMode for the request, based on the hostname.
"""
def process_request(self, request):
request.site_mode = get_site_mode(request)
| ...
def process_request(self, request):
request.site_mode = get_site_mode(request)
... |
6f3b45cd6b5558a7d81472c0298aae7d04f64846 | jsonit/utils.py | jsonit/utils.py | import os
from django.http import HttpResponse
from django.template import RequestContext, loader
def ajax_aware_render(request, template_list, extra_context=None, **kwargs):
if isinstance(template_list, basestring):
template_list = [template_list]
if request.is_ajax():
new_template_list = []
for name in template_list:
new_template_list.append('%s.ajax.%s' % os.path.splitext(name))
new_template_list.append(name)
template_list = new_template_list
c = RequestContext(request, extra_context)
t = loader.select_template(template_list)
return HttpResponse(t.render(c), **kwargs)
| import os
from django.http import HttpResponse
from django.template import RequestContext, loader
def ajax_aware_render(request, template_list, extra_context=None, **kwargs):
if isinstance(template_list, basestring):
template_list = [template_list]
if request.is_ajax():
ajax_template_list = []
for name in template_list:
ajax_template_list.append('%s.ajax.%s' % os.path.splitext(name))
template_list = ajax_template_list + list(template_list)
c = RequestContext(request, extra_context)
t = loader.select_template(template_list)
return HttpResponse(t.render(c), **kwargs)
| Change the ordering of templates to pick from for the ajax render helper | Change the ordering of templates to pick from for the ajax render helper
| Python | bsd-3-clause | lincolnloop/django-jsonit | import os
from django.http import HttpResponse
from django.template import RequestContext, loader
def ajax_aware_render(request, template_list, extra_context=None, **kwargs):
if isinstance(template_list, basestring):
template_list = [template_list]
if request.is_ajax():
- new_template_list = []
+ ajax_template_list = []
for name in template_list:
- new_template_list.append('%s.ajax.%s' % os.path.splitext(name))
+ ajax_template_list.append('%s.ajax.%s' % os.path.splitext(name))
+ template_list = ajax_template_list + list(template_list)
- new_template_list.append(name)
- template_list = new_template_list
c = RequestContext(request, extra_context)
t = loader.select_template(template_list)
return HttpResponse(t.render(c), **kwargs)
| Change the ordering of templates to pick from for the ajax render helper | ## Code Before:
import os
from django.http import HttpResponse
from django.template import RequestContext, loader
def ajax_aware_render(request, template_list, extra_context=None, **kwargs):
if isinstance(template_list, basestring):
template_list = [template_list]
if request.is_ajax():
new_template_list = []
for name in template_list:
new_template_list.append('%s.ajax.%s' % os.path.splitext(name))
new_template_list.append(name)
template_list = new_template_list
c = RequestContext(request, extra_context)
t = loader.select_template(template_list)
return HttpResponse(t.render(c), **kwargs)
## Instruction:
Change the ordering of templates to pick from for the ajax render helper
## Code After:
import os
from django.http import HttpResponse
from django.template import RequestContext, loader
def ajax_aware_render(request, template_list, extra_context=None, **kwargs):
if isinstance(template_list, basestring):
template_list = [template_list]
if request.is_ajax():
ajax_template_list = []
for name in template_list:
ajax_template_list.append('%s.ajax.%s' % os.path.splitext(name))
template_list = ajax_template_list + list(template_list)
c = RequestContext(request, extra_context)
t = loader.select_template(template_list)
return HttpResponse(t.render(c), **kwargs)
| # ... existing code ...
if request.is_ajax():
ajax_template_list = []
for name in template_list:
ajax_template_list.append('%s.ajax.%s' % os.path.splitext(name))
template_list = ajax_template_list + list(template_list)
c = RequestContext(request, extra_context)
# ... rest of the code ... |
f5253c7f458a5ce4390b5e967f45bd2f0b9a1de2 | dosagelib/__init__.py | dosagelib/__init__.py | from __future__ import absolute_import, division, print_function
try:
from importlib.metadata import version, PackageNotFoundError
except ImportError:
from importlib_metadata import version, PackageNotFoundError
from .output import out
AppName = u'dosage'
try:
__version__ = version(AppName) # PEP 396
except PackageNotFoundError:
# package is not installed
pass
| from __future__ import absolute_import, division, print_function
try:
from importlib.metadata import version, PackageNotFoundError
except ImportError:
from importlib_metadata import version, PackageNotFoundError
from .output import out
AppName = u'dosage'
try:
__version__ = version(AppName) # PEP 396
except PackageNotFoundError:
# package is not installed
out.warn('{} is not installed, no version available.'
' Use at least {!r} or {!r} to fix this.'.format(
AppName, 'pip install -e .', 'setup.py egg_info'))
__version__ = 'ERR.NOT.INSTALLED'
| Return a fallback "version" if dosage is not installed | Return a fallback "version" if dosage is not installed
Additionally, inform the user on how to fix the problem. Thanks to twb
for noticing this.
| Python | mit | webcomics/dosage,peterjanes/dosage,peterjanes/dosage,webcomics/dosage | from __future__ import absolute_import, division, print_function
try:
from importlib.metadata import version, PackageNotFoundError
except ImportError:
from importlib_metadata import version, PackageNotFoundError
from .output import out
AppName = u'dosage'
try:
__version__ = version(AppName) # PEP 396
except PackageNotFoundError:
# package is not installed
- pass
+ out.warn('{} is not installed, no version available.'
+ ' Use at least {!r} or {!r} to fix this.'.format(
+ AppName, 'pip install -e .', 'setup.py egg_info'))
+ __version__ = 'ERR.NOT.INSTALLED'
| Return a fallback "version" if dosage is not installed | ## Code Before:
from __future__ import absolute_import, division, print_function
try:
from importlib.metadata import version, PackageNotFoundError
except ImportError:
from importlib_metadata import version, PackageNotFoundError
from .output import out
AppName = u'dosage'
try:
__version__ = version(AppName) # PEP 396
except PackageNotFoundError:
# package is not installed
pass
## Instruction:
Return a fallback "version" if dosage is not installed
## Code After:
from __future__ import absolute_import, division, print_function
try:
from importlib.metadata import version, PackageNotFoundError
except ImportError:
from importlib_metadata import version, PackageNotFoundError
from .output import out
AppName = u'dosage'
try:
__version__ = version(AppName) # PEP 396
except PackageNotFoundError:
# package is not installed
out.warn('{} is not installed, no version available.'
' Use at least {!r} or {!r} to fix this.'.format(
AppName, 'pip install -e .', 'setup.py egg_info'))
__version__ = 'ERR.NOT.INSTALLED'
| // ... existing code ...
# package is not installed
out.warn('{} is not installed, no version available.'
' Use at least {!r} or {!r} to fix this.'.format(
AppName, 'pip install -e .', 'setup.py egg_info'))
__version__ = 'ERR.NOT.INSTALLED'
// ... rest of the code ... |
76b40a801b69023f5983dcfa4ecd5e904792f131 | paypal/standard/pdt/forms.py | paypal/standard/pdt/forms.py | from __future__ import unicode_literals
import django
from paypal.standard.forms import PayPalStandardBaseForm
from paypal.standard.pdt.models import PayPalPDT
class PayPalPDTForm(PayPalStandardBaseForm):
class Meta:
model = PayPalPDT
if django.VERSION >= (1, 6):
fields = '__all__'
| from __future__ import unicode_literals
import django
from paypal.standard.forms import PayPalStandardBaseForm
from paypal.standard.pdt.models import PayPalPDT
class PayPalPDTForm(PayPalStandardBaseForm):
class Meta:
model = PayPalPDT
if django.VERSION >= (1, 6):
exclude = ('ipaddress', 'flag', 'flag_code', 'flag_info', 'query', 'response', 'created_at', 'updated', 'form_view',)
| Add non-PayPal fields to exclude | Add non-PayPal fields to exclude
All the non-paypal fields are blanked if you don't exclude them from the form.
| Python | mit | spookylukey/django-paypal,rsalmaso/django-paypal,spookylukey/django-paypal,rsalmaso/django-paypal,rsalmaso/django-paypal,GamesDoneQuick/django-paypal,spookylukey/django-paypal,GamesDoneQuick/django-paypal | from __future__ import unicode_literals
import django
from paypal.standard.forms import PayPalStandardBaseForm
from paypal.standard.pdt.models import PayPalPDT
class PayPalPDTForm(PayPalStandardBaseForm):
class Meta:
model = PayPalPDT
if django.VERSION >= (1, 6):
- fields = '__all__'
+ exclude = ('ipaddress', 'flag', 'flag_code', 'flag_info', 'query', 'response', 'created_at', 'updated', 'form_view',)
| Add non-PayPal fields to exclude | ## Code Before:
from __future__ import unicode_literals
import django
from paypal.standard.forms import PayPalStandardBaseForm
from paypal.standard.pdt.models import PayPalPDT
class PayPalPDTForm(PayPalStandardBaseForm):
class Meta:
model = PayPalPDT
if django.VERSION >= (1, 6):
fields = '__all__'
## Instruction:
Add non-PayPal fields to exclude
## Code After:
from __future__ import unicode_literals
import django
from paypal.standard.forms import PayPalStandardBaseForm
from paypal.standard.pdt.models import PayPalPDT
class PayPalPDTForm(PayPalStandardBaseForm):
class Meta:
model = PayPalPDT
if django.VERSION >= (1, 6):
exclude = ('ipaddress', 'flag', 'flag_code', 'flag_info', 'query', 'response', 'created_at', 'updated', 'form_view',)
| // ... existing code ...
if django.VERSION >= (1, 6):
exclude = ('ipaddress', 'flag', 'flag_code', 'flag_info', 'query', 'response', 'created_at', 'updated', 'form_view',)
// ... rest of the code ... |
4a0491fb018cd96e510f25141dda5e7ceff423b4 | client/test/server_tests.py | client/test/server_tests.py | from mockito import *
import unittest
from source.server import *
from source.exception import *
from source.commands.system import *
class ServerTestCase(unittest.TestCase):
def createCommandResponse(self, command, parameters = {}, timeout = None):
response = mock()
response.status_code = 200
json = { 'command': command, 'parameters': parameters }
if timeout is not None:
json['timeout'] = timeout
when(response).json().thenReturn({ 'command': command, 'timeout': timeout, 'parameters': parameters })
return response
def setResponse(self, response):
when(self.server._requests).get('').thenReturn(response)
def setUp(self):
self.server = Server('')
self.server._requests = mock()
def tearDown(self):
pass
def testGet(self):
self.setResponse(self.createCommandResponse('copy', parameters = {'src': 'source', 'dst': 'destination' }, timeout = 10))
response = self.server.get()
self.assertIsInstance(response, Copy)
self.assertEqual(response.parameters, {'src': 'source', 'dst': 'destination', })
self.assertIs(response.timeout, 10)
def testGetCommandNotFound(self):
self.setResponse(self.createCommandResponse('Not found command'))
self.assertRaises(CommandNotFoundException, self.server.get) | from mockito import *
import unittest
from source.server import *
from source.exception import *
from source.commands.system import *
class ServerTestCase(unittest.TestCase):
def createCommandResponse(self, command, parameters = {}, timeout = None):
response = mock()
response.status_code = 200
json = { 'command': command, 'parameters': parameters }
if timeout is not None:
json['timeout'] = timeout
when(response).json().thenReturn({ 'command': command, 'timeout': timeout, 'parameters': parameters })
return response
def setResponse(self, response):
when(self.server._requests).get('').thenReturn(response)
def setUp(self):
self.server = Server('')
self.server._requests = mock()
def testGet(self):
self.setResponse(self.createCommandResponse('copy', parameters = {'src': 'source', 'dst': 'destination' }, timeout = 10))
response = self.server.get()
self.assertIsInstance(response, Copy)
self.assertEqual(response.parameters, {'src': 'source', 'dst': 'destination', })
self.assertIs(response.timeout, 10)
def testGetCommandNotFound(self):
self.setResponse(self.createCommandResponse('Not found command'))
self.assertRaises(CommandNotFoundException, self.server.get) | Remove tearDown not used method | Remove tearDown not used method
| Python | mit | CaminsTECH/owncloud-test | from mockito import *
import unittest
from source.server import *
from source.exception import *
from source.commands.system import *
class ServerTestCase(unittest.TestCase):
def createCommandResponse(self, command, parameters = {}, timeout = None):
response = mock()
response.status_code = 200
json = { 'command': command, 'parameters': parameters }
if timeout is not None:
json['timeout'] = timeout
when(response).json().thenReturn({ 'command': command, 'timeout': timeout, 'parameters': parameters })
return response
def setResponse(self, response):
when(self.server._requests).get('').thenReturn(response)
def setUp(self):
self.server = Server('')
self.server._requests = mock()
- def tearDown(self):
- pass
-
def testGet(self):
self.setResponse(self.createCommandResponse('copy', parameters = {'src': 'source', 'dst': 'destination' }, timeout = 10))
response = self.server.get()
self.assertIsInstance(response, Copy)
self.assertEqual(response.parameters, {'src': 'source', 'dst': 'destination', })
self.assertIs(response.timeout, 10)
def testGetCommandNotFound(self):
self.setResponse(self.createCommandResponse('Not found command'))
self.assertRaises(CommandNotFoundException, self.server.get) | Remove tearDown not used method | ## Code Before:
from mockito import *
import unittest
from source.server import *
from source.exception import *
from source.commands.system import *
class ServerTestCase(unittest.TestCase):
def createCommandResponse(self, command, parameters = {}, timeout = None):
response = mock()
response.status_code = 200
json = { 'command': command, 'parameters': parameters }
if timeout is not None:
json['timeout'] = timeout
when(response).json().thenReturn({ 'command': command, 'timeout': timeout, 'parameters': parameters })
return response
def setResponse(self, response):
when(self.server._requests).get('').thenReturn(response)
def setUp(self):
self.server = Server('')
self.server._requests = mock()
def tearDown(self):
pass
def testGet(self):
self.setResponse(self.createCommandResponse('copy', parameters = {'src': 'source', 'dst': 'destination' }, timeout = 10))
response = self.server.get()
self.assertIsInstance(response, Copy)
self.assertEqual(response.parameters, {'src': 'source', 'dst': 'destination', })
self.assertIs(response.timeout, 10)
def testGetCommandNotFound(self):
self.setResponse(self.createCommandResponse('Not found command'))
self.assertRaises(CommandNotFoundException, self.server.get)
## Instruction:
Remove tearDown not used method
## Code After:
from mockito import *
import unittest
from source.server import *
from source.exception import *
from source.commands.system import *
class ServerTestCase(unittest.TestCase):
def createCommandResponse(self, command, parameters = {}, timeout = None):
response = mock()
response.status_code = 200
json = { 'command': command, 'parameters': parameters }
if timeout is not None:
json['timeout'] = timeout
when(response).json().thenReturn({ 'command': command, 'timeout': timeout, 'parameters': parameters })
return response
def setResponse(self, response):
when(self.server._requests).get('').thenReturn(response)
def setUp(self):
self.server = Server('')
self.server._requests = mock()
def testGet(self):
self.setResponse(self.createCommandResponse('copy', parameters = {'src': 'source', 'dst': 'destination' }, timeout = 10))
response = self.server.get()
self.assertIsInstance(response, Copy)
self.assertEqual(response.parameters, {'src': 'source', 'dst': 'destination', })
self.assertIs(response.timeout, 10)
def testGetCommandNotFound(self):
self.setResponse(self.createCommandResponse('Not found command'))
self.assertRaises(CommandNotFoundException, self.server.get) | # ... existing code ...
def testGet(self):
# ... rest of the code ... |
64d599d6f7ca0aae6d95bf753a8421c7978276a2 | subliminal/__init__.py | subliminal/__init__.py | __title__ = 'subliminal'
__version__ = '1.0.dev0'
__author__ = 'Antoine Bertin'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015, Antoine Bertin'
import logging
from .api import (ProviderPool, check_video, provider_manager, download_best_subtitles, download_subtitles,
list_subtitles, save_subtitles)
from .cache import region
from .exceptions import Error, ProviderError
from .providers import Provider
from .subtitle import Subtitle
from .video import SUBTITLE_EXTENSIONS, VIDEO_EXTENSIONS, Episode, Movie, Video, scan_video, scan_videos
logging.getLogger(__name__).addHandler(logging.NullHandler())
| __title__ = 'subliminal'
__version__ = '1.0.dev0'
__author__ = 'Antoine Bertin'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015, Antoine Bertin'
import logging
from .api import (ProviderPool, check_video, provider_manager, download_best_subtitles, download_subtitles,
list_subtitles, save_subtitles)
from .cache import region
from .exceptions import Error, ProviderError
from .providers import Provider
from .subtitle import Subtitle, compute_score
from .video import SUBTITLE_EXTENSIONS, VIDEO_EXTENSIONS, Episode, Movie, Video, scan_video, scan_videos
logging.getLogger(__name__).addHandler(logging.NullHandler())
| Add compute_score to subliminal namespace | Add compute_score to subliminal namespace
| Python | mit | juanmhidalgo/subliminal,h3llrais3r/subliminal,getzze/subliminal,hpsbranco/subliminal,kbkailashbagaria/subliminal,oxan/subliminal,ratoaq2/subliminal,ofir123/subliminal,SickRage/subliminal,pums974/subliminal,Elettronik/subliminal,goll/subliminal,bogdal/subliminal,fernandog/subliminal,Diaoul/subliminal,neo1691/subliminal,t4lwh/subliminal | __title__ = 'subliminal'
__version__ = '1.0.dev0'
__author__ = 'Antoine Bertin'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015, Antoine Bertin'
import logging
from .api import (ProviderPool, check_video, provider_manager, download_best_subtitles, download_subtitles,
list_subtitles, save_subtitles)
from .cache import region
from .exceptions import Error, ProviderError
from .providers import Provider
- from .subtitle import Subtitle
+ from .subtitle import Subtitle, compute_score
from .video import SUBTITLE_EXTENSIONS, VIDEO_EXTENSIONS, Episode, Movie, Video, scan_video, scan_videos
logging.getLogger(__name__).addHandler(logging.NullHandler())
| Add compute_score to subliminal namespace | ## Code Before:
__title__ = 'subliminal'
__version__ = '1.0.dev0'
__author__ = 'Antoine Bertin'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015, Antoine Bertin'
import logging
from .api import (ProviderPool, check_video, provider_manager, download_best_subtitles, download_subtitles,
list_subtitles, save_subtitles)
from .cache import region
from .exceptions import Error, ProviderError
from .providers import Provider
from .subtitle import Subtitle
from .video import SUBTITLE_EXTENSIONS, VIDEO_EXTENSIONS, Episode, Movie, Video, scan_video, scan_videos
logging.getLogger(__name__).addHandler(logging.NullHandler())
## Instruction:
Add compute_score to subliminal namespace
## Code After:
__title__ = 'subliminal'
__version__ = '1.0.dev0'
__author__ = 'Antoine Bertin'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015, Antoine Bertin'
import logging
from .api import (ProviderPool, check_video, provider_manager, download_best_subtitles, download_subtitles,
list_subtitles, save_subtitles)
from .cache import region
from .exceptions import Error, ProviderError
from .providers import Provider
from .subtitle import Subtitle, compute_score
from .video import SUBTITLE_EXTENSIONS, VIDEO_EXTENSIONS, Episode, Movie, Video, scan_video, scan_videos
logging.getLogger(__name__).addHandler(logging.NullHandler())
| # ... existing code ...
from .providers import Provider
from .subtitle import Subtitle, compute_score
from .video import SUBTITLE_EXTENSIONS, VIDEO_EXTENSIONS, Episode, Movie, Video, scan_video, scan_videos
# ... rest of the code ... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.