Dataset Viewer
Auto-converted to Parquet
commit
stringlengths
40
40
old_file
stringlengths
4
106
new_file
stringlengths
4
106
old_contents
stringlengths
10
2.94k
new_contents
stringlengths
21
2.95k
subject
stringlengths
16
444
message
stringlengths
17
2.63k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
7
43k
ndiff
stringlengths
52
3.31k
instruction
stringlengths
16
444
content
stringlengths
133
4.32k
diff
stringlengths
49
3.61k
bc5abf988956235b48aeb1234d9944fe70be619a
pytest_hidecaptured.py
pytest_hidecaptured.py
def pytest_runtest_logreport(report): """Overwrite report by removing any captured stderr.""" # print("PLUGIN SAYS -> report -> {0}".format(report)) # print("PLUGIN SAYS -> report.sections -> {0}".format(report.sections)) # print("PLUGIN SAYS -> dir(report) -> {0}".format(dir(report))) # print("PLUGIN SAYS -> type(report) -> {0}".format(type(report))) sections = [item for item in report.sections if item[0] not in ("Captured stdout call", "Captured stderr call", "Captured stdout setup", "Captured stderr setup", "Captured stdout teardown", "Captured stderr teardown")] # print("PLUGIN SAYS -> sections -> {0}".format(sections)) report.sections = sections
import pytest @pytest.mark.tryfirst def pytest_runtest_logreport(report): """Overwrite report by removing any captured stderr.""" # print("PLUGIN SAYS -> report -> {0}".format(report)) # print("PLUGIN SAYS -> report.sections -> {0}".format(report.sections)) # print("PLUGIN SAYS -> dir(report) -> {0}".format(dir(report))) # print("PLUGIN SAYS -> type(report) -> {0}".format(type(report))) sections = [item for item in report.sections if item[0] not in ("Captured stdout call", "Captured stderr call", "Captured stdout setup", "Captured stderr setup", "Captured stdout teardown", "Captured stderr teardown")] # print("PLUGIN SAYS -> sections -> {0}".format(sections)) report.sections = sections
Fix interop issues with pytest-instafail
Fix interop issues with pytest-instafail
Python
mit
hamzasheikh/pytest-hidecaptured
+ import pytest + + @pytest.mark.tryfirst def pytest_runtest_logreport(report): """Overwrite report by removing any captured stderr.""" # print("PLUGIN SAYS -> report -> {0}".format(report)) # print("PLUGIN SAYS -> report.sections -> {0}".format(report.sections)) # print("PLUGIN SAYS -> dir(report) -> {0}".format(dir(report))) # print("PLUGIN SAYS -> type(report) -> {0}".format(type(report))) sections = [item for item in report.sections if item[0] not in ("Captured stdout call", "Captured stderr call", "Captured stdout setup", "Captured stderr setup", "Captured stdout teardown", "Captured stderr teardown")] # print("PLUGIN SAYS -> sections -> {0}".format(sections)) report.sections = sections
Fix interop issues with pytest-instafail
## Code Before: def pytest_runtest_logreport(report): """Overwrite report by removing any captured stderr.""" # print("PLUGIN SAYS -> report -> {0}".format(report)) # print("PLUGIN SAYS -> report.sections -> {0}".format(report.sections)) # print("PLUGIN SAYS -> dir(report) -> {0}".format(dir(report))) # print("PLUGIN SAYS -> type(report) -> {0}".format(type(report))) sections = [item for item in report.sections if item[0] not in ("Captured stdout call", "Captured stderr call", "Captured stdout setup", "Captured stderr setup", "Captured stdout teardown", "Captured stderr teardown")] # print("PLUGIN SAYS -> sections -> {0}".format(sections)) report.sections = sections ## Instruction: Fix interop issues with pytest-instafail ## Code After: import pytest @pytest.mark.tryfirst def pytest_runtest_logreport(report): """Overwrite report by removing any captured stderr.""" # print("PLUGIN SAYS -> report -> {0}".format(report)) # print("PLUGIN SAYS -> report.sections -> {0}".format(report.sections)) # print("PLUGIN SAYS -> dir(report) -> {0}".format(dir(report))) # print("PLUGIN SAYS -> type(report) -> {0}".format(type(report))) sections = [item for item in report.sections if item[0] not in ("Captured stdout call", "Captured stderr call", "Captured stdout setup", "Captured stderr setup", "Captured stdout teardown", "Captured stderr teardown")] # print("PLUGIN SAYS -> sections -> {0}".format(sections)) report.sections = sections
+ import pytest + + @pytest.mark.tryfirst def pytest_runtest_logreport(report): """Overwrite report by removing any captured stderr.""" # print("PLUGIN SAYS -> report -> {0}".format(report)) # print("PLUGIN SAYS -> report.sections -> {0}".format(report.sections)) # print("PLUGIN SAYS -> dir(report) -> {0}".format(dir(report))) # print("PLUGIN SAYS -> type(report) -> {0}".format(type(report))) sections = [item for item in report.sections if item[0] not in ("Captured stdout call", "Captured stderr call", "Captured stdout setup", "Captured stderr setup", "Captured stdout teardown", "Captured stderr teardown")] # print("PLUGIN SAYS -> sections -> {0}".format(sections)) report.sections = sections
cc93d6b9ade1d15236904978f012f91b0a9d567d
examples/manage.py
examples/manage.py
import logging from aio_manager import Manager from aioapp.app import build_application logging.basicConfig(level=logging.WARNING) app = build_application() manager = Manager(app) # To support SQLAlchemy commands, use this # # from aio_manager.commands.ext import sqlalchemy # sqlalchemy.configure_manager(manager, app, Base, # settings.DATABASE_USERNAME, # settings.DATABASE_NAME, # settings.DATABASE_HOST, # settings.DATABASE_PASSWORD) if __name__ == "__main__": manager.run()
import logging from aio_manager import Manager from aioapp.app import build_application logging.basicConfig(level=logging.WARNING) app = build_application() manager = Manager(app) # To support SQLAlchemy commands, use this # # from aio_manager.commands.ext import sqlalchemy # [from aiopg.sa import create_engine] # sqlalchemy.configure_manager(manager, app, Base, # settings.DATABASE_USERNAME, # settings.DATABASE_PASSWORD, # settings.DATABASE_NAME, # settings.DATABASE_HOST, # settings.DATABASE_PORT[, # create_engine]) if __name__ == "__main__": manager.run()
Update sqlalchemy command configuration example
Update sqlalchemy command configuration example
Python
bsd-3-clause
rrader/aio_manager
import logging from aio_manager import Manager from aioapp.app import build_application logging.basicConfig(level=logging.WARNING) app = build_application() manager = Manager(app) # To support SQLAlchemy commands, use this # # from aio_manager.commands.ext import sqlalchemy + # [from aiopg.sa import create_engine] # sqlalchemy.configure_manager(manager, app, Base, # settings.DATABASE_USERNAME, + # settings.DATABASE_PASSWORD, # settings.DATABASE_NAME, # settings.DATABASE_HOST, - # settings.DATABASE_PASSWORD) + # settings.DATABASE_PORT[, + # create_engine]) if __name__ == "__main__": manager.run()
Update sqlalchemy command configuration example
## Code Before: import logging from aio_manager import Manager from aioapp.app import build_application logging.basicConfig(level=logging.WARNING) app = build_application() manager = Manager(app) # To support SQLAlchemy commands, use this # # from aio_manager.commands.ext import sqlalchemy # sqlalchemy.configure_manager(manager, app, Base, # settings.DATABASE_USERNAME, # settings.DATABASE_NAME, # settings.DATABASE_HOST, # settings.DATABASE_PASSWORD) if __name__ == "__main__": manager.run() ## Instruction: Update sqlalchemy command configuration example ## Code After: import logging from aio_manager import Manager from aioapp.app import build_application logging.basicConfig(level=logging.WARNING) app = build_application() manager = Manager(app) # To support SQLAlchemy commands, use this # # from aio_manager.commands.ext import sqlalchemy # [from aiopg.sa import create_engine] # sqlalchemy.configure_manager(manager, app, Base, # settings.DATABASE_USERNAME, # settings.DATABASE_PASSWORD, # settings.DATABASE_NAME, # settings.DATABASE_HOST, # settings.DATABASE_PORT[, # create_engine]) if __name__ == "__main__": manager.run()
import logging from aio_manager import Manager from aioapp.app import build_application logging.basicConfig(level=logging.WARNING) app = build_application() manager = Manager(app) # To support SQLAlchemy commands, use this # # from aio_manager.commands.ext import sqlalchemy + # [from aiopg.sa import create_engine] # sqlalchemy.configure_manager(manager, app, Base, # settings.DATABASE_USERNAME, + # settings.DATABASE_PASSWORD, # settings.DATABASE_NAME, # settings.DATABASE_HOST, - # settings.DATABASE_PASSWORD) ? ---- ^^ + # settings.DATABASE_PORT[, ? ^^^ + # create_engine]) if __name__ == "__main__": manager.run()
47d9a8df136e235f49921d4782c5e392b0101107
migrations/versions/147_add_cleaned_subject.py
migrations/versions/147_add_cleaned_subject.py
# revision identifiers, used by Alembic. revision = '486c7fa5b533' down_revision = 'c77a90d524' from alembic import op import sqlalchemy as sa from sqlalchemy.sql import text def upgrade(): conn = op.get_bind() conn.execute(text("set @@lock_wait_timeout = 20;")) op.add_column('thread', sa.Column('_cleaned_subject', sa.String(length=255), nullable=True)) op.create_index('ix_cleaned_subject', 'thread', ['namespace_id', '_cleaned_subject'], unique=False) def downgrade(): conn = op.get_bind() conn.execute(text("set @@lock_wait_timeout = 20;")) op.drop_index('ix_cleaned_subject', table_name='thread') op.drop_column('thread', '_cleaned_subject')
# revision identifiers, used by Alembic. revision = '486c7fa5b533' down_revision = 'c77a90d524' from alembic import op import sqlalchemy as sa from sqlalchemy.sql import text def upgrade(): conn = op.get_bind() conn.execute(text("set @@lock_wait_timeout = 20;")) op.add_column('thread', sa.Column('_cleaned_subject', sa.String(length=255), nullable=True)) op.create_index('ix_cleaned_subject', 'thread', ['_cleaned_subject'], unique=False) def downgrade(): conn = op.get_bind() conn.execute(text("set @@lock_wait_timeout = 20;")) op.drop_index('ix_cleaned_subject', table_name='thread') op.drop_column('thread', '_cleaned_subject')
Make _cleaned_subject migration match declared schema.
Make _cleaned_subject migration match declared schema. Test Plan: Upgrade old database to head. Reviewers: kav-ya Reviewed By: kav-ya Differential Revision: https://review.inboxapp.com/D1394
Python
agpl-3.0
Eagles2F/sync-engine,Eagles2F/sync-engine,EthanBlackburn/sync-engine,PriviPK/privipk-sync-engine,PriviPK/privipk-sync-engine,nylas/sync-engine,closeio/nylas,jobscore/sync-engine,jobscore/sync-engine,jobscore/sync-engine,wakermahmud/sync-engine,PriviPK/privipk-sync-engine,wakermahmud/sync-engine,gale320/sync-engine,Eagles2F/sync-engine,nylas/sync-engine,closeio/nylas,nylas/sync-engine,gale320/sync-engine,gale320/sync-engine,gale320/sync-engine,Eagles2F/sync-engine,wakermahmud/sync-engine,wakermahmud/sync-engine,ErinCall/sync-engine,EthanBlackburn/sync-engine,wakermahmud/sync-engine,closeio/nylas,jobscore/sync-engine,gale320/sync-engine,ErinCall/sync-engine,EthanBlackburn/sync-engine,closeio/nylas,EthanBlackburn/sync-engine,PriviPK/privipk-sync-engine,ErinCall/sync-engine,nylas/sync-engine,EthanBlackburn/sync-engine,PriviPK/privipk-sync-engine,Eagles2F/sync-engine,ErinCall/sync-engine,ErinCall/sync-engine
# revision identifiers, used by Alembic. revision = '486c7fa5b533' down_revision = 'c77a90d524' from alembic import op import sqlalchemy as sa from sqlalchemy.sql import text def upgrade(): conn = op.get_bind() conn.execute(text("set @@lock_wait_timeout = 20;")) op.add_column('thread', sa.Column('_cleaned_subject', sa.String(length=255), nullable=True)) - op.create_index('ix_cleaned_subject', 'thread', + op.create_index('ix_cleaned_subject', 'thread', ['_cleaned_subject'], - ['namespace_id', '_cleaned_subject'], unique=False) + unique=False) def downgrade(): conn = op.get_bind() conn.execute(text("set @@lock_wait_timeout = 20;")) op.drop_index('ix_cleaned_subject', table_name='thread') op.drop_column('thread', '_cleaned_subject')
Make _cleaned_subject migration match declared schema.
## Code Before: # revision identifiers, used by Alembic. revision = '486c7fa5b533' down_revision = 'c77a90d524' from alembic import op import sqlalchemy as sa from sqlalchemy.sql import text def upgrade(): conn = op.get_bind() conn.execute(text("set @@lock_wait_timeout = 20;")) op.add_column('thread', sa.Column('_cleaned_subject', sa.String(length=255), nullable=True)) op.create_index('ix_cleaned_subject', 'thread', ['namespace_id', '_cleaned_subject'], unique=False) def downgrade(): conn = op.get_bind() conn.execute(text("set @@lock_wait_timeout = 20;")) op.drop_index('ix_cleaned_subject', table_name='thread') op.drop_column('thread', '_cleaned_subject') ## Instruction: Make _cleaned_subject migration match declared schema. ## Code After: # revision identifiers, used by Alembic. revision = '486c7fa5b533' down_revision = 'c77a90d524' from alembic import op import sqlalchemy as sa from sqlalchemy.sql import text def upgrade(): conn = op.get_bind() conn.execute(text("set @@lock_wait_timeout = 20;")) op.add_column('thread', sa.Column('_cleaned_subject', sa.String(length=255), nullable=True)) op.create_index('ix_cleaned_subject', 'thread', ['_cleaned_subject'], unique=False) def downgrade(): conn = op.get_bind() conn.execute(text("set @@lock_wait_timeout = 20;")) op.drop_index('ix_cleaned_subject', table_name='thread') op.drop_column('thread', '_cleaned_subject')
# revision identifiers, used by Alembic. revision = '486c7fa5b533' down_revision = 'c77a90d524' from alembic import op import sqlalchemy as sa from sqlalchemy.sql import text def upgrade(): conn = op.get_bind() conn.execute(text("set @@lock_wait_timeout = 20;")) op.add_column('thread', sa.Column('_cleaned_subject', sa.String(length=255), nullable=True)) - op.create_index('ix_cleaned_subject', 'thread', + op.create_index('ix_cleaned_subject', 'thread', ['_cleaned_subject'], ? ++++++++++++++++++++++ - ['namespace_id', '_cleaned_subject'], unique=False) + unique=False) def downgrade(): conn = op.get_bind() conn.execute(text("set @@lock_wait_timeout = 20;")) op.drop_index('ix_cleaned_subject', table_name='thread') op.drop_column('thread', '_cleaned_subject')
1ef76b4f4395c9b5e3c2338822947999d5581013
labs/lab-3/ex-3-2.events.py
labs/lab-3/ex-3-2.events.py
import tspapi api = tspapi.API() source = tspapi.Source(ref='myhost') api.event_create(title="bar", fingerprint_fields=['@title'], source=source)
import tspapi api = tspapi.API() source = tspapi.Source(ref='myhost', _type='host') api.event_create(title="bar", fingerprint_fields=['@title'], source=source)
Add type field to source
Add type field to source
Python
apache-2.0
jdgwartney/tsi-lab,boundary/tsi-lab,jdgwartney/tsi-lab,boundary/tsi-lab,boundary/tsi-lab,boundary/tsi-lab,jdgwartney/tsi-lab,jdgwartney/tsi-lab
import tspapi api = tspapi.API() - source = tspapi.Source(ref='myhost') + source = tspapi.Source(ref='myhost', _type='host') api.event_create(title="bar", fingerprint_fields=['@title'], source=source)
Add type field to source
## Code Before: import tspapi api = tspapi.API() source = tspapi.Source(ref='myhost') api.event_create(title="bar", fingerprint_fields=['@title'], source=source) ## Instruction: Add type field to source ## Code After: import tspapi api = tspapi.API() source = tspapi.Source(ref='myhost', _type='host') api.event_create(title="bar", fingerprint_fields=['@title'], source=source)
import tspapi api = tspapi.API() - source = tspapi.Source(ref='myhost') + source = tspapi.Source(ref='myhost', _type='host') ? ++++++++++++++ api.event_create(title="bar", fingerprint_fields=['@title'], source=source)
6ea9d0c4b4e2a117e3e74c34cc77f83d262e62d8
sendgrid_events/models.py
sendgrid_events/models.py
import json from django.db import models from django.utils import timezone from jsonfield import JSONField from sendgrid_events.signals import batch_processed class Event(models.Model): kind = models.CharField(max_length=75) email = models.CharField(max_length=150) data = JSONField(blank=True) created_at = models.DateTimeField(default=timezone.now) @classmethod def process_batch(cls, data): events = [] for line in data.split("\r\n"): if line: d = json.loads(line.strip()) events.append(Event.objects.create( kind=d["event"], email=d["email"], data=d )) batch_processed.send(sender=Event, events=events) return events
import json from django.db import models from django.utils import timezone from jsonfield import JSONField from sendgrid_events.signals import batch_processed class Event(models.Model): kind = models.CharField(max_length=75) email = models.CharField(max_length=150) data = JSONField(blank=True) created_at = models.DateTimeField(default=timezone.now) @classmethod def process_batch(cls, data): events = [] for event in json.loads(data): events.append(Event.objects.create( kind=event["event"], email=event["email"], data=event )) batch_processed.send(sender=Event, events=events) return events
Update for latest Sendgrid webhook format
Update for latest Sendgrid webhook format
Python
bsd-3-clause
digital-eskimo/django-sendgrid-events,kronok/django-sendgrid-events,eldarion/django-sendgrid-events,rorito/django-sendgrid-events
import json from django.db import models from django.utils import timezone from jsonfield import JSONField from sendgrid_events.signals import batch_processed class Event(models.Model): kind = models.CharField(max_length=75) email = models.CharField(max_length=150) data = JSONField(blank=True) created_at = models.DateTimeField(default=timezone.now) @classmethod def process_batch(cls, data): events = [] + for event in json.loads(data): - for line in data.split("\r\n"): - if line: - d = json.loads(line.strip()) - events.append(Event.objects.create( + events.append(Event.objects.create( - kind=d["event"], + kind=event["event"], - email=d["email"], + email=event["email"], - data=d + data=event - )) + )) batch_processed.send(sender=Event, events=events) return events
Update for latest Sendgrid webhook format
## Code Before: import json from django.db import models from django.utils import timezone from jsonfield import JSONField from sendgrid_events.signals import batch_processed class Event(models.Model): kind = models.CharField(max_length=75) email = models.CharField(max_length=150) data = JSONField(blank=True) created_at = models.DateTimeField(default=timezone.now) @classmethod def process_batch(cls, data): events = [] for line in data.split("\r\n"): if line: d = json.loads(line.strip()) events.append(Event.objects.create( kind=d["event"], email=d["email"], data=d )) batch_processed.send(sender=Event, events=events) return events ## Instruction: Update for latest Sendgrid webhook format ## Code After: import json from django.db import models from django.utils import timezone from jsonfield import JSONField from sendgrid_events.signals import batch_processed class Event(models.Model): kind = models.CharField(max_length=75) email = models.CharField(max_length=150) data = JSONField(blank=True) created_at = models.DateTimeField(default=timezone.now) @classmethod def process_batch(cls, data): events = [] for event in json.loads(data): events.append(Event.objects.create( kind=event["event"], email=event["email"], data=event )) batch_processed.send(sender=Event, events=events) return events
import json from django.db import models from django.utils import timezone from jsonfield import JSONField from sendgrid_events.signals import batch_processed class Event(models.Model): kind = models.CharField(max_length=75) email = models.CharField(max_length=150) data = JSONField(blank=True) created_at = models.DateTimeField(default=timezone.now) @classmethod def process_batch(cls, data): events = [] + for event in json.loads(data): - for line in data.split("\r\n"): - if line: - d = json.loads(line.strip()) - events.append(Event.objects.create( ? ---- + events.append(Event.objects.create( - kind=d["event"], ? ---- ^ + kind=event["event"], ? ^^^^^ - email=d["email"], ? ---- ^ + email=event["email"], ? ^^^^^ - data=d ? ---- ^ + data=event ? ^^^^^ - )) ? ---- + )) batch_processed.send(sender=Event, events=events) return events
e14b3fad26dce8dad3ca97c06e624f1d6b0764f9
mqueue/__init__.py
mqueue/__init__.py
__version__ = '0.5.5' default_app_config = 'mqueue.apps.MqueueConfig'
__version__ = '0.5.5' default_app_config = 'mqueue.apps.MqueueConfig' import sys reload(sys) sys.setdefaultencoding("utf-8")
Set default encoding to fix unicode errors
Set default encoding to fix unicode errors
Python
mit
synw/django-mqueue,synw/django-mqueue,synw/django-mqueue
__version__ = '0.5.5' default_app_config = 'mqueue.apps.MqueueConfig' + + import sys + reload(sys) + sys.setdefaultencoding("utf-8")
Set default encoding to fix unicode errors
## Code Before: __version__ = '0.5.5' default_app_config = 'mqueue.apps.MqueueConfig' ## Instruction: Set default encoding to fix unicode errors ## Code After: __version__ = '0.5.5' default_app_config = 'mqueue.apps.MqueueConfig' import sys reload(sys) sys.setdefaultencoding("utf-8")
__version__ = '0.5.5' default_app_config = 'mqueue.apps.MqueueConfig' + import sys + reload(sys) + sys.setdefaultencoding("utf-8") +
49e95022577eb40bcf9e1d1c9f95be7269fd0e3b
scripts/update_acq_stats.py
scripts/update_acq_stats.py
from mica.stats import update_acq_stats update_acq_stats.main() import os table_file = mica.stats.acq_stats.table_file file_stat = os.stat(table_file) if file_stat.st_size > 50e6: print(""" Warning: {tfile} is larger than 50MB and may need Warning: to be manually repacked (i.e.): Warning: Warning: ptrepack --chunkshape=auto --propindexes --keep-source-filters {tfile} compressed.h5 Warning: cp compressed.h5 {tfile} """.format(tfile=table_file))
import os from mica.stats import update_acq_stats import mica.stats.acq_stats update_acq_stats.main() table_file = mica.stats.acq_stats.TABLE_FILE file_stat = os.stat(table_file) if file_stat.st_size > 50e6: print(""" Warning: {tfile} is larger than 50MB and may need Warning: to be manually repacked (i.e.): Warning: Warning: ptrepack --chunkshape=auto --propindexes --keep-source-filters {tfile} compressed.h5 Warning: cp compressed.h5 {tfile} """.format(tfile=table_file))
Fix reference to acq table file in script
Fix reference to acq table file in script
Python
bsd-3-clause
sot/mica,sot/mica
- + import os from mica.stats import update_acq_stats + import mica.stats.acq_stats update_acq_stats.main() - import os + - table_file = mica.stats.acq_stats.table_file + table_file = mica.stats.acq_stats.TABLE_FILE file_stat = os.stat(table_file) if file_stat.st_size > 50e6: print(""" Warning: {tfile} is larger than 50MB and may need Warning: to be manually repacked (i.e.): Warning: Warning: ptrepack --chunkshape=auto --propindexes --keep-source-filters {tfile} compressed.h5 Warning: cp compressed.h5 {tfile} """.format(tfile=table_file))
Fix reference to acq table file in script
## Code Before: from mica.stats import update_acq_stats update_acq_stats.main() import os table_file = mica.stats.acq_stats.table_file file_stat = os.stat(table_file) if file_stat.st_size > 50e6: print(""" Warning: {tfile} is larger than 50MB and may need Warning: to be manually repacked (i.e.): Warning: Warning: ptrepack --chunkshape=auto --propindexes --keep-source-filters {tfile} compressed.h5 Warning: cp compressed.h5 {tfile} """.format(tfile=table_file)) ## Instruction: Fix reference to acq table file in script ## Code After: import os from mica.stats import update_acq_stats import mica.stats.acq_stats update_acq_stats.main() table_file = mica.stats.acq_stats.TABLE_FILE file_stat = os.stat(table_file) if file_stat.st_size > 50e6: print(""" Warning: {tfile} is larger than 50MB and may need Warning: to be manually repacked (i.e.): Warning: Warning: ptrepack --chunkshape=auto --propindexes --keep-source-filters {tfile} compressed.h5 Warning: cp compressed.h5 {tfile} """.format(tfile=table_file))
- + import os from mica.stats import update_acq_stats + import mica.stats.acq_stats update_acq_stats.main() - import os + - table_file = mica.stats.acq_stats.table_file ? ^^^^^ ^^^^ + table_file = mica.stats.acq_stats.TABLE_FILE ? ^^^^^ ^^^^ file_stat = os.stat(table_file) if file_stat.st_size > 50e6: print(""" Warning: {tfile} is larger than 50MB and may need Warning: to be manually repacked (i.e.): Warning: Warning: ptrepack --chunkshape=auto --propindexes --keep-source-filters {tfile} compressed.h5 Warning: cp compressed.h5 {tfile} """.format(tfile=table_file))
9fb89f885dd26b530b4cc95427373f06ddc7d13d
emptiness.py
emptiness.py
import argparse import requests import timetable if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument("-d", "--day", default='', required=True, help="Day to check the timetable on. eg: Thursday") parser.add_argument("-t", "--time", default='', required=True, help="The time the block must be empty (HH:MM (24h))") args = parser.parse_args() htmlRequest = requests.get("http://upnet.up.ac.za/tt/hatfield_timetable.html") timeTableObject = timetable.parseHTMLFile(htmlRequest.text) # Method 1 ; Elimination venueList = timetable.getVenueList(timeTableObject) filteredTimetable = timetable.getFilteredTimetable(args.day, args.time, timeTableObject, venueList) #for el in filteredTimetable: # print(el.venue) empty = timetable.getEmptyVenues(filteredTimetable, venueList) for el in empty: print(el)
import argparse import requests import timetable import datetime import time if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument("-d", "--day", default='', required=False, help="Day to check the timetable on. eg: Thursday") parser.add_argument("-t", "--time", default='', required=False, help="The time the block must be empty (HH:MM (24h))") args = parser.parse_args() time = args.time day = args.day if args.time == '': time = datetime.datetime.now().strftime("%H:%M") if args.day == '': day = datetime.datetime.now().strftime("%A") # print('Using ' + day + ' - ' + time) htmlRequest = requests.get("http://upnet.up.ac.za/tt/hatfield_timetable.html") timeTableObject = timetable.parseHTMLFile(htmlRequest.text) # Method 1 ; Elimination venueList = timetable.getVenueList(timeTableObject) filteredTimetable = timetable.getFilteredTimetable(day, time, timeTableObject, venueList) #for el in filteredTimetable: # print(el.venue) empty = timetable.getEmptyVenues(filteredTimetable, venueList) for el in empty: print(el)
Use current time if no arguments given
Use current time if no arguments given
Python
mit
egeldenhuys/emptiness,egeldenhuys/emptiness,egeldenhuys/emptiness
import argparse import requests import timetable + import datetime + import time if __name__ == '__main__': parser = argparse.ArgumentParser() - parser.add_argument("-d", "--day", default='', required=True, help="Day to check the timetable on. eg: Thursday") + parser.add_argument("-d", "--day", default='', required=False, help="Day to check the timetable on. eg: Thursday") - parser.add_argument("-t", "--time", default='', required=True, help="The time the block must be empty (HH:MM (24h))") + parser.add_argument("-t", "--time", default='', required=False, help="The time the block must be empty (HH:MM (24h))") args = parser.parse_args() + + time = args.time + day = args.day + + if args.time == '': + time = datetime.datetime.now().strftime("%H:%M") + + if args.day == '': + day = datetime.datetime.now().strftime("%A") + + # print('Using ' + day + ' - ' + time) htmlRequest = requests.get("http://upnet.up.ac.za/tt/hatfield_timetable.html") timeTableObject = timetable.parseHTMLFile(htmlRequest.text) # Method 1 ; Elimination venueList = timetable.getVenueList(timeTableObject) - filteredTimetable = timetable.getFilteredTimetable(args.day, args.time, timeTableObject, venueList) + filteredTimetable = timetable.getFilteredTimetable(day, time, timeTableObject, venueList) #for el in filteredTimetable: # print(el.venue) empty = timetable.getEmptyVenues(filteredTimetable, venueList) for el in empty: print(el)
Use current time if no arguments given
## Code Before: import argparse import requests import timetable if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument("-d", "--day", default='', required=True, help="Day to check the timetable on. eg: Thursday") parser.add_argument("-t", "--time", default='', required=True, help="The time the block must be empty (HH:MM (24h))") args = parser.parse_args() htmlRequest = requests.get("http://upnet.up.ac.za/tt/hatfield_timetable.html") timeTableObject = timetable.parseHTMLFile(htmlRequest.text) # Method 1 ; Elimination venueList = timetable.getVenueList(timeTableObject) filteredTimetable = timetable.getFilteredTimetable(args.day, args.time, timeTableObject, venueList) #for el in filteredTimetable: # print(el.venue) empty = timetable.getEmptyVenues(filteredTimetable, venueList) for el in empty: print(el) ## Instruction: Use current time if no arguments given ## Code After: import argparse import requests import timetable import datetime import time if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument("-d", "--day", default='', required=False, help="Day to check the timetable on. eg: Thursday") parser.add_argument("-t", "--time", default='', required=False, help="The time the block must be empty (HH:MM (24h))") args = parser.parse_args() time = args.time day = args.day if args.time == '': time = datetime.datetime.now().strftime("%H:%M") if args.day == '': day = datetime.datetime.now().strftime("%A") # print('Using ' + day + ' - ' + time) htmlRequest = requests.get("http://upnet.up.ac.za/tt/hatfield_timetable.html") timeTableObject = timetable.parseHTMLFile(htmlRequest.text) # Method 1 ; Elimination venueList = timetable.getVenueList(timeTableObject) filteredTimetable = timetable.getFilteredTimetable(day, time, timeTableObject, venueList) #for el in filteredTimetable: # print(el.venue) empty = timetable.getEmptyVenues(filteredTimetable, venueList) for el in empty: print(el)
import argparse import requests import timetable + import datetime + import time if __name__ == '__main__': parser = argparse.ArgumentParser() - parser.add_argument("-d", "--day", default='', required=True, help="Day to check the timetable on. eg: Thursday") ? ^^^ + parser.add_argument("-d", "--day", default='', required=False, help="Day to check the timetable on. eg: Thursday") ? ^^^^ - parser.add_argument("-t", "--time", default='', required=True, help="The time the block must be empty (HH:MM (24h))") ? ^^^ + parser.add_argument("-t", "--time", default='', required=False, help="The time the block must be empty (HH:MM (24h))") ? ^^^^ args = parser.parse_args() + + time = args.time + day = args.day + + if args.time == '': + time = datetime.datetime.now().strftime("%H:%M") + + if args.day == '': + day = datetime.datetime.now().strftime("%A") + + # print('Using ' + day + ' - ' + time) htmlRequest = requests.get("http://upnet.up.ac.za/tt/hatfield_timetable.html") timeTableObject = timetable.parseHTMLFile(htmlRequest.text) # Method 1 ; Elimination venueList = timetable.getVenueList(timeTableObject) - filteredTimetable = timetable.getFilteredTimetable(args.day, args.time, timeTableObject, venueList) ? ----- ----- + filteredTimetable = timetable.getFilteredTimetable(day, time, timeTableObject, venueList) #for el in filteredTimetable: # print(el.venue) empty = timetable.getEmptyVenues(filteredTimetable, venueList) for el in empty: print(el)
592ffbcd7fbbc29bfd377b5abadb39aa29f1c88d
foyer/tests/conftest.py
foyer/tests/conftest.py
import pytest @pytest.fixture(scope="session") def initdir(tmpdir): tmpdir.chdir()
import pytest @pytest.fixture(autouse=True) def initdir(tmpdir): tmpdir.chdir()
Switch from scope="session" to autouse=True
Switch from scope="session" to autouse=True
Python
mit
iModels/foyer,mosdef-hub/foyer,mosdef-hub/foyer,iModels/foyer
import pytest - @pytest.fixture(scope="session") + @pytest.fixture(autouse=True) def initdir(tmpdir): tmpdir.chdir()
Switch from scope="session" to autouse=True
## Code Before: import pytest @pytest.fixture(scope="session") def initdir(tmpdir): tmpdir.chdir() ## Instruction: Switch from scope="session" to autouse=True ## Code After: import pytest @pytest.fixture(autouse=True) def initdir(tmpdir): tmpdir.chdir()
import pytest - @pytest.fixture(scope="session") + @pytest.fixture(autouse=True) def initdir(tmpdir): tmpdir.chdir()
1e90db8de39bd8c4b1a4d58148b991af8b5c32dd
storage/models/fighter.py
storage/models/fighter.py
from storage.models.base import * class Fighter(Base): __tablename__ = 'fighters' id = Column(Integer, primary_key=True) ref = Column(String(STR_SIZE), unique=True, nullable=False) name = Column(String(STR_SIZE), nullable=False) country = Column(String(STR_SIZE)) city = Column(String(STR_SIZE)) birthday = Column(Date) height = Column(Integer) # centimeters weight = Column(Integer) # kg reach = Column(Integer) # centimeters specialization = Column(String) fights = relationship( "Fight", primaryjoin="or_(Fighter.id == Fight.fighter1_id, Fighter.id == Fight.fighter2_id)")
from storage.models.base import * class Fighter(Base): __tablename__ = 'fighters' id = Column(Integer, primary_key=True) ref = Column(String(STR_SIZE), unique=True, nullable=False) name = Column(String(STR_SIZE), nullable=False) country = Column(String(STR_SIZE)) city = Column(String(STR_SIZE)) birthday = Column(Date) height = Column(Integer) # centimeters weight = Column(Integer) # kg reach = Column(Integer) # centimeters specialization = Column(String(STR_SIZE)) fights = relationship( "Fight", primaryjoin="or_(Fighter.id == Fight.fighter1_id, Fighter.id == Fight.fighter2_id)")
Add restriction for specialization string in db
Add restriction for specialization string in db
Python
apache-2.0
Some1Nebo/ufcpy
from storage.models.base import * class Fighter(Base): __tablename__ = 'fighters' id = Column(Integer, primary_key=True) ref = Column(String(STR_SIZE), unique=True, nullable=False) name = Column(String(STR_SIZE), nullable=False) country = Column(String(STR_SIZE)) city = Column(String(STR_SIZE)) birthday = Column(Date) height = Column(Integer) # centimeters weight = Column(Integer) # kg reach = Column(Integer) # centimeters - specialization = Column(String) + specialization = Column(String(STR_SIZE)) fights = relationship( "Fight", primaryjoin="or_(Fighter.id == Fight.fighter1_id, Fighter.id == Fight.fighter2_id)")
Add restriction for specialization string in db
## Code Before: from storage.models.base import * class Fighter(Base): __tablename__ = 'fighters' id = Column(Integer, primary_key=True) ref = Column(String(STR_SIZE), unique=True, nullable=False) name = Column(String(STR_SIZE), nullable=False) country = Column(String(STR_SIZE)) city = Column(String(STR_SIZE)) birthday = Column(Date) height = Column(Integer) # centimeters weight = Column(Integer) # kg reach = Column(Integer) # centimeters specialization = Column(String) fights = relationship( "Fight", primaryjoin="or_(Fighter.id == Fight.fighter1_id, Fighter.id == Fight.fighter2_id)") ## Instruction: Add restriction for specialization string in db ## Code After: from storage.models.base import * class Fighter(Base): __tablename__ = 'fighters' id = Column(Integer, primary_key=True) ref = Column(String(STR_SIZE), unique=True, nullable=False) name = Column(String(STR_SIZE), nullable=False) country = Column(String(STR_SIZE)) city = Column(String(STR_SIZE)) birthday = Column(Date) height = Column(Integer) # centimeters weight = Column(Integer) # kg reach = Column(Integer) # centimeters specialization = Column(String(STR_SIZE)) fights = relationship( "Fight", primaryjoin="or_(Fighter.id == Fight.fighter1_id, Fighter.id == Fight.fighter2_id)")
from storage.models.base import * class Fighter(Base): __tablename__ = 'fighters' id = Column(Integer, primary_key=True) ref = Column(String(STR_SIZE), unique=True, nullable=False) name = Column(String(STR_SIZE), nullable=False) country = Column(String(STR_SIZE)) city = Column(String(STR_SIZE)) birthday = Column(Date) height = Column(Integer) # centimeters weight = Column(Integer) # kg reach = Column(Integer) # centimeters - specialization = Column(String) + specialization = Column(String(STR_SIZE)) ? +++++++++ + fights = relationship( "Fight", primaryjoin="or_(Fighter.id == Fight.fighter1_id, Fighter.id == Fight.fighter2_id)")
d70ccd856bb4ddb061ff608716ef15f778380d62
gnsq/stream/defalte.py
gnsq/stream/defalte.py
from __future__ import absolute_import import zlib from .compression import CompressionSocket class DefalteSocket(CompressionSocket): def __init__(self, socket, level): self._decompressor = zlib.decompressobj(level) self._compressor = zlib.compressobj(level) super(DefalteSocket, self).__init__(socket) def compress(self, data): return self._compressor.compress(data) def decompress(self, data): return self._decompressor.decompress(data)
from __future__ import absolute_import import zlib from .compression import CompressionSocket class DefalteSocket(CompressionSocket): def __init__(self, socket, level): wbits = -zlib.MAX_WBITS self._decompressor = zlib.decompressobj(wbits) self._compressor = zlib.compressobj(level, zlib.DEFLATED, wbits) super(DefalteSocket, self).__init__(socket) def compress(self, data): return self._compressor.compress(data) def decompress(self, data): return self._decompressor.decompress(data)
Set correct waits for deflate.
Set correct waits for deflate.
Python
bsd-3-clause
wtolson/gnsq,hiringsolved/gnsq,wtolson/gnsq
from __future__ import absolute_import import zlib from .compression import CompressionSocket class DefalteSocket(CompressionSocket): def __init__(self, socket, level): + wbits = -zlib.MAX_WBITS - self._decompressor = zlib.decompressobj(level) + self._decompressor = zlib.decompressobj(wbits) - self._compressor = zlib.compressobj(level) + self._compressor = zlib.compressobj(level, zlib.DEFLATED, wbits) super(DefalteSocket, self).__init__(socket) def compress(self, data): return self._compressor.compress(data) def decompress(self, data): return self._decompressor.decompress(data)
Set correct waits for deflate.
## Code Before: from __future__ import absolute_import import zlib from .compression import CompressionSocket class DefalteSocket(CompressionSocket): def __init__(self, socket, level): self._decompressor = zlib.decompressobj(level) self._compressor = zlib.compressobj(level) super(DefalteSocket, self).__init__(socket) def compress(self, data): return self._compressor.compress(data) def decompress(self, data): return self._decompressor.decompress(data) ## Instruction: Set correct waits for deflate. ## Code After: from __future__ import absolute_import import zlib from .compression import CompressionSocket class DefalteSocket(CompressionSocket): def __init__(self, socket, level): wbits = -zlib.MAX_WBITS self._decompressor = zlib.decompressobj(wbits) self._compressor = zlib.compressobj(level, zlib.DEFLATED, wbits) super(DefalteSocket, self).__init__(socket) def compress(self, data): return self._compressor.compress(data) def decompress(self, data): return self._decompressor.decompress(data)
from __future__ import absolute_import import zlib from .compression import CompressionSocket class DefalteSocket(CompressionSocket): def __init__(self, socket, level): + wbits = -zlib.MAX_WBITS - self._decompressor = zlib.decompressobj(level) ? ^^^^^ + self._decompressor = zlib.decompressobj(wbits) ? ^^^^^ - self._compressor = zlib.compressobj(level) + self._compressor = zlib.compressobj(level, zlib.DEFLATED, wbits) ? ++++++++++++++++++++++ super(DefalteSocket, self).__init__(socket) def compress(self, data): return self._compressor.compress(data) def decompress(self, data): return self._decompressor.decompress(data)
eaa13f9005a8aaf8c748a98de697b03eee9e675b
salt/client/netapi.py
salt/client/netapi.py
''' The main entry point for salt-api ''' from __future__ import absolute_import # Import python libs import logging # Import salt-api libs import salt.loader import salt.utils.process logger = logging.getLogger(__name__) class NetapiClient(object): ''' Start each netapi module that is configured to run ''' def __init__(self, opts): self.opts = opts self.process_manager = salt.utils.process.ProcessManager() self.netapi = salt.loader.netapi(self.opts) def run(self): ''' Load and start all available api modules ''' for fun in self.netapi: if fun.endswith('.start'): logger.info('Starting {0} netapi module'.format(fun)) self.process_manager.add_process(self.netapi[fun]) self.process_manager.run()
''' The main entry point for salt-api ''' from __future__ import absolute_import # Import python libs import logging # Import salt-api libs import salt.loader import salt.utils.process logger = logging.getLogger(__name__) class NetapiClient(object): ''' Start each netapi module that is configured to run ''' def __init__(self, opts): self.opts = opts self.process_manager = salt.utils.process.ProcessManager() self.netapi = salt.loader.netapi(self.opts) def run(self): ''' Load and start all available api modules ''' if not len(self.netapi): logger.error("Did not find any netapi configurations, nothing to start") for fun in self.netapi: if fun.endswith('.start'): logger.info('Starting {0} netapi module'.format(fun)) self.process_manager.add_process(self.netapi[fun]) self.process_manager.run()
Add log error if we run salt-api w/ no config
Add log error if we run salt-api w/ no config Currently, the salt-api script will exit with no error or hint of why it failed if there is no netapi module configured. Added a short line if we find no api modules to start, warning the user that the config may be missing. Fixes #28240
Python
apache-2.0
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
''' The main entry point for salt-api ''' from __future__ import absolute_import # Import python libs import logging # Import salt-api libs import salt.loader import salt.utils.process logger = logging.getLogger(__name__) class NetapiClient(object): ''' Start each netapi module that is configured to run ''' def __init__(self, opts): self.opts = opts self.process_manager = salt.utils.process.ProcessManager() self.netapi = salt.loader.netapi(self.opts) def run(self): ''' Load and start all available api modules ''' + if not len(self.netapi): + logger.error("Did not find any netapi configurations, nothing to start") + for fun in self.netapi: if fun.endswith('.start'): logger.info('Starting {0} netapi module'.format(fun)) self.process_manager.add_process(self.netapi[fun]) self.process_manager.run()
Add log error if we run salt-api w/ no config
## Code Before: ''' The main entry point for salt-api ''' from __future__ import absolute_import # Import python libs import logging # Import salt-api libs import salt.loader import salt.utils.process logger = logging.getLogger(__name__) class NetapiClient(object): ''' Start each netapi module that is configured to run ''' def __init__(self, opts): self.opts = opts self.process_manager = salt.utils.process.ProcessManager() self.netapi = salt.loader.netapi(self.opts) def run(self): ''' Load and start all available api modules ''' for fun in self.netapi: if fun.endswith('.start'): logger.info('Starting {0} netapi module'.format(fun)) self.process_manager.add_process(self.netapi[fun]) self.process_manager.run() ## Instruction: Add log error if we run salt-api w/ no config ## Code After: ''' The main entry point for salt-api ''' from __future__ import absolute_import # Import python libs import logging # Import salt-api libs import salt.loader import salt.utils.process logger = logging.getLogger(__name__) class NetapiClient(object): ''' Start each netapi module that is configured to run ''' def __init__(self, opts): self.opts = opts self.process_manager = salt.utils.process.ProcessManager() self.netapi = salt.loader.netapi(self.opts) def run(self): ''' Load and start all available api modules ''' if not len(self.netapi): logger.error("Did not find any netapi configurations, nothing to start") for fun in self.netapi: if fun.endswith('.start'): logger.info('Starting {0} netapi module'.format(fun)) self.process_manager.add_process(self.netapi[fun]) self.process_manager.run()
''' The main entry point for salt-api ''' from __future__ import absolute_import # Import python libs import logging # Import salt-api libs import salt.loader import salt.utils.process logger = logging.getLogger(__name__) class NetapiClient(object): ''' Start each netapi module that is configured to run ''' def __init__(self, opts): self.opts = opts self.process_manager = salt.utils.process.ProcessManager() self.netapi = salt.loader.netapi(self.opts) def run(self): ''' Load and start all available api modules ''' + if not len(self.netapi): + logger.error("Did not find any netapi configurations, nothing to start") + for fun in self.netapi: if fun.endswith('.start'): logger.info('Starting {0} netapi module'.format(fun)) self.process_manager.add_process(self.netapi[fun]) self.process_manager.run()
888584a49e697551c4f680cc8651be2fe80fc65d
configgen/generators/ppsspp/ppssppGenerator.py
configgen/generators/ppsspp/ppssppGenerator.py
import Command #~ import reicastControllers import recalboxFiles from generators.Generator import Generator import ppssppControllers import shutil import os.path import ConfigParser class PPSSPPGenerator(Generator): # Main entry of the module # Configure fba and return a command def generate(self, system, rom, playersControllers): if not system.config['configfile']: # Write emu.cfg to map joysticks, init with the default emu.cfg Config = ConfigParser.ConfigParser() Config.read(recalboxFiles.reicastConfigInit) section = "input" # For each pad detected for index in playersControllers : controller = playersControllers[index] # we only care about player 1 if controller.player != "1": continue ppssppControllers.generateControllerConfig(controller) # the command to run #~ commandArray = [recalboxFiles.ppssppBin, rom, "--escape-exit"] commandArray = [recalboxFiles.recalboxBins[system.config['emulator']], rom] return Command.Command(videomode=system.config['videomode'], array=commandArray, env={"XDG_CONFIG_HOME":recalboxFiles.CONF, "SDL_VIDEO_GL_DRIVER": "/usr/lib/libGLESv2.so"}, delay=1)
import Command #~ import reicastControllers import recalboxFiles from generators.Generator import Generator import ppssppControllers import shutil import os.path import ConfigParser class PPSSPPGenerator(Generator): # Main entry of the module # Configure fba and return a command def generate(self, system, rom, playersControllers): if not system.config['configfile']: for index in playersControllers : controller = playersControllers[index] # we only care about player 1 if controller.player != "1": continue ppssppControllers.generateControllerConfig(controller) break # the command to run commandArray = [recalboxFiles.recalboxBins[system.config['emulator']], rom] # The next line is a reminder on how to quit PPSSPP with just the HK #commandArray = [recalboxFiles.recalboxBins[system.config['emulator']], rom, "--escape-exit"] return Command.Command(videomode=system.config['videomode'], array=commandArray, env={"XDG_CONFIG_HOME":recalboxFiles.CONF, "SDL_VIDEO_GL_DRIVER": "/usr/lib/libGLESv2.so"}, delay=1)
Remove a bad typo from reicast
Remove a bad typo from reicast
Python
mit
nadenislamarre/recalbox-configgen,recalbox/recalbox-configgen,digitalLumberjack/recalbox-configgen
import Command #~ import reicastControllers import recalboxFiles from generators.Generator import Generator import ppssppControllers import shutil import os.path import ConfigParser class PPSSPPGenerator(Generator): # Main entry of the module # Configure fba and return a command def generate(self, system, rom, playersControllers): if not system.config['configfile']: - # Write emu.cfg to map joysticks, init with the default emu.cfg - Config = ConfigParser.ConfigParser() - Config.read(recalboxFiles.reicastConfigInit) - section = "input" - # For each pad detected for index in playersControllers : controller = playersControllers[index] # we only care about player 1 if controller.player != "1": continue ppssppControllers.generateControllerConfig(controller) + break # the command to run - #~ commandArray = [recalboxFiles.ppssppBin, rom, "--escape-exit"] commandArray = [recalboxFiles.recalboxBins[system.config['emulator']], rom] + # The next line is a reminder on how to quit PPSSPP with just the HK + #commandArray = [recalboxFiles.recalboxBins[system.config['emulator']], rom, "--escape-exit"] return Command.Command(videomode=system.config['videomode'], array=commandArray, env={"XDG_CONFIG_HOME":recalboxFiles.CONF, "SDL_VIDEO_GL_DRIVER": "/usr/lib/libGLESv2.so"}, delay=1)
Remove a bad typo from reicast
## Code Before: import Command #~ import reicastControllers import recalboxFiles from generators.Generator import Generator import ppssppControllers import shutil import os.path import ConfigParser class PPSSPPGenerator(Generator): # Main entry of the module # Configure fba and return a command def generate(self, system, rom, playersControllers): if not system.config['configfile']: # Write emu.cfg to map joysticks, init with the default emu.cfg Config = ConfigParser.ConfigParser() Config.read(recalboxFiles.reicastConfigInit) section = "input" # For each pad detected for index in playersControllers : controller = playersControllers[index] # we only care about player 1 if controller.player != "1": continue ppssppControllers.generateControllerConfig(controller) # the command to run #~ commandArray = [recalboxFiles.ppssppBin, rom, "--escape-exit"] commandArray = [recalboxFiles.recalboxBins[system.config['emulator']], rom] return Command.Command(videomode=system.config['videomode'], array=commandArray, env={"XDG_CONFIG_HOME":recalboxFiles.CONF, "SDL_VIDEO_GL_DRIVER": "/usr/lib/libGLESv2.so"}, delay=1) ## Instruction: Remove a bad typo from reicast ## Code After: import Command #~ import reicastControllers import recalboxFiles from generators.Generator import Generator import ppssppControllers import shutil import os.path import ConfigParser class PPSSPPGenerator(Generator): # Main entry of the module # Configure fba and return a command def generate(self, system, rom, playersControllers): if not system.config['configfile']: for index in playersControllers : controller = playersControllers[index] # we only care about player 1 if controller.player != "1": continue ppssppControllers.generateControllerConfig(controller) break # the command to run commandArray = [recalboxFiles.recalboxBins[system.config['emulator']], rom] # The next line is a reminder on how to quit PPSSPP with just the HK #commandArray = [recalboxFiles.recalboxBins[system.config['emulator']], rom, "--escape-exit"] return Command.Command(videomode=system.config['videomode'], array=commandArray, env={"XDG_CONFIG_HOME":recalboxFiles.CONF, "SDL_VIDEO_GL_DRIVER": "/usr/lib/libGLESv2.so"}, delay=1)
import Command #~ import reicastControllers import recalboxFiles from generators.Generator import Generator import ppssppControllers import shutil import os.path import ConfigParser class PPSSPPGenerator(Generator): # Main entry of the module # Configure fba and return a command def generate(self, system, rom, playersControllers): if not system.config['configfile']: - # Write emu.cfg to map joysticks, init with the default emu.cfg - Config = ConfigParser.ConfigParser() - Config.read(recalboxFiles.reicastConfigInit) - section = "input" - # For each pad detected for index in playersControllers : controller = playersControllers[index] # we only care about player 1 if controller.player != "1": continue ppssppControllers.generateControllerConfig(controller) + break # the command to run - #~ commandArray = [recalboxFiles.ppssppBin, rom, "--escape-exit"] commandArray = [recalboxFiles.recalboxBins[system.config['emulator']], rom] + # The next line is a reminder on how to quit PPSSPP with just the HK + #commandArray = [recalboxFiles.recalboxBins[system.config['emulator']], rom, "--escape-exit"] return Command.Command(videomode=system.config['videomode'], array=commandArray, env={"XDG_CONFIG_HOME":recalboxFiles.CONF, "SDL_VIDEO_GL_DRIVER": "/usr/lib/libGLESv2.so"}, delay=1)
cc09da295d61965af1552b35b7ece0caf4e5a399
accountant/interface/forms.py
accountant/interface/forms.py
from django import forms from django.core.exceptions import NON_FIELD_ERRORS from django.utils.translation import ugettext_lazy as _ from core import models DUPLICATE_PLAYER_ERROR = \ _('There is already a player with this name in your game') class CreateGameForm(forms.Form): bank_cash = forms.IntegerField(required=False, initial=12000) def clean_bank_cash(self): data = self.cleaned_data['bank_cash'] if data == None: data = 0 return data class AddPlayerForm(forms.ModelForm): class Meta: model = models.Player fields = ('game', 'name', 'cash') error_messages = { NON_FIELD_ERRORS: {'unique_together': DUPLICATE_PLAYER_ERROR}, }
from django import forms from django.core.exceptions import NON_FIELD_ERRORS from django.utils.translation import ugettext_lazy as _ from core import models DUPLICATE_PLAYER_ERROR = \ _('There is already a player with this name in your game') class CreateGameForm(forms.Form): bank_cash = forms.IntegerField(required=False, initial=12000) def clean_bank_cash(self): data = self.cleaned_data['bank_cash'] if data == None: data = 0 return data class AddPlayerForm(forms.ModelForm): class Meta: model = models.Player fields = ('game', 'name', 'cash') error_messages = { NON_FIELD_ERRORS: {'unique_together': DUPLICATE_PLAYER_ERROR}, } widgets = { 'game': forms.HiddenInput(), }
Hide Game ID input since it is automatically set
Hide Game ID input since it is automatically set
Python
mit
XeryusTC/18xx-accountant,XeryusTC/18xx-accountant,XeryusTC/18xx-accountant,XeryusTC/18xx-accountant,XeryusTC/18xx-accountant
from django import forms from django.core.exceptions import NON_FIELD_ERRORS from django.utils.translation import ugettext_lazy as _ from core import models DUPLICATE_PLAYER_ERROR = \ _('There is already a player with this name in your game') class CreateGameForm(forms.Form): bank_cash = forms.IntegerField(required=False, initial=12000) def clean_bank_cash(self): data = self.cleaned_data['bank_cash'] if data == None: data = 0 return data class AddPlayerForm(forms.ModelForm): class Meta: model = models.Player fields = ('game', 'name', 'cash') error_messages = { NON_FIELD_ERRORS: {'unique_together': DUPLICATE_PLAYER_ERROR}, } + widgets = { + 'game': forms.HiddenInput(), + }
Hide Game ID input since it is automatically set
## Code Before: from django import forms from django.core.exceptions import NON_FIELD_ERRORS from django.utils.translation import ugettext_lazy as _ from core import models DUPLICATE_PLAYER_ERROR = \ _('There is already a player with this name in your game') class CreateGameForm(forms.Form): bank_cash = forms.IntegerField(required=False, initial=12000) def clean_bank_cash(self): data = self.cleaned_data['bank_cash'] if data == None: data = 0 return data class AddPlayerForm(forms.ModelForm): class Meta: model = models.Player fields = ('game', 'name', 'cash') error_messages = { NON_FIELD_ERRORS: {'unique_together': DUPLICATE_PLAYER_ERROR}, } ## Instruction: Hide Game ID input since it is automatically set ## Code After: from django import forms from django.core.exceptions import NON_FIELD_ERRORS from django.utils.translation import ugettext_lazy as _ from core import models DUPLICATE_PLAYER_ERROR = \ _('There is already a player with this name in your game') class CreateGameForm(forms.Form): bank_cash = forms.IntegerField(required=False, initial=12000) def clean_bank_cash(self): data = self.cleaned_data['bank_cash'] if data == None: data = 0 return data class AddPlayerForm(forms.ModelForm): class Meta: model = models.Player fields = ('game', 'name', 'cash') error_messages = { NON_FIELD_ERRORS: {'unique_together': DUPLICATE_PLAYER_ERROR}, } widgets = { 'game': forms.HiddenInput(), }
from django import forms from django.core.exceptions import NON_FIELD_ERRORS from django.utils.translation import ugettext_lazy as _ from core import models DUPLICATE_PLAYER_ERROR = \ _('There is already a player with this name in your game') class CreateGameForm(forms.Form): bank_cash = forms.IntegerField(required=False, initial=12000) def clean_bank_cash(self): data = self.cleaned_data['bank_cash'] if data == None: data = 0 return data class AddPlayerForm(forms.ModelForm): class Meta: model = models.Player fields = ('game', 'name', 'cash') error_messages = { NON_FIELD_ERRORS: {'unique_together': DUPLICATE_PLAYER_ERROR}, } + widgets = { + 'game': forms.HiddenInput(), + }
2f635e890414f777fbe3ddde1aea74ab13558313
llvmlite/tests/test_dylib.py
llvmlite/tests/test_dylib.py
import unittest from . import TestCase from llvmlite import binding as llvm from llvmlite.binding import dylib import platform class TestDylib(TestCase): def setUp(self): llvm.initialize() llvm.initialize_native_target() llvm.initialize_native_asmprinter() def test_bad_library(self): with self.assertRaises(Exception) as context: dylib.load_library_permanently("zzzasdkf;jasd;l") system = platform.system() if system == "Linux": self.assertTrue('zzzasdkf;jasd;l: cannot open shared object file: No such file or directory' in str(context.exception)) elif system == "Darwin": self.assertTrue('dlopen(zzzasdkf;jasd;l, 9): image not found' in str(context.exception))
from . import TestCase from llvmlite import binding as llvm from llvmlite.binding import dylib import platform from ctypes.util import find_library import unittest @unittest.skipUnless(platform.system() in {"Linux", "Darwin"}, "Unsupport test for current OS") class TestDylib(TestCase): def setUp(self): llvm.initialize() llvm.initialize_native_target() llvm.initialize_native_asmprinter() self.system = platform.system() def test_bad_library(self): with self.assertRaises(Exception) as context: dylib.load_library_permanently("zzzasdkf;jasd;l") if self.system == "Linux": self.assertTrue('zzzasdkf;jasd;l: cannot open shared object file: No such file or directory' in str(context.exception)) elif self.system == "Darwin": self.assertTrue('dlopen(zzzasdkf;jasd;l, 9): image not found' in str(context.exception)) def test_libm(self): try: if self.system == "Linux": libm = find_library("m") elif self.system == "Darwin": libm = find_library("libm") dylib.load_library_permanently(libm) except Exception: self.fail("Valid call to link library should not fail.")
Add tests to check loading library.
Add tests to check loading library.
Python
bsd-2-clause
m-labs/llvmlite,pitrou/llvmlite,ssarangi/llvmlite,m-labs/llvmlite,markdewing/llvmlite,pitrou/llvmlite,numba/llvmlite,markdewing/llvmlite,sklam/llvmlite,sklam/llvmlite,pitrou/llvmlite,numba/llvmlite,ssarangi/llvmlite,markdewing/llvmlite,squisher/llvmlite,ssarangi/llvmlite,m-labs/llvmlite,numba/llvmlite,numba/llvmlite,squisher/llvmlite,squisher/llvmlite,sklam/llvmlite,ssarangi/llvmlite,sklam/llvmlite,squisher/llvmlite,markdewing/llvmlite,m-labs/llvmlite,pitrou/llvmlite
- import unittest from . import TestCase from llvmlite import binding as llvm from llvmlite.binding import dylib import platform + from ctypes.util import find_library + import unittest - + @unittest.skipUnless(platform.system() in {"Linux", "Darwin"}, "Unsupport test for current OS") class TestDylib(TestCase): - def setUp(self): llvm.initialize() llvm.initialize_native_target() llvm.initialize_native_asmprinter() + self.system = platform.system() def test_bad_library(self): with self.assertRaises(Exception) as context: dylib.load_library_permanently("zzzasdkf;jasd;l") - system = platform.system() - if system == "Linux": + if self.system == "Linux": self.assertTrue('zzzasdkf;jasd;l: cannot open shared object file: No such file or directory' in str(context.exception)) - elif system == "Darwin": + elif self.system == "Darwin": self.assertTrue('dlopen(zzzasdkf;jasd;l, 9): image not found' in str(context.exception)) + def test_libm(self): + try: + if self.system == "Linux": + libm = find_library("m") + elif self.system == "Darwin": + libm = find_library("libm") + dylib.load_library_permanently(libm) + except Exception: + self.fail("Valid call to link library should not fail.") +
Add tests to check loading library.
## Code Before: import unittest from . import TestCase from llvmlite import binding as llvm from llvmlite.binding import dylib import platform class TestDylib(TestCase): def setUp(self): llvm.initialize() llvm.initialize_native_target() llvm.initialize_native_asmprinter() def test_bad_library(self): with self.assertRaises(Exception) as context: dylib.load_library_permanently("zzzasdkf;jasd;l") system = platform.system() if system == "Linux": self.assertTrue('zzzasdkf;jasd;l: cannot open shared object file: No such file or directory' in str(context.exception)) elif system == "Darwin": self.assertTrue('dlopen(zzzasdkf;jasd;l, 9): image not found' in str(context.exception)) ## Instruction: Add tests to check loading library. ## Code After: from . import TestCase from llvmlite import binding as llvm from llvmlite.binding import dylib import platform from ctypes.util import find_library import unittest @unittest.skipUnless(platform.system() in {"Linux", "Darwin"}, "Unsupport test for current OS") class TestDylib(TestCase): def setUp(self): llvm.initialize() llvm.initialize_native_target() llvm.initialize_native_asmprinter() self.system = platform.system() def test_bad_library(self): with self.assertRaises(Exception) as context: dylib.load_library_permanently("zzzasdkf;jasd;l") if self.system == "Linux": self.assertTrue('zzzasdkf;jasd;l: cannot open shared object file: No such file or directory' in str(context.exception)) elif self.system == "Darwin": self.assertTrue('dlopen(zzzasdkf;jasd;l, 9): image not found' in str(context.exception)) def test_libm(self): try: if self.system == "Linux": libm = find_library("m") elif self.system == "Darwin": libm = find_library("libm") dylib.load_library_permanently(libm) except Exception: self.fail("Valid call to link library should not fail.")
- import unittest from . import TestCase from llvmlite import binding as llvm from llvmlite.binding import dylib import platform + from ctypes.util import find_library + import unittest - + @unittest.skipUnless(platform.system() in {"Linux", "Darwin"}, "Unsupport test for current OS") class TestDylib(TestCase): - def setUp(self): llvm.initialize() llvm.initialize_native_target() llvm.initialize_native_asmprinter() + self.system = platform.system() def test_bad_library(self): with self.assertRaises(Exception) as context: dylib.load_library_permanently("zzzasdkf;jasd;l") - system = platform.system() - if system == "Linux": + if self.system == "Linux": ? +++++ self.assertTrue('zzzasdkf;jasd;l: cannot open shared object file: No such file or directory' in str(context.exception)) - elif system == "Darwin": + elif self.system == "Darwin": ? +++++ self.assertTrue('dlopen(zzzasdkf;jasd;l, 9): image not found' in str(context.exception)) + + def test_libm(self): + try: + if self.system == "Linux": + libm = find_library("m") + elif self.system == "Darwin": + libm = find_library("libm") + dylib.load_library_permanently(libm) + except Exception: + self.fail("Valid call to link library should not fail.")
77a5ecc7c406e4a6acf814a2f0381dc605e0d14c
leds/led_dance.py
leds/led_dance.py
import pyb def led_dance(delay): dots = {} control = pyb.Switch(1) while True: if not control.value(): dots[pyb.millis() % 25] = 16 for d in dots: pyb.pixel(d, dots[d]) if dots[d] == 0: del(dots[d]) else: dots[d] = int(dots[d]/2) pyb.delay(delay) led_dance(101)
import microbit def led_dance(delay): dots = [ [0]*5, [0]*5, [0]*5, [0]*5, [0]*5 ] microbit.display.set_display_mode(1) while True: dots[microbit.random(5)][microbit.random(5)] = 128 for i in range(5): for j in range(5): microbit.display.image.set_pixel_value(i, j, dots[i][j]) dots[i][j] = int(dots[i][j]/2) microbit.sleep(delay) led_dance(100)
Update for new version of micropython for microbit
Update for new version of micropython for microbit
Python
mit
jrmhaig/microbit_playground
- import pyb + import microbit def led_dance(delay): - dots = {} - control = pyb.Switch(1) + dots = [ [0]*5, [0]*5, [0]*5, [0]*5, [0]*5 ] + microbit.display.set_display_mode(1) while True: + dots[microbit.random(5)][microbit.random(5)] = 128 + for i in range(5): + for j in range(5): + microbit.display.image.set_pixel_value(i, j, dots[i][j]) - if not control.value(): - dots[pyb.millis() % 25] = 16 - for d in dots: - pyb.pixel(d, dots[d]) - if dots[d] == 0: - del(dots[d]) - else: - dots[d] = int(dots[d]/2) + dots[i][j] = int(dots[i][j]/2) - pyb.delay(delay) + microbit.sleep(delay) - led_dance(101) + led_dance(100)
Update for new version of micropython for microbit
## Code Before: import pyb def led_dance(delay): dots = {} control = pyb.Switch(1) while True: if not control.value(): dots[pyb.millis() % 25] = 16 for d in dots: pyb.pixel(d, dots[d]) if dots[d] == 0: del(dots[d]) else: dots[d] = int(dots[d]/2) pyb.delay(delay) led_dance(101) ## Instruction: Update for new version of micropython for microbit ## Code After: import microbit def led_dance(delay): dots = [ [0]*5, [0]*5, [0]*5, [0]*5, [0]*5 ] microbit.display.set_display_mode(1) while True: dots[microbit.random(5)][microbit.random(5)] = 128 for i in range(5): for j in range(5): microbit.display.image.set_pixel_value(i, j, dots[i][j]) dots[i][j] = int(dots[i][j]/2) microbit.sleep(delay) led_dance(100)
- import pyb + import microbit def led_dance(delay): - dots = {} - control = pyb.Switch(1) + dots = [ [0]*5, [0]*5, [0]*5, [0]*5, [0]*5 ] + microbit.display.set_display_mode(1) while True: + dots[microbit.random(5)][microbit.random(5)] = 128 + for i in range(5): + for j in range(5): + microbit.display.image.set_pixel_value(i, j, dots[i][j]) - if not control.value(): - dots[pyb.millis() % 25] = 16 - for d in dots: - pyb.pixel(d, dots[d]) - if dots[d] == 0: - del(dots[d]) - else: - dots[d] = int(dots[d]/2) ? ^ ^ + dots[i][j] = int(dots[i][j]/2) ? ^^^^ ^^^^ - pyb.delay(delay) + microbit.sleep(delay) - led_dance(101) ? ^ + led_dance(100) ? ^
606b2b6c84e9f9f67606a4d7e521cf4805855a98
migrations/versions/0311_populate_returned_letters.py
migrations/versions/0311_populate_returned_letters.py
from alembic import op from app.dao.returned_letters_dao import insert_or_update_returned_letters revision = '0311_populate_returned_letters' down_revision = '0310_returned_letters_table' def upgrade(): conn = op.get_bind() sql = """ select id, service_id, reference from notification_history where notification_type = 'letter' and notification_status = 'returned-letter'""" results = conn.execute(sql) returned_letters = results.fetchall() references = [x.reference for x in returned_letters] insert_or_update_returned_letters(references) def downgrade(): pass
from alembic import op revision = '0311_populate_returned_letters' down_revision = '0310_returned_letters_table' def upgrade(): conn = op.get_bind() sql = """ select id, service_id, reference, updated_at from notification_history where notification_type = 'letter' and notification_status = 'returned-letter'""" insert_sql = """ insert into returned_letters(id, reported_at, service_id, notification_id, created_at, updated_at) values(uuid_in(md5(random()::text)::cstring), '{}', '{}', '{}', now(), null) """ results = conn.execute(sql) returned_letters = results.fetchall() for x in returned_letters: f = insert_sql.format(x.updated_at.date(), x.service_id, x.id) conn.execute(f) def downgrade(): pass
Change the insert to use updated_at as the reported_at date
Change the insert to use updated_at as the reported_at date
Python
mit
alphagov/notifications-api,alphagov/notifications-api
from alembic import op - - from app.dao.returned_letters_dao import insert_or_update_returned_letters revision = '0311_populate_returned_letters' down_revision = '0310_returned_letters_table' def upgrade(): conn = op.get_bind() sql = """ - select id, service_id, reference + select id, service_id, reference, updated_at from notification_history where notification_type = 'letter' and notification_status = 'returned-letter'""" + insert_sql = """ + insert into returned_letters(id, reported_at, service_id, notification_id, created_at, updated_at) + values(uuid_in(md5(random()::text)::cstring), '{}', '{}', '{}', now(), null) + """ + results = conn.execute(sql) returned_letters = results.fetchall() - references = [x.reference for x in returned_letters] - insert_or_update_returned_letters(references) + for x in returned_letters: + f = insert_sql.format(x.updated_at.date(), x.service_id, x.id) + conn.execute(f) def downgrade(): pass
Change the insert to use updated_at as the reported_at date
## Code Before: from alembic import op from app.dao.returned_letters_dao import insert_or_update_returned_letters revision = '0311_populate_returned_letters' down_revision = '0310_returned_letters_table' def upgrade(): conn = op.get_bind() sql = """ select id, service_id, reference from notification_history where notification_type = 'letter' and notification_status = 'returned-letter'""" results = conn.execute(sql) returned_letters = results.fetchall() references = [x.reference for x in returned_letters] insert_or_update_returned_letters(references) def downgrade(): pass ## Instruction: Change the insert to use updated_at as the reported_at date ## Code After: from alembic import op revision = '0311_populate_returned_letters' down_revision = '0310_returned_letters_table' def upgrade(): conn = op.get_bind() sql = """ select id, service_id, reference, updated_at from notification_history where notification_type = 'letter' and notification_status = 'returned-letter'""" insert_sql = """ insert into returned_letters(id, reported_at, service_id, notification_id, created_at, updated_at) values(uuid_in(md5(random()::text)::cstring), '{}', '{}', '{}', now(), null) """ results = conn.execute(sql) returned_letters = results.fetchall() for x in returned_letters: f = insert_sql.format(x.updated_at.date(), x.service_id, x.id) conn.execute(f) def downgrade(): pass
from alembic import op - - from app.dao.returned_letters_dao import insert_or_update_returned_letters revision = '0311_populate_returned_letters' down_revision = '0310_returned_letters_table' def upgrade(): conn = op.get_bind() sql = """ - select id, service_id, reference + select id, service_id, reference, updated_at ? + ++++++++++ from notification_history where notification_type = 'letter' and notification_status = 'returned-letter'""" + insert_sql = """ + insert into returned_letters(id, reported_at, service_id, notification_id, created_at, updated_at) + values(uuid_in(md5(random()::text)::cstring), '{}', '{}', '{}', now(), null) + """ + results = conn.execute(sql) returned_letters = results.fetchall() - references = [x.reference for x in returned_letters] - insert_or_update_returned_letters(references) + for x in returned_letters: + f = insert_sql.format(x.updated_at.date(), x.service_id, x.id) + conn.execute(f) def downgrade(): pass
7a21d2bccbcff2eb6a8b7cfd00c38a28553c0bcd
gratipay/models/country.py
gratipay/models/country.py
from __future__ import absolute_import, division, print_function, unicode_literals from postgres.orm import Model class Country(Model): """Represent country records from our database (read-only). :var int id: the record's primary key in our ``countries`` table :var unicode code: the country's `ISO 3166-1 alpha-2`_ code .. _ISO 3166-1 alpha-2 : https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2 """ typname = 'countries'
from __future__ import absolute_import, division, print_function, unicode_literals from postgres.orm import Model class Country(Model): """Represent country records from our database (read-only). :var int id: the record's primary key in our ``countries`` table :var unicode code: the country's `ISO 3166-1 alpha-2`_ code .. _ISO 3166-1 alpha-2 : https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2 """ typname = 'countries' @classmethod def from_code2(cls, code2): return cls.db.one("SELECT countries.*::countries FROM countries WHERE code2=%s", (code2,))
Add a helper to Country; should go upstream prolly
Add a helper to Country; should go upstream prolly
Python
mit
gratipay/gratipay.com,gratipay/gratipay.com,gratipay/gratipay.com,gratipay/gratipay.com
from __future__ import absolute_import, division, print_function, unicode_literals from postgres.orm import Model class Country(Model): """Represent country records from our database (read-only). :var int id: the record's primary key in our ``countries`` table :var unicode code: the country's `ISO 3166-1 alpha-2`_ code .. _ISO 3166-1 alpha-2 : https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2 """ typname = 'countries' + @classmethod + def from_code2(cls, code2): + return cls.db.one("SELECT countries.*::countries FROM countries WHERE code2=%s", (code2,)) +
Add a helper to Country; should go upstream prolly
## Code Before: from __future__ import absolute_import, division, print_function, unicode_literals from postgres.orm import Model class Country(Model): """Represent country records from our database (read-only). :var int id: the record's primary key in our ``countries`` table :var unicode code: the country's `ISO 3166-1 alpha-2`_ code .. _ISO 3166-1 alpha-2 : https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2 """ typname = 'countries' ## Instruction: Add a helper to Country; should go upstream prolly ## Code After: from __future__ import absolute_import, division, print_function, unicode_literals from postgres.orm import Model class Country(Model): """Represent country records from our database (read-only). :var int id: the record's primary key in our ``countries`` table :var unicode code: the country's `ISO 3166-1 alpha-2`_ code .. _ISO 3166-1 alpha-2 : https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2 """ typname = 'countries' @classmethod def from_code2(cls, code2): return cls.db.one("SELECT countries.*::countries FROM countries WHERE code2=%s", (code2,))
from __future__ import absolute_import, division, print_function, unicode_literals from postgres.orm import Model class Country(Model): """Represent country records from our database (read-only). :var int id: the record's primary key in our ``countries`` table :var unicode code: the country's `ISO 3166-1 alpha-2`_ code .. _ISO 3166-1 alpha-2 : https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2 """ typname = 'countries' + + @classmethod + def from_code2(cls, code2): + return cls.db.one("SELECT countries.*::countries FROM countries WHERE code2=%s", (code2,))
0eb20c8025a838d93a5854442640550d5bf05b0b
settings.py
settings.py
# Replace the following lines with client IDs obtained from the APIs # Console or Cloud Console. WEB_CLIENT_ID = '757224007118-0lblpo8abqeantp8mvckmabupik9edk4.apps.googleusercontent.com' ANDROID_CLIENT_ID = 'replace with Android client ID' IOS_CLIENT_ID = 'replace with iOS client ID' ANDROID_AUDIENCE = WEB_CLIENT_ID
# Replace the following lines with client IDs obtained from the APIs # Console or Cloud Console. WEB_CLIENT_ID = '757224007118-0lblpo8abqeantp8mvckmabupik9edk4.apps.googleusercontent.com' ANDROID_CLIENT_ID = '757224007118-dpqfa375ra8rgbpslig7beh4jb6qd03s.apps.googleusercontent.com' IOS_CLIENT_ID = '757224007118-nfgr65ic7dpiv5inbvta8a2b4j2h7d09.apps.googleusercontent.com' ANDROID_AUDIENCE = WEB_CLIENT_ID
Add android and ios client IDs
Add android and ios client IDs
Python
apache-2.0
elbernante/conference-central,elbernante/conference-central,elbernante/conference-central
# Replace the following lines with client IDs obtained from the APIs # Console or Cloud Console. WEB_CLIENT_ID = '757224007118-0lblpo8abqeantp8mvckmabupik9edk4.apps.googleusercontent.com' - ANDROID_CLIENT_ID = 'replace with Android client ID' - IOS_CLIENT_ID = 'replace with iOS client ID' + ANDROID_CLIENT_ID = '757224007118-dpqfa375ra8rgbpslig7beh4jb6qd03s.apps.googleusercontent.com' + IOS_CLIENT_ID = '757224007118-nfgr65ic7dpiv5inbvta8a2b4j2h7d09.apps.googleusercontent.com' ANDROID_AUDIENCE = WEB_CLIENT_ID
Add android and ios client IDs
## Code Before: # Replace the following lines with client IDs obtained from the APIs # Console or Cloud Console. WEB_CLIENT_ID = '757224007118-0lblpo8abqeantp8mvckmabupik9edk4.apps.googleusercontent.com' ANDROID_CLIENT_ID = 'replace with Android client ID' IOS_CLIENT_ID = 'replace with iOS client ID' ANDROID_AUDIENCE = WEB_CLIENT_ID ## Instruction: Add android and ios client IDs ## Code After: # Replace the following lines with client IDs obtained from the APIs # Console or Cloud Console. WEB_CLIENT_ID = '757224007118-0lblpo8abqeantp8mvckmabupik9edk4.apps.googleusercontent.com' ANDROID_CLIENT_ID = '757224007118-dpqfa375ra8rgbpslig7beh4jb6qd03s.apps.googleusercontent.com' IOS_CLIENT_ID = '757224007118-nfgr65ic7dpiv5inbvta8a2b4j2h7d09.apps.googleusercontent.com' ANDROID_AUDIENCE = WEB_CLIENT_ID
# Replace the following lines with client IDs obtained from the APIs # Console or Cloud Console. WEB_CLIENT_ID = '757224007118-0lblpo8abqeantp8mvckmabupik9edk4.apps.googleusercontent.com' - ANDROID_CLIENT_ID = 'replace with Android client ID' - IOS_CLIENT_ID = 'replace with iOS client ID' + ANDROID_CLIENT_ID = '757224007118-dpqfa375ra8rgbpslig7beh4jb6qd03s.apps.googleusercontent.com' + IOS_CLIENT_ID = '757224007118-nfgr65ic7dpiv5inbvta8a2b4j2h7d09.apps.googleusercontent.com' ANDROID_AUDIENCE = WEB_CLIENT_ID
2eb07ae9b98c36dc94e143003a7c44c7fbfb54f7
stronghold/middleware.py
stronghold/middleware.py
from django.contrib.auth.decorators import login_required from stronghold import conf, utils class LoginRequiredMiddleware(object): """ Force all views to use login required View is deemed to be public if the @public decorator is applied to the view View is also deemed to be Public if listed in in django settings in the STRONGHOLD_PUBLIC_URLS dictionary each url in STRONGHOLD_PUBLIC_URLS must be a valid regex """ def __init__(self, *args, **kwargs): self.public_view_urls = getattr(conf, 'STRONGHOLD_PUBLIC_URLS', ()) def process_view(self, request, view_func, view_args, view_kwargs): # if request is authenticated, dont process it if request.user.is_authenticated(): return None # if its a public view, don't process it if utils.is_view_func_public(view_func): return None # if this view matches a whitelisted regex, don't process it if any(view_url.match(request.path_info) for view_url in self.public_view_urls): return None return login_required(view_func)(request, *view_args, **view_kwargs)
from django.contrib.auth.decorators import login_required from stronghold import conf, utils class LoginRequiredMiddleware(object): """ Force all views to use login required View is deemed to be public if the @public decorator is applied to the view View is also deemed to be Public if listed in in django settings in the STRONGHOLD_PUBLIC_URLS dictionary each url in STRONGHOLD_PUBLIC_URLS must be a valid regex """ def __init__(self, *args, **kwargs): self.public_view_urls = getattr(conf, 'STRONGHOLD_PUBLIC_URLS', ()) def process_view(self, request, view_func, view_args, view_kwargs): if request.user.is_authenticated() or utils.is_view_func_public(view_func) \ or self.is_public_url(request.path_info): return None return login_required(view_func)(request, *view_args, **view_kwargs) def is_public_url(self, url): return any(public_url.match(url) for public_url in self.public_view_urls)
Refactor away unnecessary multiple return None
Refactor away unnecessary multiple return None
Python
mit
SunilMohanAdapa/django-stronghold,SunilMohanAdapa/django-stronghold,mgrouchy/django-stronghold
from django.contrib.auth.decorators import login_required from stronghold import conf, utils class LoginRequiredMiddleware(object): """ Force all views to use login required View is deemed to be public if the @public decorator is applied to the view View is also deemed to be Public if listed in in django settings in the STRONGHOLD_PUBLIC_URLS dictionary each url in STRONGHOLD_PUBLIC_URLS must be a valid regex """ def __init__(self, *args, **kwargs): self.public_view_urls = getattr(conf, 'STRONGHOLD_PUBLIC_URLS', ()) def process_view(self, request, view_func, view_args, view_kwargs): + if request.user.is_authenticated() or utils.is_view_func_public(view_func) \ + or self.is_public_url(request.path_info): - # if request is authenticated, dont process it - if request.user.is_authenticated(): - return None - - # if its a public view, don't process it - if utils.is_view_func_public(view_func): - return None - - # if this view matches a whitelisted regex, don't process it - if any(view_url.match(request.path_info) for view_url in self.public_view_urls): return None return login_required(view_func)(request, *view_args, **view_kwargs) + def is_public_url(self, url): + return any(public_url.match(url) for public_url in self.public_view_urls) +
Refactor away unnecessary multiple return None
## Code Before: from django.contrib.auth.decorators import login_required from stronghold import conf, utils class LoginRequiredMiddleware(object): """ Force all views to use login required View is deemed to be public if the @public decorator is applied to the view View is also deemed to be Public if listed in in django settings in the STRONGHOLD_PUBLIC_URLS dictionary each url in STRONGHOLD_PUBLIC_URLS must be a valid regex """ def __init__(self, *args, **kwargs): self.public_view_urls = getattr(conf, 'STRONGHOLD_PUBLIC_URLS', ()) def process_view(self, request, view_func, view_args, view_kwargs): # if request is authenticated, dont process it if request.user.is_authenticated(): return None # if its a public view, don't process it if utils.is_view_func_public(view_func): return None # if this view matches a whitelisted regex, don't process it if any(view_url.match(request.path_info) for view_url in self.public_view_urls): return None return login_required(view_func)(request, *view_args, **view_kwargs) ## Instruction: Refactor away unnecessary multiple return None ## Code After: from django.contrib.auth.decorators import login_required from stronghold import conf, utils class LoginRequiredMiddleware(object): """ Force all views to use login required View is deemed to be public if the @public decorator is applied to the view View is also deemed to be Public if listed in in django settings in the STRONGHOLD_PUBLIC_URLS dictionary each url in STRONGHOLD_PUBLIC_URLS must be a valid regex """ def __init__(self, *args, **kwargs): self.public_view_urls = getattr(conf, 'STRONGHOLD_PUBLIC_URLS', ()) def process_view(self, request, view_func, view_args, view_kwargs): if request.user.is_authenticated() or utils.is_view_func_public(view_func) \ or self.is_public_url(request.path_info): return None return login_required(view_func)(request, *view_args, **view_kwargs) def is_public_url(self, url): return any(public_url.match(url) for public_url in self.public_view_urls)
from django.contrib.auth.decorators import login_required from stronghold import conf, utils class LoginRequiredMiddleware(object): """ Force all views to use login required View is deemed to be public if the @public decorator is applied to the view View is also deemed to be Public if listed in in django settings in the STRONGHOLD_PUBLIC_URLS dictionary each url in STRONGHOLD_PUBLIC_URLS must be a valid regex """ def __init__(self, *args, **kwargs): self.public_view_urls = getattr(conf, 'STRONGHOLD_PUBLIC_URLS', ()) def process_view(self, request, view_func, view_args, view_kwargs): + if request.user.is_authenticated() or utils.is_view_func_public(view_func) \ + or self.is_public_url(request.path_info): - # if request is authenticated, dont process it - if request.user.is_authenticated(): - return None - - # if its a public view, don't process it - if utils.is_view_func_public(view_func): - return None - - # if this view matches a whitelisted regex, don't process it - if any(view_url.match(request.path_info) for view_url in self.public_view_urls): return None return login_required(view_func)(request, *view_args, **view_kwargs) + + def is_public_url(self, url): + return any(public_url.match(url) for public_url in self.public_view_urls)
39d0c335759781de8cf1644cdf499588441b434d
tviserrys/urls.py
tviserrys/urls.py
from django.contrib.auth import views as auth_views from django.conf.urls import patterns, include, url from django.conf.urls import url from django.contrib import admin from . import views from tviserrys.settings import MEDIA_ROOT urlpatterns = [ url(r'^$', views.IndexView.as_view(), name='index'), url(r'^admin/', admin.site.urls), url(r'^login/$', auth_views.login), url(r'^logout/$', auth_views.logout), url(r'^password_change/$', auth_views.password_change), url(r'^password_change/done/$', auth_views.password_change_done), url(r'^password_reset/$', auth_views.password_reset), url(r'^password_reset/done/$', auth_views.password_reset_done), url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', auth_views.password_reset_confirm), url(r'^reset/done/$', auth_views.password_reset_complete), url(r'^media/(?P<path>.*)$', 'django.views.static.serve', {'document_root': MEDIA_ROOT, 'show_indexes': False}), ]
from django.contrib.auth import views as auth_views from django.conf.urls import patterns, include, url from django.conf.urls import url from django.contrib import admin from . import views from tviserrys.settings import MEDIA_ROOT urlpatterns = [ url(r'^$', views.IndexView.as_view(), name='index'), url(r'^tviit/', include('tviit.urls', namespace='tviit')), url(r'^admin/', admin.site.urls), url(r'^login/$', auth_views.login), url(r'^logout/$', auth_views.logout), url(r'^password_change/$', auth_views.password_change), url(r'^password_change/done/$', auth_views.password_change_done), url(r'^password_reset/$', auth_views.password_reset), url(r'^password_reset/done/$', auth_views.password_reset_done), url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', auth_views.password_reset_confirm), url(r'^reset/done/$', auth_views.password_reset_complete), url(r'^profile/', include('user_profile.urls', namespace='profile')), url(r'^media/(?P<path>.*)$', 'django.views.static.serve', {'document_root': MEDIA_ROOT, 'show_indexes': False}), ]
Add Tviit and profile url patterns
Add Tviit and profile url patterns
Python
mit
DeWaster/Tviserrys,DeWaster/Tviserrys
from django.contrib.auth import views as auth_views from django.conf.urls import patterns, include, url from django.conf.urls import url from django.contrib import admin from . import views from tviserrys.settings import MEDIA_ROOT urlpatterns = [ url(r'^$', views.IndexView.as_view(), name='index'), + url(r'^tviit/', include('tviit.urls', namespace='tviit')), url(r'^admin/', admin.site.urls), url(r'^login/$', auth_views.login), url(r'^logout/$', auth_views.logout), url(r'^password_change/$', auth_views.password_change), url(r'^password_change/done/$', auth_views.password_change_done), url(r'^password_reset/$', auth_views.password_reset), url(r'^password_reset/done/$', auth_views.password_reset_done), url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', auth_views.password_reset_confirm), url(r'^reset/done/$', auth_views.password_reset_complete), + url(r'^profile/', include('user_profile.urls', namespace='profile')), url(r'^media/(?P<path>.*)$', 'django.views.static.serve', {'document_root': MEDIA_ROOT, 'show_indexes': False}), ]
Add Tviit and profile url patterns
## Code Before: from django.contrib.auth import views as auth_views from django.conf.urls import patterns, include, url from django.conf.urls import url from django.contrib import admin from . import views from tviserrys.settings import MEDIA_ROOT urlpatterns = [ url(r'^$', views.IndexView.as_view(), name='index'), url(r'^admin/', admin.site.urls), url(r'^login/$', auth_views.login), url(r'^logout/$', auth_views.logout), url(r'^password_change/$', auth_views.password_change), url(r'^password_change/done/$', auth_views.password_change_done), url(r'^password_reset/$', auth_views.password_reset), url(r'^password_reset/done/$', auth_views.password_reset_done), url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', auth_views.password_reset_confirm), url(r'^reset/done/$', auth_views.password_reset_complete), url(r'^media/(?P<path>.*)$', 'django.views.static.serve', {'document_root': MEDIA_ROOT, 'show_indexes': False}), ] ## Instruction: Add Tviit and profile url patterns ## Code After: from django.contrib.auth import views as auth_views from django.conf.urls import patterns, include, url from django.conf.urls import url from django.contrib import admin from . import views from tviserrys.settings import MEDIA_ROOT urlpatterns = [ url(r'^$', views.IndexView.as_view(), name='index'), url(r'^tviit/', include('tviit.urls', namespace='tviit')), url(r'^admin/', admin.site.urls), url(r'^login/$', auth_views.login), url(r'^logout/$', auth_views.logout), url(r'^password_change/$', auth_views.password_change), url(r'^password_change/done/$', auth_views.password_change_done), url(r'^password_reset/$', auth_views.password_reset), url(r'^password_reset/done/$', auth_views.password_reset_done), url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', auth_views.password_reset_confirm), url(r'^reset/done/$', auth_views.password_reset_complete), url(r'^profile/', include('user_profile.urls', namespace='profile')), url(r'^media/(?P<path>.*)$', 'django.views.static.serve', {'document_root': MEDIA_ROOT, 'show_indexes': False}), ]
from django.contrib.auth import views as auth_views from django.conf.urls import patterns, include, url from django.conf.urls import url from django.contrib import admin from . import views from tviserrys.settings import MEDIA_ROOT urlpatterns = [ url(r'^$', views.IndexView.as_view(), name='index'), + url(r'^tviit/', include('tviit.urls', namespace='tviit')), url(r'^admin/', admin.site.urls), url(r'^login/$', auth_views.login), url(r'^logout/$', auth_views.logout), url(r'^password_change/$', auth_views.password_change), url(r'^password_change/done/$', auth_views.password_change_done), url(r'^password_reset/$', auth_views.password_reset), url(r'^password_reset/done/$', auth_views.password_reset_done), url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', auth_views.password_reset_confirm), url(r'^reset/done/$', auth_views.password_reset_complete), + url(r'^profile/', include('user_profile.urls', namespace='profile')), url(r'^media/(?P<path>.*)$', 'django.views.static.serve', {'document_root': MEDIA_ROOT, 'show_indexes': False}), ]
df8ae0415f9bf10c04472fb3009e91d7c3d7e24f
teuthology/sentry.py
teuthology/sentry.py
from raven import Client client = None def get_client(ctx): if client: return client dsn = ctx.teuthology_config.get('sentry_dsn') if dsn: client = Client(dsn=dsn) return client
from raven import Client client = None def get_client(ctx): global client if client: return client dsn = ctx.teuthology_config.get('sentry_dsn') if dsn: client = Client(dsn=dsn) return client
Make client a global variable
Make client a global variable
Python
mit
robbat2/teuthology,ceph/teuthology,tchaikov/teuthology,zhouyuan/teuthology,dmick/teuthology,michaelsevilla/teuthology,dreamhost/teuthology,SUSE/teuthology,t-miyamae/teuthology,caibo2014/teuthology,yghannam/teuthology,SUSE/teuthology,SUSE/teuthology,tchaikov/teuthology,michaelsevilla/teuthology,dmick/teuthology,ktdreyer/teuthology,robbat2/teuthology,ivotron/teuthology,zhouyuan/teuthology,dreamhost/teuthology,t-miyamae/teuthology,ktdreyer/teuthology,ivotron/teuthology,yghannam/teuthology,ceph/teuthology,dmick/teuthology,caibo2014/teuthology
from raven import Client client = None def get_client(ctx): + global client if client: return client dsn = ctx.teuthology_config.get('sentry_dsn') if dsn: client = Client(dsn=dsn) return client
Make client a global variable
## Code Before: from raven import Client client = None def get_client(ctx): if client: return client dsn = ctx.teuthology_config.get('sentry_dsn') if dsn: client = Client(dsn=dsn) return client ## Instruction: Make client a global variable ## Code After: from raven import Client client = None def get_client(ctx): global client if client: return client dsn = ctx.teuthology_config.get('sentry_dsn') if dsn: client = Client(dsn=dsn) return client
from raven import Client client = None def get_client(ctx): + global client if client: return client dsn = ctx.teuthology_config.get('sentry_dsn') if dsn: client = Client(dsn=dsn) return client
76166f243b9f5f21582c95a843ddfa174ded8602
PyFVCOM/__init__.py
PyFVCOM/__init__.py
__version__ = '1.6.2' __author__ = 'Pierre Cazenave' __credits__ = ['Pierre Cazenave'] __license__ = 'MIT' __maintainer__ = 'Pierre Cazenave' __email__ = '[email protected]' import inspect from warnings import warn # Import everything! from PyFVCOM import buoy_tools from PyFVCOM import cst_tools from PyFVCOM import ctd_tools from PyFVCOM import current_tools from PyFVCOM import grid_tools from PyFVCOM import ll2utm from PyFVCOM import ll2utm as coordinate_tools from PyFVCOM import ocean_tools from PyFVCOM import stats_tools from PyFVCOM import tide_tools from PyFVCOM import tidal_ellipse from PyFVCOM import process_results from PyFVCOM import read_results from PyFVCOM import plot from PyFVCOM import utilities
__version__ = '1.6.2' __author__ = 'Pierre Cazenave' __credits__ = ['Pierre Cazenave'] __license__ = 'MIT' __maintainer__ = 'Pierre Cazenave' __email__ = '[email protected]' import inspect from warnings import warn # Import everything! from PyFVCOM import buoy_tools from PyFVCOM import cst_tools from PyFVCOM import ctd_tools from PyFVCOM import current_tools from PyFVCOM import grid_tools from PyFVCOM import ll2utm from PyFVCOM import ll2utm as coordinate_tools from PyFVCOM import ocean_tools from PyFVCOM import stats_tools from PyFVCOM import tidal_ellipse from PyFVCOM import tide_tools from PyFVCOM import process_results from PyFVCOM import read_results from PyFVCOM import plot from PyFVCOM import utilities
Put things in in alphabetical order.
Put things in in alphabetical order.
Python
mit
pwcazenave/PyFVCOM
__version__ = '1.6.2' __author__ = 'Pierre Cazenave' __credits__ = ['Pierre Cazenave'] __license__ = 'MIT' __maintainer__ = 'Pierre Cazenave' __email__ = '[email protected]' import inspect from warnings import warn # Import everything! from PyFVCOM import buoy_tools from PyFVCOM import cst_tools from PyFVCOM import ctd_tools from PyFVCOM import current_tools from PyFVCOM import grid_tools from PyFVCOM import ll2utm from PyFVCOM import ll2utm as coordinate_tools from PyFVCOM import ocean_tools from PyFVCOM import stats_tools + from PyFVCOM import tidal_ellipse from PyFVCOM import tide_tools - from PyFVCOM import tidal_ellipse from PyFVCOM import process_results from PyFVCOM import read_results from PyFVCOM import plot from PyFVCOM import utilities
Put things in in alphabetical order.
## Code Before: __version__ = '1.6.2' __author__ = 'Pierre Cazenave' __credits__ = ['Pierre Cazenave'] __license__ = 'MIT' __maintainer__ = 'Pierre Cazenave' __email__ = '[email protected]' import inspect from warnings import warn # Import everything! from PyFVCOM import buoy_tools from PyFVCOM import cst_tools from PyFVCOM import ctd_tools from PyFVCOM import current_tools from PyFVCOM import grid_tools from PyFVCOM import ll2utm from PyFVCOM import ll2utm as coordinate_tools from PyFVCOM import ocean_tools from PyFVCOM import stats_tools from PyFVCOM import tide_tools from PyFVCOM import tidal_ellipse from PyFVCOM import process_results from PyFVCOM import read_results from PyFVCOM import plot from PyFVCOM import utilities ## Instruction: Put things in in alphabetical order. ## Code After: __version__ = '1.6.2' __author__ = 'Pierre Cazenave' __credits__ = ['Pierre Cazenave'] __license__ = 'MIT' __maintainer__ = 'Pierre Cazenave' __email__ = '[email protected]' import inspect from warnings import warn # Import everything! from PyFVCOM import buoy_tools from PyFVCOM import cst_tools from PyFVCOM import ctd_tools from PyFVCOM import current_tools from PyFVCOM import grid_tools from PyFVCOM import ll2utm from PyFVCOM import ll2utm as coordinate_tools from PyFVCOM import ocean_tools from PyFVCOM import stats_tools from PyFVCOM import tidal_ellipse from PyFVCOM import tide_tools from PyFVCOM import process_results from PyFVCOM import read_results from PyFVCOM import plot from PyFVCOM import utilities
__version__ = '1.6.2' __author__ = 'Pierre Cazenave' __credits__ = ['Pierre Cazenave'] __license__ = 'MIT' __maintainer__ = 'Pierre Cazenave' __email__ = '[email protected]' import inspect from warnings import warn # Import everything! from PyFVCOM import buoy_tools from PyFVCOM import cst_tools from PyFVCOM import ctd_tools from PyFVCOM import current_tools from PyFVCOM import grid_tools from PyFVCOM import ll2utm from PyFVCOM import ll2utm as coordinate_tools from PyFVCOM import ocean_tools from PyFVCOM import stats_tools + from PyFVCOM import tidal_ellipse from PyFVCOM import tide_tools - from PyFVCOM import tidal_ellipse from PyFVCOM import process_results from PyFVCOM import read_results from PyFVCOM import plot from PyFVCOM import utilities
c97e5cf11fc21e2ef4ee04779a424e4d6a2b96ae
tools/perf/metrics/__init__.py
tools/perf/metrics/__init__.py
class Metric(object): """Base class for all the metrics that are used by telemetry measurements. The Metric class represents a way of measuring something. Metrics are helper classes used by PageMeasurements. Each PageMeasurement may use multiple metrics; each metric should be focussed on collecting data about one thing. """ def Start(self, page, tab): """Start collecting data for this metric.""" raise NotImplementedError() def Stop(self, page, tab): """Stop collecting data for this metric (if applicable).""" raise NotImplementedError() def AddResults(self, tab, results): """Add the data collected into the results object for a measurement. Metrics may implement AddResults to provide a common way to add results to the PageMeasurementResults in PageMeasurement.AddMeasurement -- results should be added with results.Add(trace_name, unit, value). """ raise NotImplementedError()
class Metric(object): """Base class for all the metrics that are used by telemetry measurements. The Metric class represents a way of measuring something. Metrics are helper classes used by PageMeasurements. Each PageMeasurement may use multiple metrics; each metric should be focussed on collecting data about one thing. """ def CustomizeBrowserOptions(self, options): """Add browser options that are required by this metric. Some metrics do not have any special browser options that need to be added, and they do not need to override this method; by default, no browser options are added. To add options here, call options.AppendExtraBrowserArg(arg). """ pass def Start(self, page, tab): """Start collecting data for this metric.""" raise NotImplementedError() def Stop(self, page, tab): """Stop collecting data for this metric (if applicable).""" raise NotImplementedError() def AddResults(self, tab, results): """Add the data collected into the results object for a measurement. Metrics may implement AddResults to provide a common way to add results to the PageMeasurementResults in PageMeasurement.AddMeasurement -- results should be added with results.Add(trace_name, unit, value). """ raise NotImplementedError()
Add CustomizeBrowserOptions method to Metric base class
Add CustomizeBrowserOptions method to Metric base class BUG=271177 Review URL: https://chromiumcodereview.appspot.com/22938004 git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@217198 0039d316-1c4b-4281-b951-d872f2087c98
Python
bsd-3-clause
mogoweb/chromium-crosswalk,Just-D/chromium-1,M4sse/chromium.src,dushu1203/chromium.src,Jonekee/chromium.src,Just-D/chromium-1,ChromiumWebApps/chromium,axinging/chromium-crosswalk,bright-sparks/chromium-spacewalk,PeterWangIntel/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,markYoungH/chromium.src,M4sse/chromium.src,Fireblend/chromium-crosswalk,Jonekee/chromium.src,dednal/chromium.src,krieger-od/nwjs_chromium.src,PeterWangIntel/chromium-crosswalk,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,ltilve/chromium,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,dushu1203/chromium.src,markYoungH/chromium.src,axinging/chromium-crosswalk,markYoungH/chromium.src,M4sse/chromium.src,jaruba/chromium.src,littlstar/chromium.src,fujunwei/chromium-crosswalk,mogoweb/chromium-crosswalk,jaruba/chromium.src,ondra-novak/chromium.src,patrickm/chromium.src,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk,dednal/chromium.src,mogoweb/chromium-crosswalk,Pluto-tv/chromium-crosswalk,krieger-od/nwjs_chromium.src,anirudhSK/chromium,krieger-od/nwjs_chromium.src,crosswalk-project/chromium-crosswalk-efl,littlstar/chromium.src,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,jaruba/chromium.src,markYoungH/chromium.src,hgl888/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,dushu1203/chromium.src,chuan9/chromium-crosswalk,fujunwei/chromium-crosswalk,littlstar/chromium.src,TheTypoMaster/chromium-crosswalk,chuan9/chromium-crosswalk,ChromiumWebApps/chromium,PeterWangIntel/chromium-crosswalk,patrickm/chromium.src,crosswalk-project/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,Chilledheart/chromium,hgl888/chromium-crosswalk-efl,Just-D/chromium-1,markYoungH/chromium.src,TheTypoMaster/chromium-crosswalk,krieger-od/nwjs_chromium.src,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk-efl,ChromiumWebApps/chromium,krieger-od/nwjs_chromium.src,krieger-od/nwjs_chromium.src,anirudhSK/chromium,axinging/chromium-crosswalk,ondra-novak/chromium.src,anirudhSK/chromium,mohamed--abdel-maksoud/chromium.src,anirudhSK/chromium,ChromiumWebApps/chromium,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,anirudhSK/chromium,M4sse/chromium.src,Fireblend/chromium-crosswalk,anirudhSK/chromium,Jonekee/chromium.src,markYoungH/chromium.src,littlstar/chromium.src,axinging/chromium-crosswalk,jaruba/chromium.src,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,patrickm/chromium.src,hgl888/chromium-crosswalk,dushu1203/chromium.src,fujunwei/chromium-crosswalk,M4sse/chromium.src,Chilledheart/chromium,hgl888/chromium-crosswalk-efl,chuan9/chromium-crosswalk,dushu1203/chromium.src,crosswalk-project/chromium-crosswalk-efl,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,fujunwei/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,M4sse/chromium.src,TheTypoMaster/chromium-crosswalk,ChromiumWebApps/chromium,hgl888/chromium-crosswalk-efl,ChromiumWebApps/chromium,patrickm/chromium.src,Just-D/chromium-1,dushu1203/chromium.src,axinging/chromium-crosswalk,dednal/chromium.src,dednal/chromium.src,jaruba/chromium.src,ltilve/chromium,Fireblend/chromium-crosswalk,bright-sparks/chromium-spacewalk,Pluto-tv/chromium-crosswalk,fujunwei/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,hgl888/chromium-crosswalk,fujunwei/chromium-crosswalk,Jonekee/chromium.src,axinging/chromium-crosswalk,littlstar/chromium.src,Chilledheart/chromium,Jonekee/chromium.src,PeterWangIntel/chromium-crosswalk,ondra-novak/chromium.src,ChromiumWebApps/chromium,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk,Chilledheart/chromium,patrickm/chromium.src,jaruba/chromium.src,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,Fireblend/chromium-crosswalk,Fireblend/chromium-crosswalk,ltilve/chromium,Just-D/chromium-1,Jonekee/chromium.src,anirudhSK/chromium,markYoungH/chromium.src,markYoungH/chromium.src,jaruba/chromium.src,ChromiumWebApps/chromium,TheTypoMaster/chromium-crosswalk,ChromiumWebApps/chromium,dushu1203/chromium.src,crosswalk-project/chromium-crosswalk-efl,axinging/chromium-crosswalk,hgl888/chromium-crosswalk-efl,M4sse/chromium.src,Chilledheart/chromium,mogoweb/chromium-crosswalk,Jonekee/chromium.src,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,mogoweb/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,patrickm/chromium.src,ltilve/chromium,patrickm/chromium.src,ondra-novak/chromium.src,PeterWangIntel/chromium-crosswalk,Jonekee/chromium.src,bright-sparks/chromium-spacewalk,markYoungH/chromium.src,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,hgl888/chromium-crosswalk,Jonekee/chromium.src,ltilve/chromium,axinging/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,mogoweb/chromium-crosswalk,ltilve/chromium,jaruba/chromium.src,patrickm/chromium.src,M4sse/chromium.src,Chilledheart/chromium,krieger-od/nwjs_chromium.src,anirudhSK/chromium,bright-sparks/chromium-spacewalk,mogoweb/chromium-crosswalk,ltilve/chromium,dednal/chromium.src,anirudhSK/chromium,ChromiumWebApps/chromium,PeterWangIntel/chromium-crosswalk,chuan9/chromium-crosswalk,mogoweb/chromium-crosswalk,anirudhSK/chromium,Jonekee/chromium.src,bright-sparks/chromium-spacewalk,TheTypoMaster/chromium-crosswalk,ChromiumWebApps/chromium,Pluto-tv/chromium-crosswalk,anirudhSK/chromium,krieger-od/nwjs_chromium.src,dednal/chromium.src,jaruba/chromium.src,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,ChromiumWebApps/chromium,dednal/chromium.src,bright-sparks/chromium-spacewalk,Pluto-tv/chromium-crosswalk,ondra-novak/chromium.src,fujunwei/chromium-crosswalk,Jonekee/chromium.src,jaruba/chromium.src,bright-sparks/chromium-spacewalk,dednal/chromium.src,dushu1203/chromium.src,ltilve/chromium,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,littlstar/chromium.src,chuan9/chromium-crosswalk,littlstar/chromium.src,hgl888/chromium-crosswalk-efl,ondra-novak/chromium.src,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,bright-sparks/chromium-spacewalk,crosswalk-project/chromium-crosswalk-efl,ondra-novak/chromium.src,Chilledheart/chromium,crosswalk-project/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,Chilledheart/chromium,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,Just-D/chromium-1,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk,ondra-novak/chromium.src,ondra-novak/chromium.src,Fireblend/chromium-crosswalk,axinging/chromium-crosswalk,hgl888/chromium-crosswalk,dushu1203/chromium.src,mohamed--abdel-maksoud/chromium.src,mogoweb/chromium-crosswalk,littlstar/chromium.src,dednal/chromium.src,dednal/chromium.src,anirudhSK/chromium,mohamed--abdel-maksoud/chromium.src,mogoweb/chromium-crosswalk,hgl888/chromium-crosswalk,jaruba/chromium.src,TheTypoMaster/chromium-crosswalk,Fireblend/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,ltilve/chromium,patrickm/chromium.src,M4sse/chromium.src,hgl888/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src
class Metric(object): """Base class for all the metrics that are used by telemetry measurements. The Metric class represents a way of measuring something. Metrics are helper classes used by PageMeasurements. Each PageMeasurement may use multiple metrics; each metric should be focussed on collecting data about one thing. """ + + def CustomizeBrowserOptions(self, options): + """Add browser options that are required by this metric. + + Some metrics do not have any special browser options that need + to be added, and they do not need to override this method; by + default, no browser options are added. + + To add options here, call options.AppendExtraBrowserArg(arg). + """ + pass def Start(self, page, tab): """Start collecting data for this metric.""" raise NotImplementedError() def Stop(self, page, tab): """Stop collecting data for this metric (if applicable).""" raise NotImplementedError() def AddResults(self, tab, results): """Add the data collected into the results object for a measurement. Metrics may implement AddResults to provide a common way to add results to the PageMeasurementResults in PageMeasurement.AddMeasurement -- results should be added with results.Add(trace_name, unit, value). """ raise NotImplementedError()
Add CustomizeBrowserOptions method to Metric base class
## Code Before: class Metric(object): """Base class for all the metrics that are used by telemetry measurements. The Metric class represents a way of measuring something. Metrics are helper classes used by PageMeasurements. Each PageMeasurement may use multiple metrics; each metric should be focussed on collecting data about one thing. """ def Start(self, page, tab): """Start collecting data for this metric.""" raise NotImplementedError() def Stop(self, page, tab): """Stop collecting data for this metric (if applicable).""" raise NotImplementedError() def AddResults(self, tab, results): """Add the data collected into the results object for a measurement. Metrics may implement AddResults to provide a common way to add results to the PageMeasurementResults in PageMeasurement.AddMeasurement -- results should be added with results.Add(trace_name, unit, value). """ raise NotImplementedError() ## Instruction: Add CustomizeBrowserOptions method to Metric base class ## Code After: class Metric(object): """Base class for all the metrics that are used by telemetry measurements. The Metric class represents a way of measuring something. Metrics are helper classes used by PageMeasurements. Each PageMeasurement may use multiple metrics; each metric should be focussed on collecting data about one thing. """ def CustomizeBrowserOptions(self, options): """Add browser options that are required by this metric. Some metrics do not have any special browser options that need to be added, and they do not need to override this method; by default, no browser options are added. To add options here, call options.AppendExtraBrowserArg(arg). """ pass def Start(self, page, tab): """Start collecting data for this metric.""" raise NotImplementedError() def Stop(self, page, tab): """Stop collecting data for this metric (if applicable).""" raise NotImplementedError() def AddResults(self, tab, results): """Add the data collected into the results object for a measurement. Metrics may implement AddResults to provide a common way to add results to the PageMeasurementResults in PageMeasurement.AddMeasurement -- results should be added with results.Add(trace_name, unit, value). """ raise NotImplementedError()
class Metric(object): """Base class for all the metrics that are used by telemetry measurements. The Metric class represents a way of measuring something. Metrics are helper classes used by PageMeasurements. Each PageMeasurement may use multiple metrics; each metric should be focussed on collecting data about one thing. """ + + def CustomizeBrowserOptions(self, options): + """Add browser options that are required by this metric. + + Some metrics do not have any special browser options that need + to be added, and they do not need to override this method; by + default, no browser options are added. + + To add options here, call options.AppendExtraBrowserArg(arg). + """ + pass def Start(self, page, tab): """Start collecting data for this metric.""" raise NotImplementedError() def Stop(self, page, tab): """Stop collecting data for this metric (if applicable).""" raise NotImplementedError() def AddResults(self, tab, results): """Add the data collected into the results object for a measurement. Metrics may implement AddResults to provide a common way to add results to the PageMeasurementResults in PageMeasurement.AddMeasurement -- results should be added with results.Add(trace_name, unit, value). """ raise NotImplementedError()
a8601d8a17c9ba8e87b8336870e0d52f79e0ffa2
indra/tests/test_omnipath.py
indra/tests/test_omnipath.py
from __future__ import unicode_literals from builtins import dict, str from indra.statements import Phosphorylation from indra.databases import omnipath as op def test_query_ptms(): stmts = op.get_ptms(['Q13873']) assert len(stmts) == 1 assert isinstance(stmts[0], Phosphorylation) assert stmts[0].enz.name == 'CSNK2A1' assert stmts[0].sub.name == 'BMPR2' assert stmts[0].residue == 'S' assert stmts[0].position == '757'
import requests from indra.sources.omnipath import OmniPathModificationProcessor,\ OmniPathLiganReceptorProcessor from indra.sources.omnipath.api import op_url from indra.statements import Agent, Phosphorylation from indra.preassembler.grounding_mapper import GroundingMapper BRAF_UPID = 'P15056' JAK2_UPID = 'O60674' BRAF_AG = Agent(None, db_refs={'UP': BRAF_UPID}) GroundingMapper.standardize_agent_name(BRAF_AG) JAK2_AG = Agent(None, db_refs={'UP': JAK2_UPID}) GroundingMapper.standardize_agent_name(JAK2_AG) def test_omnipath_web_api(): query_url = '%s/queries' res = requests.get(query_url) assert res.status_code == 200 def test_query_ptms(): stmts = op.get_ptms(['Q13873']) assert len(stmts) == 1 assert isinstance(stmts[0], Phosphorylation) assert stmts[0].enz.name == 'CSNK2A1' assert stmts[0].sub.name == 'BMPR2' assert stmts[0].residue == 'S' assert stmts[0].position == '757'
Update imports, test general web api
Update imports, test general web api
Python
bsd-2-clause
johnbachman/indra,johnbachman/indra,johnbachman/belpy,johnbachman/indra,johnbachman/belpy,sorgerlab/belpy,sorgerlab/indra,sorgerlab/indra,bgyori/indra,johnbachman/belpy,bgyori/indra,sorgerlab/belpy,bgyori/indra,sorgerlab/indra,sorgerlab/belpy
- from __future__ import unicode_literals - from builtins import dict, str + import requests + from indra.sources.omnipath import OmniPathModificationProcessor,\ + OmniPathLiganReceptorProcessor + from indra.sources.omnipath.api import op_url - from indra.statements import Phosphorylation + from indra.statements import Agent, Phosphorylation - from indra.databases import omnipath as op + from indra.preassembler.grounding_mapper import GroundingMapper + + BRAF_UPID = 'P15056' + JAK2_UPID = 'O60674' + BRAF_AG = Agent(None, db_refs={'UP': BRAF_UPID}) + GroundingMapper.standardize_agent_name(BRAF_AG) + JAK2_AG = Agent(None, db_refs={'UP': JAK2_UPID}) + GroundingMapper.standardize_agent_name(JAK2_AG) + + + def test_omnipath_web_api(): + query_url = '%s/queries' + res = requests.get(query_url) + assert res.status_code == 200 def test_query_ptms(): stmts = op.get_ptms(['Q13873']) assert len(stmts) == 1 assert isinstance(stmts[0], Phosphorylation) assert stmts[0].enz.name == 'CSNK2A1' assert stmts[0].sub.name == 'BMPR2' assert stmts[0].residue == 'S' assert stmts[0].position == '757'
Update imports, test general web api
## Code Before: from __future__ import unicode_literals from builtins import dict, str from indra.statements import Phosphorylation from indra.databases import omnipath as op def test_query_ptms(): stmts = op.get_ptms(['Q13873']) assert len(stmts) == 1 assert isinstance(stmts[0], Phosphorylation) assert stmts[0].enz.name == 'CSNK2A1' assert stmts[0].sub.name == 'BMPR2' assert stmts[0].residue == 'S' assert stmts[0].position == '757' ## Instruction: Update imports, test general web api ## Code After: import requests from indra.sources.omnipath import OmniPathModificationProcessor,\ OmniPathLiganReceptorProcessor from indra.sources.omnipath.api import op_url from indra.statements import Agent, Phosphorylation from indra.preassembler.grounding_mapper import GroundingMapper BRAF_UPID = 'P15056' JAK2_UPID = 'O60674' BRAF_AG = Agent(None, db_refs={'UP': BRAF_UPID}) GroundingMapper.standardize_agent_name(BRAF_AG) JAK2_AG = Agent(None, db_refs={'UP': JAK2_UPID}) GroundingMapper.standardize_agent_name(JAK2_AG) def test_omnipath_web_api(): query_url = '%s/queries' res = requests.get(query_url) assert res.status_code == 200 def test_query_ptms(): stmts = op.get_ptms(['Q13873']) assert len(stmts) == 1 assert isinstance(stmts[0], Phosphorylation) assert stmts[0].enz.name == 'CSNK2A1' assert stmts[0].sub.name == 'BMPR2' assert stmts[0].residue == 'S' assert stmts[0].position == '757'
- from __future__ import unicode_literals - from builtins import dict, str + import requests + from indra.sources.omnipath import OmniPathModificationProcessor,\ + OmniPathLiganReceptorProcessor + from indra.sources.omnipath.api import op_url - from indra.statements import Phosphorylation + from indra.statements import Agent, Phosphorylation ? +++++++ - from indra.databases import omnipath as op + from indra.preassembler.grounding_mapper import GroundingMapper + + BRAF_UPID = 'P15056' + JAK2_UPID = 'O60674' + BRAF_AG = Agent(None, db_refs={'UP': BRAF_UPID}) + GroundingMapper.standardize_agent_name(BRAF_AG) + JAK2_AG = Agent(None, db_refs={'UP': JAK2_UPID}) + GroundingMapper.standardize_agent_name(JAK2_AG) + + + def test_omnipath_web_api(): + query_url = '%s/queries' + res = requests.get(query_url) + assert res.status_code == 200 def test_query_ptms(): stmts = op.get_ptms(['Q13873']) assert len(stmts) == 1 assert isinstance(stmts[0], Phosphorylation) assert stmts[0].enz.name == 'CSNK2A1' assert stmts[0].sub.name == 'BMPR2' assert stmts[0].residue == 'S' assert stmts[0].position == '757'
ba23f58f7359b943d8d8ae7f05e15419c6918c6f
test/blacklist.py
test/blacklist.py
blacklist = {}
blacklist = {'BasicExprCommandsTestCase.test_evaluate_expression_python': 'Crashed while running the entire test suite with CC=clang' # To reproduce the crash: CC=clang ./dotest.py -v -w 2> ~/Developer/Log/lldbtest.log # The clang version used is clang-126. # Two radars filed for the crashes: rdar://problem/8769826 and rdar://problem/8773329. # To skip this test case: CC=clang ./dotest.py -b blacklist.py -v -w 2> ~/Developer/Log/lldbtest.log }
Add an entry for test case BasicExprCommandsTestCase.test_evaluate_expression_python, due to crashes while running the entire test suite with clang-126.
Add an entry for test case BasicExprCommandsTestCase.test_evaluate_expression_python, due to crashes while running the entire test suite with clang-126. To reproduce: CC=clang ./dotest.py -v -w 2> ~/Developer/Log/lldbtest.log To skip this test case: CC=clang ./dotest.py -b blacklist.py -v -w 2> ~/Developer/Log/lldbtest.log git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@121887 91177308-0d34-0410-b5e6-96231b3b80d8
Python
apache-2.0
llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb,llvm-mirror/lldb
- blacklist = {} + blacklist = {'BasicExprCommandsTestCase.test_evaluate_expression_python': 'Crashed while running the entire test suite with CC=clang' + # To reproduce the crash: CC=clang ./dotest.py -v -w 2> ~/Developer/Log/lldbtest.log + # The clang version used is clang-126. + # Two radars filed for the crashes: rdar://problem/8769826 and rdar://problem/8773329. + # To skip this test case: CC=clang ./dotest.py -b blacklist.py -v -w 2> ~/Developer/Log/lldbtest.log + }
Add an entry for test case BasicExprCommandsTestCase.test_evaluate_expression_python, due to crashes while running the entire test suite with clang-126.
## Code Before: blacklist = {} ## Instruction: Add an entry for test case BasicExprCommandsTestCase.test_evaluate_expression_python, due to crashes while running the entire test suite with clang-126. ## Code After: blacklist = {'BasicExprCommandsTestCase.test_evaluate_expression_python': 'Crashed while running the entire test suite with CC=clang' # To reproduce the crash: CC=clang ./dotest.py -v -w 2> ~/Developer/Log/lldbtest.log # The clang version used is clang-126. # Two radars filed for the crashes: rdar://problem/8769826 and rdar://problem/8773329. # To skip this test case: CC=clang ./dotest.py -b blacklist.py -v -w 2> ~/Developer/Log/lldbtest.log }
- blacklist = {} + blacklist = {'BasicExprCommandsTestCase.test_evaluate_expression_python': 'Crashed while running the entire test suite with CC=clang' + # To reproduce the crash: CC=clang ./dotest.py -v -w 2> ~/Developer/Log/lldbtest.log + # The clang version used is clang-126. + # Two radars filed for the crashes: rdar://problem/8769826 and rdar://problem/8773329. + # To skip this test case: CC=clang ./dotest.py -b blacklist.py -v -w 2> ~/Developer/Log/lldbtest.log + }
3d1521892ba17120ca4461335713b9d2254311fe
marble/tests/test_clustering.py
marble/tests/test_clustering.py
""" Tests for the clustering computation """ from nose.tools import * import marble as mb # Test c = 0 in the checkerboard case # Test c = 1 in the fully clustered case # Test an intermediate situation with known result
""" Tests for the clustering computation """ from nose.tools import * import itertools from shapely.geometry import Polygon import marble as mb # # Synthetic data for tests # def grid(): """ Areal units arranged in a grid """ au = [i*3+j for i,j in itertools.product(range(3), repeat=2)] units = {a:Polygon([(a%3, a/3), (a%3, 1+a/3), (1+a%3, 1+a/3), (1+a%3, a/3)]) for a in au} return units def checkerboard_city(): city = {0: {"A":100, "B":1}, 1: {"A":1, "B":100}, 2: {"A":100, "B":1}, 3: {"A":1, "B":100}, 4: {"A":100, "B":1}, 5: {"A":1, "B":100}, 6: {"A":100, "B":1}, 7: {"A":1, "B":100}, 8: {"A":100, "B":1}} return city def clustered_city(): city = {0: {"A":100, "B":1}, 1: {"A":100, "B":1}, 2: {"A":1, "B":100}, 3: {"A":100, "B":1}, 4: {"A":1, "B":100}, 5: {"A":1, "B":100}, 6: {"A":100, "B":1}, 7: {"A":1, "B":100}, 8: {"A":1, "B":100}} return city # # Perform tests # class TestClustering(object): def test_clustering_checkerboard(self): units = grid() city = checkerboard_city() c = mb.clustering(city, units) assert c["A"] == 0.0 assert c["B"] == 0.0 def test_clustering_checkerboard(self): units = grid() city = clustered_city() c = mb.clustering(city, units) assert c["A"] == 1.0 assert c["B"] == 1.0
Add tests for the clustering of cities
Add tests for the clustering of cities
Python
bsd-3-clause
walkerke/marble,scities/marble
""" Tests for the clustering computation """ from nose.tools import * + import itertools + from shapely.geometry import Polygon import marble as mb - # Test c = 0 in the checkerboard case - # Test c = 1 in the fully clustered case - # Test an intermediate situation with known result + # + # Synthetic data for tests + # + def grid(): + """ Areal units arranged in a grid """ + au = [i*3+j for i,j in itertools.product(range(3), repeat=2)] + units = {a:Polygon([(a%3, a/3), + (a%3, 1+a/3), + (1+a%3, 1+a/3), + (1+a%3, a/3)]) for a in au} + return units + + def checkerboard_city(): + city = {0: {"A":100, "B":1}, + 1: {"A":1, "B":100}, + 2: {"A":100, "B":1}, + 3: {"A":1, "B":100}, + 4: {"A":100, "B":1}, + 5: {"A":1, "B":100}, + 6: {"A":100, "B":1}, + 7: {"A":1, "B":100}, + 8: {"A":100, "B":1}} + return city + + def clustered_city(): + city = {0: {"A":100, "B":1}, + 1: {"A":100, "B":1}, + 2: {"A":1, "B":100}, + 3: {"A":100, "B":1}, + 4: {"A":1, "B":100}, + 5: {"A":1, "B":100}, + 6: {"A":100, "B":1}, + 7: {"A":1, "B":100}, + 8: {"A":1, "B":100}} + return city + + # + # Perform tests + # + class TestClustering(object): + + def test_clustering_checkerboard(self): + units = grid() + city = checkerboard_city() + c = mb.clustering(city, units) + + assert c["A"] == 0.0 + assert c["B"] == 0.0 + + def test_clustering_checkerboard(self): + units = grid() + city = clustered_city() + c = mb.clustering(city, units) + + assert c["A"] == 1.0 + assert c["B"] == 1.0 + +
Add tests for the clustering of cities
## Code Before: """ Tests for the clustering computation """ from nose.tools import * import marble as mb # Test c = 0 in the checkerboard case # Test c = 1 in the fully clustered case # Test an intermediate situation with known result ## Instruction: Add tests for the clustering of cities ## Code After: """ Tests for the clustering computation """ from nose.tools import * import itertools from shapely.geometry import Polygon import marble as mb # # Synthetic data for tests # def grid(): """ Areal units arranged in a grid """ au = [i*3+j for i,j in itertools.product(range(3), repeat=2)] units = {a:Polygon([(a%3, a/3), (a%3, 1+a/3), (1+a%3, 1+a/3), (1+a%3, a/3)]) for a in au} return units def checkerboard_city(): city = {0: {"A":100, "B":1}, 1: {"A":1, "B":100}, 2: {"A":100, "B":1}, 3: {"A":1, "B":100}, 4: {"A":100, "B":1}, 5: {"A":1, "B":100}, 6: {"A":100, "B":1}, 7: {"A":1, "B":100}, 8: {"A":100, "B":1}} return city def clustered_city(): city = {0: {"A":100, "B":1}, 1: {"A":100, "B":1}, 2: {"A":1, "B":100}, 3: {"A":100, "B":1}, 4: {"A":1, "B":100}, 5: {"A":1, "B":100}, 6: {"A":100, "B":1}, 7: {"A":1, "B":100}, 8: {"A":1, "B":100}} return city # # Perform tests # class TestClustering(object): def test_clustering_checkerboard(self): units = grid() city = checkerboard_city() c = mb.clustering(city, units) assert c["A"] == 0.0 assert c["B"] == 0.0 def test_clustering_checkerboard(self): units = grid() city = clustered_city() c = mb.clustering(city, units) assert c["A"] == 1.0 assert c["B"] == 1.0
""" Tests for the clustering computation """ from nose.tools import * + import itertools + from shapely.geometry import Polygon import marble as mb - # Test c = 0 in the checkerboard case - # Test c = 1 in the fully clustered case - # Test an intermediate situation with known result + # + # Synthetic data for tests + # + def grid(): + """ Areal units arranged in a grid """ + au = [i*3+j for i,j in itertools.product(range(3), repeat=2)] + units = {a:Polygon([(a%3, a/3), + (a%3, 1+a/3), + (1+a%3, 1+a/3), + (1+a%3, a/3)]) for a in au} + return units + def checkerboard_city(): + city = {0: {"A":100, "B":1}, + 1: {"A":1, "B":100}, + 2: {"A":100, "B":1}, + 3: {"A":1, "B":100}, + 4: {"A":100, "B":1}, + 5: {"A":1, "B":100}, + 6: {"A":100, "B":1}, + 7: {"A":1, "B":100}, + 8: {"A":100, "B":1}} + return city + + def clustered_city(): + city = {0: {"A":100, "B":1}, + 1: {"A":100, "B":1}, + 2: {"A":1, "B":100}, + 3: {"A":100, "B":1}, + 4: {"A":1, "B":100}, + 5: {"A":1, "B":100}, + 6: {"A":100, "B":1}, + 7: {"A":1, "B":100}, + 8: {"A":1, "B":100}} + return city + + + + # + # Perform tests + # + class TestClustering(object): + + def test_clustering_checkerboard(self): + units = grid() + city = checkerboard_city() + c = mb.clustering(city, units) + + assert c["A"] == 0.0 + assert c["B"] == 0.0 + + def test_clustering_checkerboard(self): + units = grid() + city = clustered_city() + c = mb.clustering(city, units) + + assert c["A"] == 1.0 + assert c["B"] == 1.0 +
6dfed291a253174672d7003700ab770aabcacae4
backend/breach/models/__init__.py
backend/breach/models/__init__.py
from .victim import Victim from .target import Target from .round import Round from .sampleset import SampleSet
__all__ = ['victim', 'target', 'round', 'sampleset'] from .victim import Victim from .target import Target from .round import Round from .sampleset import SampleSet
Add __all__ to models init file
Add __all__ to models init file
Python
mit
dimriou/rupture,esarafianou/rupture,dimriou/rupture,dimkarakostas/rupture,dionyziz/rupture,dimkarakostas/rupture,esarafianou/rupture,esarafianou/rupture,dimkarakostas/rupture,dimriou/rupture,dionyziz/rupture,esarafianou/rupture,dionyziz/rupture,dimkarakostas/rupture,dionyziz/rupture,dimriou/rupture,dimriou/rupture,dionyziz/rupture,dimkarakostas/rupture
+ __all__ = ['victim', 'target', 'round', 'sampleset'] from .victim import Victim from .target import Target from .round import Round from .sampleset import SampleSet
Add __all__ to models init file
## Code Before: from .victim import Victim from .target import Target from .round import Round from .sampleset import SampleSet ## Instruction: Add __all__ to models init file ## Code After: __all__ = ['victim', 'target', 'round', 'sampleset'] from .victim import Victim from .target import Target from .round import Round from .sampleset import SampleSet
+ __all__ = ['victim', 'target', 'round', 'sampleset'] from .victim import Victim from .target import Target from .round import Round from .sampleset import SampleSet
895d51105cd51387e3ac5db595333ff794f3e2a7
yotta/lib/ordered_json.py
yotta/lib/ordered_json.py
import json import os import stat from collections import OrderedDict # provide read & write methods for json files that maintain the order of # dictionary keys, and indent consistently # Internals def load(path): with open(path, 'r') as f: # using an ordered dictionary for objects so that we preserve the order # of keys in objects (including, for example, dependencies) return json.load(f, object_pairs_hook=OrderedDict) def dump(path, obj): with os.fdopen(os.open(path, os.O_WRONLY | os.O_CREAT, stat.S_IRUSR | stat.S_IWUSR), 'w') as f: os.chmod(path, stat.S_IRUSR | stat.S_IWUSR) json.dump(obj, f, indent=2, separators=(',', ': ')) f.truncate() def loads(string): return json.loads(string, object_pairs_hook=OrderedDict)
import json import os import stat from collections import OrderedDict # provide read & write methods for json files that maintain the order of # dictionary keys, and indent consistently # Internals def load(path): with open(path, 'r') as f: # using an ordered dictionary for objects so that we preserve the order # of keys in objects (including, for example, dependencies) return json.load(f, object_pairs_hook=OrderedDict) def dump(path, obj): with os.fdopen(os.open(path, os.O_WRONLY | os.O_CREAT, stat.S_IRUSR | stat.S_IWUSR), 'w') as f: os.chmod(path, stat.S_IRUSR | stat.S_IWUSR) json.dump(obj, f, indent=2, separators=(',', ': ')) f.write(u'\n') f.truncate() def loads(string): return json.loads(string, object_pairs_hook=OrderedDict)
Add a newline at the end of json files when writing them.
Add a newline at the end of json files when writing them. This fixes the really irritating ping-pong of newline/nonewline when editing json files with an editor, and with `yotta version` commands.
Python
apache-2.0
BlackstoneEngineering/yotta,autopulated/yotta,ARMmbed/yotta,stevenewey/yotta,ARMmbed/yotta,autopulated/yotta,ntoll/yotta,BlackstoneEngineering/yotta,stevenewey/yotta,eyeye/yotta,ntoll/yotta,eyeye/yotta
import json import os import stat from collections import OrderedDict # provide read & write methods for json files that maintain the order of # dictionary keys, and indent consistently # Internals def load(path): with open(path, 'r') as f: # using an ordered dictionary for objects so that we preserve the order # of keys in objects (including, for example, dependencies) return json.load(f, object_pairs_hook=OrderedDict) def dump(path, obj): with os.fdopen(os.open(path, os.O_WRONLY | os.O_CREAT, stat.S_IRUSR | stat.S_IWUSR), 'w') as f: os.chmod(path, stat.S_IRUSR | stat.S_IWUSR) json.dump(obj, f, indent=2, separators=(',', ': ')) + f.write(u'\n') f.truncate() def loads(string): return json.loads(string, object_pairs_hook=OrderedDict)
Add a newline at the end of json files when writing them.
## Code Before: import json import os import stat from collections import OrderedDict # provide read & write methods for json files that maintain the order of # dictionary keys, and indent consistently # Internals def load(path): with open(path, 'r') as f: # using an ordered dictionary for objects so that we preserve the order # of keys in objects (including, for example, dependencies) return json.load(f, object_pairs_hook=OrderedDict) def dump(path, obj): with os.fdopen(os.open(path, os.O_WRONLY | os.O_CREAT, stat.S_IRUSR | stat.S_IWUSR), 'w') as f: os.chmod(path, stat.S_IRUSR | stat.S_IWUSR) json.dump(obj, f, indent=2, separators=(',', ': ')) f.truncate() def loads(string): return json.loads(string, object_pairs_hook=OrderedDict) ## Instruction: Add a newline at the end of json files when writing them. ## Code After: import json import os import stat from collections import OrderedDict # provide read & write methods for json files that maintain the order of # dictionary keys, and indent consistently # Internals def load(path): with open(path, 'r') as f: # using an ordered dictionary for objects so that we preserve the order # of keys in objects (including, for example, dependencies) return json.load(f, object_pairs_hook=OrderedDict) def dump(path, obj): with os.fdopen(os.open(path, os.O_WRONLY | os.O_CREAT, stat.S_IRUSR | stat.S_IWUSR), 'w') as f: os.chmod(path, stat.S_IRUSR | stat.S_IWUSR) json.dump(obj, f, indent=2, separators=(',', ': ')) f.write(u'\n') f.truncate() def loads(string): return json.loads(string, object_pairs_hook=OrderedDict)
import json import os import stat from collections import OrderedDict # provide read & write methods for json files that maintain the order of # dictionary keys, and indent consistently # Internals def load(path): with open(path, 'r') as f: # using an ordered dictionary for objects so that we preserve the order # of keys in objects (including, for example, dependencies) return json.load(f, object_pairs_hook=OrderedDict) def dump(path, obj): with os.fdopen(os.open(path, os.O_WRONLY | os.O_CREAT, stat.S_IRUSR | stat.S_IWUSR), 'w') as f: os.chmod(path, stat.S_IRUSR | stat.S_IWUSR) json.dump(obj, f, indent=2, separators=(',', ': ')) + f.write(u'\n') f.truncate() def loads(string): return json.loads(string, object_pairs_hook=OrderedDict)
f86c925604356b25a8c5c0c71644f0df6f1b48f8
setup_directory.py
setup_directory.py
from __future__ import division, print_function, absolute_import import argparse import os import subprocess as sp from contextlib import contextmanager import tempfile try: import urllib.request as urllib2 except ImportError: import urllib2 MINICONDA_URL = 'https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh' @contextmanager def change_directory(path): old_cwd = os.getcwd() try: os.chdir(path) yield finally: os.chdir(old_cwd) def download_install_script(): location = os.path.join( tempfile.gettempdir(), os.path.split(MINICONDA_URL)[-1]) with open(location, 'wb') as outfile: response = urllib2.urlopen(MINICONDA_URL) data = response.read() outfile.write(data) return location
from __future__ import division, print_function, absolute_import import argparse import os import subprocess as sp from contextlib import contextmanager import tempfile try: import urllib.request as urllib2 except ImportError: import urllib2 MINICONDA_URL = 'https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh' @contextmanager def change_directory(path): old_cwd = os.getcwd() try: os.chdir(path) yield finally: os.chdir(old_cwd) def download_install_script(): location = os.path.join( tempfile.gettempdir(), os.path.split(MINICONDA_URL)[-1]) with open(location, 'wb') as outfile: response = urllib2.urlopen(MINICONDA_URL) data = response.read() outfile.write(data) return location def install_miniconda(script_path, name): dest = os.path.join( os.getcwd(), name) cmd = ['bash', script_path, '-b', '-f', '-p', dest] sp.check_call(cmd)
Add function to install miniconda
Add function to install miniconda
Python
mit
NGTS/pipeline-output-analysis-setup-script
from __future__ import division, print_function, absolute_import import argparse import os import subprocess as sp from contextlib import contextmanager import tempfile try: import urllib.request as urllib2 except ImportError: import urllib2 MINICONDA_URL = 'https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh' @contextmanager def change_directory(path): old_cwd = os.getcwd() try: os.chdir(path) yield finally: os.chdir(old_cwd) def download_install_script(): location = os.path.join( tempfile.gettempdir(), os.path.split(MINICONDA_URL)[-1]) with open(location, 'wb') as outfile: response = urllib2.urlopen(MINICONDA_URL) data = response.read() outfile.write(data) return location + def install_miniconda(script_path, name): + dest = os.path.join( + os.getcwd(), name) + cmd = ['bash', script_path, '-b', '-f', '-p', dest] + sp.check_call(cmd) + +
Add function to install miniconda
## Code Before: from __future__ import division, print_function, absolute_import import argparse import os import subprocess as sp from contextlib import contextmanager import tempfile try: import urllib.request as urllib2 except ImportError: import urllib2 MINICONDA_URL = 'https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh' @contextmanager def change_directory(path): old_cwd = os.getcwd() try: os.chdir(path) yield finally: os.chdir(old_cwd) def download_install_script(): location = os.path.join( tempfile.gettempdir(), os.path.split(MINICONDA_URL)[-1]) with open(location, 'wb') as outfile: response = urllib2.urlopen(MINICONDA_URL) data = response.read() outfile.write(data) return location ## Instruction: Add function to install miniconda ## Code After: from __future__ import division, print_function, absolute_import import argparse import os import subprocess as sp from contextlib import contextmanager import tempfile try: import urllib.request as urllib2 except ImportError: import urllib2 MINICONDA_URL = 'https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh' @contextmanager def change_directory(path): old_cwd = os.getcwd() try: os.chdir(path) yield finally: os.chdir(old_cwd) def download_install_script(): location = os.path.join( tempfile.gettempdir(), os.path.split(MINICONDA_URL)[-1]) with open(location, 'wb') as outfile: response = urllib2.urlopen(MINICONDA_URL) data = response.read() outfile.write(data) return location def install_miniconda(script_path, name): dest = os.path.join( os.getcwd(), name) cmd = ['bash', script_path, '-b', '-f', '-p', dest] sp.check_call(cmd)
from __future__ import division, print_function, absolute_import import argparse import os import subprocess as sp from contextlib import contextmanager import tempfile try: import urllib.request as urllib2 except ImportError: import urllib2 MINICONDA_URL = 'https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh' @contextmanager def change_directory(path): old_cwd = os.getcwd() try: os.chdir(path) yield finally: os.chdir(old_cwd) def download_install_script(): location = os.path.join( tempfile.gettempdir(), os.path.split(MINICONDA_URL)[-1]) with open(location, 'wb') as outfile: response = urllib2.urlopen(MINICONDA_URL) data = response.read() outfile.write(data) return location + def install_miniconda(script_path, name): + dest = os.path.join( + os.getcwd(), name) + cmd = ['bash', script_path, '-b', '-f', '-p', dest] + sp.check_call(cmd) + +
ebd6d12ca16003e771a7015505be1b42d96483a3
roles/gvl.commandline-utilities/templates/jupyterhub_config.py
roles/gvl.commandline-utilities/templates/jupyterhub_config.py
c.JupyterHub.ip = '127.0.0.1' # The ip for the proxy API handlers c.JupyterHub.proxy_api_ip = '127.0.0.1' # The public facing port of the proxy c.JupyterHub.port = 9510 # The base URL of the entire application c.JupyterHub.base_url = '/jupyterhub' # The ip for this process c.JupyterHub.hub_ip = '127.0.0.1' # put the log file in /var/log c.JupyterHub.extra_log_file = '/var/log/jupyterhub.log' #------------------------------------------------------------------------------ # Spawner configuration #------------------------------------------------------------------------------ # The IP address (or hostname) the single-user server should listen on c.Spawner.ip = '127.0.0.1' #------------------------------------------------------------------------------ # Authenticator configuration #------------------------------------------------------------------------------ # A class for authentication. # # The API is one method, `authenticate`, a tornado gen.coroutine. # set of usernames of admin users # # If unspecified, only the user that launches the server will be admin. c.Authenticator.admin_users = {'root', 'ubuntu'}
c.JupyterHub.ip = '127.0.0.1' # The ip for the proxy API handlers c.JupyterHub.proxy_api_ip = '127.0.0.1' # The public facing port of the proxy c.JupyterHub.port = 9510 # The base URL of the entire application c.JupyterHub.base_url = '/jupyterhub' # The ip for this process c.JupyterHub.hub_ip = '127.0.0.1' # put the log file in /var/log c.JupyterHub.extra_log_file = '/var/log/jupyterhub.log' c.JupyterHub.log_level = 'WARN' #------------------------------------------------------------------------------ # Spawner configuration #------------------------------------------------------------------------------ # The IP address (or hostname) the single-user server should listen on c.Spawner.ip = '127.0.0.1' #------------------------------------------------------------------------------ # Authenticator configuration #------------------------------------------------------------------------------ # A class for authentication. # # The API is one method, `authenticate`, a tornado gen.coroutine. # set of usernames of admin users # # If unspecified, only the user that launches the server will be admin. c.Authenticator.admin_users = {'root', 'ubuntu'}
Set log level to 'WARN'
Set log level to 'WARN'
Python
mit
gvlproject/gvl_commandline_utilities,nuwang/gvl_commandline_utilities,claresloggett/gvl_commandline_utilities,nuwang/gvl_commandline_utilities,claresloggett/gvl_commandline_utilities,gvlproject/gvl_commandline_utilities
c.JupyterHub.ip = '127.0.0.1' # The ip for the proxy API handlers c.JupyterHub.proxy_api_ip = '127.0.0.1' # The public facing port of the proxy c.JupyterHub.port = 9510 # The base URL of the entire application c.JupyterHub.base_url = '/jupyterhub' # The ip for this process c.JupyterHub.hub_ip = '127.0.0.1' # put the log file in /var/log c.JupyterHub.extra_log_file = '/var/log/jupyterhub.log' + + c.JupyterHub.log_level = 'WARN' #------------------------------------------------------------------------------ # Spawner configuration #------------------------------------------------------------------------------ # The IP address (or hostname) the single-user server should listen on c.Spawner.ip = '127.0.0.1' #------------------------------------------------------------------------------ # Authenticator configuration #------------------------------------------------------------------------------ # A class for authentication. # # The API is one method, `authenticate`, a tornado gen.coroutine. # set of usernames of admin users # # If unspecified, only the user that launches the server will be admin. c.Authenticator.admin_users = {'root', 'ubuntu'}
Set log level to 'WARN'
## Code Before: c.JupyterHub.ip = '127.0.0.1' # The ip for the proxy API handlers c.JupyterHub.proxy_api_ip = '127.0.0.1' # The public facing port of the proxy c.JupyterHub.port = 9510 # The base URL of the entire application c.JupyterHub.base_url = '/jupyterhub' # The ip for this process c.JupyterHub.hub_ip = '127.0.0.1' # put the log file in /var/log c.JupyterHub.extra_log_file = '/var/log/jupyterhub.log' #------------------------------------------------------------------------------ # Spawner configuration #------------------------------------------------------------------------------ # The IP address (or hostname) the single-user server should listen on c.Spawner.ip = '127.0.0.1' #------------------------------------------------------------------------------ # Authenticator configuration #------------------------------------------------------------------------------ # A class for authentication. # # The API is one method, `authenticate`, a tornado gen.coroutine. # set of usernames of admin users # # If unspecified, only the user that launches the server will be admin. c.Authenticator.admin_users = {'root', 'ubuntu'} ## Instruction: Set log level to 'WARN' ## Code After: c.JupyterHub.ip = '127.0.0.1' # The ip for the proxy API handlers c.JupyterHub.proxy_api_ip = '127.0.0.1' # The public facing port of the proxy c.JupyterHub.port = 9510 # The base URL of the entire application c.JupyterHub.base_url = '/jupyterhub' # The ip for this process c.JupyterHub.hub_ip = '127.0.0.1' # put the log file in /var/log c.JupyterHub.extra_log_file = '/var/log/jupyterhub.log' c.JupyterHub.log_level = 'WARN' #------------------------------------------------------------------------------ # Spawner configuration #------------------------------------------------------------------------------ # The IP address (or hostname) the single-user server should listen on c.Spawner.ip = '127.0.0.1' #------------------------------------------------------------------------------ # Authenticator configuration #------------------------------------------------------------------------------ # A class for authentication. # # The API is one method, `authenticate`, a tornado gen.coroutine. # set of usernames of admin users # # If unspecified, only the user that launches the server will be admin. c.Authenticator.admin_users = {'root', 'ubuntu'}
c.JupyterHub.ip = '127.0.0.1' # The ip for the proxy API handlers c.JupyterHub.proxy_api_ip = '127.0.0.1' # The public facing port of the proxy c.JupyterHub.port = 9510 # The base URL of the entire application c.JupyterHub.base_url = '/jupyterhub' # The ip for this process c.JupyterHub.hub_ip = '127.0.0.1' # put the log file in /var/log c.JupyterHub.extra_log_file = '/var/log/jupyterhub.log' + + c.JupyterHub.log_level = 'WARN' #------------------------------------------------------------------------------ # Spawner configuration #------------------------------------------------------------------------------ # The IP address (or hostname) the single-user server should listen on c.Spawner.ip = '127.0.0.1' #------------------------------------------------------------------------------ # Authenticator configuration #------------------------------------------------------------------------------ # A class for authentication. # # The API is one method, `authenticate`, a tornado gen.coroutine. # set of usernames of admin users # # If unspecified, only the user that launches the server will be admin. c.Authenticator.admin_users = {'root', 'ubuntu'}
e7b853c667b5785355214380954c83b843c46f05
tests/modules/contrib/test_publicip.py
tests/modules/contrib/test_publicip.py
import pytest from unittest import TestCase, mock import core.config import core.widget import modules.contrib.publicip def build_module(): config = core.config.Config([]) return modules.contrib.publicip.Module(config=config, theme=None) def widget(module): return module.widgets()[0] class PublicIPTest(TestCase): def test_load_module(self): __import__("modules.contrib.publicip") @mock.patch('util.location.public_ip') def test_public_ip(self, public_ip_mock): public_ip_mock.return_value = '5.12.220.2' module = build_module() module.update() assert widget(module).full_text() == '5.12.220.2' @mock.patch('util.location.public_ip') def test_public_ip_with_exception(self, public_ip_mock): public_ip_mock.side_effect = Exception module = build_module() module.update() assert widget(module).full_text() == 'n/a' @mock.patch('util.location.public_ip') def test_interval_seconds(self, public_ip_mock): public_ip_mock.side_effect = Exception module = build_module() assert module.parameter('interval') == 3600
import pytest from unittest import TestCase, mock import core.config import core.widget import modules.contrib.publicip def build_module(): config = core.config.Config([]) return modules.contrib.publicip.Module(config=config, theme=None) def widget(module): return module.widgets()[0] class PublicIPTest(TestCase): def test_load_module(self): __import__("modules.contrib.publicip") @mock.patch('util.location.public_ip') def test_public_ip(self, public_ip_mock): public_ip_mock.return_value = '5.12.220.2' module = build_module() module.update() assert widget(module).full_text() == '5.12.220.2' @mock.patch('util.location.public_ip') def test_public_ip_with_exception(self, public_ip_mock): public_ip_mock.side_effect = Exception module = build_module() module.update() assert widget(module).full_text() == 'n/a' def test_interval_seconds(self): module = build_module() assert module.parameter('interval') == 3600
Remove useless mock side effect
Remove useless mock side effect
Python
mit
tobi-wan-kenobi/bumblebee-status,tobi-wan-kenobi/bumblebee-status
import pytest from unittest import TestCase, mock import core.config import core.widget import modules.contrib.publicip def build_module(): config = core.config.Config([]) return modules.contrib.publicip.Module(config=config, theme=None) def widget(module): return module.widgets()[0] class PublicIPTest(TestCase): def test_load_module(self): __import__("modules.contrib.publicip") @mock.patch('util.location.public_ip') def test_public_ip(self, public_ip_mock): public_ip_mock.return_value = '5.12.220.2' module = build_module() module.update() assert widget(module).full_text() == '5.12.220.2' @mock.patch('util.location.public_ip') def test_public_ip_with_exception(self, public_ip_mock): public_ip_mock.side_effect = Exception module = build_module() module.update() assert widget(module).full_text() == 'n/a' - @mock.patch('util.location.public_ip') - def test_interval_seconds(self, public_ip_mock): + def test_interval_seconds(self): - public_ip_mock.side_effect = Exception - module = build_module() assert module.parameter('interval') == 3600
Remove useless mock side effect
## Code Before: import pytest from unittest import TestCase, mock import core.config import core.widget import modules.contrib.publicip def build_module(): config = core.config.Config([]) return modules.contrib.publicip.Module(config=config, theme=None) def widget(module): return module.widgets()[0] class PublicIPTest(TestCase): def test_load_module(self): __import__("modules.contrib.publicip") @mock.patch('util.location.public_ip') def test_public_ip(self, public_ip_mock): public_ip_mock.return_value = '5.12.220.2' module = build_module() module.update() assert widget(module).full_text() == '5.12.220.2' @mock.patch('util.location.public_ip') def test_public_ip_with_exception(self, public_ip_mock): public_ip_mock.side_effect = Exception module = build_module() module.update() assert widget(module).full_text() == 'n/a' @mock.patch('util.location.public_ip') def test_interval_seconds(self, public_ip_mock): public_ip_mock.side_effect = Exception module = build_module() assert module.parameter('interval') == 3600 ## Instruction: Remove useless mock side effect ## Code After: import pytest from unittest import TestCase, mock import core.config import core.widget import modules.contrib.publicip def build_module(): config = core.config.Config([]) return modules.contrib.publicip.Module(config=config, theme=None) def widget(module): return module.widgets()[0] class PublicIPTest(TestCase): def test_load_module(self): __import__("modules.contrib.publicip") @mock.patch('util.location.public_ip') def test_public_ip(self, public_ip_mock): public_ip_mock.return_value = '5.12.220.2' module = build_module() module.update() assert widget(module).full_text() == '5.12.220.2' @mock.patch('util.location.public_ip') def test_public_ip_with_exception(self, public_ip_mock): public_ip_mock.side_effect = Exception module = build_module() module.update() assert widget(module).full_text() == 'n/a' def test_interval_seconds(self): module = build_module() assert module.parameter('interval') == 3600
import pytest from unittest import TestCase, mock import core.config import core.widget import modules.contrib.publicip def build_module(): config = core.config.Config([]) return modules.contrib.publicip.Module(config=config, theme=None) def widget(module): return module.widgets()[0] class PublicIPTest(TestCase): def test_load_module(self): __import__("modules.contrib.publicip") @mock.patch('util.location.public_ip') def test_public_ip(self, public_ip_mock): public_ip_mock.return_value = '5.12.220.2' module = build_module() module.update() assert widget(module).full_text() == '5.12.220.2' @mock.patch('util.location.public_ip') def test_public_ip_with_exception(self, public_ip_mock): public_ip_mock.side_effect = Exception module = build_module() module.update() assert widget(module).full_text() == 'n/a' - @mock.patch('util.location.public_ip') - def test_interval_seconds(self, public_ip_mock): ? ---------------- + def test_interval_seconds(self): - public_ip_mock.side_effect = Exception - module = build_module() assert module.parameter('interval') == 3600
a1c87c491bf936d441ef7fd79b531384fa174138
simpleubjson/version.py
simpleubjson/version.py
__version_info__ = (0, 6, 0, 'dev', 0) __version__ = '{version}{tag}{build}'.format( version='.'.join(map(str, __version_info__[:3])), tag='-' + __version_info__[3] if __version_info__[3] else '', build='.' + str(__version_info__[4]) if __version_info__[4] else '' )
__version_info__ = (0, 6, 0, 'dev', 0) __version__ = '%(version)s%(tag)s%(build)s' % { 'version': '.'.join(map(str, __version_info__[:3])), 'tag': '-' + __version_info__[3] if __version_info__[3] else '', 'build': '.' + str(__version_info__[4]) if __version_info__[4] else '' }
Fix compatibility with Python 2.5
Fix compatibility with Python 2.5
Python
bsd-2-clause
kxepal/simpleubjson,brainwater/simpleubjson,samipshah/simpleubjson,498888197/simpleubjson
__version_info__ = (0, 6, 0, 'dev', 0) - __version__ = '{version}{tag}{build}'.format( + __version__ = '%(version)s%(tag)s%(build)s' % { - version='.'.join(map(str, __version_info__[:3])), + 'version': '.'.join(map(str, __version_info__[:3])), - tag='-' + __version_info__[3] if __version_info__[3] else '', + 'tag': '-' + __version_info__[3] if __version_info__[3] else '', - build='.' + str(__version_info__[4]) if __version_info__[4] else '' + 'build': '.' + str(__version_info__[4]) if __version_info__[4] else '' - ) + }
Fix compatibility with Python 2.5
## Code Before: __version_info__ = (0, 6, 0, 'dev', 0) __version__ = '{version}{tag}{build}'.format( version='.'.join(map(str, __version_info__[:3])), tag='-' + __version_info__[3] if __version_info__[3] else '', build='.' + str(__version_info__[4]) if __version_info__[4] else '' ) ## Instruction: Fix compatibility with Python 2.5 ## Code After: __version_info__ = (0, 6, 0, 'dev', 0) __version__ = '%(version)s%(tag)s%(build)s' % { 'version': '.'.join(map(str, __version_info__[:3])), 'tag': '-' + __version_info__[3] if __version_info__[3] else '', 'build': '.' + str(__version_info__[4]) if __version_info__[4] else '' }
__version_info__ = (0, 6, 0, 'dev', 0) - __version__ = '{version}{tag}{build}'.format( + __version__ = '%(version)s%(tag)s%(build)s' % { - version='.'.join(map(str, __version_info__[:3])), ? ^ + 'version': '.'.join(map(str, __version_info__[:3])), ? + ^^^ - tag='-' + __version_info__[3] if __version_info__[3] else '', ? ^ + 'tag': '-' + __version_info__[3] if __version_info__[3] else '', ? + ^^^ - build='.' + str(__version_info__[4]) if __version_info__[4] else '' ? ^ + 'build': '.' + str(__version_info__[4]) if __version_info__[4] else '' ? + ^^^ - ) + }
5547e59360126baa20e1684a22e7f88fdacb530a
s2v2.py
s2v2.py
from s2v1 import * def number_of_records(data_sample): return len(data_sample) number_of_ties = number_of_records(data_from_csv) - 1 # minus header row # print(number_of_ties, "ties in our data sample") def number_of_records2(data_sample): return data_sample.size number_of_ties_my_csv = number_of_records2(my_csv) # print(number_of_ties_my_csv, "ties in our data sample")
from s2v1 import * def number_of_records(data_sample): return len(data_sample) def number_of_records_ignore_header(data_sample, header=True): if header: return len(data_sample) - 1 else: return len(data_sample) number_of_ties = number_of_records(data_from_csv) - 1 # minus header row # print(number_of_ties, "ties in our data sample") def number_of_records2(data_sample): return data_sample.size number_of_ties_my_csv = number_of_records2(my_csv) # print(number_of_ties_my_csv, "ties in our data sample")
Create new function for number of records and do a header check
Create new function for number of records and do a header check
Python
mit
alexmilesyounger/ds_basics
from s2v1 import * def number_of_records(data_sample): return len(data_sample) + def number_of_records_ignore_header(data_sample, header=True): + if header: + return len(data_sample) - 1 + else: + return len(data_sample) + number_of_ties = number_of_records(data_from_csv) - 1 # minus header row # print(number_of_ties, "ties in our data sample") - def number_of_records2(data_sample): return data_sample.size number_of_ties_my_csv = number_of_records2(my_csv) # print(number_of_ties_my_csv, "ties in our data sample")
Create new function for number of records and do a header check
## Code Before: from s2v1 import * def number_of_records(data_sample): return len(data_sample) number_of_ties = number_of_records(data_from_csv) - 1 # minus header row # print(number_of_ties, "ties in our data sample") def number_of_records2(data_sample): return data_sample.size number_of_ties_my_csv = number_of_records2(my_csv) # print(number_of_ties_my_csv, "ties in our data sample") ## Instruction: Create new function for number of records and do a header check ## Code After: from s2v1 import * def number_of_records(data_sample): return len(data_sample) def number_of_records_ignore_header(data_sample, header=True): if header: return len(data_sample) - 1 else: return len(data_sample) number_of_ties = number_of_records(data_from_csv) - 1 # minus header row # print(number_of_ties, "ties in our data sample") def number_of_records2(data_sample): return data_sample.size number_of_ties_my_csv = number_of_records2(my_csv) # print(number_of_ties_my_csv, "ties in our data sample")
from s2v1 import * def number_of_records(data_sample): return len(data_sample) + def number_of_records_ignore_header(data_sample, header=True): + if header: + return len(data_sample) - 1 + else: + return len(data_sample) + number_of_ties = number_of_records(data_from_csv) - 1 # minus header row # print(number_of_ties, "ties in our data sample") - def number_of_records2(data_sample): return data_sample.size number_of_ties_my_csv = number_of_records2(my_csv) # print(number_of_ties_my_csv, "ties in our data sample")
9bb312c505c2749862372c0ff56ba47e087a9edc
searx/engines/semantic_scholar.py
searx/engines/semantic_scholar.py
from json import dumps, loads search_url = 'https://www.semanticscholar.org/api/1/search' def request(query, params): params['url'] = search_url params['method'] = 'POST' params['headers']['content-type'] = 'application/json' params['data'] = dumps({ "queryString": query, "page": params['pageno'], "pageSize": 10, "sort": "relevance", "useFallbackRankerService": False, "useFallbackSearchCluster": False, "performTitleMatch": True, "getQuerySuggestions": False, "authors": [], "coAuthors": [], "venues": [], "performTitleMatch": True, }) return params def response(resp): res = loads(resp.text) results = [] for result in res['results']: results.append({ 'url': result['primaryPaperLink']['url'], 'title': result['title']['text'], 'content': result['paperAbstractTruncated'] }) return results
from json import dumps, loads search_url = 'https://www.semanticscholar.org/api/1/search' def request(query, params): params['url'] = search_url params['method'] = 'POST' params['headers']['content-type'] = 'application/json' params['data'] = dumps({ "queryString": query, "page": params['pageno'], "pageSize": 10, "sort": "relevance", "useFallbackRankerService": False, "useFallbackSearchCluster": False, "getQuerySuggestions": False, "authors": [], "coAuthors": [], "venues": [], "performTitleMatch": True, }) return params def response(resp): res = loads(resp.text) results = [] for result in res['results']: results.append({ 'url': result['primaryPaperLink']['url'], 'title': result['title']['text'], 'content': result['paperAbstractTruncated'] }) return results
Remove duplicated key from dict in Semantic Scholar
Remove duplicated key from dict in Semantic Scholar
Python
agpl-3.0
dalf/searx,dalf/searx,dalf/searx,dalf/searx
from json import dumps, loads search_url = 'https://www.semanticscholar.org/api/1/search' def request(query, params): params['url'] = search_url params['method'] = 'POST' params['headers']['content-type'] = 'application/json' params['data'] = dumps({ "queryString": query, "page": params['pageno'], "pageSize": 10, "sort": "relevance", "useFallbackRankerService": False, "useFallbackSearchCluster": False, - "performTitleMatch": True, "getQuerySuggestions": False, "authors": [], "coAuthors": [], "venues": [], "performTitleMatch": True, }) return params def response(resp): res = loads(resp.text) results = [] for result in res['results']: results.append({ 'url': result['primaryPaperLink']['url'], 'title': result['title']['text'], 'content': result['paperAbstractTruncated'] }) return results
Remove duplicated key from dict in Semantic Scholar
## Code Before: from json import dumps, loads search_url = 'https://www.semanticscholar.org/api/1/search' def request(query, params): params['url'] = search_url params['method'] = 'POST' params['headers']['content-type'] = 'application/json' params['data'] = dumps({ "queryString": query, "page": params['pageno'], "pageSize": 10, "sort": "relevance", "useFallbackRankerService": False, "useFallbackSearchCluster": False, "performTitleMatch": True, "getQuerySuggestions": False, "authors": [], "coAuthors": [], "venues": [], "performTitleMatch": True, }) return params def response(resp): res = loads(resp.text) results = [] for result in res['results']: results.append({ 'url': result['primaryPaperLink']['url'], 'title': result['title']['text'], 'content': result['paperAbstractTruncated'] }) return results ## Instruction: Remove duplicated key from dict in Semantic Scholar ## Code After: from json import dumps, loads search_url = 'https://www.semanticscholar.org/api/1/search' def request(query, params): params['url'] = search_url params['method'] = 'POST' params['headers']['content-type'] = 'application/json' params['data'] = dumps({ "queryString": query, "page": params['pageno'], "pageSize": 10, "sort": "relevance", "useFallbackRankerService": False, "useFallbackSearchCluster": False, "getQuerySuggestions": False, "authors": [], "coAuthors": [], "venues": [], "performTitleMatch": True, }) return params def response(resp): res = loads(resp.text) results = [] for result in res['results']: results.append({ 'url': result['primaryPaperLink']['url'], 'title': result['title']['text'], 'content': result['paperAbstractTruncated'] }) return results
from json import dumps, loads search_url = 'https://www.semanticscholar.org/api/1/search' def request(query, params): params['url'] = search_url params['method'] = 'POST' params['headers']['content-type'] = 'application/json' params['data'] = dumps({ "queryString": query, "page": params['pageno'], "pageSize": 10, "sort": "relevance", "useFallbackRankerService": False, "useFallbackSearchCluster": False, - "performTitleMatch": True, "getQuerySuggestions": False, "authors": [], "coAuthors": [], "venues": [], "performTitleMatch": True, }) return params def response(resp): res = loads(resp.text) results = [] for result in res['results']: results.append({ 'url': result['primaryPaperLink']['url'], 'title': result['title']['text'], 'content': result['paperAbstractTruncated'] }) return results
End of preview. Expand in Data Studio
README.md exists but content is empty.
Downloads last month
108