commit
stringlengths
40
40
old_file
stringlengths
4
184
new_file
stringlengths
4
184
old_contents
stringlengths
1
3.6k
new_contents
stringlengths
5
3.38k
subject
stringlengths
15
778
message
stringlengths
16
6.74k
lang
stringclasses
201 values
license
stringclasses
13 values
repos
stringlengths
6
116k
config
stringclasses
201 values
content
stringlengths
137
7.24k
diff
stringlengths
26
5.55k
diff_length
int64
1
123
relative_diff_length
float64
0.01
89
n_lines_added
int64
0
108
n_lines_deleted
int64
0
106
b1153bc6e8b8b132c146076aeeb6b86ec4f54365
__init__.py
__init__.py
if 'loaded' in locals(): import imp imp.reload(blendergltf) from .blendergltf import * else: loaded = True from .blendergltf import *
bl_info = { "name": "glTF format", "author": "Daniel Stokes", "version": (0, 1, 0), "blender": (2, 76, 0), "location": "File > Import-Export", "description": "Export glTF", "warning": "", "wiki_url": "" "", "support": 'TESTING', "category": "Import-Export"} # Treat as module if '.' in __name__: if 'loaded' in locals(): import imp imp.reload(blendergltf) from .blendergltf import * else: loaded = True from .blendergltf import * # Treat as addon else: if "bpy" in locals(): import importlib importlib.reload(blendergltf) import json import bpy from bpy.props import ( StringProperty, ) from bpy_extras.io_utils import ( ExportHelper, ) from . import blendergltf class ExportGLTF(bpy.types.Operator, ExportHelper): """Save a Khronos glTF File""" bl_idname = "export_scene.gltf" bl_label = 'Export glTF' filename_ext = ".gltf" filter_glob = StringProperty( default="*.gltf", options={'HIDDEN'}, ) check_extension = True def execute(self, context): scene = { 'camera': bpy.data.cameras, 'lamps': bpy.data.lamps, 'images': bpy.data.images, 'materials': bpy.data.materials, 'meshes': bpy.data.meshes, 'objects': bpy.data.objects, 'scenes': bpy.data.scenes, 'textures': bpy.data.textures, } gltf = blendergltf.export_gltf(scene) with open(self.filepath, 'w') as fout: json.dump(gltf, fout, indent=4) return {'FINISHED'} def menu_func_export(self, context): self.layout.operator(ExportGLTF.bl_idname, text="glTF (.gltf)") def register(): bpy.utils.register_module(__name__) bpy.types.INFO_MT_file_export.append(menu_func_export) def unregister(): bpy.utils.unregister_module(__name__) bpy.types.INFO_MT_file_export.remove(menu_func_export)
Add experimental support to run module as Blender addon
Add experimental support to run module as Blender addon
Python
apache-2.0
Kupoman/blendergltf,lukesanantonio/blendergltf
python
## Code Before: if 'loaded' in locals(): import imp imp.reload(blendergltf) from .blendergltf import * else: loaded = True from .blendergltf import * ## Instruction: Add experimental support to run module as Blender addon ## Code After: bl_info = { "name": "glTF format", "author": "Daniel Stokes", "version": (0, 1, 0), "blender": (2, 76, 0), "location": "File > Import-Export", "description": "Export glTF", "warning": "", "wiki_url": "" "", "support": 'TESTING', "category": "Import-Export"} # Treat as module if '.' in __name__: if 'loaded' in locals(): import imp imp.reload(blendergltf) from .blendergltf import * else: loaded = True from .blendergltf import * # Treat as addon else: if "bpy" in locals(): import importlib importlib.reload(blendergltf) import json import bpy from bpy.props import ( StringProperty, ) from bpy_extras.io_utils import ( ExportHelper, ) from . import blendergltf class ExportGLTF(bpy.types.Operator, ExportHelper): """Save a Khronos glTF File""" bl_idname = "export_scene.gltf" bl_label = 'Export glTF' filename_ext = ".gltf" filter_glob = StringProperty( default="*.gltf", options={'HIDDEN'}, ) check_extension = True def execute(self, context): scene = { 'camera': bpy.data.cameras, 'lamps': bpy.data.lamps, 'images': bpy.data.images, 'materials': bpy.data.materials, 'meshes': bpy.data.meshes, 'objects': bpy.data.objects, 'scenes': bpy.data.scenes, 'textures': bpy.data.textures, } gltf = blendergltf.export_gltf(scene) with open(self.filepath, 'w') as fout: json.dump(gltf, fout, indent=4) return {'FINISHED'} def menu_func_export(self, context): self.layout.operator(ExportGLTF.bl_idname, text="glTF (.gltf)") def register(): bpy.utils.register_module(__name__) bpy.types.INFO_MT_file_export.append(menu_func_export) def unregister(): bpy.utils.unregister_module(__name__) bpy.types.INFO_MT_file_export.remove(menu_func_export)
+ bl_info = { + "name": "glTF format", + "author": "Daniel Stokes", + "version": (0, 1, 0), + "blender": (2, 76, 0), + "location": "File > Import-Export", + "description": "Export glTF", + "warning": "", + "wiki_url": "" + "", + "support": 'TESTING', + "category": "Import-Export"} + + + # Treat as module + if '.' in __name__: - if 'loaded' in locals(): + if 'loaded' in locals(): ? ++++ - import imp + import imp ? ++++ - imp.reload(blendergltf) + imp.reload(blendergltf) ? ++++ - from .blendergltf import * + from .blendergltf import * ? ++++ + else: + loaded = True + from .blendergltf import * + + # Treat as addon else: - loaded = True - from .blendergltf import * + if "bpy" in locals(): + import importlib + importlib.reload(blendergltf) + + + import json + + import bpy + from bpy.props import ( + StringProperty, + ) + from bpy_extras.io_utils import ( + ExportHelper, + ) + + from . import blendergltf + + + class ExportGLTF(bpy.types.Operator, ExportHelper): + """Save a Khronos glTF File""" + + bl_idname = "export_scene.gltf" + bl_label = 'Export glTF' + + filename_ext = ".gltf" + filter_glob = StringProperty( + default="*.gltf", + options={'HIDDEN'}, + ) + + check_extension = True + + def execute(self, context): + scene = { + 'camera': bpy.data.cameras, + 'lamps': bpy.data.lamps, + 'images': bpy.data.images, + 'materials': bpy.data.materials, + 'meshes': bpy.data.meshes, + 'objects': bpy.data.objects, + 'scenes': bpy.data.scenes, + 'textures': bpy.data.textures, + } + gltf = blendergltf.export_gltf(scene) + with open(self.filepath, 'w') as fout: + json.dump(gltf, fout, indent=4) + return {'FINISHED'} + + + def menu_func_export(self, context): + self.layout.operator(ExportGLTF.bl_idname, text="glTF (.gltf)") + + + def register(): + bpy.utils.register_module(__name__) + + bpy.types.INFO_MT_file_export.append(menu_func_export) + + + def unregister(): + bpy.utils.unregister_module(__name__) + + bpy.types.INFO_MT_file_export.remove(menu_func_export)
94
13.428571
88
6
e9bc9b8f18adb402a77a969b0868e8a560c1ca98
.github/workflows/publish.yml
.github/workflows/publish.yml
name: Publish package on: push: tags: - v8.* jobs: test-build-publish: runs-on: ubuntu-latest steps: - uses: actions/checkout@v1 - uses: actions/setup-node@v1 with: node-version: 12 registry-url: https://registry.npmjs.org/ - name: Get version name id: get_tag_name run: echo ::set-output name=VERSION::${GITHUB_REF/refs\/tags\//} - uses: actions/checkout@v1 with: ref: ${{ steps.get_tag_name.outputs.VERSION }} - name: Install Dependencies run: yarn install - name: Lint Files run: yarn workspace react-day-picker coverage - name: Run Unit Tests run: yarn workspace react-day-picker test - name: Build run: yarn workspace react-day-picker build - name: Publish on npm run: yarn npm publish --tag next env: NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}}
name: Publish package on: push: tags: - v8.* jobs: test-build-publish: runs-on: ubuntu-latest steps: - uses: actions/checkout@v1 - uses: actions/setup-node@v1 with: node-version: 12 registry-url: https://registry.npmjs.org/ - name: Get version name id: get_tag_name run: echo ::set-output name=VERSION::${GITHUB_REF/refs\/tags\//} - uses: actions/checkout@v1 with: ref: ${{ steps.get_tag_name.outputs.VERSION }} - name: Get yarn cache directory path id: yarn-cache-dir-path run: echo "::set-output name=dir::$(yarn cache dir)" - uses: actions/cache@v1 id: yarn-cache # use this to check for `cache-hit` (`steps.yarn-cache.outputs.cache-hit != 'true'`) with: path: ${{ steps.yarn-cache-dir-path.outputs.dir }} key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} restore-keys: | ${{ runner.os }}-yarn- - name: Install Dependencies run: yarn install - name: Lint Files run: yarn workspace react-day-picker coverage - name: Run Unit Tests run: yarn workspace react-day-picker test - name: Build run: yarn workspace react-day-picker build - name: Publish on npm run: yarn npm publish --tag next env: NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}}
Add yarn cache when installing dependencies
Add yarn cache when installing dependencies
YAML
mit
gpbl/react-day-picker,gpbl/react-day-picker,gpbl/react-day-picker
yaml
## Code Before: name: Publish package on: push: tags: - v8.* jobs: test-build-publish: runs-on: ubuntu-latest steps: - uses: actions/checkout@v1 - uses: actions/setup-node@v1 with: node-version: 12 registry-url: https://registry.npmjs.org/ - name: Get version name id: get_tag_name run: echo ::set-output name=VERSION::${GITHUB_REF/refs\/tags\//} - uses: actions/checkout@v1 with: ref: ${{ steps.get_tag_name.outputs.VERSION }} - name: Install Dependencies run: yarn install - name: Lint Files run: yarn workspace react-day-picker coverage - name: Run Unit Tests run: yarn workspace react-day-picker test - name: Build run: yarn workspace react-day-picker build - name: Publish on npm run: yarn npm publish --tag next env: NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}} ## Instruction: Add yarn cache when installing dependencies ## Code After: name: Publish package on: push: tags: - v8.* jobs: test-build-publish: runs-on: ubuntu-latest steps: - uses: actions/checkout@v1 - uses: actions/setup-node@v1 with: node-version: 12 registry-url: https://registry.npmjs.org/ - name: Get version name id: get_tag_name run: echo ::set-output name=VERSION::${GITHUB_REF/refs\/tags\//} - uses: actions/checkout@v1 with: ref: ${{ steps.get_tag_name.outputs.VERSION }} - name: Get yarn cache directory path id: yarn-cache-dir-path run: echo "::set-output name=dir::$(yarn cache dir)" - uses: actions/cache@v1 id: yarn-cache # use this to check for `cache-hit` (`steps.yarn-cache.outputs.cache-hit != 'true'`) with: path: ${{ steps.yarn-cache-dir-path.outputs.dir }} key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} restore-keys: | ${{ runner.os }}-yarn- - name: Install Dependencies run: yarn install - name: Lint Files run: yarn workspace react-day-picker coverage - name: Run Unit Tests run: yarn workspace react-day-picker test - name: Build run: yarn workspace react-day-picker build - name: Publish on npm run: yarn npm publish --tag next env: NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}}
name: Publish package on: push: tags: - v8.* jobs: test-build-publish: runs-on: ubuntu-latest steps: - uses: actions/checkout@v1 - uses: actions/setup-node@v1 with: node-version: 12 registry-url: https://registry.npmjs.org/ - name: Get version name id: get_tag_name run: echo ::set-output name=VERSION::${GITHUB_REF/refs\/tags\//} - uses: actions/checkout@v1 with: ref: ${{ steps.get_tag_name.outputs.VERSION }} + - name: Get yarn cache directory path + id: yarn-cache-dir-path + run: echo "::set-output name=dir::$(yarn cache dir)" + + - uses: actions/cache@v1 + id: yarn-cache # use this to check for `cache-hit` (`steps.yarn-cache.outputs.cache-hit != 'true'`) + with: + path: ${{ steps.yarn-cache-dir-path.outputs.dir }} + key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} + restore-keys: | + ${{ runner.os }}-yarn- + - name: Install Dependencies run: yarn install - name: Lint Files run: yarn workspace react-day-picker coverage - name: Run Unit Tests run: yarn workspace react-day-picker test - name: Build run: yarn workspace react-day-picker build - name: Publish on npm run: yarn npm publish --tag next env: NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}}
12
0.3
12
0
af8d6e8f8bf5d0224555a72efde6fe4f4eefc0d2
spec/unit/middleware/logging_spec.rb
spec/unit/middleware/logging_spec.rb
describe Travis::Api::App::Middleware::Logging do it 'configures ActiveRecord' do expect(ActiveRecord::Base.logger).to eq(Travis.logger) end it 'sets the logger' do mock_app do use Travis::Api::App::Middleware::Logging get '/check_logger' do expect(logger).to eq(Travis.logger) 'ok' end end expect(get('/check_logger')).to be_ok end end
describe Travis::Api::App::Middleware::Logging do it 'configures ActiveRecord' do expect(ActiveRecord::Base.logger).to eq(Travis.logger) end it 'sets the logger' do mock_app do use Travis::Api::App::Middleware::Logging get '/check_logger' do throw unless logger == Travis.logger 'ok' end end expect(get('/check_logger')).to be_ok end end
Fix test checking that logger is injected
Fix test checking that logger is injected
Ruby
mit
travis-ci/travis-api,travis-ci/travis-api,travis-ci/travis-api
ruby
## Code Before: describe Travis::Api::App::Middleware::Logging do it 'configures ActiveRecord' do expect(ActiveRecord::Base.logger).to eq(Travis.logger) end it 'sets the logger' do mock_app do use Travis::Api::App::Middleware::Logging get '/check_logger' do expect(logger).to eq(Travis.logger) 'ok' end end expect(get('/check_logger')).to be_ok end end ## Instruction: Fix test checking that logger is injected ## Code After: describe Travis::Api::App::Middleware::Logging do it 'configures ActiveRecord' do expect(ActiveRecord::Base.logger).to eq(Travis.logger) end it 'sets the logger' do mock_app do use Travis::Api::App::Middleware::Logging get '/check_logger' do throw unless logger == Travis.logger 'ok' end end expect(get('/check_logger')).to be_ok end end
describe Travis::Api::App::Middleware::Logging do it 'configures ActiveRecord' do expect(ActiveRecord::Base.logger).to eq(Travis.logger) end it 'sets the logger' do mock_app do use Travis::Api::App::Middleware::Logging get '/check_logger' do - expect(logger).to eq(Travis.logger) + throw unless logger == Travis.logger 'ok' end end expect(get('/check_logger')).to be_ok end end
2
0.117647
1
1
f6b4b16c26ee97d48ba524027a96d17fba63dc80
project/models.py
project/models.py
import datetime from project import db, bcrypt class User(db.Model): __tablename__ = "users" id = db.Column(db.Integer, primary_key=True) email = db.Column(db.String, unique=True, nullable=False) password = db.Column(db.String, nullable=False) registered_on = db.Column(db.DateTime, nullable=False) admin = db.Column(db.Boolean, nullable=False, default=False) def __init__(self, email, password, paid=False, admin=False): self.email = email self.password = bcrypt.generate_password_hash(password) self.registered_on = datetime.datetime.now() self.admin = admin def is_authenticated(self): return True def is_active(self): return True def is_anonymous(self): return False def get_id(self): return self.id def __repr__(self): return '<email {}'.format(self.email)
import datetime from project import db, bcrypt class User(db.Model): __tablename__ = "users" id = db.Column(db.Integer, primary_key=True) email = db.Column(db.String, unique=True, nullable=False) password = db.Column(db.String, nullable=False) registered_on = db.Column(db.DateTime, nullable=False) admin = db.Column(db.Boolean, nullable=False, default=False) confirmed = db.Column(db.Boolean, nullable=False, default=False) confirmed_on = db.Column(db.DateTime, nullable=True) def __init__(self, email, password, paid=False, admin=False): self.email = email self.password = bcrypt.generate_password_hash(password) self.registered_on = datetime.datetime.now() self.admin = admin self.confirmed = confirmed self.confirmed_on = confirmed_on def is_authenticated(self): return True def is_active(self): return True def is_anonymous(self): return False def get_id(self): return self.id def __repr__(self): return '<email {}'.format(self.email)
Update user model with confirmed and confirmed_at
Update user model with confirmed and confirmed_at
Python
mit
dylanshine/streamschool,dylanshine/streamschool
python
## Code Before: import datetime from project import db, bcrypt class User(db.Model): __tablename__ = "users" id = db.Column(db.Integer, primary_key=True) email = db.Column(db.String, unique=True, nullable=False) password = db.Column(db.String, nullable=False) registered_on = db.Column(db.DateTime, nullable=False) admin = db.Column(db.Boolean, nullable=False, default=False) def __init__(self, email, password, paid=False, admin=False): self.email = email self.password = bcrypt.generate_password_hash(password) self.registered_on = datetime.datetime.now() self.admin = admin def is_authenticated(self): return True def is_active(self): return True def is_anonymous(self): return False def get_id(self): return self.id def __repr__(self): return '<email {}'.format(self.email) ## Instruction: Update user model with confirmed and confirmed_at ## Code After: import datetime from project import db, bcrypt class User(db.Model): __tablename__ = "users" id = db.Column(db.Integer, primary_key=True) email = db.Column(db.String, unique=True, nullable=False) password = db.Column(db.String, nullable=False) registered_on = db.Column(db.DateTime, nullable=False) admin = db.Column(db.Boolean, nullable=False, default=False) confirmed = db.Column(db.Boolean, nullable=False, default=False) confirmed_on = db.Column(db.DateTime, nullable=True) def __init__(self, email, password, paid=False, admin=False): self.email = email self.password = bcrypt.generate_password_hash(password) self.registered_on = datetime.datetime.now() self.admin = admin self.confirmed = confirmed self.confirmed_on = confirmed_on def is_authenticated(self): return True def is_active(self): return True def is_anonymous(self): return False def get_id(self): return self.id def __repr__(self): return '<email {}'.format(self.email)
import datetime from project import db, bcrypt class User(db.Model): __tablename__ = "users" id = db.Column(db.Integer, primary_key=True) email = db.Column(db.String, unique=True, nullable=False) password = db.Column(db.String, nullable=False) registered_on = db.Column(db.DateTime, nullable=False) admin = db.Column(db.Boolean, nullable=False, default=False) + confirmed = db.Column(db.Boolean, nullable=False, default=False) + confirmed_on = db.Column(db.DateTime, nullable=True) def __init__(self, email, password, paid=False, admin=False): self.email = email self.password = bcrypt.generate_password_hash(password) self.registered_on = datetime.datetime.now() self.admin = admin + self.confirmed = confirmed + self.confirmed_on = confirmed_on def is_authenticated(self): return True def is_active(self): return True def is_anonymous(self): return False def get_id(self): return self.id def __repr__(self): return '<email {}'.format(self.email)
4
0.114286
4
0
a18b452a07104e13d9e544c92f93f5ab18349d7c
src/styles/components/_board.sass
src/styles/components/_board.sass
position: relative +set-dims(100vw, 100vh) background: $board-color .nano-pane display: block !important .board-header position: relative text-align: center .posts position: relative
position: relative +set-dims(100vw, 100vh) background: $board-color .nano-pane display: block !important .board-header position: relative text-align: center .posts position: relative pre width: 100% display: inline-block text-overflow: ellipsis text-align: left !important overflow: hidden
Fix styles for pre tag on board to cut off and align properly
Fix styles for pre tag on board to cut off and align properly
Sass
mit
AdamSalma/Lurka,AdamSalma/Lurka
sass
## Code Before: position: relative +set-dims(100vw, 100vh) background: $board-color .nano-pane display: block !important .board-header position: relative text-align: center .posts position: relative ## Instruction: Fix styles for pre tag on board to cut off and align properly ## Code After: position: relative +set-dims(100vw, 100vh) background: $board-color .nano-pane display: block !important .board-header position: relative text-align: center .posts position: relative pre width: 100% display: inline-block text-overflow: ellipsis text-align: left !important overflow: hidden
position: relative +set-dims(100vw, 100vh) background: $board-color .nano-pane display: block !important .board-header position: relative text-align: center .posts position: relative + + pre + width: 100% + display: inline-block + text-overflow: ellipsis + text-align: left !important + overflow: hidden
7
0.538462
7
0
5a4b657a3b5d48aee7ecf41854128cd8b294a66c
dashboard/app/controllers/admin/dashboard_controller.rb
dashboard/app/controllers/admin/dashboard_controller.rb
class Admin::DashboardController < Admin::BaseController def index @recent_activity = [] Refinery::Plugins.active.each do |plugin| begin plugin.activity.each do |activity| @recent_activity << activity.class.find(:all, :conditions => activity.conditions, :order => activity.order, :limit => activity.limit ) end rescue logger.warn "#{$!.class.name} raised while getting recent activity for dashboard." logger.warn $!.message logger.warn $!.backtrace.collect { |b| " > #{b}" }.join("\n") end end @recent_activity = @recent_activity.flatten.compact.sort { |x,y| y.updated_at <=> x.updated_at }.first(activity_show_limit=RefinerySetting.find_or_set(:activity_show_limit, 7)) @recent_inquiries = defined?(Inquiry) ? Inquiry.latest(activity_show_limit) : [] end def disable_upgrade_message RefinerySetting.update_all({ :value => false }, { :name => 'show_internet_explorer_upgrade_message', :scoping => 'refinery' }) render :nothing => true end end
class Admin::DashboardController < Admin::BaseController def index @recent_activity = [] Refinery::Plugins.active.each do |plugin| begin plugin.activity.each do |activity| @recent_activity << activity.class.where(activity.conditions). order(activity.order). limit(activity.limit). all end rescue logger.warn "#{$!.class.name} raised while getting recent activity for dashboard." logger.warn $!.message logger.warn $!.backtrace.collect { |b| " > #{b}" }.join("\n") end end @recent_activity = @recent_activity.flatten.compact.sort { |x,y| y.updated_at <=> x.updated_at }.first(activity_show_limit=RefinerySetting.find_or_set(:activity_show_limit, 7)) @recent_inquiries = defined?(Inquiry) ? Inquiry.latest(activity_show_limit) : [] end def disable_upgrade_message RefinerySetting.update_all({ :value => false }, { :name => 'show_internet_explorer_upgrade_message', :scoping => 'refinery' }) render :nothing => true end end
Use new Active Record syntax
Use new Active Record syntax
Ruby
mit
anitagraham/refinerycms,stefanspicer/refinerycms,refinery/refinerycms,johanb/refinerycms,simi/refinerycms,trevornez/refinerycms,stefanspicer/refinerycms,SmartMedia/refinerycms-with-custom-icons,mkaplan9/refinerycms,chrise86/refinerycms,chrise86/refinerycms,stefanspicer/refinerycms,Retimont/refinerycms,KingLemuel/refinerycms,sideci-sample/sideci-sample-refinerycms,anitagraham/refinerycms,kappiah/refinerycms,mlinfoot/refinerycms,aguzubiaga/refinerycms,Eric-Guo/refinerycms,mojarra/myrefinerycms,kelkoo-services/refinerycms,KingLemuel/refinerycms,bryanmtl/g-refinerycms,trevornez/refinerycms,pcantrell/refinerycms,louim/refinerycms,anitagraham/refinerycms,aguzubiaga/refinerycms,mobilityhouse/refinerycms,Retimont/refinerycms,gwagener/refinerycms,mabras/refinerycms,bricesanchez/refinerycms,hoopla-software/refinerycms,mlinfoot/refinerycms,trevornez/refinerycms,bricesanchez/refinerycms,stakes/refinerycms,chrise86/refinerycms,bryanmtl/g-refinerycms,mkaplan9/refinerycms,hoopla-software/refinerycms,gwagener/refinerycms,koa/refinerycms,LytayTOUCH/refinerycms,mlinfoot/refinerycms,hoopla-software/refinerycms,LytayTOUCH/refinerycms,mabras/refinerycms,pcantrell/refinerycms,kappiah/refinerycms,kappiah/refinerycms,stakes/refinerycms,louim/refinerycms,mkaplan9/refinerycms,simi/refinerycms,mojarra/myrefinerycms,johanb/refinerycms,aguzubiaga/refinerycms,sideci-sample/sideci-sample-refinerycms,simi/refinerycms,Eric-Guo/refinerycms,johanb/refinerycms,mobilityhouse/refinerycms,LytayTOUCH/refinerycms,refinery/refinerycms,donabrams/refinerycms,gwagener/refinerycms,simi/refinerycms,KingLemuel/refinerycms,Eric-Guo/refinerycms,kelkoo-services/refinerycms,donabrams/refinerycms,mabras/refinerycms,koa/refinerycms,Retimont/refinerycms,refinery/refinerycms,SmartMedia/refinerycms-with-custom-icons
ruby
## Code Before: class Admin::DashboardController < Admin::BaseController def index @recent_activity = [] Refinery::Plugins.active.each do |plugin| begin plugin.activity.each do |activity| @recent_activity << activity.class.find(:all, :conditions => activity.conditions, :order => activity.order, :limit => activity.limit ) end rescue logger.warn "#{$!.class.name} raised while getting recent activity for dashboard." logger.warn $!.message logger.warn $!.backtrace.collect { |b| " > #{b}" }.join("\n") end end @recent_activity = @recent_activity.flatten.compact.sort { |x,y| y.updated_at <=> x.updated_at }.first(activity_show_limit=RefinerySetting.find_or_set(:activity_show_limit, 7)) @recent_inquiries = defined?(Inquiry) ? Inquiry.latest(activity_show_limit) : [] end def disable_upgrade_message RefinerySetting.update_all({ :value => false }, { :name => 'show_internet_explorer_upgrade_message', :scoping => 'refinery' }) render :nothing => true end end ## Instruction: Use new Active Record syntax ## Code After: class Admin::DashboardController < Admin::BaseController def index @recent_activity = [] Refinery::Plugins.active.each do |plugin| begin plugin.activity.each do |activity| @recent_activity << activity.class.where(activity.conditions). order(activity.order). limit(activity.limit). all end rescue logger.warn "#{$!.class.name} raised while getting recent activity for dashboard." logger.warn $!.message logger.warn $!.backtrace.collect { |b| " > #{b}" }.join("\n") end end @recent_activity = @recent_activity.flatten.compact.sort { |x,y| y.updated_at <=> x.updated_at }.first(activity_show_limit=RefinerySetting.find_or_set(:activity_show_limit, 7)) @recent_inquiries = defined?(Inquiry) ? Inquiry.latest(activity_show_limit) : [] end def disable_upgrade_message RefinerySetting.update_all({ :value => false }, { :name => 'show_internet_explorer_upgrade_message', :scoping => 'refinery' }) render :nothing => true end end
class Admin::DashboardController < Admin::BaseController def index @recent_activity = [] Refinery::Plugins.active.each do |plugin| begin plugin.activity.each do |activity| - @recent_activity << activity.class.find(:all, ? ^ ^^^^^^ + @recent_activity << activity.class.where(activity.conditions). ? ^^^^^^^^^ +++++++ ^^^^^^^^ + order(activity.order). + limit(activity.limit). + all - :conditions => activity.conditions, - :order => activity.order, - :limit => activity.limit - ) end rescue logger.warn "#{$!.class.name} raised while getting recent activity for dashboard." logger.warn $!.message logger.warn $!.backtrace.collect { |b| " > #{b}" }.join("\n") end end @recent_activity = @recent_activity.flatten.compact.sort { |x,y| y.updated_at <=> x.updated_at }.first(activity_show_limit=RefinerySetting.find_or_set(:activity_show_limit, 7)) @recent_inquiries = defined?(Inquiry) ? Inquiry.latest(activity_show_limit) : [] end def disable_upgrade_message RefinerySetting.update_all({ :value => false }, { :name => 'show_internet_explorer_upgrade_message', :scoping => 'refinery' }) render :nothing => true end end
9
0.243243
4
5
f9cb778aee192923d927d8f1cd2921e235485128
packages/idyll-components/src/map.js
packages/idyll-components/src/map.js
const React = require('react'); const { mapChildren } = require('idyll-component-children'); import TextContainer from './text-container'; class Map extends React.Component { render() { const { idyll, hasError, updateProps, children, value, currentValue } = this.props; if (children) { return mapChildren(children, child => { return value.map(val => { let newProps = Object.assign({}, child.props); newProps = Object.keys(child.props).reduce((props, elm) => { if (props[elm] === currentValue) { props[elm] = val; return props; } return props; }, newProps); return ( <TextContainer> <div> {React.cloneElement(child, { ...newProps })} </div> </TextContainer> ); }); }); } return null; } } Map._idyll = { name: 'Map', tagType: 'open', children: ['Some text'], props: [ { name: 'value', type: 'array', example: "['one', 'two', 'three']", description: 'Array of values to map.' }, { name: 'currentValue', type: 'string', example: 'iterator', description: 'Value of the current element being processed from the array.' } ] }; module.exports = Map;
const React = require('react'); const { mapChildren } = require('idyll-component-children'); import TextContainer from './text-container'; class Map extends React.Component { render() { const { children, value, currentValue } = this.props; if (children) { return mapChildren(children, child => { return value.map(val => { let newProps = Object.assign({}, child.props); newProps = Object.keys(child.props).reduce((props, elm) => { if (props[elm] === currentValue) { props[elm] = val; return props; } return props; }, newProps); return React.cloneElement(child, { ...newProps }); }); }); } return null; } } Map._idyll = { name: 'Map', tagType: 'open', children: ['Some text'], props: [ { name: 'value', type: 'array', example: "['one', 'two', 'three']", description: 'Array of values to map.' }, { name: 'currentValue', type: 'string', example: 'iterator', description: 'Value of the current element being processed from the array.' } ] }; module.exports = Map;
Update Map Component: remove TextContainer
Update Map Component: remove TextContainer
JavaScript
mit
idyll-lang/idyll,idyll-lang/idyll
javascript
## Code Before: const React = require('react'); const { mapChildren } = require('idyll-component-children'); import TextContainer from './text-container'; class Map extends React.Component { render() { const { idyll, hasError, updateProps, children, value, currentValue } = this.props; if (children) { return mapChildren(children, child => { return value.map(val => { let newProps = Object.assign({}, child.props); newProps = Object.keys(child.props).reduce((props, elm) => { if (props[elm] === currentValue) { props[elm] = val; return props; } return props; }, newProps); return ( <TextContainer> <div> {React.cloneElement(child, { ...newProps })} </div> </TextContainer> ); }); }); } return null; } } Map._idyll = { name: 'Map', tagType: 'open', children: ['Some text'], props: [ { name: 'value', type: 'array', example: "['one', 'two', 'three']", description: 'Array of values to map.' }, { name: 'currentValue', type: 'string', example: 'iterator', description: 'Value of the current element being processed from the array.' } ] }; module.exports = Map; ## Instruction: Update Map Component: remove TextContainer ## Code After: const React = require('react'); const { mapChildren } = require('idyll-component-children'); import TextContainer from './text-container'; class Map extends React.Component { render() { const { children, value, currentValue } = this.props; if (children) { return mapChildren(children, child => { return value.map(val => { let newProps = Object.assign({}, child.props); newProps = Object.keys(child.props).reduce((props, elm) => { if (props[elm] === currentValue) { props[elm] = val; return props; } return props; }, newProps); return React.cloneElement(child, { ...newProps }); }); }); } return null; } } Map._idyll = { name: 'Map', tagType: 'open', children: ['Some text'], props: [ { name: 'value', type: 'array', example: "['one', 'two', 'three']", description: 'Array of values to map.' }, { name: 'currentValue', type: 'string', example: 'iterator', description: 'Value of the current element being processed from the array.' } ] }; module.exports = Map;
const React = require('react'); const { mapChildren } = require('idyll-component-children'); import TextContainer from './text-container'; class Map extends React.Component { render() { + const { children, value, currentValue } = this.props; - const { - idyll, - hasError, - updateProps, - children, - value, - currentValue - } = this.props; if (children) { return mapChildren(children, child => { return value.map(val => { let newProps = Object.assign({}, child.props); newProps = Object.keys(child.props).reduce((props, elm) => { if (props[elm] === currentValue) { props[elm] = val; return props; } return props; }, newProps); + return React.cloneElement(child, { ...newProps }); - return ( - <TextContainer> - <div> - {React.cloneElement(child, { - ...newProps - })} - </div> - </TextContainer> - ); }); }); } return null; } } Map._idyll = { name: 'Map', tagType: 'open', children: ['Some text'], props: [ { name: 'value', type: 'array', example: "['one', 'two', 'three']", description: 'Array of values to map.' }, { name: 'currentValue', type: 'string', example: 'iterator', description: 'Value of the current element being processed from the array.' } ] }; module.exports = Map;
19
0.301587
2
17
51ce83add32390b8cb4db01c9c0b17c6ae98dae3
static/css/_toolbox.scss
static/css/_toolbox.scss
position: absolute; z-index: 100; right: 0; width: 4em; text-align: center; padding: 10px; .toolbox-button { margin-bottom: 5px; background-color: $brand-gray; border: 1px solid $toolbox-btn-active; padding: 2.5px 6px; color: $black; } .toolbox-button:hover { background-color: $toolbox-btn-active; } .toolbox-button:focus { outline: none; } .box.is-dragover { background-color: grey; } }
position: absolute; z-index: 100; right: 0; width: 4em; text-align: center; padding: 10px; .toolbox-button { float: right; margin-bottom: 5px; background-color: $brand-gray; border: 1px solid $toolbox-btn-active; padding: 2.5px 6px; color: $black; } .toolbox-button:hover { background-color: $toolbox-btn-active; } .toolbox-button:focus { outline: none; } .box.is-dragover { background-color: grey; } }
Fix the toolbox buttons alignment
Fix the toolbox buttons alignment
SCSS
mit
aminmarashi/binary-bot,aminmarashi/binary-bot,binary-com/binary-bot,binary-com/binary-bot
scss
## Code Before: position: absolute; z-index: 100; right: 0; width: 4em; text-align: center; padding: 10px; .toolbox-button { margin-bottom: 5px; background-color: $brand-gray; border: 1px solid $toolbox-btn-active; padding: 2.5px 6px; color: $black; } .toolbox-button:hover { background-color: $toolbox-btn-active; } .toolbox-button:focus { outline: none; } .box.is-dragover { background-color: grey; } } ## Instruction: Fix the toolbox buttons alignment ## Code After: position: absolute; z-index: 100; right: 0; width: 4em; text-align: center; padding: 10px; .toolbox-button { float: right; margin-bottom: 5px; background-color: $brand-gray; border: 1px solid $toolbox-btn-active; padding: 2.5px 6px; color: $black; } .toolbox-button:hover { background-color: $toolbox-btn-active; } .toolbox-button:focus { outline: none; } .box.is-dragover { background-color: grey; } }
position: absolute; z-index: 100; right: 0; width: 4em; text-align: center; padding: 10px; .toolbox-button { + float: right; margin-bottom: 5px; background-color: $brand-gray; border: 1px solid $toolbox-btn-active; padding: 2.5px 6px; color: $black; } .toolbox-button:hover { background-color: $toolbox-btn-active; } .toolbox-button:focus { outline: none; } .box.is-dragover { background-color: grey; } }
1
0.037037
1
0
59151042ea180c539dc9353e16eda432524b181f
ops/ansible/roles/legislation_explorer/defaults/main.yml
ops/ansible/roles/legislation_explorer/defaults/main.yml
api_url: http://localhost:8000 base_url_path: /legislation branch_name: master changelog_url: https://github.com/openfisca/openfisca-france/blob/master/CHANGELOG.md host: 127.0.0.1 http_port: 8001 repo_url: https://github.com/openfisca/legislation-explorer.git ui_strings: | { "en": { "countryName": "France", "search_placeholder": "smic, salaire net" }, "fr": { "countryName": "France", "search_placeholder": "smic, salaire net" } } systemd_service_file_path: /etc/systemd/system/legislation-explorer.service unix_group_name: openfisca unix_user_name: legislation-explorer # Nginx host_name: legislation-explorer.example # Optional: SSL certificate # An SSL certificate is issued from Let's Encrypt if `enable_ssl` and `letsencrypt_email` are both defined. enable_ssl: no letsencrypt_email: null # Start by testing with staging environment, then switch to production once it works, to avoid reaching Let's Encrypt limits. letsencrypt_environment: "staging" # Optional: Matomo tracker matomo_url: null matomo_site_id: null
api_url: http://localhost:8000 base_url_path: /legislation branch_name: master changelog_url: https://github.com/openfisca/openfisca-france/blob/master/CHANGELOG.md host: 127.0.0.1 http_port: 8001 repo_url: https://github.com/openfisca/legislation-explorer.git ui_strings: | { "en": { "countryName": "France", "search_placeholder": "smic, salaire net" }, "fr": { "countryName": "France", "search_placeholder": "smic, salaire net" } } systemd_service_file_path: /etc/systemd/system/legislation-explorer.service unix_group_name: openfisca unix_user_name: legislation-explorer # Nginx host_name: localhost # Optional: SSL certificate # An SSL certificate is issued from Let's Encrypt if `enable_ssl` and `letsencrypt_email` are both defined. enable_ssl: no letsencrypt_email: null # Start by testing with staging environment, then switch to production once it works, to avoid reaching Let's Encrypt limits. letsencrypt_environment: "staging" # Optional: Matomo tracker matomo_url: null matomo_site_id: null
Use "localhost" as default host name for Nginx
Use "localhost" as default host name for Nginx
YAML
agpl-3.0
openfisca/legislation-explorer
yaml
## Code Before: api_url: http://localhost:8000 base_url_path: /legislation branch_name: master changelog_url: https://github.com/openfisca/openfisca-france/blob/master/CHANGELOG.md host: 127.0.0.1 http_port: 8001 repo_url: https://github.com/openfisca/legislation-explorer.git ui_strings: | { "en": { "countryName": "France", "search_placeholder": "smic, salaire net" }, "fr": { "countryName": "France", "search_placeholder": "smic, salaire net" } } systemd_service_file_path: /etc/systemd/system/legislation-explorer.service unix_group_name: openfisca unix_user_name: legislation-explorer # Nginx host_name: legislation-explorer.example # Optional: SSL certificate # An SSL certificate is issued from Let's Encrypt if `enable_ssl` and `letsencrypt_email` are both defined. enable_ssl: no letsencrypt_email: null # Start by testing with staging environment, then switch to production once it works, to avoid reaching Let's Encrypt limits. letsencrypt_environment: "staging" # Optional: Matomo tracker matomo_url: null matomo_site_id: null ## Instruction: Use "localhost" as default host name for Nginx ## Code After: api_url: http://localhost:8000 base_url_path: /legislation branch_name: master changelog_url: https://github.com/openfisca/openfisca-france/blob/master/CHANGELOG.md host: 127.0.0.1 http_port: 8001 repo_url: https://github.com/openfisca/legislation-explorer.git ui_strings: | { "en": { "countryName": "France", "search_placeholder": "smic, salaire net" }, "fr": { "countryName": "France", "search_placeholder": "smic, salaire net" } } systemd_service_file_path: /etc/systemd/system/legislation-explorer.service unix_group_name: openfisca unix_user_name: legislation-explorer # Nginx host_name: localhost # Optional: SSL certificate # An SSL certificate is issued from Let's Encrypt if `enable_ssl` and `letsencrypt_email` are both defined. enable_ssl: no letsencrypt_email: null # Start by testing with staging environment, then switch to production once it works, to avoid reaching Let's Encrypt limits. letsencrypt_environment: "staging" # Optional: Matomo tracker matomo_url: null matomo_site_id: null
api_url: http://localhost:8000 base_url_path: /legislation branch_name: master changelog_url: https://github.com/openfisca/openfisca-france/blob/master/CHANGELOG.md host: 127.0.0.1 http_port: 8001 repo_url: https://github.com/openfisca/legislation-explorer.git ui_strings: | { "en": { "countryName": "France", "search_placeholder": "smic, salaire net" }, "fr": { "countryName": "France", "search_placeholder": "smic, salaire net" } } systemd_service_file_path: /etc/systemd/system/legislation-explorer.service unix_group_name: openfisca unix_user_name: legislation-explorer # Nginx - host_name: legislation-explorer.example + host_name: localhost # Optional: SSL certificate # An SSL certificate is issued from Let's Encrypt if `enable_ssl` and `letsencrypt_email` are both defined. enable_ssl: no letsencrypt_email: null # Start by testing with staging environment, then switch to production once it works, to avoid reaching Let's Encrypt limits. letsencrypt_environment: "staging" # Optional: Matomo tracker matomo_url: null matomo_site_id: null
2
0.060606
1
1
7a6fc91b8eafe0cc88d892443ad25b24a94a3ace
cross_service_tempest_plugin/tests/scenario/test_cross_service.py
cross_service_tempest_plugin/tests/scenario/test_cross_service.py
from tempest import config from tempest import test CONF = config.CONF class HeatDriverNeutronDNSIntegration(test.BaseTestCase): @classmethod def skip_checks(cls): super(HeatDriverNeutronDNSIntegration, cls).skip_checks() if not getattr(CONF.service_available, 'dns', False): raise cls.skipException('Designate support is required') if not getattr(CONF.service_available, 'orchestration', False): raise cls.skipException('Heat support is required') def test_port_on_extenal_net_to_dns(self): pass def test_floating_ip_with_name_from_port_to_dns(self): pass def test_floating_ip_with_own_name_to_dns(self): pass
from tempest import config from tempest import test CONF = config.CONF class HeatDriverNeutronDNSIntegration(test.BaseTestCase): @classmethod def skip_checks(cls): super(HeatDriverNeutronDNSIntegration, cls).skip_checks() if not getattr(CONF.service_available, 'designate', False): raise cls.skipException('Designate support is required') if not getattr(CONF.service_available, 'heat_plugin', False): raise cls.skipException('Heat support is required') def test_port_on_extenal_net_to_dns(self): pass def test_floating_ip_with_name_from_port_to_dns(self): pass def test_floating_ip_with_own_name_to_dns(self): pass
Fix the skip to match plugins
Fix the skip to match plugins
Python
apache-2.0
afrittoli/cross_service_tempest_plugins,afrittoli/cross_service_tempest_plugins
python
## Code Before: from tempest import config from tempest import test CONF = config.CONF class HeatDriverNeutronDNSIntegration(test.BaseTestCase): @classmethod def skip_checks(cls): super(HeatDriverNeutronDNSIntegration, cls).skip_checks() if not getattr(CONF.service_available, 'dns', False): raise cls.skipException('Designate support is required') if not getattr(CONF.service_available, 'orchestration', False): raise cls.skipException('Heat support is required') def test_port_on_extenal_net_to_dns(self): pass def test_floating_ip_with_name_from_port_to_dns(self): pass def test_floating_ip_with_own_name_to_dns(self): pass ## Instruction: Fix the skip to match plugins ## Code After: from tempest import config from tempest import test CONF = config.CONF class HeatDriverNeutronDNSIntegration(test.BaseTestCase): @classmethod def skip_checks(cls): super(HeatDriverNeutronDNSIntegration, cls).skip_checks() if not getattr(CONF.service_available, 'designate', False): raise cls.skipException('Designate support is required') if not getattr(CONF.service_available, 'heat_plugin', False): raise cls.skipException('Heat support is required') def test_port_on_extenal_net_to_dns(self): pass def test_floating_ip_with_name_from_port_to_dns(self): pass def test_floating_ip_with_own_name_to_dns(self): pass
from tempest import config from tempest import test CONF = config.CONF class HeatDriverNeutronDNSIntegration(test.BaseTestCase): @classmethod def skip_checks(cls): super(HeatDriverNeutronDNSIntegration, cls).skip_checks() - if not getattr(CONF.service_available, 'dns', False): ? ^ + if not getattr(CONF.service_available, 'designate', False): ? ++++ ^^^ raise cls.skipException('Designate support is required') - if not getattr(CONF.service_available, 'orchestration', False): ? --- --- - + if not getattr(CONF.service_available, 'heat_plugin', False): ? +++++ raise cls.skipException('Heat support is required') def test_port_on_extenal_net_to_dns(self): pass def test_floating_ip_with_name_from_port_to_dns(self): pass def test_floating_ip_with_own_name_to_dns(self): pass
4
0.153846
2
2
9ea8387474e3137caecddf7a47db2918a8cd1663
test/run_test.sh
test/run_test.sh
printf_new() { str=$1 num=$2 v=$(printf "%-${num}s" "$str") printf "${v// / }" } RED='\033[0;31m' GREEN='\033[0;32m' NC='\033[0m' # No Color COMMAND=$1 CHRLEN=${#COMMAND} NBSP=`echo 32-${CHRLEN}|bc` printf "Run $1" printf_new " " $NBSP $1 $2 $3 $4 $5 $6 $7 $8 $9 1>$1.log 2>&1 if [ $? -ne 0 ] then printf "${RED}FAIL${NC}\n" else printf "${GREEN}SUCCESS${NC}\n" fi
printf_new() { str=$1 num=$2 v=$(printf "%-${num}s" "$str") printf "${v// / }" } RED='\033[0;31m' GREEN='\033[0;32m' NC='\033[0m' # No Color COMMAND=$1 CHRLEN=${#COMMAND} NBSP=$((32-$CHRLEN)) printf "Run $1" printf_new " " $NBSP $1 $2 $3 $4 $5 $6 $7 $8 $9 1>$1.log 2>&1 if [ $? -ne 0 ] then printf "${RED}FAIL${NC}\n" else printf "${GREEN}SUCCESS${NC}\n" fi
Use bash calculator instead of bc
Use bash calculator instead of bc
Shell
lgpl-2.1
babelouest/ulfius,babelouest/ulfius,babelouest/ulfius
shell
## Code Before: printf_new() { str=$1 num=$2 v=$(printf "%-${num}s" "$str") printf "${v// / }" } RED='\033[0;31m' GREEN='\033[0;32m' NC='\033[0m' # No Color COMMAND=$1 CHRLEN=${#COMMAND} NBSP=`echo 32-${CHRLEN}|bc` printf "Run $1" printf_new " " $NBSP $1 $2 $3 $4 $5 $6 $7 $8 $9 1>$1.log 2>&1 if [ $? -ne 0 ] then printf "${RED}FAIL${NC}\n" else printf "${GREEN}SUCCESS${NC}\n" fi ## Instruction: Use bash calculator instead of bc ## Code After: printf_new() { str=$1 num=$2 v=$(printf "%-${num}s" "$str") printf "${v// / }" } RED='\033[0;31m' GREEN='\033[0;32m' NC='\033[0m' # No Color COMMAND=$1 CHRLEN=${#COMMAND} NBSP=$((32-$CHRLEN)) printf "Run $1" printf_new " " $NBSP $1 $2 $3 $4 $5 $6 $7 $8 $9 1>$1.log 2>&1 if [ $? -ne 0 ] then printf "${RED}FAIL${NC}\n" else printf "${GREEN}SUCCESS${NC}\n" fi
printf_new() { str=$1 num=$2 v=$(printf "%-${num}s" "$str") printf "${v// / }" } RED='\033[0;31m' GREEN='\033[0;32m' NC='\033[0m' # No Color COMMAND=$1 CHRLEN=${#COMMAND} - NBSP=`echo 32-${CHRLEN}|bc` + NBSP=$((32-$CHRLEN)) printf "Run $1" printf_new " " $NBSP $1 $2 $3 $4 $5 $6 $7 $8 $9 1>$1.log 2>&1 if [ $? -ne 0 ] then printf "${RED}FAIL${NC}\n" else printf "${GREEN}SUCCESS${NC}\n" fi
2
0.076923
1
1
e625a54500c04480de9f0eb059f28b6a28d91d7d
src/Languages/Editor/Impl/project.json
src/Languages/Editor/Impl/project.json
{ "dependencies": { "MicroBuild.Core": "0.2.0", "Microsoft.VisualStudio.Editor": "14.2.25123", "Microsoft.VisualStudio.Imaging.Interop.14.0.DesignTime": "14.2.25123", "Microsoft.VisualStudio.Language.Intellisense": "14.2.25123", "Microsoft.VisualStudio.OLE.Interop": "7.10.6070", "Microsoft.VisualStudio.SDK.EmbedInteropTypes": "14.1.2", "Microsoft.VisualStudio.Shell.Interop": "7.10.6071", "Microsoft.VisualStudio.Text.Data": "14.2.25123", "Microsoft.VisualStudio.Text.Logic": "14.2.25123", "Microsoft.VisualStudio.Text.UI": "14.2.25123", "Microsoft.VisualStudio.Text.UI.Wpf": "14.2.25123", "Microsoft.VisualStudio.TextManager.Interop": "7.10.6070", "Microsoft.VisualStudio.TextManager.Interop.8.0": "8.0.50727" }, "frameworks": { "net46": {} }, "runtimes": { "win": {} } }
{ "dependencies": { "MicroBuild.Core": "0.2.0", "Microsoft.VisualStudio.CoreUtility": "14.2.25123", "Microsoft.VisualStudio.Editor": "14.2.25123", "Microsoft.VisualStudio.Imaging.Interop.14.0.DesignTime": "14.2.25123", "Microsoft.VisualStudio.Language.Intellisense": "14.2.25123", "Microsoft.VisualStudio.OLE.Interop": "7.10.6070", "Microsoft.VisualStudio.SDK.EmbedInteropTypes": "14.1.2", "Microsoft.VisualStudio.Shell.Interop": "7.10.6071", "Microsoft.VisualStudio.Text.Data": "14.2.25123", "Microsoft.VisualStudio.Text.Logic": "14.2.25123", "Microsoft.VisualStudio.Text.UI": "14.2.25123", "Microsoft.VisualStudio.Text.UI.Wpf": "14.2.25123", "Microsoft.VisualStudio.TextManager.Interop": "7.10.6070", "Microsoft.VisualStudio.TextManager.Interop.8.0": "8.0.50727" }, "frameworks": { "net46": {} }, "runtimes": { "win": {} } }
Add "Microsoft.VisualStudio.CoreUtility": "14.2.25123" to Microsoft.Languages.Editor
Add "Microsoft.VisualStudio.CoreUtility": "14.2.25123" to Microsoft.Languages.Editor
JSON
mit
AlexanderSher/RTVS,MikhailArkhipov/RTVS,karthiknadig/RTVS,karthiknadig/RTVS,karthiknadig/RTVS,AlexanderSher/RTVS,AlexanderSher/RTVS,AlexanderSher/RTVS,MikhailArkhipov/RTVS,karthiknadig/RTVS,MikhailArkhipov/RTVS,karthiknadig/RTVS,MikhailArkhipov/RTVS,karthiknadig/RTVS,MikhailArkhipov/RTVS,AlexanderSher/RTVS,karthiknadig/RTVS,AlexanderSher/RTVS,MikhailArkhipov/RTVS,AlexanderSher/RTVS,MikhailArkhipov/RTVS
json
## Code Before: { "dependencies": { "MicroBuild.Core": "0.2.0", "Microsoft.VisualStudio.Editor": "14.2.25123", "Microsoft.VisualStudio.Imaging.Interop.14.0.DesignTime": "14.2.25123", "Microsoft.VisualStudio.Language.Intellisense": "14.2.25123", "Microsoft.VisualStudio.OLE.Interop": "7.10.6070", "Microsoft.VisualStudio.SDK.EmbedInteropTypes": "14.1.2", "Microsoft.VisualStudio.Shell.Interop": "7.10.6071", "Microsoft.VisualStudio.Text.Data": "14.2.25123", "Microsoft.VisualStudio.Text.Logic": "14.2.25123", "Microsoft.VisualStudio.Text.UI": "14.2.25123", "Microsoft.VisualStudio.Text.UI.Wpf": "14.2.25123", "Microsoft.VisualStudio.TextManager.Interop": "7.10.6070", "Microsoft.VisualStudio.TextManager.Interop.8.0": "8.0.50727" }, "frameworks": { "net46": {} }, "runtimes": { "win": {} } } ## Instruction: Add "Microsoft.VisualStudio.CoreUtility": "14.2.25123" to Microsoft.Languages.Editor ## Code After: { "dependencies": { "MicroBuild.Core": "0.2.0", "Microsoft.VisualStudio.CoreUtility": "14.2.25123", "Microsoft.VisualStudio.Editor": "14.2.25123", "Microsoft.VisualStudio.Imaging.Interop.14.0.DesignTime": "14.2.25123", "Microsoft.VisualStudio.Language.Intellisense": "14.2.25123", "Microsoft.VisualStudio.OLE.Interop": "7.10.6070", "Microsoft.VisualStudio.SDK.EmbedInteropTypes": "14.1.2", "Microsoft.VisualStudio.Shell.Interop": "7.10.6071", "Microsoft.VisualStudio.Text.Data": "14.2.25123", "Microsoft.VisualStudio.Text.Logic": "14.2.25123", "Microsoft.VisualStudio.Text.UI": "14.2.25123", "Microsoft.VisualStudio.Text.UI.Wpf": "14.2.25123", "Microsoft.VisualStudio.TextManager.Interop": "7.10.6070", "Microsoft.VisualStudio.TextManager.Interop.8.0": "8.0.50727" }, "frameworks": { "net46": {} }, "runtimes": { "win": {} } }
{ "dependencies": { "MicroBuild.Core": "0.2.0", + "Microsoft.VisualStudio.CoreUtility": "14.2.25123", "Microsoft.VisualStudio.Editor": "14.2.25123", "Microsoft.VisualStudio.Imaging.Interop.14.0.DesignTime": "14.2.25123", "Microsoft.VisualStudio.Language.Intellisense": "14.2.25123", "Microsoft.VisualStudio.OLE.Interop": "7.10.6070", "Microsoft.VisualStudio.SDK.EmbedInteropTypes": "14.1.2", "Microsoft.VisualStudio.Shell.Interop": "7.10.6071", "Microsoft.VisualStudio.Text.Data": "14.2.25123", "Microsoft.VisualStudio.Text.Logic": "14.2.25123", "Microsoft.VisualStudio.Text.UI": "14.2.25123", "Microsoft.VisualStudio.Text.UI.Wpf": "14.2.25123", "Microsoft.VisualStudio.TextManager.Interop": "7.10.6070", "Microsoft.VisualStudio.TextManager.Interop.8.0": "8.0.50727" }, "frameworks": { "net46": {} }, "runtimes": { "win": {} } }
1
0.043478
1
0
fa3954495a077999121763ae55bb925aa27ea044
packages/apollo-client/src/util/Observable.ts
packages/apollo-client/src/util/Observable.ts
// This simplified polyfill attempts to follow the ECMAScript Observable proposal. // See https://github.com/zenparsing/es-observable import { Observable as LinkObservable } from 'apollo-link'; export type Subscription = ZenObservable.Subscription; export type Observer<T> = ZenObservable.Observer<T>; import $$observable from 'symbol-observable'; // rxjs interopt export class Observable<T> extends LinkObservable<T> { public [$$observable]() { return this; } }
// This simplified polyfill attempts to follow the ECMAScript Observable proposal. // See https://github.com/zenparsing/es-observable import { Observable as LinkObservable } from 'apollo-link'; export type Subscription = ZenObservable.Subscription; export type Observer<T> = ZenObservable.Observer<T>; import $$observable from 'symbol-observable'; // rxjs interopt export class Observable<T> extends LinkObservable<T> { public [$$observable]() { return this; } public ['@@observable']() { return this; } }
Use @@observable in case rxjs was loaded before apollo
Use @@observable in case rxjs was loaded before apollo
TypeScript
mit
apollographql/apollo-client,apollostack/apollo-client,apollostack/apollo-client,apollostack/apollo-client,apollographql/apollo-client
typescript
## Code Before: // This simplified polyfill attempts to follow the ECMAScript Observable proposal. // See https://github.com/zenparsing/es-observable import { Observable as LinkObservable } from 'apollo-link'; export type Subscription = ZenObservable.Subscription; export type Observer<T> = ZenObservable.Observer<T>; import $$observable from 'symbol-observable'; // rxjs interopt export class Observable<T> extends LinkObservable<T> { public [$$observable]() { return this; } } ## Instruction: Use @@observable in case rxjs was loaded before apollo ## Code After: // This simplified polyfill attempts to follow the ECMAScript Observable proposal. // See https://github.com/zenparsing/es-observable import { Observable as LinkObservable } from 'apollo-link'; export type Subscription = ZenObservable.Subscription; export type Observer<T> = ZenObservable.Observer<T>; import $$observable from 'symbol-observable'; // rxjs interopt export class Observable<T> extends LinkObservable<T> { public [$$observable]() { return this; } public ['@@observable']() { return this; } }
// This simplified polyfill attempts to follow the ECMAScript Observable proposal. // See https://github.com/zenparsing/es-observable import { Observable as LinkObservable } from 'apollo-link'; export type Subscription = ZenObservable.Subscription; export type Observer<T> = ZenObservable.Observer<T>; import $$observable from 'symbol-observable'; // rxjs interopt export class Observable<T> extends LinkObservable<T> { public [$$observable]() { return this; } + + public ['@@observable']() { + return this; + } }
4
0.266667
4
0
63b05af7352e875c36a3a1d42c7a8dc4404c2e78
app/views/islay/admin/users/_form.html.haml
app/views/islay/admin/users/_form.html.haml
= resource_form(@user) do |f| = record_name(@user.name, 'user', :when_blank => 'New User') = form_errors(@user) = content do = f.fieldset(:class => 'primary') do = f.input(:name, :col => 6) = f.input(:email, :col => 6) = f.fieldset('Password') do = f.input(:password) - unless @user == current_user - if @user.destroyable? = form_notice(:delete) do %p This account may be deleted as the user associated with it has not added or modified any records. Deletion is permanent and once gone the account cannot be recovered. = link_to('Delete User', path(:delete, @user), :class => 'button delete') - else = form_notice(:disable) do %p This account may be disabled. The user associated with this account will not be able to log in. After being disabled, the account may be re-enabled later. = button_tag('Disable User', :name => 'user[disabled]', :value => 1) = footer do = save_button
= resource_form(@user) do |f| = record_name(@user.name, 'user', :when_blank => 'New User') = form_errors(@user) = content do = f.fieldset(:class => 'primary') do = f.input(:name, :col => 6) = f.input(:email, :col => 6) = f.fieldset('Password') do = f.input(:password) - if @user == current_user = form_notice(:general) do %p You cannot delete or disable your own account. If you need this account deleted or disabled another user must do it for you. - else - if @user.destroyable? = form_notice(:delete) do %p This account may be deleted as the user associated with it has not added or modified any records. Deletion is permanent and once gone the account cannot be recovered. = link_to('Delete User', path(:delete, @user), :class => 'button delete') - else = form_notice(:disable) do %p This account may be disabled. The user associated with this account will not be able to log in. After being disabled, the account may be re-enabled later. = button_tag('Disable User', :name => 'user[disabled]', :value => 1) = footer do = save_button
Add a notice about why a user cannot delete or disable their own account.
Add a notice about why a user cannot delete or disable their own account.
Haml
mit
spookandpuff/islay,spookandpuff/islay,spookandpuff/islay
haml
## Code Before: = resource_form(@user) do |f| = record_name(@user.name, 'user', :when_blank => 'New User') = form_errors(@user) = content do = f.fieldset(:class => 'primary') do = f.input(:name, :col => 6) = f.input(:email, :col => 6) = f.fieldset('Password') do = f.input(:password) - unless @user == current_user - if @user.destroyable? = form_notice(:delete) do %p This account may be deleted as the user associated with it has not added or modified any records. Deletion is permanent and once gone the account cannot be recovered. = link_to('Delete User', path(:delete, @user), :class => 'button delete') - else = form_notice(:disable) do %p This account may be disabled. The user associated with this account will not be able to log in. After being disabled, the account may be re-enabled later. = button_tag('Disable User', :name => 'user[disabled]', :value => 1) = footer do = save_button ## Instruction: Add a notice about why a user cannot delete or disable their own account. ## Code After: = resource_form(@user) do |f| = record_name(@user.name, 'user', :when_blank => 'New User') = form_errors(@user) = content do = f.fieldset(:class => 'primary') do = f.input(:name, :col => 6) = f.input(:email, :col => 6) = f.fieldset('Password') do = f.input(:password) - if @user == current_user = form_notice(:general) do %p You cannot delete or disable your own account. If you need this account deleted or disabled another user must do it for you. - else - if @user.destroyable? = form_notice(:delete) do %p This account may be deleted as the user associated with it has not added or modified any records. Deletion is permanent and once gone the account cannot be recovered. = link_to('Delete User', path(:delete, @user), :class => 'button delete') - else = form_notice(:disable) do %p This account may be disabled. The user associated with this account will not be able to log in. After being disabled, the account may be re-enabled later. = button_tag('Disable User', :name => 'user[disabled]', :value => 1) = footer do = save_button
= resource_form(@user) do |f| = record_name(@user.name, 'user', :when_blank => 'New User') = form_errors(@user) = content do = f.fieldset(:class => 'primary') do = f.input(:name, :col => 6) = f.input(:email, :col => 6) = f.fieldset('Password') do = f.input(:password) - - unless @user == current_user ? ^^^^^^ + - if @user == current_user ? ^^ + = form_notice(:general) do + %p + You cannot delete or disable your own account. If you need this + account deleted or disabled another user must do it for you. + + - else - if @user.destroyable? = form_notice(:delete) do %p This account may be deleted as the user associated with it has not added or modified any records. Deletion is permanent and once gone the account cannot be recovered. = link_to('Delete User', path(:delete, @user), :class => 'button delete') - else = form_notice(:disable) do %p This account may be disabled. The user associated with this account will not be able to log in. After being disabled, the account may be re-enabled later. = button_tag('Disable User', :name => 'user[disabled]', :value => 1) = footer do = save_button
8
0.25
7
1
89bdb604a1a13893bdcb7719f32c9e0df8c140fb
app/scripts-browserify/cookie-consent.js
app/scripts-browserify/cookie-consent.js
require('../../node_modules/cookieconsent/build/cookieconsent.min.js'); // We're using http://cookieconsent.wpengine.com/documentation/javascript-api/ window.cookieconsent.initialise({ cookie: { name: config.features.cookieName }, content: { header: 'Cookies used on the website!', message: 'Vi bruger cookies for at give dig en bedre brugeroplevelse.', dismiss: 'OK', link: 'Læs mere', href: '/cookies', } });
require('cookieconsent/build/cookieconsent.min.js'); // We're using http://cookieconsent.wpengine.com/documentation/javascript-api/ window.cookieconsent.initialise({ cookie: { name: config.features.cookieName }, content: { header: 'Cookies used on the website!', message: 'Vi bruger cookies for at give dig en bedre brugeroplevelse.', dismiss: 'OK', link: 'Læs mere', href: '/cookies', } });
Fix attempt browserify require node module path
Fix attempt browserify require node module path
JavaScript
mit
CopenhagenCityArchives/kbh-billeder,CopenhagenCityArchives/kbh-billeder,CopenhagenCityArchives/kbh-billeder
javascript
## Code Before: require('../../node_modules/cookieconsent/build/cookieconsent.min.js'); // We're using http://cookieconsent.wpengine.com/documentation/javascript-api/ window.cookieconsent.initialise({ cookie: { name: config.features.cookieName }, content: { header: 'Cookies used on the website!', message: 'Vi bruger cookies for at give dig en bedre brugeroplevelse.', dismiss: 'OK', link: 'Læs mere', href: '/cookies', } }); ## Instruction: Fix attempt browserify require node module path ## Code After: require('cookieconsent/build/cookieconsent.min.js'); // We're using http://cookieconsent.wpengine.com/documentation/javascript-api/ window.cookieconsent.initialise({ cookie: { name: config.features.cookieName }, content: { header: 'Cookies used on the website!', message: 'Vi bruger cookies for at give dig en bedre brugeroplevelse.', dismiss: 'OK', link: 'Læs mere', href: '/cookies', } });
- require('../../node_modules/cookieconsent/build/cookieconsent.min.js'); ? ------------------- + require('cookieconsent/build/cookieconsent.min.js'); // We're using http://cookieconsent.wpengine.com/documentation/javascript-api/ window.cookieconsent.initialise({ cookie: { name: config.features.cookieName }, content: { header: 'Cookies used on the website!', message: 'Vi bruger cookies for at give dig en bedre brugeroplevelse.', dismiss: 'OK', link: 'Læs mere', href: '/cookies', } });
2
0.125
1
1
134d620f82f2fc4584311acf8bcaae4df59bb9e0
.travis.yml
.travis.yml
language: haskell ghc: 7.8 env: - LTS_VER=2.5 sudo: false branches: only: - master - travis addons: apt: packages: - libwebkitgtk-dev before_install: - cabal update - cabal install stackage-cli gtk2hs-buildtools - stackage-sandbox init lts-$LTS_VER - git clone https://github.com/pavelkogan/reactive-banana-gtk.git - cabal sandbox add-source ./reactive-banana-gtk/ cache: directories: - $HOME/.stackage/sandboxes
language: haskell ghc: 7.8 env: - LTS_VER=2.5 sudo: false branches: only: - master - travis addons: apt: packages: - libwebkitgtk-dev before_install: - cabal update - cabal install stackage-cli - stackage-sandbox init lts-$LTS_VER - export PATH=$HOME/.stackage/sandboxes/ghc-$(ghc_find 7.8)/lts-$LTS_VER/bin/:$PATH - cabal install gtk2hs-buildtools - git clone https://github.com/pavelkogan/reactive-banana-gtk.git - cabal sandbox add-source ./reactive-banana-gtk/ cache: directories: - $HOME/.stackage/sandboxes - $HOME/.ghc - $HOME/.cabal before_cache: - rm -f $HOME/.cabal/packages/hackage.haskell.org/00-index.*
Fix buildtools version and cache more
Fix buildtools version and cache more
YAML
bsd-3-clause
pavelkogan/NeuroSpider
yaml
## Code Before: language: haskell ghc: 7.8 env: - LTS_VER=2.5 sudo: false branches: only: - master - travis addons: apt: packages: - libwebkitgtk-dev before_install: - cabal update - cabal install stackage-cli gtk2hs-buildtools - stackage-sandbox init lts-$LTS_VER - git clone https://github.com/pavelkogan/reactive-banana-gtk.git - cabal sandbox add-source ./reactive-banana-gtk/ cache: directories: - $HOME/.stackage/sandboxes ## Instruction: Fix buildtools version and cache more ## Code After: language: haskell ghc: 7.8 env: - LTS_VER=2.5 sudo: false branches: only: - master - travis addons: apt: packages: - libwebkitgtk-dev before_install: - cabal update - cabal install stackage-cli - stackage-sandbox init lts-$LTS_VER - export PATH=$HOME/.stackage/sandboxes/ghc-$(ghc_find 7.8)/lts-$LTS_VER/bin/:$PATH - cabal install gtk2hs-buildtools - git clone https://github.com/pavelkogan/reactive-banana-gtk.git - cabal sandbox add-source ./reactive-banana-gtk/ cache: directories: - $HOME/.stackage/sandboxes - $HOME/.ghc - $HOME/.cabal before_cache: - rm -f $HOME/.cabal/packages/hackage.haskell.org/00-index.*
language: haskell ghc: 7.8 env: - LTS_VER=2.5 sudo: false branches: only: - master - travis addons: apt: packages: - libwebkitgtk-dev before_install: - cabal update - - cabal install stackage-cli gtk2hs-buildtools ? ------------------ + - cabal install stackage-cli - stackage-sandbox init lts-$LTS_VER + - export PATH=$HOME/.stackage/sandboxes/ghc-$(ghc_find 7.8)/lts-$LTS_VER/bin/:$PATH + - cabal install gtk2hs-buildtools - git clone https://github.com/pavelkogan/reactive-banana-gtk.git - cabal sandbox add-source ./reactive-banana-gtk/ cache: directories: - $HOME/.stackage/sandboxes + - $HOME/.ghc + - $HOME/.cabal + before_cache: + - rm -f $HOME/.cabal/packages/hackage.haskell.org/00-index.*
8
0.333333
7
1
4a00935290162ebc1c205480b777f39f6c4a30e2
packages/gi/github-webhook-handler-snap.yaml
packages/gi/github-webhook-handler-snap.yaml
homepage: '' changelog-type: '' hash: 5742b5299b7bfb21630b9c53355cbe479edfc30a2421a93ff42fe845b23c6787 test-bench-deps: {} maintainer: [email protected] synopsis: GitHub WebHook Handler implementation for Snap changelog: '' basic-deps: bytestring: -any case-insensitive: -any base: ! '>=4 && <4.9' uuid: -any github-types: ! '>=0.2 && <1' github-webhook-handler: ! '>=0.0.4 && <1' snap-core: -any all-versions: - '0.0.1' - '0.0.2' - '0.0.3' - '0.0.4' author: Tomas Carnecky latest: '0.0.4' description-type: haddock description: '...' license-name: MIT
homepage: '' changelog-type: '' hash: 9a86b84cf5e3919199b016612231e24147c4d9d21cbaa4ba552b64d7bd70df8c test-bench-deps: {} maintainer: [email protected] synopsis: GitHub WebHook Handler implementation for Snap changelog: '' basic-deps: bytestring: -any case-insensitive: -any base: ! '>=4 && <4.9' uuid: -any github-types: ! '>=0.2 && <1' github-webhook-handler: ! '>=0.0.5 && <1' snap-core: -any all-versions: - '0.0.1' - '0.0.2' - '0.0.3' - '0.0.4' - '0.0.5' author: Tomas Carnecky latest: '0.0.5' description-type: haddock description: '...' license-name: MIT
Update from Hackage at 2015-10-19T22:17:13+0000
Update from Hackage at 2015-10-19T22:17:13+0000
YAML
mit
commercialhaskell/all-cabal-metadata
yaml
## Code Before: homepage: '' changelog-type: '' hash: 5742b5299b7bfb21630b9c53355cbe479edfc30a2421a93ff42fe845b23c6787 test-bench-deps: {} maintainer: [email protected] synopsis: GitHub WebHook Handler implementation for Snap changelog: '' basic-deps: bytestring: -any case-insensitive: -any base: ! '>=4 && <4.9' uuid: -any github-types: ! '>=0.2 && <1' github-webhook-handler: ! '>=0.0.4 && <1' snap-core: -any all-versions: - '0.0.1' - '0.0.2' - '0.0.3' - '0.0.4' author: Tomas Carnecky latest: '0.0.4' description-type: haddock description: '...' license-name: MIT ## Instruction: Update from Hackage at 2015-10-19T22:17:13+0000 ## Code After: homepage: '' changelog-type: '' hash: 9a86b84cf5e3919199b016612231e24147c4d9d21cbaa4ba552b64d7bd70df8c test-bench-deps: {} maintainer: [email protected] synopsis: GitHub WebHook Handler implementation for Snap changelog: '' basic-deps: bytestring: -any case-insensitive: -any base: ! '>=4 && <4.9' uuid: -any github-types: ! '>=0.2 && <1' github-webhook-handler: ! '>=0.0.5 && <1' snap-core: -any all-versions: - '0.0.1' - '0.0.2' - '0.0.3' - '0.0.4' - '0.0.5' author: Tomas Carnecky latest: '0.0.5' description-type: haddock description: '...' license-name: MIT
homepage: '' changelog-type: '' - hash: 5742b5299b7bfb21630b9c53355cbe479edfc30a2421a93ff42fe845b23c6787 + hash: 9a86b84cf5e3919199b016612231e24147c4d9d21cbaa4ba552b64d7bd70df8c test-bench-deps: {} maintainer: [email protected] synopsis: GitHub WebHook Handler implementation for Snap changelog: '' basic-deps: bytestring: -any case-insensitive: -any base: ! '>=4 && <4.9' uuid: -any github-types: ! '>=0.2 && <1' - github-webhook-handler: ! '>=0.0.4 && <1' ? ^ + github-webhook-handler: ! '>=0.0.5 && <1' ? ^ snap-core: -any all-versions: - '0.0.1' - '0.0.2' - '0.0.3' - '0.0.4' + - '0.0.5' author: Tomas Carnecky - latest: '0.0.4' ? ^ + latest: '0.0.5' ? ^ description-type: haddock description: '...' license-name: MIT
7
0.28
4
3
06134021977e7e110b5312960a731f86de613b32
spec/components/pass_component_spec.rb
spec/components/pass_component_spec.rb
require 'rails_helper' RSpec.describe PassComponent, type: :component do subject { page } let(:component) { described_class.new(plan: :personal, site: site) } let(:site) do create( :site, ticket_personal_price: 1000 ) end before do with_request_url('/') { render_inline(component) } end it { is_expected.to have_text('一般票') } it { is_expected.to have_text('NTD $1000') } context 'when in early bird period' do let(:site) do create( :site, ticket_personal_price: 1000, ticket_early_personal_price: 800, ticket_early_bird_due_to: Time.zone.parse('2022-08-31') ) end before { travel_to Time.zone.parse('2022-08-25') } it { is_expected.to have_text('早鳥票') } it { is_expected.to have_text('NTD $800') } it { is_expected.to have_text('早鳥票優惠至 2022年08月31日 截止') } end end
require 'rails_helper' RSpec.describe PassComponent, type: :component do subject { page } let(:component) { described_class.new(plan: :personal, site: site) } let(:site) do create( :site, ticket_personal_price: 1000 ) end before do with_request_url('/') { render_inline(component) } end it { is_expected.to have_text('一般票') } it { is_expected.to have_text('NTD $1000') } context 'when in early bird period' do let(:site) do create( :site, ticket_personal_price: 1000, ticket_early_personal_price: 800, ticket_early_bird_due_to: Time.zone.parse('2022-08-31') ) end around { |example| travel_to(Time.zone.parse('2022-08-25')) { example.run } } it { is_expected.to have_text('早鳥票') } it { is_expected.to have_text('NTD $800') } it { is_expected.to have_text('早鳥票優惠至 2022年08月31日 截止') } end end
Fix PassComponent not freeze in correct time
Fix PassComponent not freeze in correct time
Ruby
apache-2.0
TGDF/official-site,TGDF/official-site,TGDF/official-site,TGDF/official-site
ruby
## Code Before: require 'rails_helper' RSpec.describe PassComponent, type: :component do subject { page } let(:component) { described_class.new(plan: :personal, site: site) } let(:site) do create( :site, ticket_personal_price: 1000 ) end before do with_request_url('/') { render_inline(component) } end it { is_expected.to have_text('一般票') } it { is_expected.to have_text('NTD $1000') } context 'when in early bird period' do let(:site) do create( :site, ticket_personal_price: 1000, ticket_early_personal_price: 800, ticket_early_bird_due_to: Time.zone.parse('2022-08-31') ) end before { travel_to Time.zone.parse('2022-08-25') } it { is_expected.to have_text('早鳥票') } it { is_expected.to have_text('NTD $800') } it { is_expected.to have_text('早鳥票優惠至 2022年08月31日 截止') } end end ## Instruction: Fix PassComponent not freeze in correct time ## Code After: require 'rails_helper' RSpec.describe PassComponent, type: :component do subject { page } let(:component) { described_class.new(plan: :personal, site: site) } let(:site) do create( :site, ticket_personal_price: 1000 ) end before do with_request_url('/') { render_inline(component) } end it { is_expected.to have_text('一般票') } it { is_expected.to have_text('NTD $1000') } context 'when in early bird period' do let(:site) do create( :site, ticket_personal_price: 1000, ticket_early_personal_price: 800, ticket_early_bird_due_to: Time.zone.parse('2022-08-31') ) end around { |example| travel_to(Time.zone.parse('2022-08-25')) { example.run } } it { is_expected.to have_text('早鳥票') } it { is_expected.to have_text('NTD $800') } it { is_expected.to have_text('早鳥票優惠至 2022年08月31日 截止') } end end
require 'rails_helper' RSpec.describe PassComponent, type: :component do subject { page } let(:component) { described_class.new(plan: :personal, site: site) } let(:site) do create( :site, ticket_personal_price: 1000 ) end before do with_request_url('/') { render_inline(component) } end it { is_expected.to have_text('一般票') } it { is_expected.to have_text('NTD $1000') } context 'when in early bird period' do let(:site) do create( :site, ticket_personal_price: 1000, ticket_early_personal_price: 800, ticket_early_bird_due_to: Time.zone.parse('2022-08-31') ) end - before { travel_to Time.zone.parse('2022-08-25') } + around { |example| travel_to(Time.zone.parse('2022-08-25')) { example.run } } it { is_expected.to have_text('早鳥票') } it { is_expected.to have_text('NTD $800') } it { is_expected.to have_text('早鳥票優惠至 2022年08月31日 截止') } end end
2
0.052632
1
1
49b0dfc3d86755ff8c7522dacde48d25906187e8
features/convert_number_to_words.feature
features/convert_number_to_words.feature
Feature: Convert number to words Write some code that will accept an amount and convert it to the appropriate string representation. Example: Convert 2523.04 to "Two thousand five hundred twenty-three and 04/100 dollars" Scenario: I need help writing amounts on my checks When I run `convert 2523.04` Then the output should contain exactly: """ Two thousand five hundred twenty-three and 04/100 dollars """
Feature: Convert number to words Write some code that will accept an amount and convert it to the appropriate string representation. Example: Convert 2523.04 to "Two thousand five hundred twenty-three and 04/100 dollars" Scenario Outline: I need help writing amounts on my checks When I run `convert <Amount>` Then the output should contain exactly "<String Representation>" Examples: | Amount | String Representation | | 2523.04 | Two thousand five hundred twenty-three and 04/100 dollars |
Refactor UAT while green. This sets me up for triangulation.
Refactor UAT while green. This sets me up for triangulation.
Cucumber
mit
gtempus/converter
cucumber
## Code Before: Feature: Convert number to words Write some code that will accept an amount and convert it to the appropriate string representation. Example: Convert 2523.04 to "Two thousand five hundred twenty-three and 04/100 dollars" Scenario: I need help writing amounts on my checks When I run `convert 2523.04` Then the output should contain exactly: """ Two thousand five hundred twenty-three and 04/100 dollars """ ## Instruction: Refactor UAT while green. This sets me up for triangulation. ## Code After: Feature: Convert number to words Write some code that will accept an amount and convert it to the appropriate string representation. Example: Convert 2523.04 to "Two thousand five hundred twenty-three and 04/100 dollars" Scenario Outline: I need help writing amounts on my checks When I run `convert <Amount>` Then the output should contain exactly "<String Representation>" Examples: | Amount | String Representation | | 2523.04 | Two thousand five hundred twenty-three and 04/100 dollars |
Feature: Convert number to words Write some code that will accept an amount and convert it to the appropriate string representation. Example: Convert 2523.04 to "Two thousand five hundred twenty-three and 04/100 dollars" - Scenario: I need help writing amounts on my checks + Scenario Outline: I need help writing amounts on my checks ? ++++++++ - When I run `convert 2523.04` ? ^^^^^^^ + When I run `convert <Amount>` ? ^^^^^^^^ - Then the output should contain exactly: ? ^ + Then the output should contain exactly "<String Representation>" ? ^^^^^^^^^^^^^^^^^^^^^^^^^^ - """ + + Examples: + | Amount | String Representation | - Two thousand five hundred twenty-three and 04/100 dollars + | 2523.04 | Two thousand five hundred twenty-three and 04/100 dollars | ? ++++++++++++ ++ - """ +
14
1.272727
8
6
59c7dae67979be47225c698ec6e5facec4d122b5
README.md
README.md
lighthouse-client ================= UI assets and code for Lighthouse. ## Development Be sure the `lighthouse` and `lighthouse-client` repositories are at the same level of the filesystem. Then, install our node dependencies, do a dev build, and optionally start the js watcher (which will automatically do a dev build when files change). * `bower install` * `npm install -g gulp` * `npm install` * `gulp dev build` * `gulp watch` (optional) * `gulp clean` removes built assets from the server static dir (see Gulpfile.js) * `gulp prod build` will do the same as `dev`, but with minified js * fire up a webserver from `lighthouse/static` and navigate to `localhost` at the port specified by the server New angular modules should follow the same pattern as `instances`. Generally speaking, each major piece of functionality should be pulled out into it's own module and given a unique name to link into the root module. (see `app/js/app.js`) ### Running unit tests * `npm install -g jasmine-node` * `npm install -g karma-cli` * (from root) `karma start` Happy coding!
lighthouse-client ================= UI assets and code for Lighthouse. ## Requirements * [node](http://nodejs.org/) * [lighthouse](https://github.com/lighthouse/lighthouse) ## Development Then, install our node dependencies, do a dev build, and optionally start the js watcher (which will automatically do a dev build when files change). * `npm install -g bower` * `bower install` * `npm install -g gulp` * `npm install` * `gulp dev build` * `gulp watch` (optional) * `gulp clean` removes built assets from the server static dir (see Gulpfile.js) * `gulp prod build` will do the same as `dev`, but with minified js * fire up a webserver from `lighthouse/static` and navigate to `localhost` at the port specified by the server New angular modules should follow the same pattern as `instances`. Generally speaking, each major piece of functionality should be pulled out into it's own module and given a unique name to link into the root module. (see `app/js/app.js`) ### Running unit tests * `npm install -g jasmine-node` * `npm install -g karma-cli` * (from root) `karma start` Happy coding!
Update read me for new client build process
Update read me for new client build process
Markdown
apache-2.0
lighthouse/harbor,lighthouse/harbor
markdown
## Code Before: lighthouse-client ================= UI assets and code for Lighthouse. ## Development Be sure the `lighthouse` and `lighthouse-client` repositories are at the same level of the filesystem. Then, install our node dependencies, do a dev build, and optionally start the js watcher (which will automatically do a dev build when files change). * `bower install` * `npm install -g gulp` * `npm install` * `gulp dev build` * `gulp watch` (optional) * `gulp clean` removes built assets from the server static dir (see Gulpfile.js) * `gulp prod build` will do the same as `dev`, but with minified js * fire up a webserver from `lighthouse/static` and navigate to `localhost` at the port specified by the server New angular modules should follow the same pattern as `instances`. Generally speaking, each major piece of functionality should be pulled out into it's own module and given a unique name to link into the root module. (see `app/js/app.js`) ### Running unit tests * `npm install -g jasmine-node` * `npm install -g karma-cli` * (from root) `karma start` Happy coding! ## Instruction: Update read me for new client build process ## Code After: lighthouse-client ================= UI assets and code for Lighthouse. ## Requirements * [node](http://nodejs.org/) * [lighthouse](https://github.com/lighthouse/lighthouse) ## Development Then, install our node dependencies, do a dev build, and optionally start the js watcher (which will automatically do a dev build when files change). * `npm install -g bower` * `bower install` * `npm install -g gulp` * `npm install` * `gulp dev build` * `gulp watch` (optional) * `gulp clean` removes built assets from the server static dir (see Gulpfile.js) * `gulp prod build` will do the same as `dev`, but with minified js * fire up a webserver from `lighthouse/static` and navigate to `localhost` at the port specified by the server New angular modules should follow the same pattern as `instances`. Generally speaking, each major piece of functionality should be pulled out into it's own module and given a unique name to link into the root module. (see `app/js/app.js`) ### Running unit tests * `npm install -g jasmine-node` * `npm install -g karma-cli` * (from root) `karma start` Happy coding!
lighthouse-client ================= UI assets and code for Lighthouse. + ## Requirements + + * [node](http://nodejs.org/) + * [lighthouse](https://github.com/lighthouse/lighthouse) + ## Development - Be sure the `lighthouse` and `lighthouse-client` repositories are at the same level of the filesystem. Then, install our node dependencies, do a dev build, and optionally start the js watcher (which will automatically do a dev build when files change). + * `npm install -g bower` * `bower install` * `npm install -g gulp` * `npm install` * `gulp dev build` * `gulp watch` (optional) * `gulp clean` removes built assets from the server static dir (see Gulpfile.js) * `gulp prod build` will do the same as `dev`, but with minified js * fire up a webserver from `lighthouse/static` and navigate to `localhost` at the port specified by the server New angular modules should follow the same pattern as `instances`. Generally speaking, each major piece of functionality should be pulled out into it's own module and given a unique name to link into the root module. (see `app/js/app.js`) ### Running unit tests * `npm install -g jasmine-node` * `npm install -g karma-cli` * (from root) `karma start` Happy coding!
7
0.25
6
1
4c0f0c504286ee4ec5a0977fe50b2b204e3d26d6
.travis.yml
.travis.yml
language: java
language: java notifications: email: recipients: - [email protected] - [email protected] on_success: change on_failure: change
Add @statuswoe as a build status change receiver
Add @statuswoe as a build status change receiver
YAML
mit
markmckenna/robocalypse
yaml
## Code Before: language: java ## Instruction: Add @statuswoe as a build status change receiver ## Code After: language: java notifications: email: recipients: - [email protected] - [email protected] on_success: change on_failure: change
language: java + + notifications: + email: + recipients: + - [email protected] + - [email protected] + on_success: change + on_failure: change
8
8
8
0
f65c00bdc0b8c540d8028aa1515375e97f3ae1b2
README.md
README.md
![screenshot of the graphs that make up the app]() ###Install Install through the [Spiceworks App Center]() ###Contributing Sure! ####Main Tech &amp; Tools - D3.js - Yeoman generator - RSVP - jQuery - Bootstrap - HTML5 + (Sass => CSS3) - Grunt - Bower
![screenshot of the graphs that make up the app]() ###Install Install through the [Spiceworks App Center]() ###Contributing Sure! ####Main Tech &amp; Tools - D3.js - Yeoman generator - RSVP - jQuery - Bootstrap - HTML5 + (Sass => CSS3) - Grunt - Bower ####[On Heroku](https://obscure-mountain-1975.herokuapp.com/)
Add Heroku URL to Readme.
Add Heroku URL to Readme.
Markdown
mit
chrisbodhi/ticket-graph,chrisbodhi/ticket-graph
markdown
## Code Before: ![screenshot of the graphs that make up the app]() ###Install Install through the [Spiceworks App Center]() ###Contributing Sure! ####Main Tech &amp; Tools - D3.js - Yeoman generator - RSVP - jQuery - Bootstrap - HTML5 + (Sass => CSS3) - Grunt - Bower ## Instruction: Add Heroku URL to Readme. ## Code After: ![screenshot of the graphs that make up the app]() ###Install Install through the [Spiceworks App Center]() ###Contributing Sure! ####Main Tech &amp; Tools - D3.js - Yeoman generator - RSVP - jQuery - Bootstrap - HTML5 + (Sass => CSS3) - Grunt - Bower ####[On Heroku](https://obscure-mountain-1975.herokuapp.com/)
![screenshot of the graphs that make up the app]() ###Install Install through the [Spiceworks App Center]() ###Contributing Sure! ####Main Tech &amp; Tools - D3.js - Yeoman generator - RSVP - jQuery - Bootstrap - HTML5 + (Sass => CSS3) - Grunt - Bower + + ####[On Heroku](https://obscure-mountain-1975.herokuapp.com/) +
3
0.157895
3
0
4918a336753c8650f87dc1ce98080fffef782ec7
web/static/scss/_variables.scss
web/static/scss/_variables.scss
// Variables not seen here can be found in materialize/components/_variables.scss $off-white: color("grey", "lighten-5"); $primary-color-light: #B2EBF2; $primary-text: rgba(color("grey", "darken-4"), 0.77); $secondary-text: color("grey", "darken-1"); $divider-color: color("grey", "lighten-1"); $faded-text: rgba($primary-text, 0.54); $success: rgb(105, 212, 119); $error: $error-color; $alternate-font: 'Source Code Pro';
// Variables not seen here can be found in materialize/components/_variables.scss $off-white: color("grey", "lighten-5"); $primary-color-light: lighten($primary-color, 40%); $primary-text: rgba(color("grey", "darken-4"), 0.77); $secondary-text: color("grey", "darken-1"); $divider-color: color("grey", "lighten-1"); $light-grey: color("grey", "lighten-4"); $faded-text: rgba($primary-text, 0.54); $success: rgb(105, 212, 119); $error: $error-color; $alternate-font: 'Source Code Pro';
Use Sass built-in methods of `lighten` and `darken`
Use Sass built-in methods of `lighten` and `darken`
SCSS
mit
az-webdevs/azwebdevs.org,az-webdevs/azwebdevs.org
scss
## Code Before: // Variables not seen here can be found in materialize/components/_variables.scss $off-white: color("grey", "lighten-5"); $primary-color-light: #B2EBF2; $primary-text: rgba(color("grey", "darken-4"), 0.77); $secondary-text: color("grey", "darken-1"); $divider-color: color("grey", "lighten-1"); $faded-text: rgba($primary-text, 0.54); $success: rgb(105, 212, 119); $error: $error-color; $alternate-font: 'Source Code Pro'; ## Instruction: Use Sass built-in methods of `lighten` and `darken` ## Code After: // Variables not seen here can be found in materialize/components/_variables.scss $off-white: color("grey", "lighten-5"); $primary-color-light: lighten($primary-color, 40%); $primary-text: rgba(color("grey", "darken-4"), 0.77); $secondary-text: color("grey", "darken-1"); $divider-color: color("grey", "lighten-1"); $light-grey: color("grey", "lighten-4"); $faded-text: rgba($primary-text, 0.54); $success: rgb(105, 212, 119); $error: $error-color; $alternate-font: 'Source Code Pro';
// Variables not seen here can be found in materialize/components/_variables.scss $off-white: color("grey", "lighten-5"); - $primary-color-light: #B2EBF2; + $primary-color-light: lighten($primary-color, 40%); $primary-text: rgba(color("grey", "darken-4"), 0.77); $secondary-text: color("grey", "darken-1"); $divider-color: color("grey", "lighten-1"); + $light-grey: color("grey", "lighten-4"); $faded-text: rgba($primary-text, 0.54); $success: rgb(105, 212, 119); $error: $error-color; $alternate-font: 'Source Code Pro';
3
0.214286
2
1
5b424809b2d88dd59d104316f73f8417eb1e0e1e
.vscode/settings.json
.vscode/settings.json
// Place your settings in this file to overwrite default and user settings. { "files.exclude": { "**/.git": true, "**/.DS_Store": true, "src/**/*.js": true, "test/**/*.js": true, "**/es": true, "**/lib": true, "**/dts": true, "**/coverage": true, "**/dist": true, "**/docs": true, "type_definitions/**/*.js": true } }
// Place your settings in this file to overwrite default and user settings. { "files.exclude": { "**/.git": true, "**/.DS_Store": true, "src/**/*.js": true, "test/**/*.js": true, "**/es": true, "**/lib": true, "**/amd": true, "**/dts": true, "**/coverage": true, "**/dist": true, "**/docs": true, "type_definitions/**/*.js": true } }
Hide amd from project explorer in vscode
Hide amd from project explorer in vscode
JSON
mit
inversify/inversify-binding-decorators,inversify/inversify-binding-decorators
json
## Code Before: // Place your settings in this file to overwrite default and user settings. { "files.exclude": { "**/.git": true, "**/.DS_Store": true, "src/**/*.js": true, "test/**/*.js": true, "**/es": true, "**/lib": true, "**/dts": true, "**/coverage": true, "**/dist": true, "**/docs": true, "type_definitions/**/*.js": true } } ## Instruction: Hide amd from project explorer in vscode ## Code After: // Place your settings in this file to overwrite default and user settings. { "files.exclude": { "**/.git": true, "**/.DS_Store": true, "src/**/*.js": true, "test/**/*.js": true, "**/es": true, "**/lib": true, "**/amd": true, "**/dts": true, "**/coverage": true, "**/dist": true, "**/docs": true, "type_definitions/**/*.js": true } }
// Place your settings in this file to overwrite default and user settings. { "files.exclude": { "**/.git": true, "**/.DS_Store": true, "src/**/*.js": true, "test/**/*.js": true, "**/es": true, "**/lib": true, + "**/amd": true, "**/dts": true, "**/coverage": true, "**/dist": true, "**/docs": true, "type_definitions/**/*.js": true } }
1
0.0625
1
0
e693352b2c9787748cb1dcf2bfd6e134292bfa6a
setup.py
setup.py
import sys, os from setuptools import setup, Extension, find_packages import numpy kws = {} if not int(os.getenv( 'DISABLE_INSTALL_REQUIRES','0' )): kws['install_requires'] = [ 'numpy>=1.0.4', ] setup(name="motmot.imops", description="image format conversion (e.g. between MONO8, YUV422, and RGB)", long_description = """ This is a subpackage of the motmot family of digital image utilities. """, version="0.5.2", license="BSD", maintainer="Andrew Straw", maintainer_email="[email protected]", url="http://code.astraw.com/projects/motmot/imops.html", packages = find_packages(), namespace_packages = ['motmot'], ext_modules=[Extension(name="motmot.imops.imops", sources=['src/imops.pyx','src/color_convert.c',], include_dirs=[numpy.get_include()], ), ], zip_safe = True, **kws)
import sys, os from setuptools import setup, Extension, find_packages import numpy # Note when building sdist package: # Make sure to generate src/imops.c with "pyrexc src/imops.pyx". kws = {} if not int(os.getenv( 'DISABLE_INSTALL_REQUIRES','0' )): kws['install_requires'] = [ 'numpy>=1.0.4', ] setup(name="motmot.imops", description="image format conversion (e.g. between MONO8, YUV422, and RGB)", long_description = """ This is a subpackage of the motmot family of digital image utilities. """, version="0.5.2", license="BSD", maintainer="Andrew Straw", maintainer_email="[email protected]", url="http://code.astraw.com/projects/motmot/imops.html", packages = find_packages(), namespace_packages = ['motmot'], ext_modules=[Extension(name="motmot.imops.imops", sources=['src/imops.c','src/color_convert.c',], include_dirs=[numpy.get_include()], ), ], zip_safe = True, **kws)
Add note about building sdist
Add note about building sdist
Python
bsd-3-clause
motmot/imops,motmot/imops
python
## Code Before: import sys, os from setuptools import setup, Extension, find_packages import numpy kws = {} if not int(os.getenv( 'DISABLE_INSTALL_REQUIRES','0' )): kws['install_requires'] = [ 'numpy>=1.0.4', ] setup(name="motmot.imops", description="image format conversion (e.g. between MONO8, YUV422, and RGB)", long_description = """ This is a subpackage of the motmot family of digital image utilities. """, version="0.5.2", license="BSD", maintainer="Andrew Straw", maintainer_email="[email protected]", url="http://code.astraw.com/projects/motmot/imops.html", packages = find_packages(), namespace_packages = ['motmot'], ext_modules=[Extension(name="motmot.imops.imops", sources=['src/imops.pyx','src/color_convert.c',], include_dirs=[numpy.get_include()], ), ], zip_safe = True, **kws) ## Instruction: Add note about building sdist ## Code After: import sys, os from setuptools import setup, Extension, find_packages import numpy # Note when building sdist package: # Make sure to generate src/imops.c with "pyrexc src/imops.pyx". kws = {} if not int(os.getenv( 'DISABLE_INSTALL_REQUIRES','0' )): kws['install_requires'] = [ 'numpy>=1.0.4', ] setup(name="motmot.imops", description="image format conversion (e.g. between MONO8, YUV422, and RGB)", long_description = """ This is a subpackage of the motmot family of digital image utilities. """, version="0.5.2", license="BSD", maintainer="Andrew Straw", maintainer_email="[email protected]", url="http://code.astraw.com/projects/motmot/imops.html", packages = find_packages(), namespace_packages = ['motmot'], ext_modules=[Extension(name="motmot.imops.imops", sources=['src/imops.c','src/color_convert.c',], include_dirs=[numpy.get_include()], ), ], zip_safe = True, **kws)
import sys, os from setuptools import setup, Extension, find_packages import numpy + + # Note when building sdist package: + # Make sure to generate src/imops.c with "pyrexc src/imops.pyx". kws = {} if not int(os.getenv( 'DISABLE_INSTALL_REQUIRES','0' )): kws['install_requires'] = [ 'numpy>=1.0.4', ] setup(name="motmot.imops", description="image format conversion (e.g. between MONO8, YUV422, and RGB)", long_description = """ This is a subpackage of the motmot family of digital image utilities. """, version="0.5.2", license="BSD", maintainer="Andrew Straw", maintainer_email="[email protected]", url="http://code.astraw.com/projects/motmot/imops.html", packages = find_packages(), namespace_packages = ['motmot'], ext_modules=[Extension(name="motmot.imops.imops", - sources=['src/imops.pyx','src/color_convert.c',], ? ^^^ + sources=['src/imops.c','src/color_convert.c',], ? ^ include_dirs=[numpy.get_include()], ), ], zip_safe = True, **kws)
5
0.166667
4
1
f7c813f08df006fb2bf11d15c04000d6692ce773
config/templates/bisulfite_seq.yaml
config/templates/bisulfite_seq.yaml
details: - analysis: wgbs-seq algorithm: aligner: bsmap # bismark ; this option will set up the same tool for calling and alignment description: test genome_build: hg19 # files: [../../raw/test_R1.fastq, ../../raw/test_R2.fastq]
details: - analysis: wgbs-seq algorithm: aligner: bismark # this option will set up the same tool for calling and alignment description: test genome_build: hg19 # files: [../../raw/test_R1.fastq, ../../raw/test_R2.fastq]
Set bismark as default aligner in bisufile_seq config template
Set bismark as default aligner in bisufile_seq config template
YAML
mit
chapmanb/bcbio-nextgen,vladsaveliev/bcbio-nextgen,a113n/bcbio-nextgen,a113n/bcbio-nextgen,lbeltrame/bcbio-nextgen,vladsaveliev/bcbio-nextgen,vladsaveliev/bcbio-nextgen,a113n/bcbio-nextgen,lbeltrame/bcbio-nextgen,chapmanb/bcbio-nextgen,chapmanb/bcbio-nextgen,lbeltrame/bcbio-nextgen
yaml
## Code Before: details: - analysis: wgbs-seq algorithm: aligner: bsmap # bismark ; this option will set up the same tool for calling and alignment description: test genome_build: hg19 # files: [../../raw/test_R1.fastq, ../../raw/test_R2.fastq] ## Instruction: Set bismark as default aligner in bisufile_seq config template ## Code After: details: - analysis: wgbs-seq algorithm: aligner: bismark # this option will set up the same tool for calling and alignment description: test genome_build: hg19 # files: [../../raw/test_R1.fastq, ../../raw/test_R2.fastq]
details: - analysis: wgbs-seq algorithm: - aligner: bsmap # bismark ; this option will set up the same tool for calling and alignment ? -------- ^ + aligner: bismark # this option will set up the same tool for calling and alignment ? ^^ description: test genome_build: hg19 # files: [../../raw/test_R1.fastq, ../../raw/test_R2.fastq]
2
0.285714
1
1
e631c8aaae672499a3689d1d31d2d89c50944808
setup.cfg
setup.cfg
[metadata] name = oca-maintainers-tools author = OCA - Odoo Community Associatin summary = Set of tools for the management of the Odoo Community projects license = APGL3 description-file = README.md requires-python = >=2.7 classifier = Development Status :: 4 - Beta Environment :: Console Intended Audience :: Developers Intended Audience :: Information Technology License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+) Operating System :: POSIX :: Linux Programming Language :: Python [files] packages = tools [entry_points] console_scripts = oca-github-login = tools.github_login:main oca-copy-maintainers = tools.copy_maintainers:main oca-copy-branches = tools.copy_branches:main oca-clone-everything = tools.clone_everything:main oca-set-repo-labels = tools.set_repo_labels:main oca-odoo-login = tools.odoo_login:main oca-sync-users = tools.oca_sync_users:main oca-autopep8 = tools.autopep8_extended:main oca-tx-pull = tools.tx_pull:main oca-gen-addons-table = tools.gen_addons_table:main oca-migrate-branch = tools.migrate_branch:main
[metadata] name = oca-maintainers-tools author = Odoo Community Association (OCA) summary = Set of tools for the management of the Odoo Community projects license = APGL3 description-file = README.md requires-python = >=2.7 classifier = Development Status :: 4 - Beta Environment :: Console Intended Audience :: Developers Intended Audience :: Information Technology License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+) Operating System :: POSIX :: Linux Programming Language :: Python [files] packages = tools [entry_points] console_scripts = oca-github-login = tools.github_login:main oca-copy-maintainers = tools.copy_maintainers:main oca-copy-branches = tools.copy_branches:main oca-clone-everything = tools.clone_everything:main oca-set-repo-labels = tools.set_repo_labels:main oca-odoo-login = tools.odoo_login:main oca-sync-users = tools.oca_sync_users:main oca-autopep8 = tools.autopep8_extended:main oca-tx-pull = tools.tx_pull:main oca-gen-addons-table = tools.gen_addons_table:main oca-migrate-branch = tools.migrate_branch:main
Fix typo in OCA name and align with name used elsewhere
Fix typo in OCA name and align with name used elsewhere
INI
agpl-3.0
acsone/maintainer-tools,Yajo/maintainer-tools,dreispt/maintainer-tools,Yajo/maintainer-tools,acsone/maintainers-tools,dreispt/maintainer-tools,Endika/maintainer-tools,acsone/maintainers-tools,OCA/maintainer-tools,Endika/maintainer-tools,OCA/maintainer-tools,gurneyalex/maintainers-tools,acsone/maintainers-tools,acsone/maintainer-tools,acsone/maintainer-tools,OCA/maintainer-tools,vauxoo-dev/maintainer-tools,acsone/maintainer-tools,vauxoo-dev/maintainer-tools,dreispt/maintainer-tools,OCA/maintainer-tools,Yajo/maintainer-tools,vauxoo-dev/maintainer-tools,Yajo/maintainer-tools,Endika/maintainer-tools
ini
## Code Before: [metadata] name = oca-maintainers-tools author = OCA - Odoo Community Associatin summary = Set of tools for the management of the Odoo Community projects license = APGL3 description-file = README.md requires-python = >=2.7 classifier = Development Status :: 4 - Beta Environment :: Console Intended Audience :: Developers Intended Audience :: Information Technology License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+) Operating System :: POSIX :: Linux Programming Language :: Python [files] packages = tools [entry_points] console_scripts = oca-github-login = tools.github_login:main oca-copy-maintainers = tools.copy_maintainers:main oca-copy-branches = tools.copy_branches:main oca-clone-everything = tools.clone_everything:main oca-set-repo-labels = tools.set_repo_labels:main oca-odoo-login = tools.odoo_login:main oca-sync-users = tools.oca_sync_users:main oca-autopep8 = tools.autopep8_extended:main oca-tx-pull = tools.tx_pull:main oca-gen-addons-table = tools.gen_addons_table:main oca-migrate-branch = tools.migrate_branch:main ## Instruction: Fix typo in OCA name and align with name used elsewhere ## Code After: [metadata] name = oca-maintainers-tools author = Odoo Community Association (OCA) summary = Set of tools for the management of the Odoo Community projects license = APGL3 description-file = README.md requires-python = >=2.7 classifier = Development Status :: 4 - Beta Environment :: Console Intended Audience :: Developers Intended Audience :: Information Technology License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+) Operating System :: POSIX :: Linux Programming Language :: Python [files] packages = tools [entry_points] console_scripts = oca-github-login = tools.github_login:main oca-copy-maintainers = tools.copy_maintainers:main oca-copy-branches = tools.copy_branches:main oca-clone-everything = tools.clone_everything:main oca-set-repo-labels = tools.set_repo_labels:main oca-odoo-login = tools.odoo_login:main oca-sync-users = tools.oca_sync_users:main oca-autopep8 = tools.autopep8_extended:main oca-tx-pull = tools.tx_pull:main oca-gen-addons-table = tools.gen_addons_table:main oca-migrate-branch = tools.migrate_branch:main
[metadata] name = oca-maintainers-tools - author = OCA - Odoo Community Associatin ? ------ + author = Odoo Community Association (OCA) ? + ++++++ summary = Set of tools for the management of the Odoo Community projects license = APGL3 description-file = README.md requires-python = >=2.7 classifier = Development Status :: 4 - Beta Environment :: Console Intended Audience :: Developers Intended Audience :: Information Technology License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+) Operating System :: POSIX :: Linux Programming Language :: Python [files] packages = tools [entry_points] console_scripts = oca-github-login = tools.github_login:main oca-copy-maintainers = tools.copy_maintainers:main oca-copy-branches = tools.copy_branches:main oca-clone-everything = tools.clone_everything:main oca-set-repo-labels = tools.set_repo_labels:main oca-odoo-login = tools.odoo_login:main oca-sync-users = tools.oca_sync_users:main oca-autopep8 = tools.autopep8_extended:main oca-tx-pull = tools.tx_pull:main oca-gen-addons-table = tools.gen_addons_table:main oca-migrate-branch = tools.migrate_branch:main
2
0.064516
1
1
6322839fd05f5620cdad41e702cb8d3cf71a7683
testing/env_python_3_with_flink_110.yml
testing/env_python_3_with_flink_110.yml
name: python_3_with_flink channels: - conda-forge - defaults dependencies: - pip - pip: - bkzep==0.6.1 - apache-flink==1.10.1 - numpy==1.17.3 - pandas==0.25.0 - scipy==1.3.1 - grpcio==1.19.0 - hvplot==0.5.2 - protobuf==3.10.0 - pandasql==0.7.3 - ipython==7.8.0 - matplotlib==3.0.3 - ipykernel==5.1.2 - jupyter_client==5.3.4 - bokeh==1.3.4 - panel==0.6.0 - holoviews==1.12.3 - pycodestyle==2.5.0
name: python_3_with_flink channels: - conda-forge - defaults dependencies: - pip - pip: - apache-flink==1.10.2 - bkzep==0.6.1 - numpy==1.17.3 - pandas==0.25.0 - scipy==1.3.1 - grpcio==1.34.1 - hvplot==0.5.2 - protobuf==3.10.0 - pandasql==0.7.3 - ipython==7.8.0 - matplotlib==3.0.3 - ipykernel==5.1.2 - jupyter_client==5.3.4 - bokeh==1.3.4 - panel==0.6.0 - holoviews==1.12.3 - pycodestyle==2.5.0
Fix test of flink 1.10
[HotFix] Fix test of flink 1.10
YAML
apache-2.0
jongyoul/incubator-zeppelin,apache/zeppelin,hammertank/zeppelin,hammertank/zeppelin,prabhjyotsingh/zeppelin,prabhjyotsingh/zeppelin,zjffdu/zeppelin,hammertank/zeppelin,fogbeam/zeppelin_mirror,zjffdu/zeppelin,apache/incubator-zeppelin,apache/incubator-zeppelin,zjffdu/zeppelin,fogbeam/zeppelin_mirror,apache/incubator-zeppelin,apache/incubator-zeppelin,hammertank/zeppelin,fogbeam/zeppelin_mirror,prabhjyotsingh/zeppelin,zjffdu/zeppelin,jongyoul/incubator-zeppelin,prabhjyotsingh/zeppelin,zjffdu/zeppelin,fogbeam/zeppelin_mirror,zjffdu/zeppelin,jongyoul/incubator-zeppelin,jongyoul/zeppelin,hammertank/zeppelin,fogbeam/zeppelin_mirror,apache/zeppelin,zjffdu/zeppelin,apache/zeppelin,hammertank/zeppelin,apache/incubator-zeppelin,apache/zeppelin,fogbeam/zeppelin_mirror,jongyoul/zeppelin,prabhjyotsingh/zeppelin,jongyoul/zeppelin,jongyoul/zeppelin,apache/zeppelin,apache/zeppelin,jongyoul/incubator-zeppelin,prabhjyotsingh/zeppelin,prabhjyotsingh/zeppelin,jongyoul/zeppelin,apache/zeppelin,hammertank/zeppelin,jongyoul/incubator-zeppelin,apache/incubator-zeppelin,jongyoul/incubator-zeppelin,jongyoul/zeppelin,jongyoul/zeppelin,fogbeam/zeppelin_mirror
yaml
## Code Before: name: python_3_with_flink channels: - conda-forge - defaults dependencies: - pip - pip: - bkzep==0.6.1 - apache-flink==1.10.1 - numpy==1.17.3 - pandas==0.25.0 - scipy==1.3.1 - grpcio==1.19.0 - hvplot==0.5.2 - protobuf==3.10.0 - pandasql==0.7.3 - ipython==7.8.0 - matplotlib==3.0.3 - ipykernel==5.1.2 - jupyter_client==5.3.4 - bokeh==1.3.4 - panel==0.6.0 - holoviews==1.12.3 - pycodestyle==2.5.0 ## Instruction: [HotFix] Fix test of flink 1.10 ## Code After: name: python_3_with_flink channels: - conda-forge - defaults dependencies: - pip - pip: - apache-flink==1.10.2 - bkzep==0.6.1 - numpy==1.17.3 - pandas==0.25.0 - scipy==1.3.1 - grpcio==1.34.1 - hvplot==0.5.2 - protobuf==3.10.0 - pandasql==0.7.3 - ipython==7.8.0 - matplotlib==3.0.3 - ipykernel==5.1.2 - jupyter_client==5.3.4 - bokeh==1.3.4 - panel==0.6.0 - holoviews==1.12.3 - pycodestyle==2.5.0
name: python_3_with_flink channels: - conda-forge - defaults dependencies: - pip - pip: + - apache-flink==1.10.2 - - bkzep==0.6.1 + - bkzep==0.6.1 ? ++ - - apache-flink==1.10.1 - - numpy==1.17.3 ? -- + - numpy==1.17.3 - - pandas==0.25.0 ? -- + - pandas==0.25.0 - - scipy==1.3.1 ? -- + - scipy==1.3.1 - - grpcio==1.19.0 ? -- --- + - grpcio==1.34.1 ? +++ - - hvplot==0.5.2 ? -- + - hvplot==0.5.2 - - protobuf==3.10.0 ? -- + - protobuf==3.10.0 - - pandasql==0.7.3 ? -- + - pandasql==0.7.3 - - ipython==7.8.0 ? -- + - ipython==7.8.0 - - matplotlib==3.0.3 ? -- + - matplotlib==3.0.3 - - ipykernel==5.1.2 ? -- + - ipykernel==5.1.2 - - jupyter_client==5.3.4 ? -- + - jupyter_client==5.3.4 - - bokeh==1.3.4 ? -- + - bokeh==1.3.4 - - panel==0.6.0 ? -- + - panel==0.6.0 - - holoviews==1.12.3 ? -- + - holoviews==1.12.3 - - pycodestyle==2.5.0 ? -- + - pycodestyle==2.5.0
34
1.416667
17
17
3fd54c1e1e4b06208b3c87f4b228a53f018a9008
src/chrome/content/imgctrl.js
src/chrome/content/imgctrl.js
window.addEventListener("load", function(event) { var menu = document.getElementById("contentAreaContextMenu"); menu.addEventListener("popupshowing", setupMenuItems, false); }, false); function setupMenuItems(event) { var menuItems = ['window', 'tab']; for (var i=0; i < menuItems.length; i++) { var item = document.getElementById('imgctrl-' + menuItems[i]); item.hidden = !gContextMenu.onImage; } } function imgctrl(where) { var url = gContextMenu.imageURL || gContextMenu.mediaURL; switch(where) { case "window": window.open(url); break; default: window.getBrowser().addTab(url); } }
window.addEventListener("load", function(event) { var menu = document.getElementById("contentAreaContextMenu"); if (menu) { menu.addEventListener("popupshowing", setupMenuItems, false); } }, false); function setupMenuItems(event) { var menuItems = ['window', 'tab']; for (var i=0; i < menuItems.length; i++) { var item = document.getElementById('imgctrl-' + menuItems[i]); item.hidden = !gContextMenu.onImage; } } function imgctrl(where) { var url = gContextMenu.imageURL || gContextMenu.mediaURL; switch(where) { case "window": window.open(url); break; default: window.getBrowser().addTab(url); } }
Check if menu exists before attaching events to it.
Check if menu exists before attaching events to it.
JavaScript
bsd-3-clause
djl/imgctrl
javascript
## Code Before: window.addEventListener("load", function(event) { var menu = document.getElementById("contentAreaContextMenu"); menu.addEventListener("popupshowing", setupMenuItems, false); }, false); function setupMenuItems(event) { var menuItems = ['window', 'tab']; for (var i=0; i < menuItems.length; i++) { var item = document.getElementById('imgctrl-' + menuItems[i]); item.hidden = !gContextMenu.onImage; } } function imgctrl(where) { var url = gContextMenu.imageURL || gContextMenu.mediaURL; switch(where) { case "window": window.open(url); break; default: window.getBrowser().addTab(url); } } ## Instruction: Check if menu exists before attaching events to it. ## Code After: window.addEventListener("load", function(event) { var menu = document.getElementById("contentAreaContextMenu"); if (menu) { menu.addEventListener("popupshowing", setupMenuItems, false); } }, false); function setupMenuItems(event) { var menuItems = ['window', 'tab']; for (var i=0; i < menuItems.length; i++) { var item = document.getElementById('imgctrl-' + menuItems[i]); item.hidden = !gContextMenu.onImage; } } function imgctrl(where) { var url = gContextMenu.imageURL || gContextMenu.mediaURL; switch(where) { case "window": window.open(url); break; default: window.getBrowser().addTab(url); } }
window.addEventListener("load", function(event) { var menu = document.getElementById("contentAreaContextMenu"); + if (menu) { - menu.addEventListener("popupshowing", setupMenuItems, false); + menu.addEventListener("popupshowing", setupMenuItems, false); ? ++++ + } }, false); function setupMenuItems(event) { var menuItems = ['window', 'tab']; for (var i=0; i < menuItems.length; i++) { var item = document.getElementById('imgctrl-' + menuItems[i]); item.hidden = !gContextMenu.onImage; } } function imgctrl(where) { var url = gContextMenu.imageURL || gContextMenu.mediaURL; switch(where) { case "window": window.open(url); break; default: window.getBrowser().addTab(url); } }
4
0.173913
3
1
d338347f7d5fd6e36a91f732d7bc495daccb186c
settings/ctrlp.vim
settings/ctrlp.vim
nnoremap <Leader>t :CtrlP<cr> " Mimic Command-T behaviour let g:ctrlp_working_path_mode = 0 let g:ctrlp_match_window_reversed = 0 let g:ctrlp_clear_cache_on_exit=0 if executable('ag') " Use ag in CtrlP for listing files. Lightning fast and respects .gitignore let g:ctrlp_user_command = 'ag %s -l --nocolor -g ""' endif
nnoremap <Leader>t :CtrlP<cr> nnoremap <Leader>r :CtrlPMRUFiles<cr> " Mimic Command-T behaviour let g:ctrlp_working_path_mode = 0 let g:ctrlp_match_window_reversed = 0 let g:ctrlp_clear_cache_on_exit=0 if executable('ag') " Use ag in CtrlP for listing files. Lightning fast and respects .gitignore let g:ctrlp_user_command = 'ag %s -l --nocolor -g ""' endif
Add most recently opened files quicksearch shortcut
Add most recently opened files quicksearch shortcut
VimL
mit
erikfercak/dotvim
viml
## Code Before: nnoremap <Leader>t :CtrlP<cr> " Mimic Command-T behaviour let g:ctrlp_working_path_mode = 0 let g:ctrlp_match_window_reversed = 0 let g:ctrlp_clear_cache_on_exit=0 if executable('ag') " Use ag in CtrlP for listing files. Lightning fast and respects .gitignore let g:ctrlp_user_command = 'ag %s -l --nocolor -g ""' endif ## Instruction: Add most recently opened files quicksearch shortcut ## Code After: nnoremap <Leader>t :CtrlP<cr> nnoremap <Leader>r :CtrlPMRUFiles<cr> " Mimic Command-T behaviour let g:ctrlp_working_path_mode = 0 let g:ctrlp_match_window_reversed = 0 let g:ctrlp_clear_cache_on_exit=0 if executable('ag') " Use ag in CtrlP for listing files. Lightning fast and respects .gitignore let g:ctrlp_user_command = 'ag %s -l --nocolor -g ""' endif
nnoremap <Leader>t :CtrlP<cr> + nnoremap <Leader>r :CtrlPMRUFiles<cr> " Mimic Command-T behaviour let g:ctrlp_working_path_mode = 0 let g:ctrlp_match_window_reversed = 0 let g:ctrlp_clear_cache_on_exit=0 if executable('ag') " Use ag in CtrlP for listing files. Lightning fast and respects .gitignore let g:ctrlp_user_command = 'ag %s -l --nocolor -g ""' endif
1
0.090909
1
0
b635eddbe3ad344b02ecae47333a4ddf4b17cd18
bin/remotePush.py
bin/remotePush.py
import json,httplib config_file = open('conf/net/ext_service/parse.json') silent_push_msg = { "where": { "deviceType": "ios" }, "data": { # "alert": "The Mets scored! The game is now tied 1-1.", "content-available": 1, "sound": "", } } parse_headers = { "X-Parse-Application-Id": config_file["emission_id"], "X-Parse-REST-API-Key": config_file["emission_key"], "Content-Type": "application/json" } connection = httplib.HTTPSConnection('api.parse.com', 443) connection.connect() connection.request('POST', '/1/push', json.dumps(silent_push_msg), parse_headers) result = json.loads(connection.getresponse().read()) print result
import json,httplib config_data = json.load(open('conf/net/ext_service/parse.json')) silent_push_msg = { "where": { "deviceType": "ios" }, "data": { # "alert": "The Mets scored! The game is now tied 1-1.", "content-available": 1, "sound": "", } } parse_headers = { "X-Parse-Application-Id": config_data["emission_id"], "X-Parse-REST-API-Key": config_data["emission_key"], "Content-Type": "application/json" } connection = httplib.HTTPSConnection('api.parse.com', 443) connection.connect() connection.request('POST', '/1/push', json.dumps(silent_push_msg), parse_headers) result = json.loads(connection.getresponse().read()) print result
Fix minor issue in remote push
Fix minor issue in remote push We need to open the file and then parse it as json
Python
bsd-3-clause
joshzarrabi/e-mission-server,sunil07t/e-mission-server,joshzarrabi/e-mission-server,yw374cornell/e-mission-server,e-mission/e-mission-server,joshzarrabi/e-mission-server,joshzarrabi/e-mission-server,shankari/e-mission-server,yw374cornell/e-mission-server,e-mission/e-mission-server,e-mission/e-mission-server,sunil07t/e-mission-server,sunil07t/e-mission-server,sunil07t/e-mission-server,yw374cornell/e-mission-server,shankari/e-mission-server,e-mission/e-mission-server,yw374cornell/e-mission-server,shankari/e-mission-server,shankari/e-mission-server
python
## Code Before: import json,httplib config_file = open('conf/net/ext_service/parse.json') silent_push_msg = { "where": { "deviceType": "ios" }, "data": { # "alert": "The Mets scored! The game is now tied 1-1.", "content-available": 1, "sound": "", } } parse_headers = { "X-Parse-Application-Id": config_file["emission_id"], "X-Parse-REST-API-Key": config_file["emission_key"], "Content-Type": "application/json" } connection = httplib.HTTPSConnection('api.parse.com', 443) connection.connect() connection.request('POST', '/1/push', json.dumps(silent_push_msg), parse_headers) result = json.loads(connection.getresponse().read()) print result ## Instruction: Fix minor issue in remote push We need to open the file and then parse it as json ## Code After: import json,httplib config_data = json.load(open('conf/net/ext_service/parse.json')) silent_push_msg = { "where": { "deviceType": "ios" }, "data": { # "alert": "The Mets scored! The game is now tied 1-1.", "content-available": 1, "sound": "", } } parse_headers = { "X-Parse-Application-Id": config_data["emission_id"], "X-Parse-REST-API-Key": config_data["emission_key"], "Content-Type": "application/json" } connection = httplib.HTTPSConnection('api.parse.com', 443) connection.connect() connection.request('POST', '/1/push', json.dumps(silent_push_msg), parse_headers) result = json.loads(connection.getresponse().read()) print result
import json,httplib - config_file = open('conf/net/ext_service/parse.json') ? ^^^^ + config_data = json.load(open('conf/net/ext_service/parse.json')) ? ^^^^ ++++++++++ + silent_push_msg = { "where": { "deviceType": "ios" }, "data": { # "alert": "The Mets scored! The game is now tied 1-1.", "content-available": 1, "sound": "", } } parse_headers = { - "X-Parse-Application-Id": config_file["emission_id"], ? ^^^^ + "X-Parse-Application-Id": config_data["emission_id"], ? ^^^^ - "X-Parse-REST-API-Key": config_file["emission_key"], ? ^^^^ + "X-Parse-REST-API-Key": config_data["emission_key"], ? ^^^^ "Content-Type": "application/json" } connection = httplib.HTTPSConnection('api.parse.com', 443) connection.connect() connection.request('POST', '/1/push', json.dumps(silent_push_msg), parse_headers) result = json.loads(connection.getresponse().read()) print result
6
0.206897
3
3
201e828ad4d1c34d4106acd28c99e5dcad3bd3df
README.md
README.md
Append security headers to response for laravel framework. ## Install Install using composer: ```sh composer require bepsvpt/laravel-security-header ``` Add the service provider in `config/app.php` ```php Bepsvpt\LaravelSecurityHeader\SecurityHeaderServiceProvider::class, ``` Publish config file ```sh php artisan vendor:publish --provider="Bepsvpt\LaravelSecurityHeader\SecurityHeaderServiceProvider" ``` Add the global middleware in `app/Http/Kernel.php` ```php \Illuminate\Foundation\Http\Middleware\CheckForMaintenanceMode::class, ``` ## License Laravel Security Header is licensed under [The MIT License (MIT)](LICENSE).
Append security headers to response for laravel framework. ## Install Install using composer: ```sh composer require bepsvpt/laravel-security-header ``` Add the service provider in `config/app.php` ```php Bepsvpt\LaravelSecurityHeader\SecurityHeaderServiceProvider::class, ``` Publish config file ```sh php artisan vendor:publish --provider="Bepsvpt\LaravelSecurityHeader\SecurityHeaderServiceProvider" ``` Add the global middleware in `app/Http/Kernel.php` ```php \Bepsvpt\LaravelSecurityHeader\SecurityHeaderMiddleware::class, ``` ## License Laravel Security Header is licensed under [The MIT License (MIT)](LICENSE).
Fix incorrect middleware class name
Fix incorrect middleware class name
Markdown
mit
BePsvPT/laravel-security-header,BePsvPT/secure-headers
markdown
## Code Before: Append security headers to response for laravel framework. ## Install Install using composer: ```sh composer require bepsvpt/laravel-security-header ``` Add the service provider in `config/app.php` ```php Bepsvpt\LaravelSecurityHeader\SecurityHeaderServiceProvider::class, ``` Publish config file ```sh php artisan vendor:publish --provider="Bepsvpt\LaravelSecurityHeader\SecurityHeaderServiceProvider" ``` Add the global middleware in `app/Http/Kernel.php` ```php \Illuminate\Foundation\Http\Middleware\CheckForMaintenanceMode::class, ``` ## License Laravel Security Header is licensed under [The MIT License (MIT)](LICENSE). ## Instruction: Fix incorrect middleware class name ## Code After: Append security headers to response for laravel framework. ## Install Install using composer: ```sh composer require bepsvpt/laravel-security-header ``` Add the service provider in `config/app.php` ```php Bepsvpt\LaravelSecurityHeader\SecurityHeaderServiceProvider::class, ``` Publish config file ```sh php artisan vendor:publish --provider="Bepsvpt\LaravelSecurityHeader\SecurityHeaderServiceProvider" ``` Add the global middleware in `app/Http/Kernel.php` ```php \Bepsvpt\LaravelSecurityHeader\SecurityHeaderMiddleware::class, ``` ## License Laravel Security Header is licensed under [The MIT License (MIT)](LICENSE).
Append security headers to response for laravel framework. ## Install Install using composer: ```sh composer require bepsvpt/laravel-security-header ``` Add the service provider in `config/app.php` ```php Bepsvpt\LaravelSecurityHeader\SecurityHeaderServiceProvider::class, ``` Publish config file ```sh php artisan vendor:publish --provider="Bepsvpt\LaravelSecurityHeader\SecurityHeaderServiceProvider" ``` Add the global middleware in `app/Http/Kernel.php` ```php - \Illuminate\Foundation\Http\Middleware\CheckForMaintenanceMode::class, + \Bepsvpt\LaravelSecurityHeader\SecurityHeaderMiddleware::class, ``` ## License Laravel Security Header is licensed under [The MIT License (MIT)](LICENSE).
2
0.0625
1
1
dce52137b5b48a6f2997764889af3a8074ef9930
content/apps/my-ghost-tommy/assets/js/tommy.js
content/apps/my-ghost-tommy/assets/js/tommy.js
(function () { var els = document.querySelectorAll('[data-role="tommy-expander"]') els.forEach(function (el) { el.classList.remove('is-hidden') el.addEventListener('click', onClick) }); function onClick(e) { var target = e.currentTarget; var wrapper = target.parentNode; toggle(wrapper) } function toggle(el) { if (el.classList.contains('is-expanded')) { restore(el); } else { expand(el); } } function expand(el) { el.classList.add('is-expanded'); } function restore(el) { el.classList.remove('is-expanded'); } })();
(function () { var els = document.querySelectorAll('[data-role="tommy-expander"]') els.forEach(function (el) { el.classList.remove('is-hidden') el.addEventListener('click', onClick) }); function onClick(e) { var target = e.currentTarget; var wrapper = target.parentNode; toggle(wrapper) } function createOnKeyUp(el) { return function (e) { var keyCode = e.keyCode; if (keyCode === 27) { restore(el); } } } function toggle(el) { if (el.classList.contains('is-expanded')) { restore(el); } else { expand(el); } } function expand(el) { teardownKeyUpListener(el); setupKeyUpListener(el); el.classList.add('is-expanded'); } function restore(el) { teardownKeyUpListener(el); el.classList.remove('is-expanded'); } function setupKeyUpListener(el) { var onKeyUp = createOnKeyUp(el); el.onKeyUpHandler = onKeyUp; document.addEventListener('keyup', onKeyUp); } function teardownKeyUpListener(el) { var onKeyUp = el.onKeyUpHandler; if (onKeyUp) { document.removeEventListener('keyup', onKeyUp); } delete el.onKeyUpHandler } })();
Handle ESC key to discard fullscreen
Handle ESC key to discard fullscreen
JavaScript
mit
ertrzyiks/blog.ertrzyiks.pl,ertrzyiks/blog.ertrzyiks.pl,ertrzyiks/blog.ertrzyiks.pl
javascript
## Code Before: (function () { var els = document.querySelectorAll('[data-role="tommy-expander"]') els.forEach(function (el) { el.classList.remove('is-hidden') el.addEventListener('click', onClick) }); function onClick(e) { var target = e.currentTarget; var wrapper = target.parentNode; toggle(wrapper) } function toggle(el) { if (el.classList.contains('is-expanded')) { restore(el); } else { expand(el); } } function expand(el) { el.classList.add('is-expanded'); } function restore(el) { el.classList.remove('is-expanded'); } })(); ## Instruction: Handle ESC key to discard fullscreen ## Code After: (function () { var els = document.querySelectorAll('[data-role="tommy-expander"]') els.forEach(function (el) { el.classList.remove('is-hidden') el.addEventListener('click', onClick) }); function onClick(e) { var target = e.currentTarget; var wrapper = target.parentNode; toggle(wrapper) } function createOnKeyUp(el) { return function (e) { var keyCode = e.keyCode; if (keyCode === 27) { restore(el); } } } function toggle(el) { if (el.classList.contains('is-expanded')) { restore(el); } else { expand(el); } } function expand(el) { teardownKeyUpListener(el); setupKeyUpListener(el); el.classList.add('is-expanded'); } function restore(el) { teardownKeyUpListener(el); el.classList.remove('is-expanded'); } function setupKeyUpListener(el) { var onKeyUp = createOnKeyUp(el); el.onKeyUpHandler = onKeyUp; document.addEventListener('keyup', onKeyUp); } function teardownKeyUpListener(el) { var onKeyUp = el.onKeyUpHandler; if (onKeyUp) { document.removeEventListener('keyup', onKeyUp); } delete el.onKeyUpHandler } })();
(function () { var els = document.querySelectorAll('[data-role="tommy-expander"]') els.forEach(function (el) { el.classList.remove('is-hidden') el.addEventListener('click', onClick) }); function onClick(e) { var target = e.currentTarget; var wrapper = target.parentNode; toggle(wrapper) } + function createOnKeyUp(el) { + return function (e) { + var keyCode = e.keyCode; + + if (keyCode === 27) { + restore(el); + } + } + } + function toggle(el) { if (el.classList.contains('is-expanded')) { restore(el); } else { expand(el); } } function expand(el) { + teardownKeyUpListener(el); + setupKeyUpListener(el); el.classList.add('is-expanded'); } function restore(el) { + teardownKeyUpListener(el); el.classList.remove('is-expanded'); } + + function setupKeyUpListener(el) { + var onKeyUp = createOnKeyUp(el); + el.onKeyUpHandler = onKeyUp; + document.addEventListener('keyup', onKeyUp); + } + + function teardownKeyUpListener(el) { + var onKeyUp = el.onKeyUpHandler; + + if (onKeyUp) { + document.removeEventListener('keyup', onKeyUp); + } + + delete el.onKeyUpHandler + } })();
29
0.966667
29
0
9b4f312266c47846b6312bfad4e05285ecd23ce8
examples/random_number_generator.rb
examples/random_number_generator.rb
require_relative '../lib/webhooks' class RandomNumberGenerator include Webhooks def webhooks %w{http://localhost:9000} end def generate random_number = SecureRandom.random_number(1000) send_webhooks(:random_number, :generate, random_number) return random_number end end
require_relative '../lib/webhooks' class RandomNumberGenerator include Webhooks def initialize self.webhooks = %w{http://localhost:9000} end def generate random_number = SecureRandom.random_number(1000) send_webhooks(:random_number, :generate, random_number) return random_number end end
Set webhooks in initializer for random number generator
Set webhooks in initializer for random number generator
Ruby
mit
kyletolle/webhooks.rb
ruby
## Code Before: require_relative '../lib/webhooks' class RandomNumberGenerator include Webhooks def webhooks %w{http://localhost:9000} end def generate random_number = SecureRandom.random_number(1000) send_webhooks(:random_number, :generate, random_number) return random_number end end ## Instruction: Set webhooks in initializer for random number generator ## Code After: require_relative '../lib/webhooks' class RandomNumberGenerator include Webhooks def initialize self.webhooks = %w{http://localhost:9000} end def generate random_number = SecureRandom.random_number(1000) send_webhooks(:random_number, :generate, random_number) return random_number end end
require_relative '../lib/webhooks' class RandomNumberGenerator include Webhooks - def webhooks + def initialize - %w{http://localhost:9000} + self.webhooks = %w{http://localhost:9000} ? ++++++++++++++++ end def generate random_number = SecureRandom.random_number(1000) send_webhooks(:random_number, :generate, random_number) return random_number end end
4
0.222222
2
2
2438403f6b732aa368fd913af15cde9180ef2c28
css/CalendarMonthGrid.scss
css/CalendarMonthGrid.scss
$react-dates-width-day-picker: 300px; .CalendarMonthGrid { background: $react-dates-color-white; z-index: 0; text-align: left; } .CalendarMonthGrid--animating { -webkit-transition: -webkit-transform 0.2s ease-in-out; -moz-transition: -moz-transform 0.2s ease-in-out; transition: transform 0.2s ease-in-out; z-index: 1; } .CalendarMonthGrid--horizontal { position: absolute; left: 9px; width: 4 * $react-dates-width-day-picker; } .CalendarMonthGrid--vertical { width: $react-dates-width-day-picker; margin: 0 auto; } .CalendarMonthGrid--vertical-scrollable { width: $react-dates-width-day-picker; margin: 0 auto; overflow-y: scroll; }
@import 'variables'; .CalendarMonthGrid { background: $react-dates-color-white; z-index: 0; text-align: left; } .CalendarMonthGrid--animating { -webkit-transition: -webkit-transform 0.2s ease-in-out; -moz-transition: -moz-transform 0.2s ease-in-out; transition: transform 0.2s ease-in-out; z-index: 1; } .CalendarMonthGrid--horizontal { position: absolute; left: 9px; width: 4 * $react-dates-width-day-picker; } .CalendarMonthGrid--vertical { width: $react-dates-width-day-picker; margin: 0 auto; } .CalendarMonthGrid--vertical-scrollable { width: $react-dates-width-day-picker; margin: 0 auto; overflow-y: scroll; }
Remove override of day picker width
Remove override of day picker width
SCSS
mit
airbnb/react-dates,acp31/react-dates,acp31/react-dates,intwarehq/react-dates,tombatossals/react-dates,tombatossals/react-dates,intwarehq/react-dates,intwarehq/react-dates
scss
## Code Before: $react-dates-width-day-picker: 300px; .CalendarMonthGrid { background: $react-dates-color-white; z-index: 0; text-align: left; } .CalendarMonthGrid--animating { -webkit-transition: -webkit-transform 0.2s ease-in-out; -moz-transition: -moz-transform 0.2s ease-in-out; transition: transform 0.2s ease-in-out; z-index: 1; } .CalendarMonthGrid--horizontal { position: absolute; left: 9px; width: 4 * $react-dates-width-day-picker; } .CalendarMonthGrid--vertical { width: $react-dates-width-day-picker; margin: 0 auto; } .CalendarMonthGrid--vertical-scrollable { width: $react-dates-width-day-picker; margin: 0 auto; overflow-y: scroll; } ## Instruction: Remove override of day picker width ## Code After: @import 'variables'; .CalendarMonthGrid { background: $react-dates-color-white; z-index: 0; text-align: left; } .CalendarMonthGrid--animating { -webkit-transition: -webkit-transform 0.2s ease-in-out; -moz-transition: -moz-transform 0.2s ease-in-out; transition: transform 0.2s ease-in-out; z-index: 1; } .CalendarMonthGrid--horizontal { position: absolute; left: 9px; width: 4 * $react-dates-width-day-picker; } .CalendarMonthGrid--vertical { width: $react-dates-width-day-picker; margin: 0 auto; } .CalendarMonthGrid--vertical-scrollable { width: $react-dates-width-day-picker; margin: 0 auto; overflow-y: scroll; }
- $react-dates-width-day-picker: 300px; + @import 'variables'; .CalendarMonthGrid { background: $react-dates-color-white; z-index: 0; text-align: left; } .CalendarMonthGrid--animating { -webkit-transition: -webkit-transform 0.2s ease-in-out; -moz-transition: -moz-transform 0.2s ease-in-out; transition: transform 0.2s ease-in-out; z-index: 1; } .CalendarMonthGrid--horizontal { position: absolute; left: 9px; width: 4 * $react-dates-width-day-picker; } .CalendarMonthGrid--vertical { width: $react-dates-width-day-picker; margin: 0 auto; } .CalendarMonthGrid--vertical-scrollable { width: $react-dates-width-day-picker; margin: 0 auto; overflow-y: scroll; }
2
0.064516
1
1
34545d2542bfd62a2ac1a69d5a0cad605b4f4c26
packages/Search/examples/viz/CMakeLists.txt
packages/Search/examples/viz/CMakeLists.txt
TRIBITS_ADD_EXECUTABLE_AND_TEST( example_viz SOURCES tree_visualization.cpp NUM_MPI_PROCS 1 FAIL_REGULAR_EXPRESSION "data race;leak;runtime error" ) TRIBITS_COPY_FILES_TO_BINARY_DIR( oak_leaf SOURCE_FILES leaf_cloud.txt SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../point_clouds DEST_DIR ${CMAKE_CURRENT_BINARY_DIR} EXEDEPS example_viz )
if(Kokkos_ENABLE_Serial) TRIBITS_ADD_EXECUTABLE_AND_TEST( example_viz SOURCES tree_visualization.cpp NUM_MPI_PROCS 1 FAIL_REGULAR_EXPRESSION "data race;leak;runtime error" ) TRIBITS_COPY_FILES_TO_BINARY_DIR( oak_leaf SOURCE_FILES leaf_cloud.txt SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../point_clouds DEST_DIR ${CMAKE_CURRENT_BINARY_DIR} EXEDEPS example_viz ) endif()
Enable viz example only if Kokkos_ENABLE_Serial is ON
Enable viz example only if Kokkos_ENABLE_Serial is ON
Text
bsd-3-clause
ORNL-CEES/DataTransferKit,ORNL-CEES/DataTransferKit,ORNL-CEES/DataTransferKit,Rombur/DataTransferKit,ORNL-CEES/DataTransferKit,Rombur/DataTransferKit,Rombur/DataTransferKit,dalg24/DataTransferKit,dalg24/DataTransferKit,Rombur/DataTransferKit,dalg24/DataTransferKit,dalg24/DataTransferKit
text
## Code Before: TRIBITS_ADD_EXECUTABLE_AND_TEST( example_viz SOURCES tree_visualization.cpp NUM_MPI_PROCS 1 FAIL_REGULAR_EXPRESSION "data race;leak;runtime error" ) TRIBITS_COPY_FILES_TO_BINARY_DIR( oak_leaf SOURCE_FILES leaf_cloud.txt SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../point_clouds DEST_DIR ${CMAKE_CURRENT_BINARY_DIR} EXEDEPS example_viz ) ## Instruction: Enable viz example only if Kokkos_ENABLE_Serial is ON ## Code After: if(Kokkos_ENABLE_Serial) TRIBITS_ADD_EXECUTABLE_AND_TEST( example_viz SOURCES tree_visualization.cpp NUM_MPI_PROCS 1 FAIL_REGULAR_EXPRESSION "data race;leak;runtime error" ) TRIBITS_COPY_FILES_TO_BINARY_DIR( oak_leaf SOURCE_FILES leaf_cloud.txt SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../point_clouds DEST_DIR ${CMAKE_CURRENT_BINARY_DIR} EXEDEPS example_viz ) endif()
+ if(Kokkos_ENABLE_Serial) TRIBITS_ADD_EXECUTABLE_AND_TEST( example_viz SOURCES tree_visualization.cpp NUM_MPI_PROCS 1 FAIL_REGULAR_EXPRESSION "data race;leak;runtime error" ) TRIBITS_COPY_FILES_TO_BINARY_DIR( oak_leaf SOURCE_FILES leaf_cloud.txt SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../point_clouds DEST_DIR ${CMAKE_CURRENT_BINARY_DIR} EXEDEPS example_viz ) + endif()
2
0.153846
2
0
b18b3e261a117a76a0e07cd8a3acd48c915fb70c
.travis.yml
.travis.yml
sudo: false language: node_js node_js: - node env: global: - secure: du27W3wTgZ3G183axW7w0I01lOIurx8kilMH9p45VMfNXCu8lo6FLtLIQZxJ1FYMoJLQ1yfJTu2G0rq39SotDfJumsk6tF7BjTY/HKCocZaHqCMgw0W2bcylb5kMAdLhBNPlzejpPoWa1x1axbAHNFOLQNVosG/Bavu3/kuIIps= - secure: Ax/5aekM40o67NuTkvQqx1DhfP86ZlHTtKbv5yI+WFmbjD3FQM8b8G1J/o7doaBDev7Mp+1zDJOK2pFGtt+JGRl0lM2JUmLh6yh/b28obXyei5iuUkqzKJLfKZHMbY5QW/1i4DUM+zSXe6Kava0qnqYg5wBBnrF6gLdsVsCGNQk=
sudo: false language: node_js node_js: - 0.10 - 0.12 - 4 - node env: global: - secure: du27W3wTgZ3G183axW7w0I01lOIurx8kilMH9p45VMfNXCu8lo6FLtLIQZxJ1FYMoJLQ1yfJTu2G0rq39SotDfJumsk6tF7BjTY/HKCocZaHqCMgw0W2bcylb5kMAdLhBNPlzejpPoWa1x1axbAHNFOLQNVosG/Bavu3/kuIIps= - secure: Ax/5aekM40o67NuTkvQqx1DhfP86ZlHTtKbv5yI+WFmbjD3FQM8b8G1J/o7doaBDev7Mp+1zDJOK2pFGtt+JGRl0lM2JUmLh6yh/b28obXyei5iuUkqzKJLfKZHMbY5QW/1i4DUM+zSXe6Kava0qnqYg5wBBnrF6gLdsVsCGNQk=
Test node 0.10, 0.12, 4, 6
Test node 0.10, 0.12, 4, 6
YAML
mit
feross/is-buffer
yaml
## Code Before: sudo: false language: node_js node_js: - node env: global: - secure: du27W3wTgZ3G183axW7w0I01lOIurx8kilMH9p45VMfNXCu8lo6FLtLIQZxJ1FYMoJLQ1yfJTu2G0rq39SotDfJumsk6tF7BjTY/HKCocZaHqCMgw0W2bcylb5kMAdLhBNPlzejpPoWa1x1axbAHNFOLQNVosG/Bavu3/kuIIps= - secure: Ax/5aekM40o67NuTkvQqx1DhfP86ZlHTtKbv5yI+WFmbjD3FQM8b8G1J/o7doaBDev7Mp+1zDJOK2pFGtt+JGRl0lM2JUmLh6yh/b28obXyei5iuUkqzKJLfKZHMbY5QW/1i4DUM+zSXe6Kava0qnqYg5wBBnrF6gLdsVsCGNQk= ## Instruction: Test node 0.10, 0.12, 4, 6 ## Code After: sudo: false language: node_js node_js: - 0.10 - 0.12 - 4 - node env: global: - secure: du27W3wTgZ3G183axW7w0I01lOIurx8kilMH9p45VMfNXCu8lo6FLtLIQZxJ1FYMoJLQ1yfJTu2G0rq39SotDfJumsk6tF7BjTY/HKCocZaHqCMgw0W2bcylb5kMAdLhBNPlzejpPoWa1x1axbAHNFOLQNVosG/Bavu3/kuIIps= - secure: Ax/5aekM40o67NuTkvQqx1DhfP86ZlHTtKbv5yI+WFmbjD3FQM8b8G1J/o7doaBDev7Mp+1zDJOK2pFGtt+JGRl0lM2JUmLh6yh/b28obXyei5iuUkqzKJLfKZHMbY5QW/1i4DUM+zSXe6Kava0qnqYg5wBBnrF6gLdsVsCGNQk=
sudo: false language: node_js node_js: + - 0.10 + - 0.12 + - 4 - node env: global: - secure: du27W3wTgZ3G183axW7w0I01lOIurx8kilMH9p45VMfNXCu8lo6FLtLIQZxJ1FYMoJLQ1yfJTu2G0rq39SotDfJumsk6tF7BjTY/HKCocZaHqCMgw0W2bcylb5kMAdLhBNPlzejpPoWa1x1axbAHNFOLQNVosG/Bavu3/kuIIps= - secure: Ax/5aekM40o67NuTkvQqx1DhfP86ZlHTtKbv5yI+WFmbjD3FQM8b8G1J/o7doaBDev7Mp+1zDJOK2pFGtt+JGRl0lM2JUmLh6yh/b28obXyei5iuUkqzKJLfKZHMbY5QW/1i4DUM+zSXe6Kava0qnqYg5wBBnrF6gLdsVsCGNQk=
3
0.375
3
0
38e4583de4d3bb6931319b58d417e0420f3ade08
public/app/project-setup/theme/components/_icon-bar.scss
public/app/project-setup/theme/components/_icon-bar.scss
// ICON BAR $icon-bar-bg: transparent; $icon-bar-active-color: $light-gray; $icon-bar-icon-color: $passive-gray; $icon-bar-icon-color-hover: $active-gray; $icon-bar-font-color: $passive-gray; $icon-bar-font-color-hover: $active-gray; $icon-bar-hover-color: $hint-gray; $icon-bar-font-size: $h2-font-size; @import "foundation/components/icon-bar";
// ICON BAR $icon-bar-bg: transparent; $icon-bar-active-color: transparent; $icon-bar-icon-color: $passive-gray; $icon-bar-icon-color-hover: $active-gray; $icon-bar-font-color: $passive-gray; $icon-bar-font-color-hover: $active-gray; $icon-bar-hover-color: $hint-gray; $icon-bar-font-size: $h2-font-size; @import "foundation/components/icon-bar";
Set selected nav item color to transparent
Set selected nav item color to transparent
SCSS
mit
yetu/controlcenter,yetu/controlcenter,yetu/controlcenter
scss
## Code Before: // ICON BAR $icon-bar-bg: transparent; $icon-bar-active-color: $light-gray; $icon-bar-icon-color: $passive-gray; $icon-bar-icon-color-hover: $active-gray; $icon-bar-font-color: $passive-gray; $icon-bar-font-color-hover: $active-gray; $icon-bar-hover-color: $hint-gray; $icon-bar-font-size: $h2-font-size; @import "foundation/components/icon-bar"; ## Instruction: Set selected nav item color to transparent ## Code After: // ICON BAR $icon-bar-bg: transparent; $icon-bar-active-color: transparent; $icon-bar-icon-color: $passive-gray; $icon-bar-icon-color-hover: $active-gray; $icon-bar-font-color: $passive-gray; $icon-bar-font-color-hover: $active-gray; $icon-bar-hover-color: $hint-gray; $icon-bar-font-size: $h2-font-size; @import "foundation/components/icon-bar";
// ICON BAR $icon-bar-bg: transparent; - $icon-bar-active-color: $light-gray; ? ----- -- ^ + $icon-bar-active-color: transparent; ? ^^^^^^^^ $icon-bar-icon-color: $passive-gray; $icon-bar-icon-color-hover: $active-gray; $icon-bar-font-color: $passive-gray; $icon-bar-font-color-hover: $active-gray; $icon-bar-hover-color: $hint-gray; $icon-bar-font-size: $h2-font-size; @import "foundation/components/icon-bar";
2
0.181818
1
1
eff2687d1c38121fbde590fa003948c4c7b8773f
services/prep-scripts/drill_issues.json
services/prep-scripts/drill_issues.json
{ "issues": [ { "id": 1, "name": "Setup Cloud Services", "template_file": "setup_cloud_services.json" }, { "id": 2, "name": "setup website", "template_file": "setup_website.json" }, { "id": 3, "name": "Update Hack Data", "template_file": "update_hack_data.json" }, { "id": 4, "name": "Check agenda", "template_file": "check_agenda.json" }, { "id": 4, "name": "Check Warmups", "template_file": "check_warmups.json" } ] }
{ "issues": [ { "id": 1, "name": "Setup Cloud Services", "template_file": "setup_cloud_services.json" }, { "id": 2, "name": "setup website", "template_file": "setup_website.json" }, { "id": 3, "name": "Update Hack Data", "template_file": "update_hack_data.json" }, { "id": 4, "name": "Update Hack Team", "template_file": "update_hack_team.json" }, { "id": 5, "name": "Check agenda", "template_file": "check_agenda.json" }, { "id": 6, "name": "Check Warmups", "template_file": "check_warmups.json" } ] }
Add issue for setting the team data
Add issue for setting the team data
JSON
apache-2.0
AAROC/e-Research-Hackfest-prep,AAROC/e-Research-Hackfest-prep,AAROC/e-Research-Hackfest-prep,AAROC/e-Research-Hackfest-prep
json
## Code Before: { "issues": [ { "id": 1, "name": "Setup Cloud Services", "template_file": "setup_cloud_services.json" }, { "id": 2, "name": "setup website", "template_file": "setup_website.json" }, { "id": 3, "name": "Update Hack Data", "template_file": "update_hack_data.json" }, { "id": 4, "name": "Check agenda", "template_file": "check_agenda.json" }, { "id": 4, "name": "Check Warmups", "template_file": "check_warmups.json" } ] } ## Instruction: Add issue for setting the team data ## Code After: { "issues": [ { "id": 1, "name": "Setup Cloud Services", "template_file": "setup_cloud_services.json" }, { "id": 2, "name": "setup website", "template_file": "setup_website.json" }, { "id": 3, "name": "Update Hack Data", "template_file": "update_hack_data.json" }, { "id": 4, "name": "Update Hack Team", "template_file": "update_hack_team.json" }, { "id": 5, "name": "Check agenda", "template_file": "check_agenda.json" }, { "id": 6, "name": "Check Warmups", "template_file": "check_warmups.json" } ] }
{ "issues": [ { "id": 1, "name": "Setup Cloud Services", "template_file": "setup_cloud_services.json" }, { "id": 2, "name": "setup website", "template_file": "setup_website.json" }, { "id": 3, "name": "Update Hack Data", "template_file": "update_hack_data.json" }, { "id": 4, + "name": "Update Hack Team", + "template_file": "update_hack_team.json" + }, + { + "id": 5, "name": "Check agenda", "template_file": "check_agenda.json" }, { - "id": 4, ? ^ + "id": 6, ? ^ "name": "Check Warmups", "template_file": "check_warmups.json" } ] }
7
0.28
6
1
fc70feec85f0b22ebef05b0fa1316214a48a465a
background/config/prod.py
background/config/prod.py
from decouple import config from .base import BaseCeleryConfig class CeleryProduction(BaseCeleryConfig): enable_utc = config('CELERY_ENABLE_UTC', default=True, cast=bool) broker_url = config('CELERY_BROKER_URL') result_backend = config('CELERY_RESULT_BACKEND')
from decouple import config from .base import BaseCeleryConfig REDIS_URL = config('REDIS_URL') class CeleryProduction(BaseCeleryConfig): enable_utc = config('CELERY_ENABLE_UTC', default=True, cast=bool) broker_url = config('CELERY_BROKER_URL', default=REDIS_URL) result_backend = config('CELERY_RESULT_BACKEND', default=REDIS_URL)
Use REDIS_URL by default for Celery
Use REDIS_URL by default for Celery
Python
mit
RaitoBezarius/ryuzu-fb-bot
python
## Code Before: from decouple import config from .base import BaseCeleryConfig class CeleryProduction(BaseCeleryConfig): enable_utc = config('CELERY_ENABLE_UTC', default=True, cast=bool) broker_url = config('CELERY_BROKER_URL') result_backend = config('CELERY_RESULT_BACKEND') ## Instruction: Use REDIS_URL by default for Celery ## Code After: from decouple import config from .base import BaseCeleryConfig REDIS_URL = config('REDIS_URL') class CeleryProduction(BaseCeleryConfig): enable_utc = config('CELERY_ENABLE_UTC', default=True, cast=bool) broker_url = config('CELERY_BROKER_URL', default=REDIS_URL) result_backend = config('CELERY_RESULT_BACKEND', default=REDIS_URL)
from decouple import config from .base import BaseCeleryConfig + REDIS_URL = config('REDIS_URL') + class CeleryProduction(BaseCeleryConfig): enable_utc = config('CELERY_ENABLE_UTC', default=True, cast=bool) - broker_url = config('CELERY_BROKER_URL') ? ^ + broker_url = config('CELERY_BROKER_URL', ? ^ + default=REDIS_URL) - result_backend = config('CELERY_RESULT_BACKEND') ? ^ + result_backend = config('CELERY_RESULT_BACKEND', ? ^ + default=REDIS_URL)
8
0.888889
6
2
d803647eeb1644985f1788d232a72587ff156bb4
setup.py
setup.py
from __future__ import unicode_literals import re from setuptools import setup def get_version(filename): content = open(filename).read() metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", content)) return metadata['version'] setup( name='Mopidy-Scrobbler', version=get_version('mopidy_scrobbler/__init__.py'), url='https://github.com/mopidy/mopidy-scrobbler', license='Apache License, Version 2.0', author='Stein Magnus Jodal', author_email='[email protected]', description='Mopidy extension for scrobbling played tracks to Last.fm', long_description=open('README.rst').read(), packages=['mopidy_scrobbler'], zip_safe=False, include_package_data=True, install_requires=[ 'setuptools', 'Mopidy', 'pylast >= 0.5.7', ], entry_points={ 'mopidy.ext': [ 'scrobbler = mopidy_scrobbler:Extension', ], }, classifiers=[ 'Environment :: No Input/Output (Daemon)', 'Intended Audience :: End Users/Desktop', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Topic :: Multimedia :: Sound/Audio :: Players', ], )
from __future__ import unicode_literals import re from setuptools import setup def get_version(filename): content = open(filename).read() metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", content)) return metadata['version'] setup( name='Mopidy-Scrobbler', version=get_version('mopidy_scrobbler/__init__.py'), url='https://github.com/mopidy/mopidy-scrobbler', license='Apache License, Version 2.0', author='Stein Magnus Jodal', author_email='[email protected]', description='Mopidy extension for scrobbling played tracks to Last.fm', long_description=open('README.rst').read(), packages=['mopidy_scrobbler'], zip_safe=False, include_package_data=True, install_requires=[ 'setuptools', 'Mopidy', 'Pykka >= 1.1', 'pylast >= 0.5.7', ], entry_points={ 'mopidy.ext': [ 'scrobbler = mopidy_scrobbler:Extension', ], }, classifiers=[ 'Environment :: No Input/Output (Daemon)', 'Intended Audience :: End Users/Desktop', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Topic :: Multimedia :: Sound/Audio :: Players', ], )
Add Pykka as a dependency
Add Pykka as a dependency
Python
apache-2.0
mopidy/mopidy-scrobbler,mthssdrbrg/mopidy-scrobbler
python
## Code Before: from __future__ import unicode_literals import re from setuptools import setup def get_version(filename): content = open(filename).read() metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", content)) return metadata['version'] setup( name='Mopidy-Scrobbler', version=get_version('mopidy_scrobbler/__init__.py'), url='https://github.com/mopidy/mopidy-scrobbler', license='Apache License, Version 2.0', author='Stein Magnus Jodal', author_email='[email protected]', description='Mopidy extension for scrobbling played tracks to Last.fm', long_description=open('README.rst').read(), packages=['mopidy_scrobbler'], zip_safe=False, include_package_data=True, install_requires=[ 'setuptools', 'Mopidy', 'pylast >= 0.5.7', ], entry_points={ 'mopidy.ext': [ 'scrobbler = mopidy_scrobbler:Extension', ], }, classifiers=[ 'Environment :: No Input/Output (Daemon)', 'Intended Audience :: End Users/Desktop', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Topic :: Multimedia :: Sound/Audio :: Players', ], ) ## Instruction: Add Pykka as a dependency ## Code After: from __future__ import unicode_literals import re from setuptools import setup def get_version(filename): content = open(filename).read() metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", content)) return metadata['version'] setup( name='Mopidy-Scrobbler', version=get_version('mopidy_scrobbler/__init__.py'), url='https://github.com/mopidy/mopidy-scrobbler', license='Apache License, Version 2.0', author='Stein Magnus Jodal', author_email='[email protected]', description='Mopidy extension for scrobbling played tracks to Last.fm', long_description=open('README.rst').read(), packages=['mopidy_scrobbler'], zip_safe=False, include_package_data=True, install_requires=[ 'setuptools', 'Mopidy', 'Pykka >= 1.1', 'pylast >= 0.5.7', ], entry_points={ 'mopidy.ext': [ 'scrobbler = mopidy_scrobbler:Extension', ], }, classifiers=[ 'Environment :: No Input/Output (Daemon)', 'Intended Audience :: End Users/Desktop', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Topic :: Multimedia :: Sound/Audio :: Players', ], )
from __future__ import unicode_literals import re from setuptools import setup def get_version(filename): content = open(filename).read() metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", content)) return metadata['version'] setup( name='Mopidy-Scrobbler', version=get_version('mopidy_scrobbler/__init__.py'), url='https://github.com/mopidy/mopidy-scrobbler', license='Apache License, Version 2.0', author='Stein Magnus Jodal', author_email='[email protected]', description='Mopidy extension for scrobbling played tracks to Last.fm', long_description=open('README.rst').read(), packages=['mopidy_scrobbler'], zip_safe=False, include_package_data=True, install_requires=[ 'setuptools', 'Mopidy', + 'Pykka >= 1.1', 'pylast >= 0.5.7', ], entry_points={ 'mopidy.ext': [ 'scrobbler = mopidy_scrobbler:Extension', ], }, classifiers=[ 'Environment :: No Input/Output (Daemon)', 'Intended Audience :: End Users/Desktop', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Topic :: Multimedia :: Sound/Audio :: Players', ], )
1
0.023256
1
0
735dd003e6f7b12715fd016e447c34a836469377
ci/azure-cross-compile.yml
ci/azure-cross-compile.yml
parameters: vmImage: ubuntu-16.04 jobs: - job: ${{ parameters.name }} displayName: Cross strategy: matrix: iOS_64: vmImage: macOS-10.13 target: x86_64-apple-ios iOS_32: vmImage: macOS-10.13 target: i386-apple-ios iOS_ARM: vmImage: macOS-10.13 target: armv7s-apple-ios Android: vmImage: ubuntu-16.04 target: arm-linux-androideabi Android_ARM64: vmImage: ubuntu-16.04 target: aarch64-linux-android Android_32: vmImage: ubuntu-16.04 target: i686-unknown-linux-gnu NetBSD: vmImage: ubuntu-16.04 target: x86_64-unknown-netbsd Solaris: vmImage: ubuntu-16.04 target: x86_64-sun-solaris pool: vmImage: $(vmImage) steps: - template: azure-install-rust.yml parameters: rust_version: stable - script: rustup target add $(target) displayName: "Add target" - script: cargo check --target $(target) displayName: Check source - script: cargo check --tests --target $(target) --all-features displayName: Check tests - script: cargo check --examples --target $(target) --all-features displayName: Check examples
parameters: vmImage: ubuntu-16.04 jobs: - job: ${{ parameters.name }} displayName: Cross strategy: matrix: iOS_64: vmImage: macOS-10.13 target: x86_64-apple-ios iOS_ARM64: vmImage: macOS-10.13 target: aarch64-apple-ios Android_ARM: vmImage: ubuntu-16.04 target: arm-linux-androideabi Android_ARM64: vmImage: ubuntu-16.04 target: aarch64-linux-android Android_32: vmImage: ubuntu-16.04 target: i686-unknown-linux-gnu NetBSD: vmImage: ubuntu-16.04 target: x86_64-unknown-netbsd Solaris: vmImage: ubuntu-16.04 target: x86_64-sun-solaris pool: vmImage: $(vmImage) steps: - template: azure-install-rust.yml parameters: rust_version: stable - script: rustup target add $(target) displayName: "Add target" - script: cargo check --target $(target) displayName: Check source - script: cargo check --tests --target $(target) --all-features displayName: Check tests - script: cargo check --examples --target $(target) --all-features displayName: Check examples
Remove 32bit iOS and replace ARM with ARM 64 bit iOS in CI
Remove 32bit iOS and replace ARM with ARM 64 bit iOS in CI Both i386 and armv7s iOS targets are no longer supported by Rust (or Apple for the matter), so remove them both from the CI. And use the aarch64 (ARM 64 bit) iOS target instead.
YAML
mit
carllerche/mio,carllerche/mio
yaml
## Code Before: parameters: vmImage: ubuntu-16.04 jobs: - job: ${{ parameters.name }} displayName: Cross strategy: matrix: iOS_64: vmImage: macOS-10.13 target: x86_64-apple-ios iOS_32: vmImage: macOS-10.13 target: i386-apple-ios iOS_ARM: vmImage: macOS-10.13 target: armv7s-apple-ios Android: vmImage: ubuntu-16.04 target: arm-linux-androideabi Android_ARM64: vmImage: ubuntu-16.04 target: aarch64-linux-android Android_32: vmImage: ubuntu-16.04 target: i686-unknown-linux-gnu NetBSD: vmImage: ubuntu-16.04 target: x86_64-unknown-netbsd Solaris: vmImage: ubuntu-16.04 target: x86_64-sun-solaris pool: vmImage: $(vmImage) steps: - template: azure-install-rust.yml parameters: rust_version: stable - script: rustup target add $(target) displayName: "Add target" - script: cargo check --target $(target) displayName: Check source - script: cargo check --tests --target $(target) --all-features displayName: Check tests - script: cargo check --examples --target $(target) --all-features displayName: Check examples ## Instruction: Remove 32bit iOS and replace ARM with ARM 64 bit iOS in CI Both i386 and armv7s iOS targets are no longer supported by Rust (or Apple for the matter), so remove them both from the CI. And use the aarch64 (ARM 64 bit) iOS target instead. ## Code After: parameters: vmImage: ubuntu-16.04 jobs: - job: ${{ parameters.name }} displayName: Cross strategy: matrix: iOS_64: vmImage: macOS-10.13 target: x86_64-apple-ios iOS_ARM64: vmImage: macOS-10.13 target: aarch64-apple-ios Android_ARM: vmImage: ubuntu-16.04 target: arm-linux-androideabi Android_ARM64: vmImage: ubuntu-16.04 target: aarch64-linux-android Android_32: vmImage: ubuntu-16.04 target: i686-unknown-linux-gnu NetBSD: vmImage: ubuntu-16.04 target: x86_64-unknown-netbsd Solaris: vmImage: ubuntu-16.04 target: x86_64-sun-solaris pool: vmImage: $(vmImage) steps: - template: azure-install-rust.yml parameters: rust_version: stable - script: rustup target add $(target) displayName: "Add target" - script: cargo check --target $(target) displayName: Check source - script: cargo check --tests --target $(target) --all-features displayName: Check tests - script: cargo check --examples --target $(target) --all-features displayName: Check examples
parameters: vmImage: ubuntu-16.04 jobs: - job: ${{ parameters.name }} displayName: Cross strategy: matrix: iOS_64: vmImage: macOS-10.13 target: x86_64-apple-ios - iOS_32: ? ^^ + iOS_ARM64: ? ^^^^^ vmImage: macOS-10.13 - target: i386-apple-ios ? ^^^ + target: aarch64-apple-ios ? ^^^^^ + - iOS_ARM: - vmImage: macOS-10.13 - target: armv7s-apple-ios - - Android: + Android_ARM: ? ++++ vmImage: ubuntu-16.04 target: arm-linux-androideabi Android_ARM64: vmImage: ubuntu-16.04 target: aarch64-linux-android Android_32: vmImage: ubuntu-16.04 target: i686-unknown-linux-gnu NetBSD: vmImage: ubuntu-16.04 target: x86_64-unknown-netbsd Solaris: vmImage: ubuntu-16.04 target: x86_64-sun-solaris pool: vmImage: $(vmImage) steps: - template: azure-install-rust.yml parameters: rust_version: stable - script: rustup target add $(target) displayName: "Add target" - script: cargo check --target $(target) displayName: Check source - script: cargo check --tests --target $(target) --all-features displayName: Check tests - script: cargo check --examples --target $(target) --all-features displayName: Check examples
10
0.169492
3
7
0a8cd757df8c4649842e91bafdfe8fa24a53fff9
package.json
package.json
{ "name": "hexo-pagination", "version": "0.1.0", "description": "Pagination utilities for Hexo generator plugins.", "main": "lib/pagination", "scripts": { "eslint": "eslint .", "test": "mocha test/index.js", "test-cov": "istanbul cover --print both _mocha -- test/index.js" }, "directories": { "lib": "./lib" }, "repository": "hexojs/hexo-pagination", "homepage": "http://hexo.io/", "keywords": [ "hexo", "pagination", "util" ], "author": "Tommy Chen <[email protected]> (http://zespia.tw)", "license": "MIT", "devDependencies": { "chai": "^4.1.2", "eslint": "^5.6.0", "eslint-config-hexo": "^3.0.0", "istanbul": "^0.4.5", "mocha": "^5.2.0" }, "dependencies": { "object-assign": "^4.1.0", "babel-eslint": "^7.2.1" } }
{ "name": "hexo-pagination", "version": "0.1.0", "description": "Pagination utilities for Hexo generator plugins.", "main": "lib/pagination", "scripts": { "eslint": "eslint .", "test": "mocha test/index.js", "test-cov": "istanbul cover --print both _mocha -- test/index.js" }, "directories": { "lib": "./lib" }, "repository": "hexojs/hexo-pagination", "homepage": "http://hexo.io/", "keywords": [ "hexo", "pagination", "util" ], "author": "Tommy Chen <[email protected]> (http://zespia.tw)", "license": "MIT", "devDependencies": { "chai": "^4.1.2", "eslint": "^5.6.0", "eslint-config-hexo": "^3.0.0", "istanbul": "^0.4.5", "mocha": "^5.2.0" }, "dependencies": { "object-assign": "^4.1.0", "babel-eslint": "^7.2.1" }, "engines": { "node": ">=6.9.0" } }
Set node engines 6.9.0 later
Set node engines 6.9.0 later
JSON
mit
hexojs/hexo-pagination
json
## Code Before: { "name": "hexo-pagination", "version": "0.1.0", "description": "Pagination utilities for Hexo generator plugins.", "main": "lib/pagination", "scripts": { "eslint": "eslint .", "test": "mocha test/index.js", "test-cov": "istanbul cover --print both _mocha -- test/index.js" }, "directories": { "lib": "./lib" }, "repository": "hexojs/hexo-pagination", "homepage": "http://hexo.io/", "keywords": [ "hexo", "pagination", "util" ], "author": "Tommy Chen <[email protected]> (http://zespia.tw)", "license": "MIT", "devDependencies": { "chai": "^4.1.2", "eslint": "^5.6.0", "eslint-config-hexo": "^3.0.0", "istanbul": "^0.4.5", "mocha": "^5.2.0" }, "dependencies": { "object-assign": "^4.1.0", "babel-eslint": "^7.2.1" } } ## Instruction: Set node engines 6.9.0 later ## Code After: { "name": "hexo-pagination", "version": "0.1.0", "description": "Pagination utilities for Hexo generator plugins.", "main": "lib/pagination", "scripts": { "eslint": "eslint .", "test": "mocha test/index.js", "test-cov": "istanbul cover --print both _mocha -- test/index.js" }, "directories": { "lib": "./lib" }, "repository": "hexojs/hexo-pagination", "homepage": "http://hexo.io/", "keywords": [ "hexo", "pagination", "util" ], "author": "Tommy Chen <[email protected]> (http://zespia.tw)", "license": "MIT", "devDependencies": { "chai": "^4.1.2", "eslint": "^5.6.0", "eslint-config-hexo": "^3.0.0", "istanbul": "^0.4.5", "mocha": "^5.2.0" }, "dependencies": { "object-assign": "^4.1.0", "babel-eslint": "^7.2.1" }, "engines": { "node": ">=6.9.0" } }
{ "name": "hexo-pagination", "version": "0.1.0", "description": "Pagination utilities for Hexo generator plugins.", "main": "lib/pagination", "scripts": { "eslint": "eslint .", "test": "mocha test/index.js", "test-cov": "istanbul cover --print both _mocha -- test/index.js" }, "directories": { "lib": "./lib" }, "repository": "hexojs/hexo-pagination", "homepage": "http://hexo.io/", "keywords": [ "hexo", "pagination", "util" ], "author": "Tommy Chen <[email protected]> (http://zespia.tw)", "license": "MIT", "devDependencies": { "chai": "^4.1.2", "eslint": "^5.6.0", "eslint-config-hexo": "^3.0.0", "istanbul": "^0.4.5", "mocha": "^5.2.0" }, "dependencies": { "object-assign": "^4.1.0", "babel-eslint": "^7.2.1" + }, + "engines": { + "node": ">=6.9.0" } }
3
0.088235
3
0
70a4acde8a1975cfbfca62aea886543922f2a55d
src/client/app/suche/basic-search/basic-search.component.ts
src/client/app/suche/basic-search/basic-search.component.ts
import { Component } from '@angular/core'; import { Router } from '@angular/router'; @Component({ moduleId: module.id, selector: 'rae-basic-search', templateUrl: './basic-search.component.html', styleUrls: ['./basic-search.component.css'] }) export class BasicSearchComponent { hideSearchfield: boolean = true; placeholder = 'Suche...'; constructor(private router: Router) { } sendRequest(values: any) { this.router.navigateByUrl('/suche/' + encodeURIComponent(values),); } }
import { Component } from '@angular/core'; import { Router } from '@angular/router'; @Component({ moduleId: module.id, selector: 'rae-basic-search', templateUrl: './basic-search.component.html', styleUrls: ['./basic-search.component.css'] }) export class BasicSearchComponent { hideSearchfield: boolean = true; placeholder = 'Suche...'; constructor(private router: Router) { router.events.subscribe(changes => { this.hideSearchfield = true; this.placeholder = 'Suche...'; }); } sendRequest(values: any) { this.router.navigateByUrl('/suche/' + encodeURIComponent(values),); } }
Delete search string and hide input field if route is changed (i.e. a new "page" is loaded)
Delete search string and hide input field if route is changed (i.e. a new "page" is loaded)
TypeScript
mit
nie-ine/raeber-website,nie-ine/raeber-website,nie-ine/raeber-website,nie-ine/raeber-website
typescript
## Code Before: import { Component } from '@angular/core'; import { Router } from '@angular/router'; @Component({ moduleId: module.id, selector: 'rae-basic-search', templateUrl: './basic-search.component.html', styleUrls: ['./basic-search.component.css'] }) export class BasicSearchComponent { hideSearchfield: boolean = true; placeholder = 'Suche...'; constructor(private router: Router) { } sendRequest(values: any) { this.router.navigateByUrl('/suche/' + encodeURIComponent(values),); } } ## Instruction: Delete search string and hide input field if route is changed (i.e. a new "page" is loaded) ## Code After: import { Component } from '@angular/core'; import { Router } from '@angular/router'; @Component({ moduleId: module.id, selector: 'rae-basic-search', templateUrl: './basic-search.component.html', styleUrls: ['./basic-search.component.css'] }) export class BasicSearchComponent { hideSearchfield: boolean = true; placeholder = 'Suche...'; constructor(private router: Router) { router.events.subscribe(changes => { this.hideSearchfield = true; this.placeholder = 'Suche...'; }); } sendRequest(values: any) { this.router.navigateByUrl('/suche/' + encodeURIComponent(values),); } }
import { Component } from '@angular/core'; import { Router } from '@angular/router'; @Component({ moduleId: module.id, selector: 'rae-basic-search', templateUrl: './basic-search.component.html', styleUrls: ['./basic-search.component.css'] }) export class BasicSearchComponent { hideSearchfield: boolean = true; placeholder = 'Suche...'; constructor(private router: Router) { + router.events.subscribe(changes => { + this.hideSearchfield = true; + this.placeholder = 'Suche...'; + }); } sendRequest(values: any) { this.router.navigateByUrl('/suche/' + encodeURIComponent(values),); } }
4
0.173913
4
0
c43e6a69fa1391b5fd00a43628111f8d52ec8792
pct_vs_time.py
pct_vs_time.py
from deuces.deuces import Card, Deck from convenience import who_wins p1 = [Card.new('As'), Card.new('Ac')] p2 = [Card.new('Ad'), Card.new('Kd')] win_record = [] for i in range(100000): deck = Deck() b = [] while len(b) < 5: c = deck.draw() if c in p1 or c in p2: continue b.append(c) win_record.append(who_wins(b, p1, p2, printout = False)) Card.print_pretty_cards(p1) print win_record.count(1) / float(len(win_record)) Card.print_pretty_cards(p2) print win_record.count(2) / float(len(win_record))
from deuces.deuces import Card, Deck from convenience import who_wins, pr from copy import deepcopy p1 = [Card.new('As'), Card.new('Ac')] p2 = [Card.new('Ad'), Card.new('Kd')] def find_pcts(p1, p2, start_b = [], iter = 10000): win_record = [] for i in range(iter): deck = Deck() b = deepcopy(start_b) while len(b) < 5: c = deck.draw() if c in p1 + p2 + b: continue b.append(c) win_record.append(who_wins(b, p1, p2, printout = False)) return [win_record.count(1) / float(len(win_record)), win_record.count(2) / float(len(win_record)) ] Card.print_pretty_cards(p1) Card.print_pretty_cards(p2) print find_pcts(p1, p2)
Make find_pcts() function that can repetitively run.
Make find_pcts() function that can repetitively run.
Python
mit
zimolzak/poker-experiments,zimolzak/poker-experiments,zimolzak/poker-experiments
python
## Code Before: from deuces.deuces import Card, Deck from convenience import who_wins p1 = [Card.new('As'), Card.new('Ac')] p2 = [Card.new('Ad'), Card.new('Kd')] win_record = [] for i in range(100000): deck = Deck() b = [] while len(b) < 5: c = deck.draw() if c in p1 or c in p2: continue b.append(c) win_record.append(who_wins(b, p1, p2, printout = False)) Card.print_pretty_cards(p1) print win_record.count(1) / float(len(win_record)) Card.print_pretty_cards(p2) print win_record.count(2) / float(len(win_record)) ## Instruction: Make find_pcts() function that can repetitively run. ## Code After: from deuces.deuces import Card, Deck from convenience import who_wins, pr from copy import deepcopy p1 = [Card.new('As'), Card.new('Ac')] p2 = [Card.new('Ad'), Card.new('Kd')] def find_pcts(p1, p2, start_b = [], iter = 10000): win_record = [] for i in range(iter): deck = Deck() b = deepcopy(start_b) while len(b) < 5: c = deck.draw() if c in p1 + p2 + b: continue b.append(c) win_record.append(who_wins(b, p1, p2, printout = False)) return [win_record.count(1) / float(len(win_record)), win_record.count(2) / float(len(win_record)) ] Card.print_pretty_cards(p1) Card.print_pretty_cards(p2) print find_pcts(p1, p2)
from deuces.deuces import Card, Deck - from convenience import who_wins + from convenience import who_wins, pr ? ++++ + from copy import deepcopy p1 = [Card.new('As'), Card.new('Ac')] p2 = [Card.new('Ad'), Card.new('Kd')] - win_record = [] - for i in range(100000): + def find_pcts(p1, p2, start_b = [], iter = 10000): + win_record = [] + for i in range(iter): - deck = Deck() + deck = Deck() ? ++++ - b = [] + b = deepcopy(start_b) - while len(b) < 5: + while len(b) < 5: ? ++++ - c = deck.draw() + c = deck.draw() ? ++++ - if c in p1 or c in p2: + if c in p1 + p2 + b: - continue + continue ? ++++ - b.append(c) + b.append(c) ? ++++ - win_record.append(who_wins(b, p1, p2, printout = False)) + win_record.append(who_wins(b, p1, p2, printout = False)) ? ++++ + return [win_record.count(1) / float(len(win_record)), + win_record.count(2) / float(len(win_record)) + ] Card.print_pretty_cards(p1) - print win_record.count(1) / float(len(win_record)) Card.print_pretty_cards(p2) - print win_record.count(2) / float(len(win_record)) + print find_pcts(p1, p2)
30
1.428571
17
13
35f3534e9e624aee0bcde99fa6cefc1f3121bb32
tools/set_permissions.sh
tools/set_permissions.sh
cd `dirname "$0"`/.. find . -type f -exec chmod 0444 {} \; find . -type d -exec chmod 0555 {} \; chmod 0777 assets chmod 0777 protected/runtime chmod 0777 backups
cd `dirname "$0"`/.. find . -type f -exec chmod 0444 {} \; find . -type d -exec chmod 0555 {} \;
Correct of script for permissions setting.
Correct of script for permissions setting.
Shell
mit
thewizardplusplus/wizard-diary,thewizardplusplus/wizard-diary,thewizardplusplus/wizard-diary,thewizardplusplus/wizard-diary,thewizardplusplus/wizard-diary
shell
## Code Before: cd `dirname "$0"`/.. find . -type f -exec chmod 0444 {} \; find . -type d -exec chmod 0555 {} \; chmod 0777 assets chmod 0777 protected/runtime chmod 0777 backups ## Instruction: Correct of script for permissions setting. ## Code After: cd `dirname "$0"`/.. find . -type f -exec chmod 0444 {} \; find . -type d -exec chmod 0555 {} \;
cd `dirname "$0"`/.. find . -type f -exec chmod 0444 {} \; find . -type d -exec chmod 0555 {} \; - - chmod 0777 assets - chmod 0777 protected/runtime - chmod 0777 backups
4
0.444444
0
4
04fcda42222fff1daad780db53190bcfb721d034
polling_stations/apps/data_collection/management/commands/import_mid_sussex.py
polling_stations/apps/data_collection/management/commands/import_mid_sussex.py
import sys from django.contrib.gis.geos import Point, GEOSGeometry from data_collection.management.commands import BaseKamlImporter class Command(BaseKamlImporter): """ Imports the Polling Station data from Mid Sussex """ council_id = 'E07000228' districts_name = 'msdc_3830_pollingdistricts_polygon.kmz' stations_name = 'R3900_pollingstations.csv' def station_record_to_dict(self, record): location = Point(float(record.xcoord), float(record.ycoord), srid=self.srid) address = "\n".join([record.venue, record.street, record.town]) return { 'internal_council_id': record.statnum, 'postcode': record.postcode, 'address': address, 'location': location }
import sys from lxml import etree from django.contrib.gis.geos import Point, GEOSGeometry from data_collection.management.commands import BaseKamlImporter class Command(BaseKamlImporter): """ Imports the Polling Station data from Mid Sussex """ council_id = 'E07000228' districts_name = 'msdc_3830_pollingdistricts_polygon.kmz' stations_name = 'R3900_pollingstations.csv' def extract_msercode_from_description(self, description): html = etree.HTML(str(description).replace('&', '&amp;')) rows = html.xpath("//td") return rows[7].text def district_record_to_dict(self, record): msercode = self.extract_msercode_from_description(record['description']) geojson = self.strip_z_values(record.geom.geojson) poly = self.clean_poly(GEOSGeometry(geojson, srid=self.get_srid('districts'))) return { 'internal_council_id': msercode, 'name' : record['Name'].value, 'area' : poly } def station_record_to_dict(self, record): location = Point(float(record.xcoord), float(record.ycoord), srid=self.srid) address = "\n".join([record.venue, record.street, record.town]) return { 'internal_council_id': record.msercode, 'postcode': record.postcode, 'address': address, 'location': location, 'polling_district_id': record.msercode }
Fix Mid Sussex Import script
Fix Mid Sussex Import script Set polling_district_id Use mserid as internal_council_id to avoid importing duplicate points
Python
bsd-3-clause
chris48s/UK-Polling-Stations,andylolz/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,andylolz/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,andylolz/UK-Polling-Stations
python
## Code Before: import sys from django.contrib.gis.geos import Point, GEOSGeometry from data_collection.management.commands import BaseKamlImporter class Command(BaseKamlImporter): """ Imports the Polling Station data from Mid Sussex """ council_id = 'E07000228' districts_name = 'msdc_3830_pollingdistricts_polygon.kmz' stations_name = 'R3900_pollingstations.csv' def station_record_to_dict(self, record): location = Point(float(record.xcoord), float(record.ycoord), srid=self.srid) address = "\n".join([record.venue, record.street, record.town]) return { 'internal_council_id': record.statnum, 'postcode': record.postcode, 'address': address, 'location': location } ## Instruction: Fix Mid Sussex Import script Set polling_district_id Use mserid as internal_council_id to avoid importing duplicate points ## Code After: import sys from lxml import etree from django.contrib.gis.geos import Point, GEOSGeometry from data_collection.management.commands import BaseKamlImporter class Command(BaseKamlImporter): """ Imports the Polling Station data from Mid Sussex """ council_id = 'E07000228' districts_name = 'msdc_3830_pollingdistricts_polygon.kmz' stations_name = 'R3900_pollingstations.csv' def extract_msercode_from_description(self, description): html = etree.HTML(str(description).replace('&', '&amp;')) rows = html.xpath("//td") return rows[7].text def district_record_to_dict(self, record): msercode = self.extract_msercode_from_description(record['description']) geojson = self.strip_z_values(record.geom.geojson) poly = self.clean_poly(GEOSGeometry(geojson, srid=self.get_srid('districts'))) return { 'internal_council_id': msercode, 'name' : record['Name'].value, 'area' : poly } def station_record_to_dict(self, record): location = Point(float(record.xcoord), float(record.ycoord), srid=self.srid) address = "\n".join([record.venue, record.street, record.town]) return { 'internal_council_id': record.msercode, 'postcode': record.postcode, 'address': address, 'location': location, 'polling_district_id': record.msercode }
import sys - + from lxml import etree from django.contrib.gis.geos import Point, GEOSGeometry - from data_collection.management.commands import BaseKamlImporter class Command(BaseKamlImporter): """ Imports the Polling Station data from Mid Sussex """ council_id = 'E07000228' districts_name = 'msdc_3830_pollingdistricts_polygon.kmz' stations_name = 'R3900_pollingstations.csv' + def extract_msercode_from_description(self, description): + html = etree.HTML(str(description).replace('&', '&amp;')) + rows = html.xpath("//td") + return rows[7].text + + def district_record_to_dict(self, record): + msercode = self.extract_msercode_from_description(record['description']) + geojson = self.strip_z_values(record.geom.geojson) + poly = self.clean_poly(GEOSGeometry(geojson, srid=self.get_srid('districts'))) + return { + 'internal_council_id': msercode, + 'name' : record['Name'].value, + 'area' : poly + } + def station_record_to_dict(self, record): location = Point(float(record.xcoord), float(record.ycoord), srid=self.srid) address = "\n".join([record.venue, record.street, record.town]) return { - 'internal_council_id': record.statnum, ? ^^^^^^ + 'internal_council_id': record.msercode, ? + ^^^^^^ 'postcode': record.postcode, 'address': address, - 'location': location + 'location': location, ? + + 'polling_district_id': record.msercode }
23
1
19
4
686aa9b0467a9fe57af86bf2652e290be3f1c3b3
docs/dev_releaseprocess.rst
docs/dev_releaseprocess.rst
================= Release process ================= 1. Checkout master tip. 2. Check to make sure ``setup.py``, requirements files, and ``docs/installation.rst`` have correct version of elasticsearch-py. 3. Update version numbers in ``elasticutils/_version.py``. 1. Set ``__version__`` to something like ``0.4``. 2. Set ``__releasedate__`` to something like ``20120731``. 4. Update ``CONTRIBUTORS``, ``CHANGELOG``, ``MANIFEST.in``. Make sure to set the date for the release in CHANGELOG. Make sure requirements in ``setup.py``, ``docs/installation.rst`` and ``CHANGELOG`` all match. 5. Verify correctness. 1. Run tests. 2. Build docs. 3. Run sample programs in docs. 4. Verify all that works. 6. Tag the release:: $ git tag -a v0.4 7. Push everything:: $ git push --tags official master 8. Update PyPI:: $ rm -rf dist/* $ python setup.py sdist bdist_wheel $ twine upload dist/* 9. Update topic in ``#elasticutils``, blog post, twitter, etc.
================= Release process ================= 1. Checkout master tip. 2. Check to make sure ``setup.py``, requirements files, and ``docs/installation.rst`` have correct version of elasticsearch-py. 3. Update version numbers in ``elasticutils/_version.py``. 1. Set ``__version__`` to something like ``0.4``. 2. Set ``__releasedate__`` to something like ``20120731``. 4. Update ``CONTRIBUTORS``, ``CHANGELOG``, ``MANIFEST.in``. Make sure to set the date for the release in CHANGELOG. Make sure requirements in ``setup.py``, ``docs/installation.rst`` and ``CHANGELOG`` all match. 5. Verify correctness. 1. Run tests. 2. Build docs. 3. Run sample programs in docs. 4. Verify all that works. 6. Tag the release:: $ git tag -a v0.4 Copy the details from ``CHANGELOG`` into the tag comment. 7. Push everything:: $ git push --tags official master 8. Update PyPI:: $ rm -rf dist/* $ python setup.py sdist bdist_wheel $ twine upload dist/* 9. Update topic in ``#elasticutils``, blog post, twitter, etc.
Add note about tag notes to releaseprocess
Add note about tag notes to releaseprocess
reStructuredText
bsd-3-clause
einvalentin/elasticutils,mozilla/elasticutils,einvalentin/elasticutils,einvalentin/elasticutils,mozilla/elasticutils,mozilla/elasticutils
restructuredtext
## Code Before: ================= Release process ================= 1. Checkout master tip. 2. Check to make sure ``setup.py``, requirements files, and ``docs/installation.rst`` have correct version of elasticsearch-py. 3. Update version numbers in ``elasticutils/_version.py``. 1. Set ``__version__`` to something like ``0.4``. 2. Set ``__releasedate__`` to something like ``20120731``. 4. Update ``CONTRIBUTORS``, ``CHANGELOG``, ``MANIFEST.in``. Make sure to set the date for the release in CHANGELOG. Make sure requirements in ``setup.py``, ``docs/installation.rst`` and ``CHANGELOG`` all match. 5. Verify correctness. 1. Run tests. 2. Build docs. 3. Run sample programs in docs. 4. Verify all that works. 6. Tag the release:: $ git tag -a v0.4 7. Push everything:: $ git push --tags official master 8. Update PyPI:: $ rm -rf dist/* $ python setup.py sdist bdist_wheel $ twine upload dist/* 9. Update topic in ``#elasticutils``, blog post, twitter, etc. ## Instruction: Add note about tag notes to releaseprocess ## Code After: ================= Release process ================= 1. Checkout master tip. 2. Check to make sure ``setup.py``, requirements files, and ``docs/installation.rst`` have correct version of elasticsearch-py. 3. Update version numbers in ``elasticutils/_version.py``. 1. Set ``__version__`` to something like ``0.4``. 2. Set ``__releasedate__`` to something like ``20120731``. 4. Update ``CONTRIBUTORS``, ``CHANGELOG``, ``MANIFEST.in``. Make sure to set the date for the release in CHANGELOG. Make sure requirements in ``setup.py``, ``docs/installation.rst`` and ``CHANGELOG`` all match. 5. Verify correctness. 1. Run tests. 2. Build docs. 3. Run sample programs in docs. 4. Verify all that works. 6. Tag the release:: $ git tag -a v0.4 Copy the details from ``CHANGELOG`` into the tag comment. 7. Push everything:: $ git push --tags official master 8. Update PyPI:: $ rm -rf dist/* $ python setup.py sdist bdist_wheel $ twine upload dist/* 9. Update topic in ``#elasticutils``, blog post, twitter, etc.
================= Release process ================= 1. Checkout master tip. 2. Check to make sure ``setup.py``, requirements files, and ``docs/installation.rst`` have correct version of elasticsearch-py. 3. Update version numbers in ``elasticutils/_version.py``. 1. Set ``__version__`` to something like ``0.4``. 2. Set ``__releasedate__`` to something like ``20120731``. 4. Update ``CONTRIBUTORS``, ``CHANGELOG``, ``MANIFEST.in``. Make sure to set the date for the release in CHANGELOG. Make sure requirements in ``setup.py``, ``docs/installation.rst`` and ``CHANGELOG`` all match. 5. Verify correctness. 1. Run tests. 2. Build docs. 3. Run sample programs in docs. 4. Verify all that works. 6. Tag the release:: $ git tag -a v0.4 + Copy the details from ``CHANGELOG`` into the tag comment. + 7. Push everything:: $ git push --tags official master 8. Update PyPI:: $ rm -rf dist/* $ python setup.py sdist bdist_wheel $ twine upload dist/* 9. Update topic in ``#elasticutils``, blog post, twitter, etc.
2
0.045455
2
0
951c5b86e9f23930b0ea0fff8fbdc89c8453e862
pkgs/development/python-modules/moviepy/default.nix
pkgs/development/python-modules/moviepy/default.nix
{ stdenv , buildPythonPackage , fetchPypi , numpy , decorator , imageio , isPy3k , tqdm }: buildPythonPackage rec { pname = "moviepy"; version = "1.0.0"; src = fetchPypi { inherit pname version; sha256 = "16c7ffca23d90c76dd7b163f648c8166dfd589b7c180b8ff75aa327ae0a2fc6d"; }; # No tests doCheck = false; propagatedBuildInputs = [ numpy decorator imageio tqdm ]; meta = with stdenv.lib; { description = "Video editing with Python"; homepage = http://zulko.github.io/moviepy/; license = licenses.mit; }; }
{ stdenv , buildPythonPackage , fetchPypi , numpy , decorator , imageio , imageio-ffmpeg , isPy3k , proglog , requests , tqdm }: buildPythonPackage rec { pname = "moviepy"; version = "1.0.0"; src = fetchPypi { inherit pname version; sha256 = "16c7ffca23d90c76dd7b163f648c8166dfd589b7c180b8ff75aa327ae0a2fc6d"; }; # No tests doCheck = false; propagatedBuildInputs = [ numpy decorator imageio imageio-ffmpeg tqdm requests proglog ]; meta = with stdenv.lib; { description = "Video editing with Python"; homepage = http://zulko.github.io/moviepy/; license = licenses.mit; }; }
Add deps for new version
pythonPackages.moviepy: Add deps for new version Fixes #59857
Nix
mit
NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs
nix
## Code Before: { stdenv , buildPythonPackage , fetchPypi , numpy , decorator , imageio , isPy3k , tqdm }: buildPythonPackage rec { pname = "moviepy"; version = "1.0.0"; src = fetchPypi { inherit pname version; sha256 = "16c7ffca23d90c76dd7b163f648c8166dfd589b7c180b8ff75aa327ae0a2fc6d"; }; # No tests doCheck = false; propagatedBuildInputs = [ numpy decorator imageio tqdm ]; meta = with stdenv.lib; { description = "Video editing with Python"; homepage = http://zulko.github.io/moviepy/; license = licenses.mit; }; } ## Instruction: pythonPackages.moviepy: Add deps for new version Fixes #59857 ## Code After: { stdenv , buildPythonPackage , fetchPypi , numpy , decorator , imageio , imageio-ffmpeg , isPy3k , proglog , requests , tqdm }: buildPythonPackage rec { pname = "moviepy"; version = "1.0.0"; src = fetchPypi { inherit pname version; sha256 = "16c7ffca23d90c76dd7b163f648c8166dfd589b7c180b8ff75aa327ae0a2fc6d"; }; # No tests doCheck = false; propagatedBuildInputs = [ numpy decorator imageio imageio-ffmpeg tqdm requests proglog ]; meta = with stdenv.lib; { description = "Video editing with Python"; homepage = http://zulko.github.io/moviepy/; license = licenses.mit; }; }
{ stdenv , buildPythonPackage , fetchPypi , numpy , decorator , imageio + , imageio-ffmpeg , isPy3k + , proglog + , requests , tqdm }: buildPythonPackage rec { pname = "moviepy"; version = "1.0.0"; src = fetchPypi { inherit pname version; sha256 = "16c7ffca23d90c76dd7b163f648c8166dfd589b7c180b8ff75aa327ae0a2fc6d"; }; # No tests doCheck = false; - propagatedBuildInputs = [ numpy decorator imageio tqdm ]; + propagatedBuildInputs = [ numpy decorator imageio imageio-ffmpeg tqdm requests proglog ]; ? +++++++++++++++ +++++++++++++++++ meta = with stdenv.lib; { description = "Video editing with Python"; homepage = http://zulko.github.io/moviepy/; license = licenses.mit; }; }
5
0.166667
4
1
0101b9b146eae2cc3c8ba891acb536acd6328982
app/models/EventLog.java
app/models/EventLog.java
package models; import java.util.Date; import javax.persistence.Entity; import javax.persistence.EnumType; import javax.persistence.Enumerated; import javax.persistence.JoinColumn; import javax.persistence.OneToOne; import models.Server.Status; import play.db.jpa.Model; @Entity public class EventLog extends Model { @OneToOne @JoinColumn(name = "server_id") public Server server; @Enumerated(EnumType.ORDINAL) public Status status; public String message; public Date created; }
package models; import java.util.Date; import javax.persistence.Entity; import javax.persistence.EnumType; import javax.persistence.Enumerated; import play.db.jpa.Model; @Entity public class EventLog extends Model { public static enum Type { Server, Probe; } // Event creates from server status or probe status @Enumerated(EnumType.ORDINAL) public Type type; // Event instance, server id or probe id public Long instance; // Event status, e.g. 0 for server means fail and 1 for server means ok. public Integer status; // Event message public String message; public Date created; }
Change event model to support probe status.
Change event model to support probe status.
Java
mit
llun/eyes
java
## Code Before: package models; import java.util.Date; import javax.persistence.Entity; import javax.persistence.EnumType; import javax.persistence.Enumerated; import javax.persistence.JoinColumn; import javax.persistence.OneToOne; import models.Server.Status; import play.db.jpa.Model; @Entity public class EventLog extends Model { @OneToOne @JoinColumn(name = "server_id") public Server server; @Enumerated(EnumType.ORDINAL) public Status status; public String message; public Date created; } ## Instruction: Change event model to support probe status. ## Code After: package models; import java.util.Date; import javax.persistence.Entity; import javax.persistence.EnumType; import javax.persistence.Enumerated; import play.db.jpa.Model; @Entity public class EventLog extends Model { public static enum Type { Server, Probe; } // Event creates from server status or probe status @Enumerated(EnumType.ORDINAL) public Type type; // Event instance, server id or probe id public Long instance; // Event status, e.g. 0 for server means fail and 1 for server means ok. public Integer status; // Event message public String message; public Date created; }
package models; import java.util.Date; import javax.persistence.Entity; import javax.persistence.EnumType; import javax.persistence.Enumerated; - import javax.persistence.JoinColumn; - import javax.persistence.OneToOne; - import models.Server.Status; import play.db.jpa.Model; @Entity public class EventLog extends Model { - @OneToOne - @JoinColumn(name = "server_id") - public Server server; + public static enum Type { + Server, Probe; + } + + // Event creates from server status or probe status @Enumerated(EnumType.ORDINAL) + public Type type; + + // Event instance, server id or probe id + public Long instance; + + // Event status, e.g. 0 for server means fail and 1 for server means ok. - public Status status; ? ^ ^^^^ + public Integer status; ? ^^ ^^^^ + + // Event message public String message; public Date created; }
21
0.84
14
7
b03501d7e1528d141fa78eb839a1322aae33d417
README.md
README.md
Chatroom ========= Miniature IRC-style chatroom written in about 300 lines of Python. Stores users and their passwords persistently. Todo: * Make sure a user can only be online in one session. *Block `/nick` changes to that user as well as new login attempts. * Implement a `/changepass` command with optional <username> and <pass> parameters to change a user's password. Current password required of course. * ~~Implement a `/nick` command to change the current connection's username~~ * Look into writing a client program to smooth out the experience and get rid of the usability problems with Telnet * Store some statistics about users, like total number of messages sent * ~~Make the users file an absolute filepath~~ * Implement a `/help` command * Implement a `/me` command * Look into bolding and other terminal control features over Telnet. Think about compatability with a client program. * ~~Add a /quit or /leave command with "reason" parameter.~~ * ~~Store accounts more permanently~~ * ~~Improve username formatting~~ * ~~Broadcast messages when users join and leave~~ * ~~Take another look at the `users` and `connections` dicts - probably some redundancy there~~ * ~~Don't send messages to yourself~~
Chatroom ========= Miniature IRC-style chatroom written in about 300 lines of Python. Stores users and their passwords persistently. Todo: * ~~Make sure a user can only be online in one session.~~ * Implement a `/changepass` command with optional <username> and <pass> parameters to change a user's password. Current password required of course. * ~~Implement a `/nick` command to change the current connection's username~~ * Look into writing a client program to smooth out the experience and get rid of the usability problems with Telnet * Store some statistics about users, like total number of messages sent * ~~Make the users file an absolute filepath~~ * Implement a `/help` command * ~~Implement a `/me` command~~ * Look into bolding and other terminal control features over Telnet. Think about compatability with a client program. * ~~Add a /quit or /leave command with "reason" parameter.~~ * ~~Store accounts more permanently~~ * ~~Improve username formatting~~ * ~~Broadcast messages when users join and leave~~ * ~~Take another look at the `users` and `connections` dicts - probably some redundancy there~~ * ~~Don't send messages to yourself~~
Update TODO list with closed issues
Update TODO list with closed issues * /me command implemented * Multiple users forces all but one to be kicked
Markdown
unlicense
bedekelly/twistchat
markdown
## Code Before: Chatroom ========= Miniature IRC-style chatroom written in about 300 lines of Python. Stores users and their passwords persistently. Todo: * Make sure a user can only be online in one session. *Block `/nick` changes to that user as well as new login attempts. * Implement a `/changepass` command with optional <username> and <pass> parameters to change a user's password. Current password required of course. * ~~Implement a `/nick` command to change the current connection's username~~ * Look into writing a client program to smooth out the experience and get rid of the usability problems with Telnet * Store some statistics about users, like total number of messages sent * ~~Make the users file an absolute filepath~~ * Implement a `/help` command * Implement a `/me` command * Look into bolding and other terminal control features over Telnet. Think about compatability with a client program. * ~~Add a /quit or /leave command with "reason" parameter.~~ * ~~Store accounts more permanently~~ * ~~Improve username formatting~~ * ~~Broadcast messages when users join and leave~~ * ~~Take another look at the `users` and `connections` dicts - probably some redundancy there~~ * ~~Don't send messages to yourself~~ ## Instruction: Update TODO list with closed issues * /me command implemented * Multiple users forces all but one to be kicked ## Code After: Chatroom ========= Miniature IRC-style chatroom written in about 300 lines of Python. Stores users and their passwords persistently. Todo: * ~~Make sure a user can only be online in one session.~~ * Implement a `/changepass` command with optional <username> and <pass> parameters to change a user's password. Current password required of course. * ~~Implement a `/nick` command to change the current connection's username~~ * Look into writing a client program to smooth out the experience and get rid of the usability problems with Telnet * Store some statistics about users, like total number of messages sent * ~~Make the users file an absolute filepath~~ * Implement a `/help` command * ~~Implement a `/me` command~~ * Look into bolding and other terminal control features over Telnet. Think about compatability with a client program. * ~~Add a /quit or /leave command with "reason" parameter.~~ * ~~Store accounts more permanently~~ * ~~Improve username formatting~~ * ~~Broadcast messages when users join and leave~~ * ~~Take another look at the `users` and `connections` dicts - probably some redundancy there~~ * ~~Don't send messages to yourself~~
Chatroom ========= Miniature IRC-style chatroom written in about 300 lines of Python. Stores users and their passwords persistently. Todo: - * Make sure a user can only be online in one session. ? ^ + * ~~Make sure a user can only be online in one session.~~ ? ++ ^^ - *Block `/nick` changes to that user as well as new login attempts. * Implement a `/changepass` command with optional <username> and <pass> parameters to change a user's password. Current password required of course. * ~~Implement a `/nick` command to change the current connection's username~~ * Look into writing a client program to smooth out the experience and get rid of the usability problems with Telnet * Store some statistics about users, like total number of messages sent * ~~Make the users file an absolute filepath~~ * Implement a `/help` command - * Implement a `/me` command + * ~~Implement a `/me` command~~ ? ++ ++ * Look into bolding and other terminal control features over Telnet. Think about compatability with a client program. * ~~Add a /quit or /leave command with "reason" parameter.~~ * ~~Store accounts more permanently~~ * ~~Improve username formatting~~ * ~~Broadcast messages when users join and leave~~ * ~~Take another look at the `users` and `connections` dicts - probably some redundancy there~~ * ~~Don't send messages to yourself~~
5
0.217391
2
3
03877a7bd2c17c857f5c2ac10944bcbca96bc8be
src/Game.coffee
src/Game.coffee
Player = require './Player' TextInput = require './TextInput' class Game constructor: (game) -> @textBox = new TextInput(game) @player = new Player(game) window.Game = @ @socket = io.connect('http://localhost:8080', {secure: true}) @socket.emit 'new player', 'name' @socket.on 'snap', (data) => # update appropriate word for snap @player.addPoints(data.d_score) console.log data.d_score console.log "total points: #{@player.points}" $.notify "Snap! on #{data.word}" $.notify "You have #{@player.points} points", "info" create: -> sendWord: (word) -> console.log word @socket.emit 'new word', word module.exports = Game
Player = require './Player' TextInput = require './TextInput' class Game constructor: (game) -> @textBox = new TextInput(game) @player = new Player(game) window.Game = @ @socket = io.connect('https://snapgame.herokuapp.com', {secure: true}) @socket.emit 'new player', 'name' @socket.on 'snap', (data) => # update appropriate word for snap @player.addPoints(data.d_score) console.log data.d_score console.log "total points: #{@player.points}" $.notify "Snap! on #{data.word}" $.notify "You have #{@player.points} points", "info" create: -> sendWord: (word) -> console.log word @socket.emit 'new word', word module.exports = Game
Use heroku app as backend
Use heroku app as backend
CoffeeScript
mit
CMS611-snap/snap-frontend,CMS611-snap/snap-frontend,CMS611-snap/snap-frontend
coffeescript
## Code Before: Player = require './Player' TextInput = require './TextInput' class Game constructor: (game) -> @textBox = new TextInput(game) @player = new Player(game) window.Game = @ @socket = io.connect('http://localhost:8080', {secure: true}) @socket.emit 'new player', 'name' @socket.on 'snap', (data) => # update appropriate word for snap @player.addPoints(data.d_score) console.log data.d_score console.log "total points: #{@player.points}" $.notify "Snap! on #{data.word}" $.notify "You have #{@player.points} points", "info" create: -> sendWord: (word) -> console.log word @socket.emit 'new word', word module.exports = Game ## Instruction: Use heroku app as backend ## Code After: Player = require './Player' TextInput = require './TextInput' class Game constructor: (game) -> @textBox = new TextInput(game) @player = new Player(game) window.Game = @ @socket = io.connect('https://snapgame.herokuapp.com', {secure: true}) @socket.emit 'new player', 'name' @socket.on 'snap', (data) => # update appropriate word for snap @player.addPoints(data.d_score) console.log data.d_score console.log "total points: #{@player.points}" $.notify "Snap! on #{data.word}" $.notify "You have #{@player.points} points", "info" create: -> sendWord: (word) -> console.log word @socket.emit 'new word', word module.exports = Game
Player = require './Player' TextInput = require './TextInput' class Game constructor: (game) -> @textBox = new TextInput(game) @player = new Player(game) window.Game = @ - @socket = io.connect('http://localhost:8080', {secure: true}) ? ^ --- ^^^^^^^ + @socket = io.connect('https://snapgame.herokuapp.com', {secure: true}) ? + ^^^^^^^^^^^^ ++++++ ^ @socket.emit 'new player', 'name' @socket.on 'snap', (data) => # update appropriate word for snap @player.addPoints(data.d_score) console.log data.d_score console.log "total points: #{@player.points}" $.notify "Snap! on #{data.word}" $.notify "You have #{@player.points} points", "info" create: -> sendWord: (word) -> console.log word @socket.emit 'new word', word module.exports = Game
2
0.071429
1
1
4167de29a21588464e0a11793687b6f68351dbb9
composer.json
composer.json
{ "name": "adyen/module-payment", "description": "Official Magento2 Plugin to connect to Payment Service Provider Adyen.", "type": "magento2-module", "version": "1.0.0.1", "license": [ "OSL-3.0", "AFL-3.0" ], "repositories": [ { "type": "package", "package": { "name": "adyen/adyen-php-api-library", "version": "0.1.0", "type": "package", "source": { "url": "https://github.com/Adyen/adyen-php-api-library", "type": "git", "reference": "master" } } } ], "require": { "php": "~5.5.0|~5.6.0", "magento/magento-composer-installer": "*", "adyen/adyen-php-api-library": "*" }, "autoload": { "psr-4": { "Adyen\\Payment\\": "" }, "files": [ "registration.php" ] } }
{ "name": "adyen/module-payment", "description": "Official Magento2 Plugin to connect to Payment Service Provider Adyen.", "type": "magento2-module", "version": "1.0.0.1", "license": [ "OSL-3.0", "AFL-3.0" ], "repositories": [ { "type": "package", "package": { "name": "adyen/adyen-php-api-library", "version": "0.1.0", "type": "package", "source": { "url": "https://github.com/Adyen/adyen-php-api-library", "type": "git", "reference": "master" } } } ], "require": { "php": "~5.5.0|~5.6.0|~7.0.0", "magento/module-config": "100.0.*", "magento/module-store": "100.0.*", "magento/module-checkout": "100.0.*", "magento/module-catalog": "100.0.*", "magento/module-sales": "100.0.*", "magento/module-customer": "100.0.*", "magento/module-payment": "100.0.*", "magento/module-quote": "100.0.*", "magento/module-backend": "100.0.*", "magento/module-directory": "100.0.*", "magento/module-theme": "100.0.*", "magento/framework": "100.0.*", "magento/magento-composer-installer": "*", "adyen/adyen-php-api-library": "*" }, "autoload": { "files": [ "registration.php" ], "psr-4": { "Adyen\\Payment\\": "" } } }
Extend require with needed modules
Extend require with needed modules
JSON
mit
Adyen/adyen-magento2,Adyen/adyen-magento2,Adyen/adyen-magento2
json
## Code Before: { "name": "adyen/module-payment", "description": "Official Magento2 Plugin to connect to Payment Service Provider Adyen.", "type": "magento2-module", "version": "1.0.0.1", "license": [ "OSL-3.0", "AFL-3.0" ], "repositories": [ { "type": "package", "package": { "name": "adyen/adyen-php-api-library", "version": "0.1.0", "type": "package", "source": { "url": "https://github.com/Adyen/adyen-php-api-library", "type": "git", "reference": "master" } } } ], "require": { "php": "~5.5.0|~5.6.0", "magento/magento-composer-installer": "*", "adyen/adyen-php-api-library": "*" }, "autoload": { "psr-4": { "Adyen\\Payment\\": "" }, "files": [ "registration.php" ] } } ## Instruction: Extend require with needed modules ## Code After: { "name": "adyen/module-payment", "description": "Official Magento2 Plugin to connect to Payment Service Provider Adyen.", "type": "magento2-module", "version": "1.0.0.1", "license": [ "OSL-3.0", "AFL-3.0" ], "repositories": [ { "type": "package", "package": { "name": "adyen/adyen-php-api-library", "version": "0.1.0", "type": "package", "source": { "url": "https://github.com/Adyen/adyen-php-api-library", "type": "git", "reference": "master" } } } ], "require": { "php": "~5.5.0|~5.6.0|~7.0.0", "magento/module-config": "100.0.*", "magento/module-store": "100.0.*", "magento/module-checkout": "100.0.*", "magento/module-catalog": "100.0.*", "magento/module-sales": "100.0.*", "magento/module-customer": "100.0.*", "magento/module-payment": "100.0.*", "magento/module-quote": "100.0.*", "magento/module-backend": "100.0.*", "magento/module-directory": "100.0.*", "magento/module-theme": "100.0.*", "magento/framework": "100.0.*", "magento/magento-composer-installer": "*", "adyen/adyen-php-api-library": "*" }, "autoload": { "files": [ "registration.php" ], "psr-4": { "Adyen\\Payment\\": "" } } }
{ "name": "adyen/module-payment", "description": "Official Magento2 Plugin to connect to Payment Service Provider Adyen.", "type": "magento2-module", "version": "1.0.0.1", "license": [ "OSL-3.0", "AFL-3.0" ], "repositories": [ { "type": "package", "package": { "name": "adyen/adyen-php-api-library", "version": "0.1.0", "type": "package", "source": { "url": "https://github.com/Adyen/adyen-php-api-library", "type": "git", "reference": "master" } } } ], "require": { - "php": "~5.5.0|~5.6.0", + "php": "~5.5.0|~5.6.0|~7.0.0", ? +++++++ + "magento/module-config": "100.0.*", + "magento/module-store": "100.0.*", + "magento/module-checkout": "100.0.*", + "magento/module-catalog": "100.0.*", + "magento/module-sales": "100.0.*", + "magento/module-customer": "100.0.*", + "magento/module-payment": "100.0.*", + "magento/module-quote": "100.0.*", + "magento/module-backend": "100.0.*", + "magento/module-directory": "100.0.*", + "magento/module-theme": "100.0.*", + "magento/framework": "100.0.*", "magento/magento-composer-installer": "*", "adyen/adyen-php-api-library": "*" }, "autoload": { + "files": [ + "registration.php" + ], + "psr-4": { - "psr-4": { "Adyen\\Payment\\": "" }, ? -------- ^ --- + "Adyen\\Payment\\": "" ? ^^ - "files": [ "registration.php" ] + } } }
22
0.647059
19
3
d9325dc913ce1a31d42dc812d1e0b2142de545c6
source/Graphite.TSql/sp_graphitesend.sql
source/Graphite.TSql/sp_graphitesend.sql
USE [master] GO -- enable 'TRUSTWORTHY' -- required for 'external access' of clr code (-> sending tcp packets). ALTER DATABASE [master] SET TRUSTWORTHY ON; GO -- enable clr code EXEC sp_configure 'clr enabled', 1 GO RECONFIGURE GO -- Add Graphite.TSql.dll assembly CREATE ASSEMBLY [Graphite.TSql] AUTHORIZATION [dbo] FROM '<your/path/to/Graphite.TSql.dll>' WITH PERMISSION_SET = EXTERNAL_ACCESS GO -- Create stored procedure 'sp_graphitesend' CREATE PROCEDURE sp_graphitesend ( @host nvarchar(255), @port int, @key nvarchar(255), @value int ) AS EXTERNAL NAME [Graphite.TSql].[Graphite.TSql.GraphiteProcedures].GraphiteSend GO -- -------------------------------------------------------------------------- -- Example usage: -- -- exec sp_graphitesend N'192.168.0.1', 2003, 'stats.events.myserver.test', 1 -- -- --------------------------------------------------------------------------
USE [master] GO -- enable 'TRUSTWORTHY' -- required for 'external access' of clr code (-> sending tcp packets). ALTER DATABASE [master] SET TRUSTWORTHY ON; GO -- enable clr code EXEC sp_configure 'clr enabled', 1 GO RECONFIGURE GO -- Check if procedure and assembly already exist and drop them IF EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'[dbo].[sp_graphitesend]') AND type in (N'P', N'PC')) DROP PROCEDURE [dbo].[sp_graphitesend] GO IF EXISTS (SELECT * FROM sys.assemblies WHERE name = N'Graphite.TSql') DROP ASSEMBLY [Graphite.TSql] GO -- Add Graphite.TSql.dll assembly CREATE ASSEMBLY [Graphite.TSql] AUTHORIZATION [dbo] FROM '<your/path/to/Graphite.TSql.dll>' WITH PERMISSION_SET = EXTERNAL_ACCESS GO -- Create stored procedure 'sp_graphitesend' CREATE PROCEDURE sp_graphitesend ( @host nvarchar(255), @port int, @key nvarchar(255), @value int ) AS EXTERNAL NAME [Graphite.TSql].[Graphite.TSql.GraphiteProcedures].GraphiteSend GO -- -------------------------------------------------------------------------- -- Example usage: -- -- exec sp_graphitesend N'192.168.0.1', 2003, 'stats.events.myserver.test', 1 -- -- --------------------------------------------------------------------------
Drop assembly and sp before create
Drop assembly and sp before create
SQL
mit
peschuster/graphite-client,PeteGoo/graphite-client,MaciejSzczepanski/graphite-client
sql
## Code Before: USE [master] GO -- enable 'TRUSTWORTHY' -- required for 'external access' of clr code (-> sending tcp packets). ALTER DATABASE [master] SET TRUSTWORTHY ON; GO -- enable clr code EXEC sp_configure 'clr enabled', 1 GO RECONFIGURE GO -- Add Graphite.TSql.dll assembly CREATE ASSEMBLY [Graphite.TSql] AUTHORIZATION [dbo] FROM '<your/path/to/Graphite.TSql.dll>' WITH PERMISSION_SET = EXTERNAL_ACCESS GO -- Create stored procedure 'sp_graphitesend' CREATE PROCEDURE sp_graphitesend ( @host nvarchar(255), @port int, @key nvarchar(255), @value int ) AS EXTERNAL NAME [Graphite.TSql].[Graphite.TSql.GraphiteProcedures].GraphiteSend GO -- -------------------------------------------------------------------------- -- Example usage: -- -- exec sp_graphitesend N'192.168.0.1', 2003, 'stats.events.myserver.test', 1 -- -- -------------------------------------------------------------------------- ## Instruction: Drop assembly and sp before create ## Code After: USE [master] GO -- enable 'TRUSTWORTHY' -- required for 'external access' of clr code (-> sending tcp packets). ALTER DATABASE [master] SET TRUSTWORTHY ON; GO -- enable clr code EXEC sp_configure 'clr enabled', 1 GO RECONFIGURE GO -- Check if procedure and assembly already exist and drop them IF EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'[dbo].[sp_graphitesend]') AND type in (N'P', N'PC')) DROP PROCEDURE [dbo].[sp_graphitesend] GO IF EXISTS (SELECT * FROM sys.assemblies WHERE name = N'Graphite.TSql') DROP ASSEMBLY [Graphite.TSql] GO -- Add Graphite.TSql.dll assembly CREATE ASSEMBLY [Graphite.TSql] AUTHORIZATION [dbo] FROM '<your/path/to/Graphite.TSql.dll>' WITH PERMISSION_SET = EXTERNAL_ACCESS GO -- Create stored procedure 'sp_graphitesend' CREATE PROCEDURE sp_graphitesend ( @host nvarchar(255), @port int, @key nvarchar(255), @value int ) AS EXTERNAL NAME [Graphite.TSql].[Graphite.TSql.GraphiteProcedures].GraphiteSend GO -- -------------------------------------------------------------------------- -- Example usage: -- -- exec sp_graphitesend N'192.168.0.1', 2003, 'stats.events.myserver.test', 1 -- -- --------------------------------------------------------------------------
USE [master] GO -- enable 'TRUSTWORTHY' -- required for 'external access' of clr code (-> sending tcp packets). ALTER DATABASE [master] SET TRUSTWORTHY ON; GO -- enable clr code EXEC sp_configure 'clr enabled', 1 GO RECONFIGURE + GO + + -- Check if procedure and assembly already exist and drop them + IF EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'[dbo].[sp_graphitesend]') AND type in (N'P', N'PC')) + DROP PROCEDURE [dbo].[sp_graphitesend] + GO + IF EXISTS (SELECT * FROM sys.assemblies WHERE name = N'Graphite.TSql') + DROP ASSEMBLY [Graphite.TSql] GO -- Add Graphite.TSql.dll assembly CREATE ASSEMBLY [Graphite.TSql] AUTHORIZATION [dbo] FROM '<your/path/to/Graphite.TSql.dll>' WITH PERMISSION_SET = EXTERNAL_ACCESS GO -- Create stored procedure 'sp_graphitesend' CREATE PROCEDURE sp_graphitesend ( @host nvarchar(255), @port int, @key nvarchar(255), @value int ) AS EXTERNAL NAME [Graphite.TSql].[Graphite.TSql.GraphiteProcedures].GraphiteSend GO -- -------------------------------------------------------------------------- -- Example usage: -- -- exec sp_graphitesend N'192.168.0.1', 2003, 'stats.events.myserver.test', 1 -- -- --------------------------------------------------------------------------
8
0.205128
8
0
79f59b93a84ff829c1e73d05a9c17c4ee4b4b540
bin/ci.sh
bin/ci.sh
set -ev if [ "${TRAVIS_BRANCH}" = "master" ]; then packer build -only=amazon-ebs template.json else packer build -only=docker template.json fi
set -ev if [ "${TRAVIS_BRANCH}" = "master" ]; then packer build template.json else packer build -only=docker template.json fi
Fix an error that container does not exist
:pill: Fix an error that container does not exist
Shell
mit
aws-lambda-bins/git,aws-lambda-bins/git
shell
## Code Before: set -ev if [ "${TRAVIS_BRANCH}" = "master" ]; then packer build -only=amazon-ebs template.json else packer build -only=docker template.json fi ## Instruction: :pill: Fix an error that container does not exist ## Code After: set -ev if [ "${TRAVIS_BRANCH}" = "master" ]; then packer build template.json else packer build -only=docker template.json fi
set -ev if [ "${TRAVIS_BRANCH}" = "master" ]; then - packer build -only=amazon-ebs template.json ? ----------------- + packer build template.json else packer build -only=docker template.json fi
2
0.285714
1
1
a9827033fb064dc7edfff9cebee0c40d2d0fcd19
spec/dummy/config/database.yml
spec/dummy/config/database.yml
development: adapter: mysql2 encoding: utf8 reconnect: false database: monologue_development pool: 5 username: david password: davidsql test: adapter: mysql2 encoding: utf8 reconnect: false database: monologue_test pool: 5 username: david password: davidsql
development: adapter: mysql2 encoding: utf8 reconnect: false database: monologue_development pool: 5 username: root password: test: adapter: mysql2 encoding: utf8 reconnect: false database: monologue_test pool: 5 username: root password:
Reset password for db in dummy app
Reset password for db in dummy app
YAML
mit
jaimerson/monologue,caitlingoldman/monologue,thiesa/monologue,kyle-annen/monologue,bertomartin/monologue,jipiboily/monologue,abreckner/monologue,caitlingoldman/monologue,bertomartin/monologue,caitlingoldman/monologue,bertomartin/monologue,abreckner/monologue,jaimerson/monologue,kyle-annen/monologue,tam-vo/monologue,thiesa/monologue,GapIntelligence/monologue,tam-vo/monologue,paresharma/monologue,tam-vo/monologue,paresharma/monologue,jaimerson/monologue,paresharma/monologue,jipiboily/monologue,kyle-annen/monologue,munirent/monologue,GapIntelligence/monologue,GapIntelligence/monologue,abreckner/monologue,thiesa/monologue,munirent/monologue,jipiboily/monologue
yaml
## Code Before: development: adapter: mysql2 encoding: utf8 reconnect: false database: monologue_development pool: 5 username: david password: davidsql test: adapter: mysql2 encoding: utf8 reconnect: false database: monologue_test pool: 5 username: david password: davidsql ## Instruction: Reset password for db in dummy app ## Code After: development: adapter: mysql2 encoding: utf8 reconnect: false database: monologue_development pool: 5 username: root password: test: adapter: mysql2 encoding: utf8 reconnect: false database: monologue_test pool: 5 username: root password:
development: adapter: mysql2 encoding: utf8 reconnect: false database: monologue_development pool: 5 - username: david - password: davidsql + username: root + password: test: adapter: mysql2 encoding: utf8 reconnect: false database: monologue_test pool: 5 - username: david - password: davidsql + username: root + password:
8
0.470588
4
4
582c8926bdeb47844ad844c5adc12f4b8d998b27
modules/core/github_mention.js
modules/core/github_mention.js
// PiscoBot Script var commandDescription = { name: 'GitHub Mention', author: 'Daniel Gallegos [@that_taco_guy]', trigger: '[none]', version: 1.0, description: 'Have the bot reply to new commits on GitHub.', module: 'Core' }; global.botHelp.push(commandDescription); var _ = require('underscore'); global.piscobot.hears(['PiscoBot.*new commits by', 'PiscoBot.*new commit by'], ['bot_message'], function(bot, message) { var emoji = [ 'thinking_face', 'open_mouth', 'face_with_rolling_eyes', 'sweat_smile' ]; bot.api.reactions.add({ timestamp: message.ts, channel: message.channel, name: _.sample(emoji) }, function(err) { if(err) { bot.botkit.log('Failed to add emoji reaction :(', err); } }); } );
// PiscoBot Script var commandDescription = { name: 'GitHub Mention', author: 'Daniel Gallegos [@that_taco_guy]', trigger: '[none]', version: 1.0, description: 'Have the bot reply to new commits on GitHub.', module: 'Core' }; var _ = require('underscore'); global.piscobot.hears(['PiscoBot.*new commits by', 'PiscoBot.*new commit by'], ['bot_message'], function(bot, message) { var emoji = [ 'thinking_face', 'open_mouth', 'face_with_rolling_eyes', 'sweat_smile' ]; bot.api.reactions.add({ timestamp: message.ts, channel: message.channel, name: _.sample(emoji) }, function(err) { if(err) { bot.botkit.log('Failed to add emoji reaction :(', err); } }); } );
Remove GitHub from the help scripts
Remove GitHub from the help scripts
JavaScript
mit
devacademyla/PiscoBot,devacademyla/PiscoBot
javascript
## Code Before: // PiscoBot Script var commandDescription = { name: 'GitHub Mention', author: 'Daniel Gallegos [@that_taco_guy]', trigger: '[none]', version: 1.0, description: 'Have the bot reply to new commits on GitHub.', module: 'Core' }; global.botHelp.push(commandDescription); var _ = require('underscore'); global.piscobot.hears(['PiscoBot.*new commits by', 'PiscoBot.*new commit by'], ['bot_message'], function(bot, message) { var emoji = [ 'thinking_face', 'open_mouth', 'face_with_rolling_eyes', 'sweat_smile' ]; bot.api.reactions.add({ timestamp: message.ts, channel: message.channel, name: _.sample(emoji) }, function(err) { if(err) { bot.botkit.log('Failed to add emoji reaction :(', err); } }); } ); ## Instruction: Remove GitHub from the help scripts ## Code After: // PiscoBot Script var commandDescription = { name: 'GitHub Mention', author: 'Daniel Gallegos [@that_taco_guy]', trigger: '[none]', version: 1.0, description: 'Have the bot reply to new commits on GitHub.', module: 'Core' }; var _ = require('underscore'); global.piscobot.hears(['PiscoBot.*new commits by', 'PiscoBot.*new commit by'], ['bot_message'], function(bot, message) { var emoji = [ 'thinking_face', 'open_mouth', 'face_with_rolling_eyes', 'sweat_smile' ]; bot.api.reactions.add({ timestamp: message.ts, channel: message.channel, name: _.sample(emoji) }, function(err) { if(err) { bot.botkit.log('Failed to add emoji reaction :(', err); } }); } );
// PiscoBot Script var commandDescription = { name: 'GitHub Mention', author: 'Daniel Gallegos [@that_taco_guy]', trigger: '[none]', version: 1.0, description: 'Have the bot reply to new commits on GitHub.', module: 'Core' }; - - global.botHelp.push(commandDescription); var _ = require('underscore'); global.piscobot.hears(['PiscoBot.*new commits by', 'PiscoBot.*new commit by'], ['bot_message'], function(bot, message) { var emoji = [ 'thinking_face', 'open_mouth', 'face_with_rolling_eyes', 'sweat_smile' ]; bot.api.reactions.add({ timestamp: message.ts, channel: message.channel, name: _.sample(emoji) }, function(err) { if(err) { bot.botkit.log('Failed to add emoji reaction :(', err); } }); } );
2
0.058824
0
2
427f22a903dc618fde836cfa4e55032174fb50c6
modules/member/queries/updateMemberPassword.xml
modules/member/queries/updateMemberPassword.xml
<query id="updateMemberPassword" action="update"> <tables> <table name="member" /> </tables> <columns> <column name="password" var="password" notnull="notnull" /> <column name="denied" var="denied" /> </columns> <conditions> <condition operation="equal" column="member_srl" var="member_srl" notnull="notnull" filter="number" /> </conditions> </query>
<query id="updateMemberPassword" action="update"> <tables> <table name="member" /> </tables> <columns> <column name="password" var="password" /> <column name="denied" var="denied" /> </columns> <conditions> <condition operation="equal" column="member_srl" var="member_srl" notnull="notnull" filter="number" /> </conditions> </query>
Fix password required in initial auth mail
Fix password required in initial auth mail
XML
lgpl-2.1
xetown/xe-core,xetown/xe-core,xetown/xe-core
xml
## Code Before: <query id="updateMemberPassword" action="update"> <tables> <table name="member" /> </tables> <columns> <column name="password" var="password" notnull="notnull" /> <column name="denied" var="denied" /> </columns> <conditions> <condition operation="equal" column="member_srl" var="member_srl" notnull="notnull" filter="number" /> </conditions> </query> ## Instruction: Fix password required in initial auth mail ## Code After: <query id="updateMemberPassword" action="update"> <tables> <table name="member" /> </tables> <columns> <column name="password" var="password" /> <column name="denied" var="denied" /> </columns> <conditions> <condition operation="equal" column="member_srl" var="member_srl" notnull="notnull" filter="number" /> </conditions> </query>
<query id="updateMemberPassword" action="update"> <tables> <table name="member" /> </tables> <columns> - <column name="password" var="password" notnull="notnull" /> ? ------------------ + <column name="password" var="password" /> <column name="denied" var="denied" /> </columns> <conditions> <condition operation="equal" column="member_srl" var="member_srl" notnull="notnull" filter="number" /> </conditions> </query>
2
0.166667
1
1
2e79c42b8d003df020076511f0b978897245d1ea
packages/no/non-empty-zipper.yaml
packages/no/non-empty-zipper.yaml
homepage: '' changelog-type: markdown hash: 7cb42774a3712b82037e1705b8414d1d2684ce59b0454379ada0c3a4edac9009 test-bench-deps: checkers: -any base: -any QuickCheck: -any non-empty-zipper: -any maintainer: [email protected] synopsis: The Zipper for NonEmpty changelog: ! '# Revision history for NonEmptyZipper ## 0.1.0.0 -- YYYY-mm-dd * First version. Released on an unsuspecting world. ' basic-deps: base: ! '>=4.9 && <4.10' all-versions: - '0.1.0.0' - '0.1.0.1' - '0.1.0.2' - '0.1.0.3' - '0.1.0.4' author: Isaac Shapira latest: '0.1.0.4' description-type: haddock description: ! 'The Zipper for NonEmpty. Useful for things like tabs, button groups, and slideshows. Basically any case in which you want to ensure you have one selected value from a list of values.' license-name: BSD3
homepage: '' changelog-type: markdown hash: 607814b4dc4149540c3dbc50c38b04574e9cf4c5650b5983936725d3faf7d8a1 test-bench-deps: checkers: -any base: -any QuickCheck: -any non-empty-zipper: -any maintainer: [email protected] synopsis: The Zipper for NonEmpty changelog: ! '# Revision history for NonEmptyZipper ## 0.1.0.0 -- YYYY-mm-dd * First version. Released on an unsuspecting world. ' basic-deps: base: ! '>=4.9 && <4.10' all-versions: - '0.1.0.0' - '0.1.0.1' - '0.1.0.2' - '0.1.0.3' - '0.1.0.4' - '0.1.0.5' author: Isaac Shapira latest: '0.1.0.5' description-type: haddock description: ! 'The Zipper for NonEmpty. Useful for things like tabs, button groups, and slideshows. Basically any case in which you want to ensure you have one selected value from a list of values.' license-name: BSD3
Update from Hackage at 2017-01-24T06:13:47Z
Update from Hackage at 2017-01-24T06:13:47Z
YAML
mit
commercialhaskell/all-cabal-metadata
yaml
## Code Before: homepage: '' changelog-type: markdown hash: 7cb42774a3712b82037e1705b8414d1d2684ce59b0454379ada0c3a4edac9009 test-bench-deps: checkers: -any base: -any QuickCheck: -any non-empty-zipper: -any maintainer: [email protected] synopsis: The Zipper for NonEmpty changelog: ! '# Revision history for NonEmptyZipper ## 0.1.0.0 -- YYYY-mm-dd * First version. Released on an unsuspecting world. ' basic-deps: base: ! '>=4.9 && <4.10' all-versions: - '0.1.0.0' - '0.1.0.1' - '0.1.0.2' - '0.1.0.3' - '0.1.0.4' author: Isaac Shapira latest: '0.1.0.4' description-type: haddock description: ! 'The Zipper for NonEmpty. Useful for things like tabs, button groups, and slideshows. Basically any case in which you want to ensure you have one selected value from a list of values.' license-name: BSD3 ## Instruction: Update from Hackage at 2017-01-24T06:13:47Z ## Code After: homepage: '' changelog-type: markdown hash: 607814b4dc4149540c3dbc50c38b04574e9cf4c5650b5983936725d3faf7d8a1 test-bench-deps: checkers: -any base: -any QuickCheck: -any non-empty-zipper: -any maintainer: [email protected] synopsis: The Zipper for NonEmpty changelog: ! '# Revision history for NonEmptyZipper ## 0.1.0.0 -- YYYY-mm-dd * First version. Released on an unsuspecting world. ' basic-deps: base: ! '>=4.9 && <4.10' all-versions: - '0.1.0.0' - '0.1.0.1' - '0.1.0.2' - '0.1.0.3' - '0.1.0.4' - '0.1.0.5' author: Isaac Shapira latest: '0.1.0.5' description-type: haddock description: ! 'The Zipper for NonEmpty. Useful for things like tabs, button groups, and slideshows. Basically any case in which you want to ensure you have one selected value from a list of values.' license-name: BSD3
homepage: '' changelog-type: markdown - hash: 7cb42774a3712b82037e1705b8414d1d2684ce59b0454379ada0c3a4edac9009 + hash: 607814b4dc4149540c3dbc50c38b04574e9cf4c5650b5983936725d3faf7d8a1 test-bench-deps: checkers: -any base: -any QuickCheck: -any non-empty-zipper: -any maintainer: [email protected] synopsis: The Zipper for NonEmpty changelog: ! '# Revision history for NonEmptyZipper ## 0.1.0.0 -- YYYY-mm-dd * First version. Released on an unsuspecting world. ' basic-deps: base: ! '>=4.9 && <4.10' all-versions: - '0.1.0.0' - '0.1.0.1' - '0.1.0.2' - '0.1.0.3' - '0.1.0.4' + - '0.1.0.5' author: Isaac Shapira - latest: '0.1.0.4' ? ^ + latest: '0.1.0.5' ? ^ description-type: haddock description: ! 'The Zipper for NonEmpty. Useful for things like tabs, button groups, and slideshows. Basically any case in which you want to ensure you have one selected value from a list of values.' license-name: BSD3
5
0.131579
3
2
63d6e68dc4150568bd72330c53a387c7213769b4
CHANGELOG.md
CHANGELOG.md
* Register coffee extension for rake notes. *Roberto Miranda* ## 4.0.1 (October 17, 2013) ## * Drop support to Rails `4.0.0.rc` releases *Rafael Mendonça França* ## 4.0.0 (April 18, 2013) ## * Bump railties version to 4.0.0.beta. *José Valim* ## 3.2.2 (January 26, 2012) ## * Bump railties version to ~> 3.2.0. *Aaron Patterson* ## 3.2.1 (January 5, 2012) ## * No changes. ## 3.2.0 (December 17, 2011) ## * Add coffee-script.js for asset pipeline. Now your app will support `<script type="text/coffeescript">` in views. *Guillermo Iguaran* * Add Action View template handler for coffee views. *Guillermo Iguaran*
* Default to .coffee extension instead of .js.coffee *Joshua Peek* * Register coffee extension for rake notes. *Roberto Miranda* ## 4.0.1 (October 17, 2013) ## * Drop support to Rails `4.0.0.rc` releases *Rafael Mendonça França* ## 4.0.0 (April 18, 2013) ## * Bump railties version to 4.0.0.beta. *José Valim* ## 3.2.2 (January 26, 2012) ## * Bump railties version to ~> 3.2.0. *Aaron Patterson* ## 3.2.1 (January 5, 2012) ## * No changes. ## 3.2.0 (December 17, 2011) ## * Add coffee-script.js for asset pipeline. Now your app will support `<script type="text/coffeescript">` in views. *Guillermo Iguaran* * Add Action View template handler for coffee views. *Guillermo Iguaran*
Add .coffee change to changelog
Add .coffee change to changelog
Markdown
mit
slobodankovacevic/coffee-rails,rails/coffee-rails,rails/coffee-rails,rails/coffee-rails
markdown
## Code Before: * Register coffee extension for rake notes. *Roberto Miranda* ## 4.0.1 (October 17, 2013) ## * Drop support to Rails `4.0.0.rc` releases *Rafael Mendonça França* ## 4.0.0 (April 18, 2013) ## * Bump railties version to 4.0.0.beta. *José Valim* ## 3.2.2 (January 26, 2012) ## * Bump railties version to ~> 3.2.0. *Aaron Patterson* ## 3.2.1 (January 5, 2012) ## * No changes. ## 3.2.0 (December 17, 2011) ## * Add coffee-script.js for asset pipeline. Now your app will support `<script type="text/coffeescript">` in views. *Guillermo Iguaran* * Add Action View template handler for coffee views. *Guillermo Iguaran* ## Instruction: Add .coffee change to changelog ## Code After: * Default to .coffee extension instead of .js.coffee *Joshua Peek* * Register coffee extension for rake notes. *Roberto Miranda* ## 4.0.1 (October 17, 2013) ## * Drop support to Rails `4.0.0.rc` releases *Rafael Mendonça França* ## 4.0.0 (April 18, 2013) ## * Bump railties version to 4.0.0.beta. *José Valim* ## 3.2.2 (January 26, 2012) ## * Bump railties version to ~> 3.2.0. *Aaron Patterson* ## 3.2.1 (January 5, 2012) ## * No changes. ## 3.2.0 (December 17, 2011) ## * Add coffee-script.js for asset pipeline. Now your app will support `<script type="text/coffeescript">` in views. *Guillermo Iguaran* * Add Action View template handler for coffee views. *Guillermo Iguaran*
+ + * Default to .coffee extension instead of .js.coffee + + *Joshua Peek* * Register coffee extension for rake notes. *Roberto Miranda* ## 4.0.1 (October 17, 2013) ## * Drop support to Rails `4.0.0.rc` releases *Rafael Mendonça França* ## 4.0.0 (April 18, 2013) ## * Bump railties version to 4.0.0.beta. *José Valim* ## 3.2.2 (January 26, 2012) ## * Bump railties version to ~> 3.2.0. *Aaron Patterson* ## 3.2.1 (January 5, 2012) ## * No changes. ## 3.2.0 (December 17, 2011) ## * Add coffee-script.js for asset pipeline. Now your app will support `<script type="text/coffeescript">` in views. *Guillermo Iguaran* * Add Action View template handler for coffee views. *Guillermo Iguaran*
4
0.097561
4
0
66d814ef3f47dd4085052bc5fdacd667df1406d8
app/assets/javascripts/attachments.js
app/assets/javascripts/attachments.js
Dropzone.autoDiscover = false; document.addEventListener("turbolinks:load", function() { //Only work at words form page if ($('#word_body').length == 0) { return; } //Dropzone.js var de = $("#upload-dropzone"); de.dropzone( { url: de.attr('url'), paramName: 'attachment[file]', previewTemplate: '<div style="display:none"></div>', init: function(){ this.on('sending', function(file, xhr, formData){ formData.append('authenticity_token', de.attr('authenticity-token')); }), this.on('addedfile', function(file, json) { console.log(file); simplemde.codemirror.replaceSelection("<!-- Uploading " + file.name + " -->") }), this.on('success', function(file, json) { var code = "![" + json.file.url + "](" + json.file.url + ")" var text = simplemde.value(); simplemde.value(text.replace("<!-- Uploading " + file.name + " -->", code)); }); } }); });
Dropzone.autoDiscover = false; document.addEventListener("turbolinks:load", function() { //Only work at words form page if ($('#word_body').length == 0) { return; } //Dropzone.js var myDropzone; var de = $("#upload-dropzone"); de.dropzone( { url: de.attr('url'), paramName: 'attachment[file]', previewTemplate: '<div style="display:none"></div>', init: function(){ this.on('sending', function(file, xhr, formData){ formData.append('authenticity_token', de.attr('authenticity-token')); }), this.on('addedfile', function(file, json) { console.log(file); simplemde.codemirror.replaceSelection("<!-- Uploading " + file.name + " -->") }), this.on('success', function(file, json) { var code = "![" + json.file.url + "](" + json.file.url + ")" var text = simplemde.value(); simplemde.value(text.replace("<!-- Uploading " + file.name + " -->", code)); }); myDropzone = this; } }); document.onpaste = function(event){ var items = (event.clipboardData || event.originalEvent.clipboardData).items; for (index in items) { var item = items[index]; if (item.kind === 'file') { myDropzone.addFile(item.getAsFile()) } } } });
Enable to upload image by paste
Enable to upload image by paste
JavaScript
mit
toyoshi/wikigo,toyoshi/wikigo,toyoshi/wikigo
javascript
## Code Before: Dropzone.autoDiscover = false; document.addEventListener("turbolinks:load", function() { //Only work at words form page if ($('#word_body').length == 0) { return; } //Dropzone.js var de = $("#upload-dropzone"); de.dropzone( { url: de.attr('url'), paramName: 'attachment[file]', previewTemplate: '<div style="display:none"></div>', init: function(){ this.on('sending', function(file, xhr, formData){ formData.append('authenticity_token', de.attr('authenticity-token')); }), this.on('addedfile', function(file, json) { console.log(file); simplemde.codemirror.replaceSelection("<!-- Uploading " + file.name + " -->") }), this.on('success', function(file, json) { var code = "![" + json.file.url + "](" + json.file.url + ")" var text = simplemde.value(); simplemde.value(text.replace("<!-- Uploading " + file.name + " -->", code)); }); } }); }); ## Instruction: Enable to upload image by paste ## Code After: Dropzone.autoDiscover = false; document.addEventListener("turbolinks:load", function() { //Only work at words form page if ($('#word_body').length == 0) { return; } //Dropzone.js var myDropzone; var de = $("#upload-dropzone"); de.dropzone( { url: de.attr('url'), paramName: 'attachment[file]', previewTemplate: '<div style="display:none"></div>', init: function(){ this.on('sending', function(file, xhr, formData){ formData.append('authenticity_token', de.attr('authenticity-token')); }), this.on('addedfile', function(file, json) { console.log(file); simplemde.codemirror.replaceSelection("<!-- Uploading " + file.name + " -->") }), this.on('success', function(file, json) { var code = "![" + json.file.url + "](" + json.file.url + ")" var text = simplemde.value(); simplemde.value(text.replace("<!-- Uploading " + file.name + " -->", code)); }); myDropzone = this; } }); document.onpaste = function(event){ var items = (event.clipboardData || event.originalEvent.clipboardData).items; for (index in items) { var item = items[index]; if (item.kind === 'file') { myDropzone.addFile(item.getAsFile()) } } } });
Dropzone.autoDiscover = false; document.addEventListener("turbolinks:load", function() { //Only work at words form page if ($('#word_body').length == 0) { return; } //Dropzone.js + var myDropzone; var de = $("#upload-dropzone"); + de.dropzone( { url: de.attr('url'), paramName: 'attachment[file]', previewTemplate: '<div style="display:none"></div>', init: function(){ this.on('sending', function(file, xhr, formData){ formData.append('authenticity_token', de.attr('authenticity-token')); }), this.on('addedfile', function(file, json) { console.log(file); simplemde.codemirror.replaceSelection("<!-- Uploading " + file.name + " -->") }), this.on('success', function(file, json) { var code = "![" + json.file.url + "](" + json.file.url + ")" var text = simplemde.value(); simplemde.value(text.replace("<!-- Uploading " + file.name + " -->", code)); }); + myDropzone = this; } }); + + document.onpaste = function(event){ + var items = (event.clipboardData || event.originalEvent.clipboardData).items; + for (index in items) { + var item = items[index]; + if (item.kind === 'file') { + myDropzone.addFile(item.getAsFile()) + } + } + } });
13
0.448276
13
0
06b72eee4537b7757a43e23752a0af41200c62cf
README.md
README.md
[![](https://images.microbadger.com/badges/image/jgeusebroek/duplicity-duply.svg)](https://microbadger.com/images/jgeusebroek/duplicity-duply "Get your own image badge on microbadger.com")
[![](https://images.microbadger.com/badges/image/jgeusebroek/duplicity-duply:mariadb.svg)](https://microbadger.com/images/jgeusebroek/duplicity-duply:mariadb "Get your own image badge on microbadger.com")
Update microbadge for mariadb tag
Update microbadge for mariadb tag
Markdown
mit
jgeusebroek/docker-duplicity-duply
markdown
## Code Before: [![](https://images.microbadger.com/badges/image/jgeusebroek/duplicity-duply.svg)](https://microbadger.com/images/jgeusebroek/duplicity-duply "Get your own image badge on microbadger.com") ## Instruction: Update microbadge for mariadb tag ## Code After: [![](https://images.microbadger.com/badges/image/jgeusebroek/duplicity-duply:mariadb.svg)](https://microbadger.com/images/jgeusebroek/duplicity-duply:mariadb "Get your own image badge on microbadger.com")
- [![](https://images.microbadger.com/badges/image/jgeusebroek/duplicity-duply.svg)](https://microbadger.com/images/jgeusebroek/duplicity-duply "Get your own image badge on microbadger.com") + [![](https://images.microbadger.com/badges/image/jgeusebroek/duplicity-duply:mariadb.svg)](https://microbadger.com/images/jgeusebroek/duplicity-duply:mariadb "Get your own image badge on microbadger.com") ? ++++++++ ++++++++
2
2
1
1
e59d1354e8c2983aba64dbf3dd83f03d3845ceb9
activerecord/test/fixtures/readers.yml
activerecord/test/fixtures/readers.yml
michael_welcome: post_id: 1 person_id: 1 first_post_id: 2 michael_authorless: post_id: 3 person_id: 1 first_post_id: 3
michael_welcome: post_id: 1 person: michael first_post_id: 2 michael_authorless: post_id: 3 person: michael first_post_id: 3
Fix random test failure of test_create_resets_cached_counters
Fix random test failure of test_create_resets_cached_counters - In earlier commit, I removed setting id manually for readers fixtures but that did not fix the randomly failing test_create_resets_cached_counters from has_many_associations tests. - Because the problem was with the `person_id` of the readers. As it is set to 1 in fixtures, if a post gets created with id 1 then that post automatically has 2 readers. - Fixed by removing the person_id.
YAML
mit
illacceptanything/illacceptanything,joonyou/rails,schuetzm/rails,Erol/rails,printercu/rails,Erol/rails,kirs/rails-1,yasslab/railsguides.jp,Vasfed/rails,Sen-Zhang/rails,illacceptanything/illacceptanything,mathieujobin/reduced-rails-for-travis,baerjam/rails,assain/rails,BlakeWilliams/rails,Stellenticket/rails,shioyama/rails,yalab/rails,yasslab/railsguides.jp,repinel/rails,mechanicles/rails,tjschuck/rails,Edouard-chin/rails,odedniv/rails,joonyou/rails,kmcphillips/rails,Edouard-chin/rails,jeremy/rails,alecspopa/rails,esparta/rails,yahonda/rails,notapatch/rails,rails/rails,illacceptanything/illacceptanything,pvalena/rails,baerjam/rails,yhirano55/rails,tjschuck/rails,betesh/rails,deraru/rails,kaspth/rails,palkan/rails,illacceptanything/illacceptanything,lcreid/rails,Envek/rails,bogdanvlviv/rails,tgxworld/rails,kamipo/rails,gauravtiwari/rails,flanger001/rails,felipecvo/rails,notapatch/rails,kmayer/rails,fabianoleittes/rails,ledestin/rails,esparta/rails,palkan/rails,tgxworld/rails,Edouard-chin/rails,illacceptanything/illacceptanything,flanger001/rails,deraru/rails,baerjam/rails,utilum/rails,Envek/rails,esparta/rails,MSP-Greg/rails,mohitnatoo/rails,illacceptanything/illacceptanything,yhirano55/rails,joonyou/rails,tjschuck/rails,Envek/rails,georgeclaghorn/rails,travisofthenorth/rails,palkan/rails,untidy-hair/rails,shioyama/rails,tgxworld/rails,shioyama/rails,bogdanvlviv/rails,gfvcastro/rails,Sen-Zhang/rails,felipecvo/rails,iainbeeston/rails,palkan/rails,starknx/rails,MSP-Greg/rails,jeremy/rails,printercu/rails,iainbeeston/rails,mohitnatoo/rails,utilum/rails,aditya-kapoor/rails,iainbeeston/rails,rails/rails,Edouard-chin/rails,deraru/rails,kddeisz/rails,prathamesh-sonpatki/rails,MSP-Greg/rails,yawboakye/rails,fabianoleittes/rails,EmmaB/rails-1,Stellenticket/rails,yalab/rails,kamipo/rails,kirs/rails-1,brchristian/rails,aditya-kapoor/rails,arunagw/rails,kddeisz/rails,deraru/rails,illacceptanything/illacceptanything,brchristian/rails,fabianoleittes/rails,arunagw/rails,georgeclaghorn/rails,schuetzm/rails,tjschuck/rails,Vasfed/rails,kddeisz/rails,yalab/rails,illacceptanything/illacceptanything,kaspth/rails,Sen-Zhang/rails,gcourtemanche/rails,mathieujobin/reduced-rails-for-travis,yasslab/railsguides.jp,eileencodes/rails,Erol/rails,prathamesh-sonpatki/rails,gauravtiwari/rails,mechanicles/rails,vipulnsward/rails,bogdanvlviv/rails,kmcphillips/rails,iainbeeston/rails,georgeclaghorn/rails,kmayer/rails,pvalena/rails,gcourtemanche/rails,mohitnatoo/rails,illacceptanything/illacceptanything,tgxworld/rails,mohitnatoo/rails,esparta/rails,alecspopa/rails,rafaelfranca/omg-rails,lcreid/rails,rafaelfranca/omg-rails,lcreid/rails,printercu/rails,untidy-hair/rails,jeremy/rails,felipecvo/rails,aditya-kapoor/rails,yawboakye/rails,untidy-hair/rails,BlakeWilliams/rails,gfvcastro/rails,shioyama/rails,illacceptanything/illacceptanything,illacceptanything/illacceptanything,yawboakye/rails,yalab/rails,vipulnsward/rails,georgeclaghorn/rails,odedniv/rails,flanger001/rails,rafaelfranca/omg-rails,utilum/rails,eileencodes/rails,assain/rails,notapatch/rails,kmcphillips/rails,kamipo/rails,yawboakye/rails,assain/rails,prathamesh-sonpatki/rails,yasslab/railsguides.jp,rails/rails,BlakeWilliams/rails,vipulnsward/rails,yahonda/rails,travisofthenorth/rails,Vasfed/rails,aditya-kapoor/rails,schuetzm/rails,betesh/rails,repinel/rails,gcourtemanche/rails,gfvcastro/rails,kmayer/rails,starknx/rails,rails/rails,betesh/rails,arunagw/rails,kddeisz/rails,pvalena/rails,arunagw/rails,travisofthenorth/rails,schuetzm/rails,mechanicles/rails,ledestin/rails,kmcphillips/rails,notapatch/rails,brchristian/rails,printercu/rails,fabianoleittes/rails,mathieujobin/reduced-rails-for-travis,mechanicles/rails,untidy-hair/rails,gfvcastro/rails,flanger001/rails,illacceptanything/illacceptanything,Stellenticket/rails,yahonda/rails,betesh/rails,Stellenticket/rails,kaspth/rails,yhirano55/rails,yhirano55/rails,joonyou/rails,prathamesh-sonpatki/rails,lcreid/rails,travisofthenorth/rails,gauravtiwari/rails,illacceptanything/illacceptanything,EmmaB/rails-1,yahonda/rails,eileencodes/rails,repinel/rails,ledestin/rails,Erol/rails,Envek/rails,Vasfed/rails,pvalena/rails,eileencodes/rails,utilum/rails,EmmaB/rails-1,alecspopa/rails,odedniv/rails,bogdanvlviv/rails,jeremy/rails,kirs/rails-1,assain/rails,illacceptanything/illacceptanything,BlakeWilliams/rails,repinel/rails,illacceptanything/illacceptanything,vipulnsward/rails,baerjam/rails,MSP-Greg/rails,starknx/rails
yaml
## Code Before: michael_welcome: post_id: 1 person_id: 1 first_post_id: 2 michael_authorless: post_id: 3 person_id: 1 first_post_id: 3 ## Instruction: Fix random test failure of test_create_resets_cached_counters - In earlier commit, I removed setting id manually for readers fixtures but that did not fix the randomly failing test_create_resets_cached_counters from has_many_associations tests. - Because the problem was with the `person_id` of the readers. As it is set to 1 in fixtures, if a post gets created with id 1 then that post automatically has 2 readers. - Fixed by removing the person_id. ## Code After: michael_welcome: post_id: 1 person: michael first_post_id: 2 michael_authorless: post_id: 3 person: michael first_post_id: 3
michael_welcome: post_id: 1 - person_id: 1 + person: michael first_post_id: 2 michael_authorless: post_id: 3 - person_id: 1 + person: michael first_post_id: 3
4
0.444444
2
2
6310045542516b110329c9fe70825ef1d2eeaf8c
api_classes/dsl_proxy.rb
api_classes/dsl_proxy.rb
require "api_classes/api_dsl" class Proxy < ZabbixAPI_Base end Proxy.get Proxy.create Proxy.delete
require "api_classes/api_dsl" class Proxy < ZabbixAPI_Base end Proxy.get Proxy.create Proxy.delete Proxy.update
Add method "update" tp proxy
Add method "update" tp proxy
Ruby
lgpl-2.1
red-tux/zbxapi
ruby
## Code Before: require "api_classes/api_dsl" class Proxy < ZabbixAPI_Base end Proxy.get Proxy.create Proxy.delete ## Instruction: Add method "update" tp proxy ## Code After: require "api_classes/api_dsl" class Proxy < ZabbixAPI_Base end Proxy.get Proxy.create Proxy.delete Proxy.update
require "api_classes/api_dsl" class Proxy < ZabbixAPI_Base end Proxy.get Proxy.create Proxy.delete + Proxy.update
1
0.111111
1
0
26544cc1840f52356a9e4107eaff63f401c5b848
test/factories/unpublishings.rb
test/factories/unpublishings.rb
FactoryGirl.define do factory :unpublishing do unpublishing_reason_id UnpublishingReason::PublishedInError.id edition { create(:published_case_study, state: 'draft', first_published_at: 2.days.ago) } after(:build) do |unpublishing| unpublishing.document_type = unpublishing.edition.class.name unpublishing.slug = unpublishing.edition.slug end end end
FactoryGirl.define do factory :unpublishing do unpublishing_reason_id UnpublishingReason::PublishedInError.id edition { create(:published_case_study, state: 'draft', first_published_at: 2.days.ago) } after(:build) do |unpublishing| unpublishing.document_type = unpublishing.edition.class.name unpublishing.slug = unpublishing.edition.slug end end factory :redirect_unpublishing, parent: :unpublishing do redirect true alternative_url (Whitehall.public_root + '/government/another/page') end factory :consolidated_unpublishing, parent: :unpublishing do unpublishing_reason_id UnpublishingReason::Consolidated.id alternative_url (Whitehall.public_root + '/government/another/page') end end
Add factories for different types of unpublishing
Add factories for different types of unpublishing This makes it a bit clearer what the main variations of an unpublishing are.
Ruby
mit
robinwhittleton/whitehall,hotvulcan/whitehall,robinwhittleton/whitehall,YOTOV-LIMITED/whitehall,YOTOV-LIMITED/whitehall,alphagov/whitehall,hotvulcan/whitehall,askl56/whitehall,hotvulcan/whitehall,hotvulcan/whitehall,ggoral/whitehall,ggoral/whitehall,alphagov/whitehall,ggoral/whitehall,askl56/whitehall,YOTOV-LIMITED/whitehall,askl56/whitehall,alphagov/whitehall,alphagov/whitehall,robinwhittleton/whitehall,ggoral/whitehall,askl56/whitehall,YOTOV-LIMITED/whitehall,robinwhittleton/whitehall
ruby
## Code Before: FactoryGirl.define do factory :unpublishing do unpublishing_reason_id UnpublishingReason::PublishedInError.id edition { create(:published_case_study, state: 'draft', first_published_at: 2.days.ago) } after(:build) do |unpublishing| unpublishing.document_type = unpublishing.edition.class.name unpublishing.slug = unpublishing.edition.slug end end end ## Instruction: Add factories for different types of unpublishing This makes it a bit clearer what the main variations of an unpublishing are. ## Code After: FactoryGirl.define do factory :unpublishing do unpublishing_reason_id UnpublishingReason::PublishedInError.id edition { create(:published_case_study, state: 'draft', first_published_at: 2.days.ago) } after(:build) do |unpublishing| unpublishing.document_type = unpublishing.edition.class.name unpublishing.slug = unpublishing.edition.slug end end factory :redirect_unpublishing, parent: :unpublishing do redirect true alternative_url (Whitehall.public_root + '/government/another/page') end factory :consolidated_unpublishing, parent: :unpublishing do unpublishing_reason_id UnpublishingReason::Consolidated.id alternative_url (Whitehall.public_root + '/government/another/page') end end
FactoryGirl.define do factory :unpublishing do unpublishing_reason_id UnpublishingReason::PublishedInError.id edition { create(:published_case_study, state: 'draft', first_published_at: 2.days.ago) } after(:build) do |unpublishing| unpublishing.document_type = unpublishing.edition.class.name unpublishing.slug = unpublishing.edition.slug end end + + factory :redirect_unpublishing, parent: :unpublishing do + redirect true + alternative_url (Whitehall.public_root + '/government/another/page') + end + + factory :consolidated_unpublishing, parent: :unpublishing do + unpublishing_reason_id UnpublishingReason::Consolidated.id + alternative_url (Whitehall.public_root + '/government/another/page') + end end
10
0.909091
10
0
aee8c70dc969710a544394cda98f816d40909270
code/forms/QuantityField.php
code/forms/QuantityField.php
<?php /** * Text input field with validation for numeric values. * * @package forms * @subpackage fields-formattedinput */ class QuantityField extends NumericField { public function Type() { return 'quantity numeric text'; } /** PHP Validation **/ public function validate($validator) { if ($this->value && !is_int(floatval(trim($this->value)))) { $validator->validationError( $this->name, _t( 'NumericField.VALIDATION', "'{value}' is not a valid number, only whole numbers can be accepted for this field", array('value' => $this->value) ), "validation" ); return false; } elseif (!$this->value) { $validator->validationError( $this->name, sprintf(_t('Form.FIELDISREQUIRED', '%s is required'), $this->title), "validation" ); return false; } else { return true; } } public function dataValue() { return (is_numeric($this->value)) ? $this->value : 0; } }
<?php /** * Text input field with validation for numeric values. * * @package forms * @subpackage fields-formattedinput */ class QuantityField extends NumericField { public function Type() { return 'quantity numeric text'; } /** PHP Validation **/ public function validate($validator) { // First check if value is numeric if ($this->value && $this->isNumeric()) { // Convert to a number to check $value = $this->value + 0; if(is_int($value)) { return true; } } $validator->validationError( $this->name, _t( 'Checkout.VALIDATION', '{value} is not a valid number, only whole numbers can be accepted for this field', array('value' => $this->value) ), "validation" ); return false; } public function dataValue() { return (is_numeric($this->value)) ? $this->value : 0; } }
Fix validation issues with quantity field
Fix validation issues with quantity field
PHP
bsd-3-clause
i-lateral/silverstripe-checkout
php
## Code Before: <?php /** * Text input field with validation for numeric values. * * @package forms * @subpackage fields-formattedinput */ class QuantityField extends NumericField { public function Type() { return 'quantity numeric text'; } /** PHP Validation **/ public function validate($validator) { if ($this->value && !is_int(floatval(trim($this->value)))) { $validator->validationError( $this->name, _t( 'NumericField.VALIDATION', "'{value}' is not a valid number, only whole numbers can be accepted for this field", array('value' => $this->value) ), "validation" ); return false; } elseif (!$this->value) { $validator->validationError( $this->name, sprintf(_t('Form.FIELDISREQUIRED', '%s is required'), $this->title), "validation" ); return false; } else { return true; } } public function dataValue() { return (is_numeric($this->value)) ? $this->value : 0; } } ## Instruction: Fix validation issues with quantity field ## Code After: <?php /** * Text input field with validation for numeric values. * * @package forms * @subpackage fields-formattedinput */ class QuantityField extends NumericField { public function Type() { return 'quantity numeric text'; } /** PHP Validation **/ public function validate($validator) { // First check if value is numeric if ($this->value && $this->isNumeric()) { // Convert to a number to check $value = $this->value + 0; if(is_int($value)) { return true; } } $validator->validationError( $this->name, _t( 'Checkout.VALIDATION', '{value} is not a valid number, only whole numbers can be accepted for this field', array('value' => $this->value) ), "validation" ); return false; } public function dataValue() { return (is_numeric($this->value)) ? $this->value : 0; } }
<?php /** * Text input field with validation for numeric values. * * @package forms * @subpackage fields-formattedinput */ class QuantityField extends NumericField { public function Type() { return 'quantity numeric text'; } /** PHP Validation **/ public function validate($validator) { + // First check if value is numeric + if ($this->value && $this->isNumeric()) { + // Convert to a number to check + $value = $this->value + 0; - if ($this->value && !is_int(floatval(trim($this->value)))) { - $validator->validationError( - $this->name, - _t( - 'NumericField.VALIDATION', "'{value}' is not a valid number, only whole numbers can be accepted for this field", - array('value' => $this->value) - ), - "validation" - ); ? -- + + if(is_int($value)) { - return false; - } elseif (!$this->value) { - $validator->validationError( - $this->name, - sprintf(_t('Form.FIELDISREQUIRED', '%s is required'), $this->title), - "validation" - ); - return false; - } else { - return true; + return true; ? ++++ + } } + + $validator->validationError( + $this->name, + _t( + 'Checkout.VALIDATION', '{value} is not a valid number, only whole numbers can be accepted for this field', + array('value' => $this->value) + ), + "validation" + ); + return false; } public function dataValue() { return (is_numeric($this->value)) ? $this->value : 0; } }
37
0.822222
18
19
8037e90f0a8c00c09317bb52e0abcd35a32373ab
README.md
README.md
[![Build Status](https://travis-ci.org/luser/read-process-memory.svg?branch=master)](https://travis-ci.org/luser/read-process-memory) [![Build status](https://ci.appveyor.com/api/projects/status/9x0yse13l060659f/branch/master?svg=true)](https://ci.appveyor.com/project/luser/read-process-memory/branch/master) [![crates.io](https://img.shields.io/crates/v/read-process-memory.svg)](https://crates.io/crates/read-process-memory) A crate to read memory from another process. Code originally taken from Julia Evans' excellent [ruby-stacktrace](https://github.com/jvns/ruby-stacktrace/) project. # Example ```rust,no_run extern crate read_process_memory; use std::io; use read_process_memory::{Pid, TryIntoProcessHandle, CopyAddress, copy_address}; // Try to read `size` bytes at `address` from the process `pid`. fn read_some_memory(pid: Pid, address: usize, size: usize) -> io::Result<()> { let handle = try!(pid.try_into_process_handle()); let _bytes = try!(copy_address(address, size, &handle)); println!("Read {} bytes", size); Ok(()) } ```
[![Build Status](https://travis-ci.org/luser/read-process-memory.svg?branch=master)](https://travis-ci.org/luser/read-process-memory) [![Build status](https://ci.appveyor.com/api/projects/status/9x0yse13l060659f/branch/master?svg=true)](https://ci.appveyor.com/project/luser/read-process-memory/branch/master) [![crates.io](https://img.shields.io/crates/v/read-process-memory.svg)](https://crates.io/crates/read-process-memory) [![](https://docs.rs/read-process-memory/badge.svg)](https://docs.rs/read-process-memory) A crate to read memory from another process. Code originally taken from Julia Evans' excellent [ruby-stacktrace](https://github.com/jvns/ruby-stacktrace/) project. # Example ```rust,no_run extern crate read_process_memory; use std::io; use read_process_memory::{Pid, TryIntoProcessHandle, CopyAddress, copy_address}; // Try to read `size` bytes at `address` from the process `pid`. fn read_some_memory(pid: Pid, address: usize, size: usize) -> io::Result<()> { let handle = try!(pid.try_into_process_handle()); let _bytes = try!(copy_address(address, size, &handle)); println!("Read {} bytes", size); Ok(()) } ``` # Documentation [https://docs.rs/read-process-memory](https://docs.rs/read-process-memory)
Add a link to documentation on docs.rs and badge
Add a link to documentation on docs.rs and badge
Markdown
mit
luser/read-process-memory
markdown
## Code Before: [![Build Status](https://travis-ci.org/luser/read-process-memory.svg?branch=master)](https://travis-ci.org/luser/read-process-memory) [![Build status](https://ci.appveyor.com/api/projects/status/9x0yse13l060659f/branch/master?svg=true)](https://ci.appveyor.com/project/luser/read-process-memory/branch/master) [![crates.io](https://img.shields.io/crates/v/read-process-memory.svg)](https://crates.io/crates/read-process-memory) A crate to read memory from another process. Code originally taken from Julia Evans' excellent [ruby-stacktrace](https://github.com/jvns/ruby-stacktrace/) project. # Example ```rust,no_run extern crate read_process_memory; use std::io; use read_process_memory::{Pid, TryIntoProcessHandle, CopyAddress, copy_address}; // Try to read `size` bytes at `address` from the process `pid`. fn read_some_memory(pid: Pid, address: usize, size: usize) -> io::Result<()> { let handle = try!(pid.try_into_process_handle()); let _bytes = try!(copy_address(address, size, &handle)); println!("Read {} bytes", size); Ok(()) } ``` ## Instruction: Add a link to documentation on docs.rs and badge ## Code After: [![Build Status](https://travis-ci.org/luser/read-process-memory.svg?branch=master)](https://travis-ci.org/luser/read-process-memory) [![Build status](https://ci.appveyor.com/api/projects/status/9x0yse13l060659f/branch/master?svg=true)](https://ci.appveyor.com/project/luser/read-process-memory/branch/master) [![crates.io](https://img.shields.io/crates/v/read-process-memory.svg)](https://crates.io/crates/read-process-memory) [![](https://docs.rs/read-process-memory/badge.svg)](https://docs.rs/read-process-memory) A crate to read memory from another process. Code originally taken from Julia Evans' excellent [ruby-stacktrace](https://github.com/jvns/ruby-stacktrace/) project. # Example ```rust,no_run extern crate read_process_memory; use std::io; use read_process_memory::{Pid, TryIntoProcessHandle, CopyAddress, copy_address}; // Try to read `size` bytes at `address` from the process `pid`. fn read_some_memory(pid: Pid, address: usize, size: usize) -> io::Result<()> { let handle = try!(pid.try_into_process_handle()); let _bytes = try!(copy_address(address, size, &handle)); println!("Read {} bytes", size); Ok(()) } ``` # Documentation [https://docs.rs/read-process-memory](https://docs.rs/read-process-memory)
- [![Build Status](https://travis-ci.org/luser/read-process-memory.svg?branch=master)](https://travis-ci.org/luser/read-process-memory) [![Build status](https://ci.appveyor.com/api/projects/status/9x0yse13l060659f/branch/master?svg=true)](https://ci.appveyor.com/project/luser/read-process-memory/branch/master) [![crates.io](https://img.shields.io/crates/v/read-process-memory.svg)](https://crates.io/crates/read-process-memory) + [![Build Status](https://travis-ci.org/luser/read-process-memory.svg?branch=master)](https://travis-ci.org/luser/read-process-memory) [![Build status](https://ci.appveyor.com/api/projects/status/9x0yse13l060659f/branch/master?svg=true)](https://ci.appveyor.com/project/luser/read-process-memory/branch/master) [![crates.io](https://img.shields.io/crates/v/read-process-memory.svg)](https://crates.io/crates/read-process-memory) [![](https://docs.rs/read-process-memory/badge.svg)](https://docs.rs/read-process-memory) ? ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ A crate to read memory from another process. Code originally taken from Julia Evans' excellent [ruby-stacktrace](https://github.com/jvns/ruby-stacktrace/) project. # Example ```rust,no_run extern crate read_process_memory; use std::io; use read_process_memory::{Pid, TryIntoProcessHandle, CopyAddress, copy_address}; // Try to read `size` bytes at `address` from the process `pid`. fn read_some_memory(pid: Pid, address: usize, size: usize) -> io::Result<()> { let handle = try!(pid.try_into_process_handle()); let _bytes = try!(copy_address(address, size, &handle)); println!("Read {} bytes", size); Ok(()) } ``` + + # Documentation + + [https://docs.rs/read-process-memory](https://docs.rs/read-process-memory)
6
0.3
5
1
0c55074a323bf3e5fdade954ddcdd41f1a50ad02
activerecord-jdbcpostgresql-adapter/activerecord-jdbcpostgresql-adapter.gemspec
activerecord-jdbcpostgresql-adapter/activerecord-jdbcpostgresql-adapter.gemspec
arjdbc_lib = File.expand_path("../../lib", __FILE__) $:.push arjdbc_lib unless $:.include?(arjdbc_lib) require 'arjdbc/version' version = ArJdbc::Version::VERSION Gem::Specification.new do |s| s.name = "activerecord-jdbcpostgresql-adapter" s.version = version s.platform = Gem::Platform::RUBY s.authors = ["Nick Sieger, Ola Bini and JRuby contributors"] s.description = %q{Install this gem to use Postgres with JRuby on Rails.} s.email = %q{[email protected], [email protected]} s.files = [ "Rakefile", "README.txt", "LICENSE.txt", "lib/active_record/connection_adapters/jdbcpostgresql_adapter.rb" ] s.homepage = %q{https://github.com/jruby/activerecord-jdbc-adapter} s.require_paths = ["lib"] s.rubyforge_project = %q{jruby-extras} s.summary = %q{Postgres JDBC adapter for JRuby on Rails.} s.add_dependency 'activerecord-jdbc-adapter', "~>#{version}" s.add_dependency 'jdbc-postgres', '~> 9.0.0' end
arjdbc_lib = File.expand_path("../../lib", __FILE__) $:.push arjdbc_lib unless $:.include?(arjdbc_lib) require 'arjdbc/version' version = ArJdbc::Version::VERSION Gem::Specification.new do |s| s.name = "activerecord-jdbcpostgresql-adapter" s.version = version s.platform = Gem::Platform::RUBY s.authors = ["Nick Sieger, Ola Bini and JRuby contributors"] s.description = %q{Install this gem to use Postgres with JRuby on Rails.} s.email = %q{[email protected], [email protected]} s.files = [ "Rakefile", "README.txt", "LICENSE.txt", "lib/active_record/connection_adapters/jdbcpostgresql_adapter.rb" ] s.homepage = %q{https://github.com/jruby/activerecord-jdbc-adapter} s.require_paths = ["lib"] s.rubyforge_project = %q{jruby-extras} s.summary = %q{Postgres JDBC adapter for JRuby on Rails.} s.add_dependency 'activerecord-jdbc-adapter', "~>#{version}" s.add_dependency 'jdbc-postgres', '>= 9.0', '< 9.2' end
Expand postgresql-adapter dependency on jdbc-postgres to include 9.1 series
Expand postgresql-adapter dependency on jdbc-postgres to include 9.1 series
Ruby
bsd-2-clause
keeguon/activerecord-jdbc-adapter,kares/activerecord-jdbc-adapter,jruby/activerecord-jdbc-adapter,keeguon/activerecord-jdbc-adapter,bruceadams/activerecord-jdbc-adapter,bruceadams/activerecord-jdbc-adapter,bruceadams/activerecord-jdbc-adapter,jruby/activerecord-jdbc-adapter,jruby/activerecord-jdbc-adapter,kares/activerecord-jdbc-adapter,keeguon/activerecord-jdbc-adapter,kares/activerecord-jdbc-adapter
ruby
## Code Before: arjdbc_lib = File.expand_path("../../lib", __FILE__) $:.push arjdbc_lib unless $:.include?(arjdbc_lib) require 'arjdbc/version' version = ArJdbc::Version::VERSION Gem::Specification.new do |s| s.name = "activerecord-jdbcpostgresql-adapter" s.version = version s.platform = Gem::Platform::RUBY s.authors = ["Nick Sieger, Ola Bini and JRuby contributors"] s.description = %q{Install this gem to use Postgres with JRuby on Rails.} s.email = %q{[email protected], [email protected]} s.files = [ "Rakefile", "README.txt", "LICENSE.txt", "lib/active_record/connection_adapters/jdbcpostgresql_adapter.rb" ] s.homepage = %q{https://github.com/jruby/activerecord-jdbc-adapter} s.require_paths = ["lib"] s.rubyforge_project = %q{jruby-extras} s.summary = %q{Postgres JDBC adapter for JRuby on Rails.} s.add_dependency 'activerecord-jdbc-adapter', "~>#{version}" s.add_dependency 'jdbc-postgres', '~> 9.0.0' end ## Instruction: Expand postgresql-adapter dependency on jdbc-postgres to include 9.1 series ## Code After: arjdbc_lib = File.expand_path("../../lib", __FILE__) $:.push arjdbc_lib unless $:.include?(arjdbc_lib) require 'arjdbc/version' version = ArJdbc::Version::VERSION Gem::Specification.new do |s| s.name = "activerecord-jdbcpostgresql-adapter" s.version = version s.platform = Gem::Platform::RUBY s.authors = ["Nick Sieger, Ola Bini and JRuby contributors"] s.description = %q{Install this gem to use Postgres with JRuby on Rails.} s.email = %q{[email protected], [email protected]} s.files = [ "Rakefile", "README.txt", "LICENSE.txt", "lib/active_record/connection_adapters/jdbcpostgresql_adapter.rb" ] s.homepage = %q{https://github.com/jruby/activerecord-jdbc-adapter} s.require_paths = ["lib"] s.rubyforge_project = %q{jruby-extras} s.summary = %q{Postgres JDBC adapter for JRuby on Rails.} s.add_dependency 'activerecord-jdbc-adapter', "~>#{version}" s.add_dependency 'jdbc-postgres', '>= 9.0', '< 9.2' end
arjdbc_lib = File.expand_path("../../lib", __FILE__) $:.push arjdbc_lib unless $:.include?(arjdbc_lib) require 'arjdbc/version' version = ArJdbc::Version::VERSION Gem::Specification.new do |s| s.name = "activerecord-jdbcpostgresql-adapter" s.version = version s.platform = Gem::Platform::RUBY s.authors = ["Nick Sieger, Ola Bini and JRuby contributors"] s.description = %q{Install this gem to use Postgres with JRuby on Rails.} s.email = %q{[email protected], [email protected]} s.files = [ "Rakefile", "README.txt", "LICENSE.txt", "lib/active_record/connection_adapters/jdbcpostgresql_adapter.rb" ] s.homepage = %q{https://github.com/jruby/activerecord-jdbc-adapter} s.require_paths = ["lib"] s.rubyforge_project = %q{jruby-extras} s.summary = %q{Postgres JDBC adapter for JRuby on Rails.} s.add_dependency 'activerecord-jdbc-adapter', "~>#{version}" - s.add_dependency 'jdbc-postgres', '~> 9.0.0' ? - ^ + s.add_dependency 'jdbc-postgres', '>= 9.0', '< 9.2' ? + +++++++ ^ end
2
0.076923
1
1
34f6b5da6675da9c316c5d6bf30b33a2e35906e3
surveyPassthru.php
surveyPassthru.php
<?php namespace Vanderbilt\EmailTriggerExternalModule; use ExternalModules\AbstractExternalModule; use ExternalModules\ExternalModules; require_once 'EmailTriggerExternalModule.php'; $passthruData = $module->resetSurveyAndGetCodes($_REQUEST['pid'], $_REQUEST['record'], $_REQUEST['instrument'], $_REQUEST['event']); $returnCode = $passthruData['return_code']; $hash = $passthruData['hash']; if($returnCode == $_REQUEST['returnCode']){ $surveyLink = APP_PATH_SURVEY_FULL."?s=".$hash; $link = ($_REQUEST['returnCode'] == "NULL")? "":"<input type='hidden' value='".$returnCode."' name='__code'/>"; ?> <html> <body> <form id='passthruform' name='passthruform' action='<?=$surveyLink?>' method='post' enctype='multipart/form-data'>      <?=$link?>     <input type='hidden' value='1' name='__prefill' /> </form>     <script type='text/javascript'> window.onload = function(){ document.passthruform.submit(); } </script> </body> </html> <?php } else { echo "Error: Incorrect return code specified"; }?>
<?php namespace Vanderbilt\EmailTriggerExternalModule; use ExternalModules\AbstractExternalModule; use ExternalModules\ExternalModules; require_once 'EmailTriggerExternalModule.php'; $passthruData = $module->resetSurveyAndGetCodes($_REQUEST['pid'], $_REQUEST['record'], $_REQUEST['instrument'], $_REQUEST['event']); $returnCode = $passthruData['return_code']; $hash = $passthruData['hash']; if($returnCode == $_REQUEST['returnCode']){ $surveyLink = APP_PATH_SURVEY_FULL."?s=".$hash; $link = ($_REQUEST['returnCode'] == "NULL")? "":"<input type='hidden' value='".$returnCode."' name='__code'/>"; ?> <html> <body> <form id='passthruform' name='passthruform' action='<?=$surveyLink?>' method='post' enctype='multipart/form-data'>      <?=$link?>     <input type='hidden' value='1' name='__prefill' /> </form>     <script type='text/javascript'> window.onload = function(){ document.passthruform.submit(); } </script> </body> </html> <?php } else { echo "Error: Incorrect return code specified.<br /><br />This error can also be caused by using an outdated version of the External Modules framework with a longitudinal study project. You may be able to correct this error by updating to a version of REDCap above 8.7.0"; }?>
Expand the error message to recommend updating REDCap versions
Expand the error message to recommend updating REDCap versions
PHP
mit
vanderbilt-redcap/email-alerts-module,vanderbilt-redcap/email-alerts-module
php
## Code Before: <?php namespace Vanderbilt\EmailTriggerExternalModule; use ExternalModules\AbstractExternalModule; use ExternalModules\ExternalModules; require_once 'EmailTriggerExternalModule.php'; $passthruData = $module->resetSurveyAndGetCodes($_REQUEST['pid'], $_REQUEST['record'], $_REQUEST['instrument'], $_REQUEST['event']); $returnCode = $passthruData['return_code']; $hash = $passthruData['hash']; if($returnCode == $_REQUEST['returnCode']){ $surveyLink = APP_PATH_SURVEY_FULL."?s=".$hash; $link = ($_REQUEST['returnCode'] == "NULL")? "":"<input type='hidden' value='".$returnCode."' name='__code'/>"; ?> <html> <body> <form id='passthruform' name='passthruform' action='<?=$surveyLink?>' method='post' enctype='multipart/form-data'>      <?=$link?>     <input type='hidden' value='1' name='__prefill' /> </form>     <script type='text/javascript'> window.onload = function(){ document.passthruform.submit(); } </script> </body> </html> <?php } else { echo "Error: Incorrect return code specified"; }?> ## Instruction: Expand the error message to recommend updating REDCap versions ## Code After: <?php namespace Vanderbilt\EmailTriggerExternalModule; use ExternalModules\AbstractExternalModule; use ExternalModules\ExternalModules; require_once 'EmailTriggerExternalModule.php'; $passthruData = $module->resetSurveyAndGetCodes($_REQUEST['pid'], $_REQUEST['record'], $_REQUEST['instrument'], $_REQUEST['event']); $returnCode = $passthruData['return_code']; $hash = $passthruData['hash']; if($returnCode == $_REQUEST['returnCode']){ $surveyLink = APP_PATH_SURVEY_FULL."?s=".$hash; $link = ($_REQUEST['returnCode'] == "NULL")? "":"<input type='hidden' value='".$returnCode."' name='__code'/>"; ?> <html> <body> <form id='passthruform' name='passthruform' action='<?=$surveyLink?>' method='post' enctype='multipart/form-data'>      <?=$link?>     <input type='hidden' value='1' name='__prefill' /> </form>     <script type='text/javascript'> window.onload = function(){ document.passthruform.submit(); } </script> </body> </html> <?php } else { echo "Error: Incorrect return code specified.<br /><br />This error can also be caused by using an outdated version of the External Modules framework with a longitudinal study project. You may be able to correct this error by updating to a version of REDCap above 8.7.0"; }?>
<?php namespace Vanderbilt\EmailTriggerExternalModule; use ExternalModules\AbstractExternalModule; use ExternalModules\ExternalModules; require_once 'EmailTriggerExternalModule.php'; $passthruData = $module->resetSurveyAndGetCodes($_REQUEST['pid'], $_REQUEST['record'], $_REQUEST['instrument'], $_REQUEST['event']); $returnCode = $passthruData['return_code']; $hash = $passthruData['hash']; if($returnCode == $_REQUEST['returnCode']){ $surveyLink = APP_PATH_SURVEY_FULL."?s=".$hash; $link = ($_REQUEST['returnCode'] == "NULL")? "":"<input type='hidden' value='".$returnCode."' name='__code'/>"; ?> <html> <body> <form id='passthruform' name='passthruform' action='<?=$surveyLink?>' method='post' enctype='multipart/form-data'>      <?=$link?>     <input type='hidden' value='1' name='__prefill' /> </form>     <script type='text/javascript'> window.onload = function(){ document.passthruform.submit(); } </script> </body> </html> <?php } else { - echo "Error: Incorrect return code specified"; + echo "Error: Incorrect return code specified.<br /><br />This error can also be caused by using an outdated version of the External Modules framework with a longitudinal study project. You may be able to correct this error by updating to a version of REDCap above 8.7.0"; }?>
2
0.054054
1
1
2dece45476170e24e14903f19f9bf400c10ebf42
djangocms_wow/cms_plugins.py
djangocms_wow/cms_plugins.py
from django.utils.translation import ugettext_lazy as _ from cms.plugin_base import CMSPluginBase from cms.plugin_pool import plugin_pool from . import models class AnimationPlugin(CMSPluginBase): model = models.Animation name = _('Animation') render_template = 'djangocms_wow/animation.html' allow_children = True cache = True def render(self, context, instance, placeholder): context.update({'instance': instance}) return context plugin_pool.register_plugin(AnimationPlugin) class WOWAnimationPlugin(CMSPluginBase): model = models.WOWAnimation name = _("Wow Animation") render_template = 'djangocms_wow/wow_animation.html' allow_children = True cache = True def render(self, context, instance, placeholder): context.update({'instance': instance}) return context plugin_pool.register_plugin(WOWAnimationPlugin)
from django.utils.translation import ugettext_lazy as _ from cms.plugin_base import CMSPluginBase from cms.plugin_pool import plugin_pool from . import models class AnimationPlugin(CMSPluginBase): model = models.Animation name = _('Animation') render_template = 'djangocms_wow/animation.html' allow_children = True text_enabled = True cache = True def render(self, context, instance, placeholder): context.update({'instance': instance}) return context plugin_pool.register_plugin(AnimationPlugin) class WOWAnimationPlugin(CMSPluginBase): model = models.WOWAnimation name = _("Wow Animation") render_template = 'djangocms_wow/wow_animation.html' allow_children = True text_enabled = True cache = True def render(self, context, instance, placeholder): context.update({'instance': instance}) return context plugin_pool.register_plugin(WOWAnimationPlugin)
Allow WOW animations to be used in text plugin.
Allow WOW animations to be used in text plugin.
Python
bsd-3-clause
narayanaditya95/djangocms-wow,narayanaditya95/djangocms-wow,narayanaditya95/djangocms-wow
python
## Code Before: from django.utils.translation import ugettext_lazy as _ from cms.plugin_base import CMSPluginBase from cms.plugin_pool import plugin_pool from . import models class AnimationPlugin(CMSPluginBase): model = models.Animation name = _('Animation') render_template = 'djangocms_wow/animation.html' allow_children = True cache = True def render(self, context, instance, placeholder): context.update({'instance': instance}) return context plugin_pool.register_plugin(AnimationPlugin) class WOWAnimationPlugin(CMSPluginBase): model = models.WOWAnimation name = _("Wow Animation") render_template = 'djangocms_wow/wow_animation.html' allow_children = True cache = True def render(self, context, instance, placeholder): context.update({'instance': instance}) return context plugin_pool.register_plugin(WOWAnimationPlugin) ## Instruction: Allow WOW animations to be used in text plugin. ## Code After: from django.utils.translation import ugettext_lazy as _ from cms.plugin_base import CMSPluginBase from cms.plugin_pool import plugin_pool from . import models class AnimationPlugin(CMSPluginBase): model = models.Animation name = _('Animation') render_template = 'djangocms_wow/animation.html' allow_children = True text_enabled = True cache = True def render(self, context, instance, placeholder): context.update({'instance': instance}) return context plugin_pool.register_plugin(AnimationPlugin) class WOWAnimationPlugin(CMSPluginBase): model = models.WOWAnimation name = _("Wow Animation") render_template = 'djangocms_wow/wow_animation.html' allow_children = True text_enabled = True cache = True def render(self, context, instance, placeholder): context.update({'instance': instance}) return context plugin_pool.register_plugin(WOWAnimationPlugin)
from django.utils.translation import ugettext_lazy as _ from cms.plugin_base import CMSPluginBase from cms.plugin_pool import plugin_pool from . import models class AnimationPlugin(CMSPluginBase): model = models.Animation name = _('Animation') render_template = 'djangocms_wow/animation.html' allow_children = True + text_enabled = True cache = True def render(self, context, instance, placeholder): context.update({'instance': instance}) return context plugin_pool.register_plugin(AnimationPlugin) class WOWAnimationPlugin(CMSPluginBase): model = models.WOWAnimation name = _("Wow Animation") render_template = 'djangocms_wow/wow_animation.html' allow_children = True + text_enabled = True cache = True def render(self, context, instance, placeholder): context.update({'instance': instance}) return context plugin_pool.register_plugin(WOWAnimationPlugin)
2
0.055556
2
0
e8b74ba4400614cb064a8d6452a7dd76044daf8d
_posts/2015-11-20-ilios-3-is-here.markdown
_posts/2015-11-20-ilios-3-is-here.markdown
--- layout: post title: "Ilios 3 is here!" date: 2015-11-20 11:00:00 categories: ilios release --- The latest prerelease of the Ilios 3 code is now available for download and testing. Please use the download link to see this release and notes for the core code and API. This, as well as all pending release iterations, will be applied to the public ilios 3 demo site. For access to the demo (if you don't already have it), please contact us at [email protected]. Live API documentation is available here: https://ilios3-demo.ucsf.edu/api/doc. Interested in the data model updates for the latest Ilios version? You can get the ERD here!
--- layout: post title: "Ilios 3 is here!" date: 2015-11-20 11:00:00 categories: ilios release --- The latest prerelease of the Ilios 3 code is now available for download and testing. Please use the download link to see this release and notes for the core code and API. This, as well as all pending release iterations, will be applied to the public ilios 3 demo site. For access to the demo (if you don't already have it), please contact us at [[email protected]](mailto:[email protected]). Live API documentation is available here: [https://ilios3-demo.ucsf.edu/api/doc](https://ilios3-demo.ucsf.edu/api/doc). Interested in the data model updates for the latest Ilios version? You can get the ERD [here](https://github.com/ilios/ilios/wiki/Database-ERD)!
Fix links in ilios is here post
Fix links in ilios is here post
Markdown
mit
ilios/iliosproject.org,ilios/iliosproject.org,ilios/ilios.github.io,ilios/ilios.github.io,jrjohnson/ilios.github.io,jrjohnson/ilios.github.io
markdown
## Code Before: --- layout: post title: "Ilios 3 is here!" date: 2015-11-20 11:00:00 categories: ilios release --- The latest prerelease of the Ilios 3 code is now available for download and testing. Please use the download link to see this release and notes for the core code and API. This, as well as all pending release iterations, will be applied to the public ilios 3 demo site. For access to the demo (if you don't already have it), please contact us at [email protected]. Live API documentation is available here: https://ilios3-demo.ucsf.edu/api/doc. Interested in the data model updates for the latest Ilios version? You can get the ERD here! ## Instruction: Fix links in ilios is here post ## Code After: --- layout: post title: "Ilios 3 is here!" date: 2015-11-20 11:00:00 categories: ilios release --- The latest prerelease of the Ilios 3 code is now available for download and testing. Please use the download link to see this release and notes for the core code and API. This, as well as all pending release iterations, will be applied to the public ilios 3 demo site. For access to the demo (if you don't already have it), please contact us at [[email protected]](mailto:[email protected]). Live API documentation is available here: [https://ilios3-demo.ucsf.edu/api/doc](https://ilios3-demo.ucsf.edu/api/doc). Interested in the data model updates for the latest Ilios version? You can get the ERD [here](https://github.com/ilios/ilios/wiki/Database-ERD)!
--- layout: post title: "Ilios 3 is here!" date: 2015-11-20 11:00:00 categories: ilios release --- - The latest prerelease of the Ilios 3 code is now available for download and testing. Please use the download link to see this release and notes for the core code and API. This, as well as all pending release iterations, will be applied to the public ilios 3 demo site. For access to the demo (if you don't already have it), please contact us at [email protected]. + The latest prerelease of the Ilios 3 code is now available for download and testing. Please use the download link to see this release and notes for the core code and API. - Live API documentation is available here: https://ilios3-demo.ucsf.edu/api/doc. + This, as well as all pending release iterations, will be applied to the public ilios 3 demo site. For access to the demo (if you don't already have it), please contact us at [[email protected]](mailto:[email protected]). + Live API documentation is available here: [https://ilios3-demo.ucsf.edu/api/doc](https://ilios3-demo.ucsf.edu/api/doc). + - Interested in the data model updates for the latest Ilios version? You can get the ERD here! + Interested in the data model updates for the latest Ilios version? You can get the ERD [here](https://github.com/ilios/ilios/wiki/Database-ERD)! ? + +++++++++++++++++++++++++++++++++++++++++++++++++++
8
0.727273
5
3
f9e00ac4297e729c2d6c72f57ad7156cce976017
README.md
README.md
[![Build Status](https://travis-ci.org/goinstant/delayd.svg?branch=master)](https://travis-ci.org/goinstant/delayd) [![BSD License](http://img.shields.io/badge/license-BSD-blue.svg)](https://github.com/goinstant/delayd/blob/master/LICENSE) ## Developing `delayd` is built/developed with `go1.2`. I recommend using the excellent [gvm](https://github.com/moovweb/gvm) to install it. To get started: ```bash make deps # install required modules make check # run fmt, vet, lint make test # run unit tests make # build the executable ``` ## Running ```bash ./delayd -c delayd.toml ``` ## License &copy; 2014 GoInstant Inc., a salesforce.com company. Licensed under the BSD 3-clause license. [![GoInstant](http://goinstant.com/static/img/logo.png)](http://goinstant.com)
[![Build Status](https://travis-ci.org/goinstant/delayd.svg?branch=master)](https://travis-ci.org/goinstant/delayd) [![BSD License](http://img.shields.io/badge/license-BSD-blue.svg)](https://github.com/goinstant/delayd/blob/master/LICENSE) ## Developing `delayd` is built/developed with `go1.2`. I recommend using the excellent [gvm](https://github.com/moovweb/gvm) to install it. To get started: ```bash make deps # install required modules make check # run fmt, vet, lint make test # run unit tests make # build the executable ``` ## Running ```bash ./delayd -c delayd.toml ``` ## License &copy; 2014 salesforce.com. Licensed under the BSD 3-clause license. [![GoInstant](http://goinstant.com/static/img/logo.png)](http://goinstant.com)
Fix copyright to be consistent with LICENSE
Fix copyright to be consistent with LICENSE Signed-off-by: Jeremy Stashewsky <[email protected]>
Markdown
bsd-3-clause
nabeken/delayd,goinstant/delayd
markdown
## Code Before: [![Build Status](https://travis-ci.org/goinstant/delayd.svg?branch=master)](https://travis-ci.org/goinstant/delayd) [![BSD License](http://img.shields.io/badge/license-BSD-blue.svg)](https://github.com/goinstant/delayd/blob/master/LICENSE) ## Developing `delayd` is built/developed with `go1.2`. I recommend using the excellent [gvm](https://github.com/moovweb/gvm) to install it. To get started: ```bash make deps # install required modules make check # run fmt, vet, lint make test # run unit tests make # build the executable ``` ## Running ```bash ./delayd -c delayd.toml ``` ## License &copy; 2014 GoInstant Inc., a salesforce.com company. Licensed under the BSD 3-clause license. [![GoInstant](http://goinstant.com/static/img/logo.png)](http://goinstant.com) ## Instruction: Fix copyright to be consistent with LICENSE Signed-off-by: Jeremy Stashewsky <[email protected]> ## Code After: [![Build Status](https://travis-ci.org/goinstant/delayd.svg?branch=master)](https://travis-ci.org/goinstant/delayd) [![BSD License](http://img.shields.io/badge/license-BSD-blue.svg)](https://github.com/goinstant/delayd/blob/master/LICENSE) ## Developing `delayd` is built/developed with `go1.2`. I recommend using the excellent [gvm](https://github.com/moovweb/gvm) to install it. To get started: ```bash make deps # install required modules make check # run fmt, vet, lint make test # run unit tests make # build the executable ``` ## Running ```bash ./delayd -c delayd.toml ``` ## License &copy; 2014 salesforce.com. Licensed under the BSD 3-clause license. [![GoInstant](http://goinstant.com/static/img/logo.png)](http://goinstant.com)
[![Build Status](https://travis-ci.org/goinstant/delayd.svg?branch=master)](https://travis-ci.org/goinstant/delayd) [![BSD License](http://img.shields.io/badge/license-BSD-blue.svg)](https://github.com/goinstant/delayd/blob/master/LICENSE) ## Developing `delayd` is built/developed with `go1.2`. I recommend using the excellent [gvm](https://github.com/moovweb/gvm) to install it. To get started: ```bash make deps # install required modules make check # run fmt, vet, lint make test # run unit tests make # build the executable ``` ## Running ```bash ./delayd -c delayd.toml ``` ## License + &copy; 2014 salesforce.com. Licensed under the BSD 3-clause license. - &copy; 2014 GoInstant Inc., a salesforce.com company. Licensed under the BSD - 3-clause license. [![GoInstant](http://goinstant.com/static/img/logo.png)](http://goinstant.com)
3
0.107143
1
2
051a0be631d544f7128e368eeccdef912907e31d
.travis.yml
.travis.yml
branches: only: - master language: perl6 perl6: - latest
branches: only: - master language: perl6 perl6: - latest - 2015.09
Add latest stable Rakudo release for Travis CI
Add latest stable Rakudo release for Travis CI
YAML
artistic-2.0
bbkr/text_markov
yaml
## Code Before: branches: only: - master language: perl6 perl6: - latest ## Instruction: Add latest stable Rakudo release for Travis CI ## Code After: branches: only: - master language: perl6 perl6: - latest - 2015.09
branches: only: - master language: perl6 perl6: - latest + - 2015.09
1
0.166667
1
0
da8efb34fe00f4c625c6ab7d3cf5651193d972d0
mopidy/backends/__init__.py
mopidy/backends/__init__.py
import logging import time from mopidy.exceptions import MpdNotImplemented from mopidy.models import Playlist logger = logging.getLogger('backends.base') class BaseBackend(object): current_playlist = None library = None playback = None stored_playlists = None uri_handlers = [] class BaseCurrentPlaylistController(object): def __init__(self, backend): self.backend = backend class BasePlaybackController(object): PAUSED = 'paused' PLAYING = 'playing' STOPPED = 'stopped' def __init__(self, backend): self.backend = backend self.state = self.STOPPED self.current_track = None self.playlist_position = None def play(self, id=None, position=None): raise NotImplementedError def next(self): raise NotImplementedError
import logging import time from mopidy.exceptions import MpdNotImplemented from mopidy.models import Playlist logger = logging.getLogger('backends.base') class BaseBackend(object): current_playlist = None library = None playback = None stored_playlists = None uri_handlers = [] class BaseCurrentPlaylistController(object): def __init__(self, backend): self.backend = backend def add(self, track, at_position=None): raise NotImplementedError class BasePlaybackController(object): PAUSED = 'paused' PLAYING = 'playing' STOPPED = 'stopped' def __init__(self, backend): self.backend = backend self.state = self.STOPPED self.current_track = None self.playlist_position = None def play(self, id=None, position=None): raise NotImplementedError def next(self): raise NotImplementedError
Add add method to BaseCurrentPlaylistController
Add add method to BaseCurrentPlaylistController
Python
apache-2.0
priestd09/mopidy,jcass77/mopidy,mokieyue/mopidy,ZenithDK/mopidy,SuperStarPL/mopidy,mopidy/mopidy,bencevans/mopidy,tkem/mopidy,quartz55/mopidy,rawdlite/mopidy,tkem/mopidy,quartz55/mopidy,glogiotatidis/mopidy,SuperStarPL/mopidy,adamcik/mopidy,woutervanwijk/mopidy,bencevans/mopidy,pacificIT/mopidy,hkariti/mopidy,bacontext/mopidy,abarisain/mopidy,SuperStarPL/mopidy,adamcik/mopidy,vrs01/mopidy,jcass77/mopidy,priestd09/mopidy,diandiankan/mopidy,jmarsik/mopidy,ZenithDK/mopidy,swak/mopidy,hkariti/mopidy,ZenithDK/mopidy,jmarsik/mopidy,ali/mopidy,quartz55/mopidy,SuperStarPL/mopidy,jodal/mopidy,ali/mopidy,rawdlite/mopidy,diandiankan/mopidy,rawdlite/mopidy,dbrgn/mopidy,quartz55/mopidy,tkem/mopidy,mopidy/mopidy,mokieyue/mopidy,swak/mopidy,tkem/mopidy,mokieyue/mopidy,liamw9534/mopidy,glogiotatidis/mopidy,jmarsik/mopidy,kingosticks/mopidy,hkariti/mopidy,jodal/mopidy,swak/mopidy,bencevans/mopidy,jmarsik/mopidy,glogiotatidis/mopidy,diandiankan/mopidy,pacificIT/mopidy,bacontext/mopidy,dbrgn/mopidy,rawdlite/mopidy,priestd09/mopidy,ali/mopidy,hkariti/mopidy,liamw9534/mopidy,woutervanwijk/mopidy,mokieyue/mopidy,dbrgn/mopidy,adamcik/mopidy,glogiotatidis/mopidy,abarisain/mopidy,diandiankan/mopidy,dbrgn/mopidy,pacificIT/mopidy,ali/mopidy,bacontext/mopidy,mopidy/mopidy,pacificIT/mopidy,bacontext/mopidy,swak/mopidy,vrs01/mopidy,vrs01/mopidy,bencevans/mopidy,ZenithDK/mopidy,vrs01/mopidy,jcass77/mopidy,kingosticks/mopidy,jodal/mopidy,kingosticks/mopidy
python
## Code Before: import logging import time from mopidy.exceptions import MpdNotImplemented from mopidy.models import Playlist logger = logging.getLogger('backends.base') class BaseBackend(object): current_playlist = None library = None playback = None stored_playlists = None uri_handlers = [] class BaseCurrentPlaylistController(object): def __init__(self, backend): self.backend = backend class BasePlaybackController(object): PAUSED = 'paused' PLAYING = 'playing' STOPPED = 'stopped' def __init__(self, backend): self.backend = backend self.state = self.STOPPED self.current_track = None self.playlist_position = None def play(self, id=None, position=None): raise NotImplementedError def next(self): raise NotImplementedError ## Instruction: Add add method to BaseCurrentPlaylistController ## Code After: import logging import time from mopidy.exceptions import MpdNotImplemented from mopidy.models import Playlist logger = logging.getLogger('backends.base') class BaseBackend(object): current_playlist = None library = None playback = None stored_playlists = None uri_handlers = [] class BaseCurrentPlaylistController(object): def __init__(self, backend): self.backend = backend def add(self, track, at_position=None): raise NotImplementedError class BasePlaybackController(object): PAUSED = 'paused' PLAYING = 'playing' STOPPED = 'stopped' def __init__(self, backend): self.backend = backend self.state = self.STOPPED self.current_track = None self.playlist_position = None def play(self, id=None, position=None): raise NotImplementedError def next(self): raise NotImplementedError
import logging import time from mopidy.exceptions import MpdNotImplemented from mopidy.models import Playlist logger = logging.getLogger('backends.base') class BaseBackend(object): current_playlist = None library = None playback = None stored_playlists = None uri_handlers = [] class BaseCurrentPlaylistController(object): def __init__(self, backend): self.backend = backend + def add(self, track, at_position=None): + raise NotImplementedError + class BasePlaybackController(object): PAUSED = 'paused' PLAYING = 'playing' STOPPED = 'stopped' def __init__(self, backend): self.backend = backend self.state = self.STOPPED self.current_track = None self.playlist_position = None def play(self, id=None, position=None): raise NotImplementedError def next(self): raise NotImplementedError
3
0.085714
3
0
c6e5e7de2c316b61cba7aea16346867aebaf5c9d
examples/apps/chatbot/server.js
examples/apps/chatbot/server.js
/** * This is the main server entry point into the * chatbot example. */ import Chat from './app/components/chat' import ChatBot from './app/chatbot' import DOM from 'react-dom/server' import Elizabot from 'elizabot' import React from 'react' import uid from 'uid' const bot = new Elizabot() export function answer (request, reply) { return reply({ id : uid(), message : bot.transform(request.payload), time : new Date(), user : 'Eliza' }) } export function render (request, reply) { var app = new ChatBot() app.start(function(error) { if (error) { return reply(error).code(500) } return reply.view('apps/chatbot/index', { markup : DOM.renderToString(React.createElement(Chat, { app })), payload : JSON.stringify(app) }) }) } export default function register (server, _options, next) { server.route([ { method : 'GET', path : '/chatbot', handler : render }, { method : 'POST', path : '/chatbot/message', handler : answer } ]) next() } register.attributes = { name : 'Chatbot', description : 'A chatbot app that demonstrates optimistic updates.', example : true, path : '/chatbot' }
/** * This is the main server entry point into the * chatbot example. */ import Chat from './app/components/chat' import ChatBot from './app/chatbot' import DOM from 'react-dom/server' import Elizabot from 'elizabot' import React from 'react' import uid from 'uid' const bot = new Elizabot() export function answer (request, reply) { return reply({ id : uid(), message : bot.transform(request.payload), time : new Date(), user : 'Eliza' }) } export function render (request, reply) { var app = new ChatBot() app.start(function(error) { if (error) { return reply(error).code(500) } return reply.view('apps/chatbot/index', { markup : DOM.renderToString(React.createElement(Chat, { app })), payload : JSON.stringify(app) }) }) } export default function register (server, _options, next) { server.route([ { method : 'GET', path : '/chatbot', handler : render }, { method : 'POST', path : '/chatbot/message', handler(request, reply) { setTimeout(answer, Math.random() * 2500, request, reply) } } ]) next() } register.attributes = { name : 'Chatbot', description : 'A chatbot app that demonstrates optimistic updates.', example : true, path : '/chatbot' }
Add delay to chat response
Add delay to chat response
JavaScript
mit
leobauza/microcosm,vigetlabs/microcosm,vigetlabs/microcosm,vigetlabs/microcosm,leobauza/microcosm,leobauza/microcosm
javascript
## Code Before: /** * This is the main server entry point into the * chatbot example. */ import Chat from './app/components/chat' import ChatBot from './app/chatbot' import DOM from 'react-dom/server' import Elizabot from 'elizabot' import React from 'react' import uid from 'uid' const bot = new Elizabot() export function answer (request, reply) { return reply({ id : uid(), message : bot.transform(request.payload), time : new Date(), user : 'Eliza' }) } export function render (request, reply) { var app = new ChatBot() app.start(function(error) { if (error) { return reply(error).code(500) } return reply.view('apps/chatbot/index', { markup : DOM.renderToString(React.createElement(Chat, { app })), payload : JSON.stringify(app) }) }) } export default function register (server, _options, next) { server.route([ { method : 'GET', path : '/chatbot', handler : render }, { method : 'POST', path : '/chatbot/message', handler : answer } ]) next() } register.attributes = { name : 'Chatbot', description : 'A chatbot app that demonstrates optimistic updates.', example : true, path : '/chatbot' } ## Instruction: Add delay to chat response ## Code After: /** * This is the main server entry point into the * chatbot example. */ import Chat from './app/components/chat' import ChatBot from './app/chatbot' import DOM from 'react-dom/server' import Elizabot from 'elizabot' import React from 'react' import uid from 'uid' const bot = new Elizabot() export function answer (request, reply) { return reply({ id : uid(), message : bot.transform(request.payload), time : new Date(), user : 'Eliza' }) } export function render (request, reply) { var app = new ChatBot() app.start(function(error) { if (error) { return reply(error).code(500) } return reply.view('apps/chatbot/index', { markup : DOM.renderToString(React.createElement(Chat, { app })), payload : JSON.stringify(app) }) }) } export default function register (server, _options, next) { server.route([ { method : 'GET', path : '/chatbot', handler : render }, { method : 'POST', path : '/chatbot/message', handler(request, reply) { setTimeout(answer, Math.random() * 2500, request, reply) } } ]) next() } register.attributes = { name : 'Chatbot', description : 'A chatbot app that demonstrates optimistic updates.', example : true, path : '/chatbot' }
/** * This is the main server entry point into the * chatbot example. */ import Chat from './app/components/chat' import ChatBot from './app/chatbot' import DOM from 'react-dom/server' import Elizabot from 'elizabot' import React from 'react' import uid from 'uid' const bot = new Elizabot() export function answer (request, reply) { return reply({ id : uid(), message : bot.transform(request.payload), time : new Date(), user : 'Eliza' }) } export function render (request, reply) { var app = new ChatBot() app.start(function(error) { if (error) { return reply(error).code(500) } return reply.view('apps/chatbot/index', { markup : DOM.renderToString(React.createElement(Chat, { app })), payload : JSON.stringify(app) }) }) } export default function register (server, _options, next) { server.route([ { method : 'GET', path : '/chatbot', handler : render }, { method : 'POST', path : '/chatbot/message', - handler : answer + handler(request, reply) { + setTimeout(answer, Math.random() * 2500, request, reply) + } } ]) next() } register.attributes = { name : 'Chatbot', description : 'A chatbot app that demonstrates optimistic updates.', example : true, path : '/chatbot' }
4
0.063492
3
1
f68dcd5a761afbfe1097e087ed3576680c8273a2
src/main/kotlin/rhmodding/tickompiler/cli/NotepadppLangCommand.kt
src/main/kotlin/rhmodding/tickompiler/cli/NotepadppLangCommand.kt
package rhmodding.tickompiler.cli import picocli.CommandLine import java.io.File @CommandLine.Command(name = "notepad++", description = ["Outputs a Notepad++-suitable custom user-defined language XML file. If the output directory is not specified, it will be placed next to this executable."], mixinStandardHelpOptions = true) class NotepadppLangCommand : Runnable { private val FILE_NAME = "tickflow.xml" @CommandLine.Option(names = ["-ow", "--overwrite"], description = ["Overwrite even if a file already exists."]) var overwrite: Boolean = false @CommandLine.Parameters(index = "0", arity = "0..1", description = ["output file or directory"]) var output: File = File("") override fun run() { output.mkdirs() val file = if (output.isDirectory) output.resolve(FILE_NAME) else output if (file.exists() && !overwrite) { println("Cannot output ${file.name}, already exists in the target directory (${file.parentFile.absolutePath}). Please move, rename, or delete the file first.") } else { val internal = NotepadppLangCommand::class.java.getResource("/notepadplusplustickflowlang.xml") file.writeBytes(internal.readBytes()) println("Outputted ${file.name} to ${file.parentFile.absolutePath}. Import it into Notepad++ via: Language > Define your language... > Import") } } }
package rhmodding.tickompiler.cli import picocli.CommandLine import java.io.File @CommandLine.Command(name = "notepad++", description = ["Outputs a Notepad++-suitable custom user-defined language XML file. If the output directory is not specified, it will be placed next to this executable."], mixinStandardHelpOptions = true) class NotepadppLangCommand : Runnable { private val FILE_NAME = "tickflow.xml" @CommandLine.Option(names = ["-ow", "--overwrite"], description = ["Overwrite even if a file already exists."]) var overwrite: Boolean = false @CommandLine.Parameters(index = "0", arity = "0..1", description = ["output file or directory"]) var output: File = File("./") override fun run() { output.mkdirs() val file = if (output.isDirectory) output.resolve(FILE_NAME) else output if (file.exists() && !overwrite) { println("Cannot output ${file.name}, already exists in the target directory (${file.parentFile.absolutePath}). Please move, rename, or delete the file first.") } else { val internal = NotepadppLangCommand::class.java.getResource("/notepadplusplustickflowlang.xml") file.createNewFile() file.writeBytes(internal.readBytes()) println("Outputted ${file.name} to ${file.parentFile.canonicalPath}\nImport it into Notepad++ via: Language > Define your language... > Import") } } }
Fix issues with default path for notepad++ command
Fix issues with default path for notepad++ command
Kotlin
mit
SneakySpook/Tickompiler,chrislo27/Tickompiler
kotlin
## Code Before: package rhmodding.tickompiler.cli import picocli.CommandLine import java.io.File @CommandLine.Command(name = "notepad++", description = ["Outputs a Notepad++-suitable custom user-defined language XML file. If the output directory is not specified, it will be placed next to this executable."], mixinStandardHelpOptions = true) class NotepadppLangCommand : Runnable { private val FILE_NAME = "tickflow.xml" @CommandLine.Option(names = ["-ow", "--overwrite"], description = ["Overwrite even if a file already exists."]) var overwrite: Boolean = false @CommandLine.Parameters(index = "0", arity = "0..1", description = ["output file or directory"]) var output: File = File("") override fun run() { output.mkdirs() val file = if (output.isDirectory) output.resolve(FILE_NAME) else output if (file.exists() && !overwrite) { println("Cannot output ${file.name}, already exists in the target directory (${file.parentFile.absolutePath}). Please move, rename, or delete the file first.") } else { val internal = NotepadppLangCommand::class.java.getResource("/notepadplusplustickflowlang.xml") file.writeBytes(internal.readBytes()) println("Outputted ${file.name} to ${file.parentFile.absolutePath}. Import it into Notepad++ via: Language > Define your language... > Import") } } } ## Instruction: Fix issues with default path for notepad++ command ## Code After: package rhmodding.tickompiler.cli import picocli.CommandLine import java.io.File @CommandLine.Command(name = "notepad++", description = ["Outputs a Notepad++-suitable custom user-defined language XML file. If the output directory is not specified, it will be placed next to this executable."], mixinStandardHelpOptions = true) class NotepadppLangCommand : Runnable { private val FILE_NAME = "tickflow.xml" @CommandLine.Option(names = ["-ow", "--overwrite"], description = ["Overwrite even if a file already exists."]) var overwrite: Boolean = false @CommandLine.Parameters(index = "0", arity = "0..1", description = ["output file or directory"]) var output: File = File("./") override fun run() { output.mkdirs() val file = if (output.isDirectory) output.resolve(FILE_NAME) else output if (file.exists() && !overwrite) { println("Cannot output ${file.name}, already exists in the target directory (${file.parentFile.absolutePath}). Please move, rename, or delete the file first.") } else { val internal = NotepadppLangCommand::class.java.getResource("/notepadplusplustickflowlang.xml") file.createNewFile() file.writeBytes(internal.readBytes()) println("Outputted ${file.name} to ${file.parentFile.canonicalPath}\nImport it into Notepad++ via: Language > Define your language... > Import") } } }
package rhmodding.tickompiler.cli import picocli.CommandLine import java.io.File @CommandLine.Command(name = "notepad++", description = ["Outputs a Notepad++-suitable custom user-defined language XML file. If the output directory is not specified, it will be placed next to this executable."], mixinStandardHelpOptions = true) class NotepadppLangCommand : Runnable { private val FILE_NAME = "tickflow.xml" @CommandLine.Option(names = ["-ow", "--overwrite"], description = ["Overwrite even if a file already exists."]) var overwrite: Boolean = false @CommandLine.Parameters(index = "0", arity = "0..1", description = ["output file or directory"]) - var output: File = File("") + var output: File = File("./") ? ++ override fun run() { output.mkdirs() val file = if (output.isDirectory) output.resolve(FILE_NAME) else output if (file.exists() && !overwrite) { println("Cannot output ${file.name}, already exists in the target directory (${file.parentFile.absolutePath}). Please move, rename, or delete the file first.") } else { val internal = NotepadppLangCommand::class.java.getResource("/notepadplusplustickflowlang.xml") + file.createNewFile() file.writeBytes(internal.readBytes()) - println("Outputted ${file.name} to ${file.parentFile.absolutePath}. Import it into Notepad++ via: Language > Define your language... > Import") ? ^^ --- ^^ + println("Outputted ${file.name} to ${file.parentFile.canonicalPath}\nImport it into Notepad++ via: Language > Define your language... > Import") ? + ^ ++++ ^^ } } }
5
0.166667
3
2
e1d9d0e9f784df298df41967cfba947fa65c2698
commands/access/remove.js
commands/access/remove.js
'use strict'; var Heroku = require('heroku-client'); var co = require('co'); var heroku; module.exports = { topic: 'access', needsAuth: true, needsApp: true, command: 'remove', description: 'Remove users from your app', help: 'heroku access:remove [email protected] --app APP', args: [{name: 'user', optional: false}], run: function (context) { let appName; appName = context.app; co(function* () { heroku = new Heroku({token: context.auth.password}); yield heroku.apps(appName).collaborators(context.args.user).delete(function (err) { if (err) { throw err; } console.log(`Removing ${context.args.user} from application ${cli.color.cyan(appName)}...done`); }); }).catch(function (err) { console.error(err); }); } };
'use strict'; var Heroku = require('heroku-client'); var co = require('co'); var heroku; module.exports = { topic: 'access', needsAuth: true, needsApp: true, command: 'remove', description: 'Remove users from your app', help: 'heroku access:remove [email protected] --app APP', args: [{name: 'user', optional: false}], run: function (context) { let appName; appName = context.app; co(function* () { heroku = new Heroku({token: context.auth.password}); yield heroku.apps(appName).collaborators(context.args.user).delete(function (err) { if (err) { throw err; } console.log(`Removing ${context.args.user} from application appName...done`); }); }).catch(function (err) { console.error(err); }); } };
Remove is not ready yet
Remove is not ready yet
JavaScript
isc
heroku/heroku-access,heroku/heroku-orgs
javascript
## Code Before: 'use strict'; var Heroku = require('heroku-client'); var co = require('co'); var heroku; module.exports = { topic: 'access', needsAuth: true, needsApp: true, command: 'remove', description: 'Remove users from your app', help: 'heroku access:remove [email protected] --app APP', args: [{name: 'user', optional: false}], run: function (context) { let appName; appName = context.app; co(function* () { heroku = new Heroku({token: context.auth.password}); yield heroku.apps(appName).collaborators(context.args.user).delete(function (err) { if (err) { throw err; } console.log(`Removing ${context.args.user} from application ${cli.color.cyan(appName)}...done`); }); }).catch(function (err) { console.error(err); }); } }; ## Instruction: Remove is not ready yet ## Code After: 'use strict'; var Heroku = require('heroku-client'); var co = require('co'); var heroku; module.exports = { topic: 'access', needsAuth: true, needsApp: true, command: 'remove', description: 'Remove users from your app', help: 'heroku access:remove [email protected] --app APP', args: [{name: 'user', optional: false}], run: function (context) { let appName; appName = context.app; co(function* () { heroku = new Heroku({token: context.auth.password}); yield heroku.apps(appName).collaborators(context.args.user).delete(function (err) { if (err) { throw err; } console.log(`Removing ${context.args.user} from application appName...done`); }); }).catch(function (err) { console.error(err); }); } };
'use strict'; var Heroku = require('heroku-client'); var co = require('co'); var heroku; module.exports = { topic: 'access', needsAuth: true, needsApp: true, command: 'remove', description: 'Remove users from your app', help: 'heroku access:remove [email protected] --app APP', args: [{name: 'user', optional: false}], run: function (context) { let appName; appName = context.app; co(function* () { heroku = new Heroku({token: context.auth.password}); yield heroku.apps(appName).collaborators(context.args.user).delete(function (err) { if (err) { throw err; } - console.log(`Removing ${context.args.user} from application ${cli.color.cyan(appName)}...done`); ? ----------------- -- + console.log(`Removing ${context.args.user} from application appName...done`); }); }).catch(function (err) { console.error(err); }); } };
2
0.064516
1
1
df8251912fc3eb35c311a96b77a559bd1a65f42b
roles/gnome-shell/tasks/main.yml
roles/gnome-shell/tasks/main.yml
--- - name: Gnome Shell | Install drivers for OPENVPN apt: pkg=network-manager-openvpn-gnome state=installed
--- - name: Gnome Shell | Install drivers for OPENVPN apt: pkg=network-manager-openvpn-gnome state=installed # Config theme Gnome Shell - name: "Gnome 3 | Change default template by template dark" copy: src=/usr/share/themes/Adwaita/gtk-3.0/gtk-dark.css dest=/usr/share/themes/Adwaita/gtk-3.0/gtk.css backup=yes
Add package for create VPN
Add package for create VPN
YAML
mit
ericjsilva/ansible-desktop-ubuntu
yaml
## Code Before: --- - name: Gnome Shell | Install drivers for OPENVPN apt: pkg=network-manager-openvpn-gnome state=installed ## Instruction: Add package for create VPN ## Code After: --- - name: Gnome Shell | Install drivers for OPENVPN apt: pkg=network-manager-openvpn-gnome state=installed # Config theme Gnome Shell - name: "Gnome 3 | Change default template by template dark" copy: src=/usr/share/themes/Adwaita/gtk-3.0/gtk-dark.css dest=/usr/share/themes/Adwaita/gtk-3.0/gtk.css backup=yes
--- - name: Gnome Shell | Install drivers for OPENVPN apt: pkg=network-manager-openvpn-gnome state=installed + + + # Config theme Gnome Shell + - name: "Gnome 3 | Change default template by template dark" + copy: src=/usr/share/themes/Adwaita/gtk-3.0/gtk-dark.css dest=/usr/share/themes/Adwaita/gtk-3.0/gtk.css backup=yes
5
1.25
5
0
ce0afe74b001c76d57449f5deb0a525df36b9756
benchmarks/benchmark-file-output/src/main/java/org/tinylog/benchmarks/file/LogFileState.java
benchmarks/benchmark-file-output/src/main/java/org/tinylog/benchmarks/file/LogFileState.java
package org.tinylog.benchmarks.file; import java.io.IOException; import java.nio.file.Path; import org.openjdk.jmh.annotations.Param; import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.State; import org.tinylog.impl.writers.file.LogFile; /** * State for writing strings by using {@link LogFile}. */ @State(Scope.Thread) public class LogFileState extends AbstractState<LogFile> { /** * The buffer sizes to benchmark. */ @Param({ "1024", "2048", "4096", "8192", "16384", "32768", "65536", "131072" }) private int bufferSize; /** */ public LogFileState() { } /** * @param bufferSize The buffer size in bytes */ public LogFileState(int bufferSize) { this.bufferSize = bufferSize; } @Override public void write(String content) throws IOException { instance.write(content); } @Override protected LogFile create(Path path) throws IOException { return new LogFile(path.toString(), bufferSize, CHARSET, false); } }
package org.tinylog.benchmarks.file; import java.io.IOException; import java.nio.file.Path; import org.openjdk.jmh.annotations.Param; import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.State; import org.tinylog.impl.writers.file.LogFile; /** * State for writing strings by using {@link LogFile}. */ @State(Scope.Thread) public class LogFileState extends AbstractState<LogFile> { /** * The buffer sizes to benchmark. */ @Param({ "1024", "2048", "4096", "8192", "16384", "32768", "65536", "131072" }) private int bufferSize; /** */ public LogFileState() { } /** * @param bufferSize The buffer size in bytes */ public LogFileState(int bufferSize) { this.bufferSize = bufferSize; } @Override public void write(String content) throws IOException { instance.write(content.getBytes(CHARSET)); } @Override protected LogFile create(Path path) throws IOException { return new LogFile(path.toString(), bufferSize, CHARSET, false); } }
Fix writing strings in FileOutputBenchmark via FileLogState
Fix writing strings in FileOutputBenchmark via FileLogState
Java
apache-2.0
pmwmedia/tinylog,pmwmedia/tinylog
java
## Code Before: package org.tinylog.benchmarks.file; import java.io.IOException; import java.nio.file.Path; import org.openjdk.jmh.annotations.Param; import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.State; import org.tinylog.impl.writers.file.LogFile; /** * State for writing strings by using {@link LogFile}. */ @State(Scope.Thread) public class LogFileState extends AbstractState<LogFile> { /** * The buffer sizes to benchmark. */ @Param({ "1024", "2048", "4096", "8192", "16384", "32768", "65536", "131072" }) private int bufferSize; /** */ public LogFileState() { } /** * @param bufferSize The buffer size in bytes */ public LogFileState(int bufferSize) { this.bufferSize = bufferSize; } @Override public void write(String content) throws IOException { instance.write(content); } @Override protected LogFile create(Path path) throws IOException { return new LogFile(path.toString(), bufferSize, CHARSET, false); } } ## Instruction: Fix writing strings in FileOutputBenchmark via FileLogState ## Code After: package org.tinylog.benchmarks.file; import java.io.IOException; import java.nio.file.Path; import org.openjdk.jmh.annotations.Param; import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.State; import org.tinylog.impl.writers.file.LogFile; /** * State for writing strings by using {@link LogFile}. */ @State(Scope.Thread) public class LogFileState extends AbstractState<LogFile> { /** * The buffer sizes to benchmark. */ @Param({ "1024", "2048", "4096", "8192", "16384", "32768", "65536", "131072" }) private int bufferSize; /** */ public LogFileState() { } /** * @param bufferSize The buffer size in bytes */ public LogFileState(int bufferSize) { this.bufferSize = bufferSize; } @Override public void write(String content) throws IOException { instance.write(content.getBytes(CHARSET)); } @Override protected LogFile create(Path path) throws IOException { return new LogFile(path.toString(), bufferSize, CHARSET, false); } }
package org.tinylog.benchmarks.file; import java.io.IOException; import java.nio.file.Path; import org.openjdk.jmh.annotations.Param; import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.State; import org.tinylog.impl.writers.file.LogFile; /** * State for writing strings by using {@link LogFile}. */ @State(Scope.Thread) public class LogFileState extends AbstractState<LogFile> { /** * The buffer sizes to benchmark. */ @Param({ "1024", "2048", "4096", "8192", "16384", "32768", "65536", "131072" }) private int bufferSize; /** */ public LogFileState() { } /** * @param bufferSize The buffer size in bytes */ public LogFileState(int bufferSize) { this.bufferSize = bufferSize; } @Override public void write(String content) throws IOException { - instance.write(content); + instance.write(content.getBytes(CHARSET)); } @Override protected LogFile create(Path path) throws IOException { return new LogFile(path.toString(), bufferSize, CHARSET, false); } }
2
0.045455
1
1
40c210a30716af1408048dfda47dd095fc66e0c3
custom.css
custom.css
.docblock pre.rust { background: #eeeeff; } pre.trait, pre.macro, pre.fn, pre.struct, pre.enum, pre.typedef { background: #fcfefc; } /* Small “example” label for doc examples */ .docblock pre.rust::before { content: "example"; float: right; font-style: italic; font-size: 0.8em; margin-top: -10px; margin-right: -5px; } /* Fixup where display in trait listing */ pre.trait .where::before { content: '\a '; } .docblock code { background-color: inherit; font-weight: bold; padding: 0 0.1em; } em.deprecated { font-weight: bold; font-style: italic; } /* custom fix for rustdoc summary bug */ .short pre.rust { display: none; }
.docblock pre.rust { background: #eeeeff; } pre.trait, pre.macro, pre.fn, pre.struct, pre.enum, pre.typedef { background: #fcfefc; } /* Small “example” label for doc examples */ .docblock pre.rust::before { content: "example"; float: right; font-style: italic; font-size: 0.8em; margin-top: -10px; margin-right: -5px; } /* Fixup where display in trait listing */ pre.trait .where::before { content: '\a '; } .docblock code { background-color: inherit; font-weight: bold; padding: 0 0.1em; } em.deprecated { font-weight: bold; font-style: italic; }
Remove css workaround for rustdoc bug that was fixed
Remove css workaround for rustdoc bug that was fixed
CSS
apache-2.0
Pireax/rust-ndarray,SuperFluffy/rust-ndarray,Pireax/rust-ndarray,SuperFluffy/rust-ndarray
css
## Code Before: .docblock pre.rust { background: #eeeeff; } pre.trait, pre.macro, pre.fn, pre.struct, pre.enum, pre.typedef { background: #fcfefc; } /* Small “example” label for doc examples */ .docblock pre.rust::before { content: "example"; float: right; font-style: italic; font-size: 0.8em; margin-top: -10px; margin-right: -5px; } /* Fixup where display in trait listing */ pre.trait .where::before { content: '\a '; } .docblock code { background-color: inherit; font-weight: bold; padding: 0 0.1em; } em.deprecated { font-weight: bold; font-style: italic; } /* custom fix for rustdoc summary bug */ .short pre.rust { display: none; } ## Instruction: Remove css workaround for rustdoc bug that was fixed ## Code After: .docblock pre.rust { background: #eeeeff; } pre.trait, pre.macro, pre.fn, pre.struct, pre.enum, pre.typedef { background: #fcfefc; } /* Small “example” label for doc examples */ .docblock pre.rust::before { content: "example"; float: right; font-style: italic; font-size: 0.8em; margin-top: -10px; margin-right: -5px; } /* Fixup where display in trait listing */ pre.trait .where::before { content: '\a '; } .docblock code { background-color: inherit; font-weight: bold; padding: 0 0.1em; } em.deprecated { font-weight: bold; font-style: italic; }
.docblock pre.rust { background: #eeeeff; } pre.trait, pre.macro, pre.fn, pre.struct, pre.enum, pre.typedef { background: #fcfefc; } /* Small “example” label for doc examples */ .docblock pre.rust::before { content: "example"; float: right; font-style: italic; font-size: 0.8em; margin-top: -10px; margin-right: -5px; } /* Fixup where display in trait listing */ pre.trait .where::before { content: '\a '; } .docblock code { background-color: inherit; font-weight: bold; padding: 0 0.1em; } em.deprecated { font-weight: bold; font-style: italic; } - - /* custom fix for rustdoc summary bug */ - - .short pre.rust { - display: none; - }
6
0.166667
0
6
d4fa49e9a44eb12e5831923ad64c6fb71384aabe
modules/05_fasd.zsh
modules/05_fasd.zsh
function { emulate -LR zsh local fasd_cache=$ZDOTDIR/fasd-init-cache local fasd_path=$ZDOTDIR/fasd/fasd source $fasd_path if [[ ! -w $fasd_cache ]]; then touch $fasd_cache $fasd_path --init \ zsh-hook \ zsh-wcomp \ zsh-wcomp-install \ > $fasd_cache fi source $fasd_cache } # interactive directory selection # interactive file selection alias sd='fasd -sid' alias sf='fasd -sif' # cd, same functionality as j in autojump alias j='fasd -e cd -d'
function { emulate -LR zsh local fasd_cache=$ZDOTDIR/fasd-init-cache local fasd_path=$ZDOTDIR/fasd/fasd source $fasd_path if [[ ! -w $fasd_cache ]]; then touch $fasd_cache $fasd_path --init \ zsh-hook \ zsh-wcomp \ zsh-wcomp-install \ > $fasd_cache fi source $fasd_cache } # interactive directory selection # interactive file selection alias sd='fasd -sid' alias sf='fasd -sif' # cd, same functionality as j in autojump alias j='fasd -e cd -d' _mydirstack() { local -a lines list for d in $dirstack; do lines+="$(($#lines+1)) -- $d" list+="$#lines" done _wanted -V directory-stack expl 'directory stack' \ compadd "$@" -ld lines -S']/' -Q -a list } zsh_directory_name() { case $1 in (c) _mydirstack;; (n) case $2 in (<0-9>) reply=($dirstack[$2]);; (*) reply=($dirstack[(r)*$2*]);; esac;; (d) false;; esac }
Add a useful zsh_directory_name handler
Add a useful zsh_directory_name handler
Shell
mit
PythonNut/zsh-config
shell
## Code Before: function { emulate -LR zsh local fasd_cache=$ZDOTDIR/fasd-init-cache local fasd_path=$ZDOTDIR/fasd/fasd source $fasd_path if [[ ! -w $fasd_cache ]]; then touch $fasd_cache $fasd_path --init \ zsh-hook \ zsh-wcomp \ zsh-wcomp-install \ > $fasd_cache fi source $fasd_cache } # interactive directory selection # interactive file selection alias sd='fasd -sid' alias sf='fasd -sif' # cd, same functionality as j in autojump alias j='fasd -e cd -d' ## Instruction: Add a useful zsh_directory_name handler ## Code After: function { emulate -LR zsh local fasd_cache=$ZDOTDIR/fasd-init-cache local fasd_path=$ZDOTDIR/fasd/fasd source $fasd_path if [[ ! -w $fasd_cache ]]; then touch $fasd_cache $fasd_path --init \ zsh-hook \ zsh-wcomp \ zsh-wcomp-install \ > $fasd_cache fi source $fasd_cache } # interactive directory selection # interactive file selection alias sd='fasd -sid' alias sf='fasd -sif' # cd, same functionality as j in autojump alias j='fasd -e cd -d' _mydirstack() { local -a lines list for d in $dirstack; do lines+="$(($#lines+1)) -- $d" list+="$#lines" done _wanted -V directory-stack expl 'directory stack' \ compadd "$@" -ld lines -S']/' -Q -a list } zsh_directory_name() { case $1 in (c) _mydirstack;; (n) case $2 in (<0-9>) reply=($dirstack[$2]);; (*) reply=($dirstack[(r)*$2*]);; esac;; (d) false;; esac }
function { emulate -LR zsh local fasd_cache=$ZDOTDIR/fasd-init-cache local fasd_path=$ZDOTDIR/fasd/fasd source $fasd_path if [[ ! -w $fasd_cache ]]; then touch $fasd_cache $fasd_path --init \ zsh-hook \ zsh-wcomp \ zsh-wcomp-install \ > $fasd_cache fi source $fasd_cache } # interactive directory selection # interactive file selection alias sd='fasd -sid' alias sf='fasd -sif' # cd, same functionality as j in autojump alias j='fasd -e cd -d' + + _mydirstack() { + local -a lines list + for d in $dirstack; do + lines+="$(($#lines+1)) -- $d" + list+="$#lines" + done + _wanted -V directory-stack expl 'directory stack' \ + compadd "$@" -ld lines -S']/' -Q -a list + } + + zsh_directory_name() { + case $1 in + (c) _mydirstack;; + (n) case $2 in + (<0-9>) reply=($dirstack[$2]);; + (*) reply=($dirstack[(r)*$2*]);; + esac;; + (d) false;; + esac + }
21
0.777778
21
0
c1290ad36b0aff4ab1661233eae6e043858c2813
libkoshka_mm.h
libkoshka_mm.h
typedef struct { unsigned long long int width,height; long long int handle_x,handle_y; GLuint texture; SDL_Surface *surface; GLuint mask; } Image; void bb_fatal_error(char *msg);
typedef struct { long long int width,height; long long int width_frames,height_frames; long long int handle_x,handle_y; GLuint *textures; SDL_Surface *surface; GLuint mask_color; unsigned long long int **masks; unsigned long long int mask_width; unsigned long long int mask_height; } Image; void bb_fatal_error(char *msg);
Change definition of Image struct so all width/height fields are signed integers. Add fields to Image struct that deal with collision masking.
Change definition of Image struct so all width/height fields are signed integers. Add fields to Image struct that deal with collision masking.
C
bsd-2-clause
clockworkdevstudio/Idlewild-Lang,clockworkdevstudio/Idlewild-Lang,clockworkdevstudio/Idlewild-Lang
c
## Code Before: typedef struct { unsigned long long int width,height; long long int handle_x,handle_y; GLuint texture; SDL_Surface *surface; GLuint mask; } Image; void bb_fatal_error(char *msg); ## Instruction: Change definition of Image struct so all width/height fields are signed integers. Add fields to Image struct that deal with collision masking. ## Code After: typedef struct { long long int width,height; long long int width_frames,height_frames; long long int handle_x,handle_y; GLuint *textures; SDL_Surface *surface; GLuint mask_color; unsigned long long int **masks; unsigned long long int mask_width; unsigned long long int mask_height; } Image; void bb_fatal_error(char *msg);
typedef struct { - unsigned long long int width,height; ? --------- + long long int width,height; + long long int width_frames,height_frames; long long int handle_x,handle_y; - GLuint texture; + GLuint *textures; ? + + SDL_Surface *surface; - GLuint mask; + GLuint mask_color; ? ++++++ + unsigned long long int **masks; + unsigned long long int mask_width; + unsigned long long int mask_height; } Image; void bb_fatal_error(char *msg);
10
1
7
3
9efb883a2110f1c052aae4b82300c3872a39ebc7
app/views/documentation/storing_data/_perl.html.haml
app/views/documentation/storing_data/_perl.html.haml
= render "missing_documentation"
:markdown The `Database::DumpTruck` Perl module makes it easy to store data in an SQLite database. To save data all you need to do is use the [`Database::DumpTruck` module](http://search.cpan.org/dist/Database-DumpTruck/lib/Database/DumpTruck.pm). :coderay #!perl use Database::DumpTruck; # Open a database handle my $dt = Database::DumpTruck->new({dbname => 'data.sqlite', table => 'data'}); #insert content $dt->insert({ name => 'Susan', occupation => 'software developer' }); #create index $dt->create_index(['name']); #update content. You can use upsert when storing new data too. $dt->upsert({ name => 'Susan', occupation => 'product owner' });
Add perl documentation for storing data
Add perl documentation for storing data
Haml
agpl-3.0
openaustralia/morph,openaustralia/morph,openaustralia/morph,openaustralia/morph,openaustralia/morph,openaustralia/morph,openaustralia/morph
haml
## Code Before: = render "missing_documentation" ## Instruction: Add perl documentation for storing data ## Code After: :markdown The `Database::DumpTruck` Perl module makes it easy to store data in an SQLite database. To save data all you need to do is use the [`Database::DumpTruck` module](http://search.cpan.org/dist/Database-DumpTruck/lib/Database/DumpTruck.pm). :coderay #!perl use Database::DumpTruck; # Open a database handle my $dt = Database::DumpTruck->new({dbname => 'data.sqlite', table => 'data'}); #insert content $dt->insert({ name => 'Susan', occupation => 'software developer' }); #create index $dt->create_index(['name']); #update content. You can use upsert when storing new data too. $dt->upsert({ name => 'Susan', occupation => 'product owner' });
- = render "missing_documentation" + :markdown + The `Database::DumpTruck` Perl module makes it easy to store data in an SQLite database. + + To save data all you need to do is use the [`Database::DumpTruck` module](http://search.cpan.org/dist/Database-DumpTruck/lib/Database/DumpTruck.pm). + + :coderay + #!perl + use Database::DumpTruck; + + # Open a database handle + my $dt = Database::DumpTruck->new({dbname => 'data.sqlite', table => 'data'}); + + #insert content + $dt->insert({ + name => 'Susan', + occupation => 'software developer' + }); + + #create index + $dt->create_index(['name']); + + #update content. You can use upsert when storing new data too. + $dt->upsert({ + name => 'Susan', + occupation => 'product owner' + });
27
27
26
1
16ef3a240259eefd63001c9f797bfbd3e2d811e0
chars.css
chars.css
margin-top: 10px; } #chars_container span.avatar { width: 25px; height: 25px; display: inline-block; text-align: center; } #chars_container span.avatar img { max-width: 100%; max-height: 100%; margin-bottom: -7px; } #chars_container li { margin-top: 7px; } #chars_container .switch { float: right; }
margin-top: 10px; } #chars_container span.avatar { width: 25px; height: 25px; display: inline-block; text-align: center; } #chars_container span.avatar img { max-width: 100%; max-height: 100%; margin-bottom: -7px; } #chars_container li { margin-top: 7px; } #chars_container .switch { float: right; } /* And the profile menu can use some love */ #profile_menu_top img.avatar, #profile_menu img.avatar { display: none; }
Fix indentation and don't bother showing user generic avatar in the profile popup since it won't ever be anything else.
Fix indentation and don't bother showing user generic avatar in the profile popup since it won't ever be anything else.
CSS
mit
Arantor/smf-characters,Arantor/smf-characters
css
## Code Before: margin-top: 10px; } #chars_container span.avatar { width: 25px; height: 25px; display: inline-block; text-align: center; } #chars_container span.avatar img { max-width: 100%; max-height: 100%; margin-bottom: -7px; } #chars_container li { margin-top: 7px; } #chars_container .switch { float: right; } ## Instruction: Fix indentation and don't bother showing user generic avatar in the profile popup since it won't ever be anything else. ## Code After: margin-top: 10px; } #chars_container span.avatar { width: 25px; height: 25px; display: inline-block; text-align: center; } #chars_container span.avatar img { max-width: 100%; max-height: 100%; margin-bottom: -7px; } #chars_container li { margin-top: 7px; } #chars_container .switch { float: right; } /* And the profile menu can use some love */ #profile_menu_top img.avatar, #profile_menu img.avatar { display: none; }
margin-top: 10px; } #chars_container span.avatar { - width: 25px; ? ^^^^ + width: 25px; ? ^ - height: 25px; ? ^^^^ + height: 25px; ? ^ - display: inline-block; ? ^^^^ + display: inline-block; ? ^ text-align: center; } #chars_container span.avatar img { - max-width: 100%; ? ^^^^ + max-width: 100%; ? ^ - max-height: 100%; ? ^^^^ + max-height: 100%; ? ^ - margin-bottom: -7px; ? ^^^^ + margin-bottom: -7px; ? ^ } #chars_container li { margin-top: 7px; } #chars_container .switch { float: right; } + + /* And the profile menu can use some love */ + #profile_menu_top img.avatar, #profile_menu img.avatar { + display: none; + }
17
0.73913
11
6
f96d396bc66c5b3f486acb95c5407d441912f7a9
src/sprites/Water/interact.js
src/sprites/Water/interact.js
export default function interact() { const game = this.game.state.states.Game; const player = game.player; player.inventory.water++; // this should be its own function somewhere game.hud.water.setText('Water: ' + player.inventory.water); }
export default function interact() { const game = this.game.state.states.Game; const player = game.player; player.inventory.water++; }
Handle updating water count elsewhere
Handle updating water count elsewhere
JavaScript
mit
ThomasMays/incremental-forest,ThomasMays/incremental-forest
javascript
## Code Before: export default function interact() { const game = this.game.state.states.Game; const player = game.player; player.inventory.water++; // this should be its own function somewhere game.hud.water.setText('Water: ' + player.inventory.water); } ## Instruction: Handle updating water count elsewhere ## Code After: export default function interact() { const game = this.game.state.states.Game; const player = game.player; player.inventory.water++; }
export default function interact() { const game = this.game.state.states.Game; const player = game.player; player.inventory.water++; - - // this should be its own function somewhere - game.hud.water.setText('Water: ' + player.inventory.water); }
3
0.3
0
3
6d3f920eab29161d48297315aa934f94953a95f2
autoload/airline/extensions/neomake.vim
autoload/airline/extensions/neomake.vim
let s:spc = g:airline_symbols.space function! airline#extensions#neomake#apply(...) let w:airline_section_warning = get(w:, 'airline_section_warning', g:airline_section_warning) let w:airline_section_warning .= s:spc.'%{neomake#statusline#LoclistStatus()}' endfunction function! airline#extensions#neomake#init(ext) call airline#parts#define_raw('neomake', '%{neomake#statusline#LoclistStatus()}') call a:ext.add_statusline_func('airline#extensions#neomake#apply') endfunction
" vim: ts=4 sw=4 et if exists('g:neomake_airline') && g:neomake_airline == 0 finish endif let s:spc = g:airline_symbols.space function! airline#extensions#neomake#apply(...) let w:airline_section_warning = get(w:, 'airline_section_warning', g:airline_section_warning) let w:airline_section_warning .= s:spc.'%{neomake#statusline#LoclistStatus()}' endfunction function! airline#extensions#neomake#init(ext) call airline#parts#define_raw('neomake', '%{neomake#statusline#LoclistStatus()}') call a:ext.add_statusline_func('airline#extensions#neomake#apply') endfunction
Add option to disable Airline integration.
Add option to disable Airline integration.
VimL
mit
euclio/neomake,codeinabox/neomake,datanoise/neomake,bonds/neomake,KariusDx/neomake,neomake/neomake,euclio/neomake,neomake/neomake,nerdrew/neomake,nerdrew/neomake,bonds/neomake
viml
## Code Before: let s:spc = g:airline_symbols.space function! airline#extensions#neomake#apply(...) let w:airline_section_warning = get(w:, 'airline_section_warning', g:airline_section_warning) let w:airline_section_warning .= s:spc.'%{neomake#statusline#LoclistStatus()}' endfunction function! airline#extensions#neomake#init(ext) call airline#parts#define_raw('neomake', '%{neomake#statusline#LoclistStatus()}') call a:ext.add_statusline_func('airline#extensions#neomake#apply') endfunction ## Instruction: Add option to disable Airline integration. ## Code After: " vim: ts=4 sw=4 et if exists('g:neomake_airline') && g:neomake_airline == 0 finish endif let s:spc = g:airline_symbols.space function! airline#extensions#neomake#apply(...) let w:airline_section_warning = get(w:, 'airline_section_warning', g:airline_section_warning) let w:airline_section_warning .= s:spc.'%{neomake#statusline#LoclistStatus()}' endfunction function! airline#extensions#neomake#init(ext) call airline#parts#define_raw('neomake', '%{neomake#statusline#LoclistStatus()}') call a:ext.add_statusline_func('airline#extensions#neomake#apply') endfunction
+ " vim: ts=4 sw=4 et + + if exists('g:neomake_airline') && g:neomake_airline == 0 + finish + endif + let s:spc = g:airline_symbols.space function! airline#extensions#neomake#apply(...) let w:airline_section_warning = get(w:, 'airline_section_warning', g:airline_section_warning) let w:airline_section_warning .= s:spc.'%{neomake#statusline#LoclistStatus()}' endfunction function! airline#extensions#neomake#init(ext) call airline#parts#define_raw('neomake', '%{neomake#statusline#LoclistStatus()}') call a:ext.add_statusline_func('airline#extensions#neomake#apply') endfunction
6
0.545455
6
0
92e68c724d97c659ee2040ca88e617c934df83f1
ci/pipelines/auto-update/tasks/auto-update-gems.sh
ci/pipelines/auto-update/tasks/auto-update-gems.sh
set -e set -x pushd bosh-cpi-src-in echo "Check if latest auto-update commit has already been merged to master" git fetch origin auto-update:refs/remotes/origin/auto-update pr_open=$(git branch master --contains $(git rev-parse origin/auto-update)) if [ -z ${pr_open} ]; then echo "PR is open. Merge first" exit 1 fi popd cp -r bosh-cpi-src-in bosh-cpi-src-out cd bosh-cpi-src-out/src/bosh_openstack_cpi echo "Looking for new gem versions" rm Gemfile.lock ./vendor_gems git diff --exit-code Gemfile.lock || exit_code=$? if [ -v exit_code ]; then echo "Running unit tests" bundle install bundle exec rspec spec/unit/* echo "Creating new pull request" git add . git config --global user.email [email protected] git config --global user.name CI git commit -m "Bump gems" else echo "No new gem versions found" fi
set -e set -x pushd bosh-cpi-src-in echo "Check if latest auto-update commit has already been merged to master" git fetch origin auto-update:refs/remotes/origin/auto-update pr_open=$(git branch master --contains $(git rev-parse origin/auto-update)) if [ -z ${pr_open} ]; then echo "PR is open. Merge first" exit 1 fi popd cp -r bosh-cpi-src-in bosh-cpi-src-out/repo cd bosh-cpi-src-out/repo/src/bosh_openstack_cpi echo "Looking for new gem versions" rm Gemfile.lock ./vendor_gems git diff --exit-code Gemfile.lock || exit_code=$? if [ -v exit_code ]; then echo "Running unit tests" bundle install bundle exec rspec spec/unit/* echo "Creating new pull request" git add . git config --global user.email [email protected] git config --global user.name CI git commit -m "Bump gems" else echo "No new gem versions found" fi
Copy source to correct folder
Copy source to correct folder [#149794797](https://www.pivotaltracker.com/story/show/149794797) Signed-off-by: Tom Kiemes <[email protected]>
Shell
apache-2.0
cloudfoundry-incubator/bosh-openstack-cpi-release,cloudfoundry-incubator/bosh-openstack-cpi-release,cloudfoundry-incubator/bosh-openstack-cpi-release
shell
## Code Before: set -e set -x pushd bosh-cpi-src-in echo "Check if latest auto-update commit has already been merged to master" git fetch origin auto-update:refs/remotes/origin/auto-update pr_open=$(git branch master --contains $(git rev-parse origin/auto-update)) if [ -z ${pr_open} ]; then echo "PR is open. Merge first" exit 1 fi popd cp -r bosh-cpi-src-in bosh-cpi-src-out cd bosh-cpi-src-out/src/bosh_openstack_cpi echo "Looking for new gem versions" rm Gemfile.lock ./vendor_gems git diff --exit-code Gemfile.lock || exit_code=$? if [ -v exit_code ]; then echo "Running unit tests" bundle install bundle exec rspec spec/unit/* echo "Creating new pull request" git add . git config --global user.email [email protected] git config --global user.name CI git commit -m "Bump gems" else echo "No new gem versions found" fi ## Instruction: Copy source to correct folder [#149794797](https://www.pivotaltracker.com/story/show/149794797) Signed-off-by: Tom Kiemes <[email protected]> ## Code After: set -e set -x pushd bosh-cpi-src-in echo "Check if latest auto-update commit has already been merged to master" git fetch origin auto-update:refs/remotes/origin/auto-update pr_open=$(git branch master --contains $(git rev-parse origin/auto-update)) if [ -z ${pr_open} ]; then echo "PR is open. Merge first" exit 1 fi popd cp -r bosh-cpi-src-in bosh-cpi-src-out/repo cd bosh-cpi-src-out/repo/src/bosh_openstack_cpi echo "Looking for new gem versions" rm Gemfile.lock ./vendor_gems git diff --exit-code Gemfile.lock || exit_code=$? if [ -v exit_code ]; then echo "Running unit tests" bundle install bundle exec rspec spec/unit/* echo "Creating new pull request" git add . git config --global user.email [email protected] git config --global user.name CI git commit -m "Bump gems" else echo "No new gem versions found" fi
set -e set -x pushd bosh-cpi-src-in echo "Check if latest auto-update commit has already been merged to master" git fetch origin auto-update:refs/remotes/origin/auto-update pr_open=$(git branch master --contains $(git rev-parse origin/auto-update)) if [ -z ${pr_open} ]; then echo "PR is open. Merge first" exit 1 fi popd - cp -r bosh-cpi-src-in bosh-cpi-src-out + cp -r bosh-cpi-src-in bosh-cpi-src-out/repo ? +++++ - cd bosh-cpi-src-out/src/bosh_openstack_cpi + cd bosh-cpi-src-out/repo/src/bosh_openstack_cpi ? +++++ echo "Looking for new gem versions" rm Gemfile.lock ./vendor_gems git diff --exit-code Gemfile.lock || exit_code=$? if [ -v exit_code ]; then echo "Running unit tests" bundle install bundle exec rspec spec/unit/* echo "Creating new pull request" git add . git config --global user.email [email protected] git config --global user.name CI git commit -m "Bump gems" else echo "No new gem versions found" fi
4
0.117647
2
2
334a3c2ff706b2a40379c90f88049cd1f643d900
zsh.d/autoload.d/50-toolchains.zsh
zsh.d/autoload.d/50-toolchains.zsh
if [ -d "/opt/msp430" ]; then #Add to paths export MSP430ROOT="/opt/msp430" export PATH="${PATH}:${MSP430ROOT}/bin" fi #NodeJS if [ -d /usr/local/node ]; then export PATH="${PATH}:/usr/local/node/bin" fi if [ -d ${HOME}/Workspace/FlameGraph ]; then # useful: checkout https://github.com/brendangregg/FlameGraph.git to ~/Workspace for flame utils export PATH="${PATH}:${HOME}/Workspace/FlameGraph" fi
if [ -d "/opt/msp430" ]; then #Add to paths export MSP430ROOT="/opt/msp430" export PATH="${PATH}:${MSP430ROOT}/bin" fi #NodeJS if [ -d /usr/local/node ]; then export PATH="${PATH}:/usr/local/node/bin" fi if [ -d ${WORKSPACE}/FlameGraph ]; then # useful: checkout https://github.com/brendangregg/FlameGraph.git to ~/Workspace for flame utils export PATH="${PATH}:${HOME}/Workspace/FlameGraph" fi
Use $WORKSPACE rather than hardcoding a path
Use $WORKSPACE rather than hardcoding a path
Shell
unlicense
npotts/dotfiles
shell
## Code Before: if [ -d "/opt/msp430" ]; then #Add to paths export MSP430ROOT="/opt/msp430" export PATH="${PATH}:${MSP430ROOT}/bin" fi #NodeJS if [ -d /usr/local/node ]; then export PATH="${PATH}:/usr/local/node/bin" fi if [ -d ${HOME}/Workspace/FlameGraph ]; then # useful: checkout https://github.com/brendangregg/FlameGraph.git to ~/Workspace for flame utils export PATH="${PATH}:${HOME}/Workspace/FlameGraph" fi ## Instruction: Use $WORKSPACE rather than hardcoding a path ## Code After: if [ -d "/opt/msp430" ]; then #Add to paths export MSP430ROOT="/opt/msp430" export PATH="${PATH}:${MSP430ROOT}/bin" fi #NodeJS if [ -d /usr/local/node ]; then export PATH="${PATH}:/usr/local/node/bin" fi if [ -d ${WORKSPACE}/FlameGraph ]; then # useful: checkout https://github.com/brendangregg/FlameGraph.git to ~/Workspace for flame utils export PATH="${PATH}:${HOME}/Workspace/FlameGraph" fi
if [ -d "/opt/msp430" ]; then #Add to paths export MSP430ROOT="/opt/msp430" export PATH="${PATH}:${MSP430ROOT}/bin" fi #NodeJS if [ -d /usr/local/node ]; then export PATH="${PATH}:/usr/local/node/bin" fi - if [ -d ${HOME}/Workspace/FlameGraph ]; then ? ^ ^ ---------- + if [ -d ${WORKSPACE}/FlameGraph ]; then ? ^ ^^^^^^ # useful: checkout https://github.com/brendangregg/FlameGraph.git to ~/Workspace for flame utils export PATH="${PATH}:${HOME}/Workspace/FlameGraph" fi
2
0.125
1
1
d75d910339409c67a3fba80a9ce7a48d23afb81b
db/migrate/20160303110710_add_status_to_users.rb
db/migrate/20160303110710_add_status_to_users.rb
class AddStatusToUsers < ActiveRecord::Migration def change add_column :users, :status, :integer User.update_all(:status => User.statuses[:active]) end end
class AddStatusToUsers < ActiveRecord::Migration USER_STATUS_ACTIVE = 1 def change add_column :users, :status, :integer User.update_all(status: USER_STATUS_ACTIVE) end end
Fix old migration by hard-coding value. It doesn't won't since it references a method that doesn't exist any more.
Fix old migration by hard-coding value. It doesn't won't since it references a method that doesn't exist any more.
Ruby
mit
diegoaad/lale-help,lale-help/lale-help,jprokay/lale-help,jprokay/lale-help,diegoaad/lale-help,jprokay/lale-help,lale-help/lale-help,diegoaad/lale-help,lale-help/lale-help,diegoaad/lale-help,jprokay/lale-help,lale-help/lale-help
ruby
## Code Before: class AddStatusToUsers < ActiveRecord::Migration def change add_column :users, :status, :integer User.update_all(:status => User.statuses[:active]) end end ## Instruction: Fix old migration by hard-coding value. It doesn't won't since it references a method that doesn't exist any more. ## Code After: class AddStatusToUsers < ActiveRecord::Migration USER_STATUS_ACTIVE = 1 def change add_column :users, :status, :integer User.update_all(status: USER_STATUS_ACTIVE) end end
class AddStatusToUsers < ActiveRecord::Migration + + USER_STATUS_ACTIVE = 1 + def change add_column :users, :status, :integer - User.update_all(:status => User.statuses[:active]) + User.update_all(status: USER_STATUS_ACTIVE) end end
5
0.833333
4
1
6cc4553589851b7eff813e2388c2cbcd874512aa
package.json
package.json
{ "name": "gfn", "description": "Module to create a graph in node and output it as a .dot file to be used with graphviz.", "author": "Nikolas Poniros <[email protected]>", "version": "0.0.1", "dependencies": {}, "devDependencies": {}, "directories": { "lib": "./lib" }, "main": "main", "keywords": ["graphviz", "dot", "graph"], "engine": "node >= 0.6.6", "private": true }
{ "name": "gfn", "description": "Module to create a graph in node and output it as a .dot file to be used with graphviz.", "author": "Nikolas Poniros <[email protected]>", "version": "0.0.1", "dependencies": {}, "devDependencies": { "mocha": "1.18.2", "jshint": "2.5.0" }, "directories": { "lib": "./lib" }, "scripts": { "test": "make jshint && make test" }, "main": "main", "keywords": ["graphviz", "dot", "graph"], "engine": "node >= 0.10.0", "private": true }
Add devDependencies and test script
Add devDependencies and test script
JSON
mit
nponiros/graphviz-node
json
## Code Before: { "name": "gfn", "description": "Module to create a graph in node and output it as a .dot file to be used with graphviz.", "author": "Nikolas Poniros <[email protected]>", "version": "0.0.1", "dependencies": {}, "devDependencies": {}, "directories": { "lib": "./lib" }, "main": "main", "keywords": ["graphviz", "dot", "graph"], "engine": "node >= 0.6.6", "private": true } ## Instruction: Add devDependencies and test script ## Code After: { "name": "gfn", "description": "Module to create a graph in node and output it as a .dot file to be used with graphviz.", "author": "Nikolas Poniros <[email protected]>", "version": "0.0.1", "dependencies": {}, "devDependencies": { "mocha": "1.18.2", "jshint": "2.5.0" }, "directories": { "lib": "./lib" }, "scripts": { "test": "make jshint && make test" }, "main": "main", "keywords": ["graphviz", "dot", "graph"], "engine": "node >= 0.10.0", "private": true }
{ "name": "gfn", "description": "Module to create a graph in node and output it as a .dot file to be used with graphviz.", "author": "Nikolas Poniros <[email protected]>", "version": "0.0.1", "dependencies": {}, - "devDependencies": {}, ? -- + "devDependencies": { + "mocha": "1.18.2", + "jshint": "2.5.0" + }, "directories": { "lib": "./lib" }, + "scripts": { + "test": "make jshint && make test" + }, "main": "main", "keywords": ["graphviz", "dot", "graph"], - "engine": "node >= 0.6.6", ? ^ ^ + "engine": "node >= 0.10.0", ? ^^ ^ "private": true }
10
0.666667
8
2
d7689783bf23fbe43c395b07572a1380654652cd
requirements.txt
requirements.txt
Django==1.11.2 boto3==1.4.4 dj-database-url==0.4.2 django-stdimage==2.4.1 django-storages==1.5.2 gunicorn==19.7.1 mock==2.0.0 psycopg2==2.7.1 pytest==3.1.2 whitenoise==3.3.0
Django==1.11.2 boto3==1.5.8 dj-database-url==0.4.2 django-stdimage==3.0.0 django-storages==1.6.5 gunicorn==19.7.1 mock==2.0.0 psycopg2==2.7.3.2 pytest==3.3.1 whitenoise==3.3.1
Update all dependencies but django
Update all dependencies but django
Text
mit
jordifierro/abidria-api,jordifierro/abidria-api,jordifierro/abidria-api
text
## Code Before: Django==1.11.2 boto3==1.4.4 dj-database-url==0.4.2 django-stdimage==2.4.1 django-storages==1.5.2 gunicorn==19.7.1 mock==2.0.0 psycopg2==2.7.1 pytest==3.1.2 whitenoise==3.3.0 ## Instruction: Update all dependencies but django ## Code After: Django==1.11.2 boto3==1.5.8 dj-database-url==0.4.2 django-stdimage==3.0.0 django-storages==1.6.5 gunicorn==19.7.1 mock==2.0.0 psycopg2==2.7.3.2 pytest==3.3.1 whitenoise==3.3.1
Django==1.11.2 - boto3==1.4.4 ? ^ ^ + boto3==1.5.8 ? ^ ^ dj-database-url==0.4.2 - django-stdimage==2.4.1 ? ^ ^ ^ + django-stdimage==3.0.0 ? ^ ^ ^ - django-storages==1.5.2 ? -- + django-storages==1.6.5 ? ++ gunicorn==19.7.1 mock==2.0.0 - psycopg2==2.7.1 ? ^ + psycopg2==2.7.3.2 ? ^^^ - pytest==3.1.2 ? -- + pytest==3.3.1 ? ++ - whitenoise==3.3.0 ? ^ + whitenoise==3.3.1 ? ^
12
1.2
6
6
a6cd139f9cf6a5e27c4f19e81571c300215c17e6
.appveyor.yml
.appveyor.yml
init: - git config --global core.autocrlf true install: - ps: Install-Product node 8 x64 branches: only: - dev - /^release\/.*$/ - /^(.*\/)?ci-.*$/ clone_depth: 1 environment: global: DOTNET_SKIP_FIRST_TIME_EXPERIENCE: true DOTNET_CLI_TELEMETRY_OPTOUT: 1 test: 'off' deploy: 'off' os: Visual Studio 2017 Preview build_script: - dotnet build Blazor.sln before_test: - choco install googlechrome - npm install -g selenium-standalone - selenium-standalone install - ps: $SeleniumProcess = Start-Process "selenium-standalone" -ArgumentList "start" -PassThru test_script: - dotnet test test\AllTests.proj after_test: - ps: Stop-Process -Id $SeleniumProcess.Id
init: - git config --global core.autocrlf true install: - ps: Install-Product node 8 x64 branches: only: - dev - /^release\/.*$/ - /^(.*\/)?ci-.*$/ environment: global: DOTNET_SKIP_FIRST_TIME_EXPERIENCE: true DOTNET_CLI_TELEMETRY_OPTOUT: 1 test: 'off' deploy: 'off' os: Visual Studio 2017 Preview build_script: - dotnet build Blazor.sln before_test: - choco install googlechrome - npm install -g selenium-standalone - selenium-standalone install - ps: $SeleniumProcess = Start-Process "selenium-standalone" -ArgumentList "start" -PassThru test_script: - dotnet test test\AllTests.proj after_test: - ps: Stop-Process -Id $SeleniumProcess.Id
Make AppVeyor fetch all commits to avoid "reference is not a tree" errors in CI
Make AppVeyor fetch all commits to avoid "reference is not a tree" errors in CI
YAML
apache-2.0
aspnet/AspNetCore,aspnet/AspNetCore,aspnet/AspNetCore,aspnet/AspNetCore,aspnet/AspNetCore,aspnet/AspNetCore,aspnet/AspNetCore,aspnet/AspNetCore,aspnet/AspNetCore
yaml
## Code Before: init: - git config --global core.autocrlf true install: - ps: Install-Product node 8 x64 branches: only: - dev - /^release\/.*$/ - /^(.*\/)?ci-.*$/ clone_depth: 1 environment: global: DOTNET_SKIP_FIRST_TIME_EXPERIENCE: true DOTNET_CLI_TELEMETRY_OPTOUT: 1 test: 'off' deploy: 'off' os: Visual Studio 2017 Preview build_script: - dotnet build Blazor.sln before_test: - choco install googlechrome - npm install -g selenium-standalone - selenium-standalone install - ps: $SeleniumProcess = Start-Process "selenium-standalone" -ArgumentList "start" -PassThru test_script: - dotnet test test\AllTests.proj after_test: - ps: Stop-Process -Id $SeleniumProcess.Id ## Instruction: Make AppVeyor fetch all commits to avoid "reference is not a tree" errors in CI ## Code After: init: - git config --global core.autocrlf true install: - ps: Install-Product node 8 x64 branches: only: - dev - /^release\/.*$/ - /^(.*\/)?ci-.*$/ environment: global: DOTNET_SKIP_FIRST_TIME_EXPERIENCE: true DOTNET_CLI_TELEMETRY_OPTOUT: 1 test: 'off' deploy: 'off' os: Visual Studio 2017 Preview build_script: - dotnet build Blazor.sln before_test: - choco install googlechrome - npm install -g selenium-standalone - selenium-standalone install - ps: $SeleniumProcess = Start-Process "selenium-standalone" -ArgumentList "start" -PassThru test_script: - dotnet test test\AllTests.proj after_test: - ps: Stop-Process -Id $SeleniumProcess.Id
init: - git config --global core.autocrlf true install: - ps: Install-Product node 8 x64 branches: only: - dev - /^release\/.*$/ - /^(.*\/)?ci-.*$/ - clone_depth: 1 environment: global: DOTNET_SKIP_FIRST_TIME_EXPERIENCE: true DOTNET_CLI_TELEMETRY_OPTOUT: 1 test: 'off' deploy: 'off' os: Visual Studio 2017 Preview build_script: - dotnet build Blazor.sln before_test: - choco install googlechrome - npm install -g selenium-standalone - selenium-standalone install - ps: $SeleniumProcess = Start-Process "selenium-standalone" -ArgumentList "start" -PassThru test_script: - dotnet test test\AllTests.proj after_test: - ps: Stop-Process -Id $SeleniumProcess.Id
1
0.035714
0
1
8e3b3fa365080d9ab3ecd679010497667b918d91
lib/extract_urls.rb
lib/extract_urls.rb
require 'hpricot' module ExtractUrls # Extract image URLs from HTML. def extract_image_urls(url, body) relative_url = url.gsub(/(https?:\/\/[^?]*)(\?.*)$*/, '\1'); if relative_url !~ /\/$/ then relative_url += "/" end url_head = relative_url.gsub(/(https?:\/\/[^\/]+\/).*/, '\1'); urls = [] doc = Hpricot(body) doc.search("a[@href]").each do |param| href = param.attributes["href"] if href.nil? then next end if href !~ /\.(png|jpg|jpeg)$/i then next end if href =~ /https?:\/\// then elsif href =~ /^\// then href = url_head + href elsif href !~ /https?:\/\// then href = relative_url + href end urls.push(href) end return urls end module_function :extract_image_urls end
require 'nokogiri' module ExtractUrls # Extract image URLs from HTML. def extract_image_urls(url, body) urls = [] Nokogiri::HTML(body).xpath('//a[@href]').each do |link| urls += [URI.join(url, link[:href]).to_s] if link[:href] =~ /\.(png|jpg|jpeg)\z/i end return urls end module_function :extract_image_urls end
Use nokogiri + URI - much simpler now.
Use nokogiri + URI - much simpler now.
Ruby
isc
moebooru/moebooru,nanaya/moebooru,nanaya/moebooru,euank/moebooru-thin,euank/moebooru-thin,euank/moebooru-thin,moebooru/moebooru,nanaya/moebooru,moebooru/moebooru,nanaya/moebooru,euank/moebooru-thin,euank/moebooru-thin,moebooru/moebooru,nanaya/moebooru,moebooru/moebooru
ruby
## Code Before: require 'hpricot' module ExtractUrls # Extract image URLs from HTML. def extract_image_urls(url, body) relative_url = url.gsub(/(https?:\/\/[^?]*)(\?.*)$*/, '\1'); if relative_url !~ /\/$/ then relative_url += "/" end url_head = relative_url.gsub(/(https?:\/\/[^\/]+\/).*/, '\1'); urls = [] doc = Hpricot(body) doc.search("a[@href]").each do |param| href = param.attributes["href"] if href.nil? then next end if href !~ /\.(png|jpg|jpeg)$/i then next end if href =~ /https?:\/\// then elsif href =~ /^\// then href = url_head + href elsif href !~ /https?:\/\// then href = relative_url + href end urls.push(href) end return urls end module_function :extract_image_urls end ## Instruction: Use nokogiri + URI - much simpler now. ## Code After: require 'nokogiri' module ExtractUrls # Extract image URLs from HTML. def extract_image_urls(url, body) urls = [] Nokogiri::HTML(body).xpath('//a[@href]').each do |link| urls += [URI.join(url, link[:href]).to_s] if link[:href] =~ /\.(png|jpg|jpeg)\z/i end return urls end module_function :extract_image_urls end
- require 'hpricot' + require 'nokogiri' module ExtractUrls # Extract image URLs from HTML. def extract_image_urls(url, body) - relative_url = url.gsub(/(https?:\/\/[^?]*)(\?.*)$*/, '\1'); - if relative_url !~ /\/$/ then relative_url += "/" end - - url_head = relative_url.gsub(/(https?:\/\/[^\/]+\/).*/, '\1'); - urls = [] + Nokogiri::HTML(body).xpath('//a[@href]').each do |link| + urls += [URI.join(url, link[:href]).to_s] if link[:href] =~ /\.(png|jpg|jpeg)\z/i - doc = Hpricot(body) - doc.search("a[@href]").each do |param| - href = param.attributes["href"] - if href.nil? then next end - if href !~ /\.(png|jpg|jpeg)$/i then next end - if href =~ /https?:\/\// then - elsif href =~ /^\// then - href = url_head + href - elsif href !~ /https?:\/\// then - href = relative_url + href - end - - urls.push(href) end return urls end module_function :extract_image_urls end
22
0.709677
3
19
3c6594ef6a30c02377eb6ecb02af709b41e52c22
db/seeds.rb
db/seeds.rb
require "colorize" attributes = { login: "thedude", email: "[email protected]", password: "abc123", password_confirmation: "abc123", terms_of_use: "1", activated_at: Time.now, is_admin: true } user = User.new(attributes) user.login = attributes[:login] user.save! puts attributes.to_yaml.green course = Course.new() # Because of strange validation behaviour course.save(:validation => false) course_code = CourseCode.new({code: "TDA289"}) course_code.course = course course_code.save! puts course_code.to_yaml.green
require "colorize" attributes = { login: "thedude", email: "[email protected]", password: "abc123", password_confirmation: "abc123", terms_of_use: "1", activated_at: Time.now, is_admin: true } user = User.new(attributes) user.login = attributes[:login] user.save! puts attributes.to_yaml.green more_attributes = { login: "examiner", email: "[email protected]", password: "abc123", password_confirmation: "abc123", terms_of_use: "1", activated_at: Time.now, is_admin: true } user2 = User.new(more_attributes) user2.login = more_attributes[:login] user.save! student = Student.create(user: user) examiner = Examiner.create(user: user2) ################### # Courses and codes codes = ["TDA289", "DAT255", "FFR101", "EDA343", "DAT036", "EDA331", "EDA451"] codes.each do |code| course_code = CourseCode.new({code: code}) course = Course.new course.save!(validation: false) course_code.course = course course_code.save! end
Add additional user to seed script
Add additional user to seed script
Ruby
agpl-3.0
water/mainline,water/mainline,water/mainline
ruby
## Code Before: require "colorize" attributes = { login: "thedude", email: "[email protected]", password: "abc123", password_confirmation: "abc123", terms_of_use: "1", activated_at: Time.now, is_admin: true } user = User.new(attributes) user.login = attributes[:login] user.save! puts attributes.to_yaml.green course = Course.new() # Because of strange validation behaviour course.save(:validation => false) course_code = CourseCode.new({code: "TDA289"}) course_code.course = course course_code.save! puts course_code.to_yaml.green ## Instruction: Add additional user to seed script ## Code After: require "colorize" attributes = { login: "thedude", email: "[email protected]", password: "abc123", password_confirmation: "abc123", terms_of_use: "1", activated_at: Time.now, is_admin: true } user = User.new(attributes) user.login = attributes[:login] user.save! puts attributes.to_yaml.green more_attributes = { login: "examiner", email: "[email protected]", password: "abc123", password_confirmation: "abc123", terms_of_use: "1", activated_at: Time.now, is_admin: true } user2 = User.new(more_attributes) user2.login = more_attributes[:login] user.save! student = Student.create(user: user) examiner = Examiner.create(user: user2) ################### # Courses and codes codes = ["TDA289", "DAT255", "FFR101", "EDA343", "DAT036", "EDA331", "EDA451"] codes.each do |code| course_code = CourseCode.new({code: code}) course = Course.new course.save!(validation: false) course_code.course = course course_code.save! end
require "colorize" attributes = { login: "thedude", email: "[email protected]", password: "abc123", password_confirmation: "abc123", terms_of_use: "1", activated_at: Time.now, is_admin: true } user = User.new(attributes) user.login = attributes[:login] user.save! puts attributes.to_yaml.green - course = Course.new() - # Because of strange validation behaviour - course.save(:validation => false) + more_attributes = { + login: "examiner", + email: "[email protected]", + password: "abc123", + password_confirmation: "abc123", + terms_of_use: "1", + activated_at: Time.now, + is_admin: true + } + user2 = User.new(more_attributes) + user2.login = more_attributes[:login] + user.save! - course_code = CourseCode.new({code: "TDA289"}) - course_code.course = course - course_code.save! - puts course_code.to_yaml.green + student = Student.create(user: user) + examiner = Examiner.create(user: user2) + + ################### + # Courses and codes + codes = ["TDA289", "DAT255", "FFR101", "EDA343", "DAT036", "EDA331", "EDA451"] + codes.each do |code| + course_code = CourseCode.new({code: code}) + course = Course.new + course.save!(validation: false) + course_code.course = course + course_code.save! + end + + +
35
1.346154
28
7
03fc71b00978c39fab14847f702a28c49418d242
git/install.sh
git/install.sh
git config --global user.name "Christophe Bliard" git config --global color.branch auto git config --global color.diff auto git config --global color.status auto git config --global color.ui true git config --global core.editor vim git config --global core.excludesfile $HOME/.gitignore git config --global push.default simple git config --global fetch.prune true
git config --global user.name "Christophe Bliard" git config --global color.branch auto git config --global color.diff auto git config --global color.status auto git config --global color.ui true git config --global core.editor vim git config --global core.excludesfile $HOME/.gitignore git config --global push.default simple git config --global fetch.prune true # Three ways diff, so we have the common text git config --global merge.conflict diff3
Configure git to have diff3 for conflicts
Configure git to have diff3 for conflicts
Shell
mit
cbliard/dotfiles,cbliard/dotfiles,cbliard/dotfiles
shell
## Code Before: git config --global user.name "Christophe Bliard" git config --global color.branch auto git config --global color.diff auto git config --global color.status auto git config --global color.ui true git config --global core.editor vim git config --global core.excludesfile $HOME/.gitignore git config --global push.default simple git config --global fetch.prune true ## Instruction: Configure git to have diff3 for conflicts ## Code After: git config --global user.name "Christophe Bliard" git config --global color.branch auto git config --global color.diff auto git config --global color.status auto git config --global color.ui true git config --global core.editor vim git config --global core.excludesfile $HOME/.gitignore git config --global push.default simple git config --global fetch.prune true # Three ways diff, so we have the common text git config --global merge.conflict diff3
git config --global user.name "Christophe Bliard" git config --global color.branch auto git config --global color.diff auto git config --global color.status auto git config --global color.ui true git config --global core.editor vim git config --global core.excludesfile $HOME/.gitignore git config --global push.default simple git config --global fetch.prune true + + # Three ways diff, so we have the common text + git config --global merge.conflict diff3
3
0.3
3
0
c95878e0713a7abe8321a93b1087990ddceed853
lib/kangaruby/diagram.rb
lib/kangaruby/diagram.rb
module KangaRuby # Creates a diagram and writes it out to an `SVG` file. A diagram is represented by a collection of objects. class Diagram # Set of arrows in the diagram, from top to bottom. attr_reader :arrows # Set of lifelines in the diagram, from left to right. attr_reader :lifelines # Creates an empty diagram. def initialize @arrows = [] @lifelines = [] end end end
module KangaRuby # Creates a diagram by constructing an `SVG` document and returning it as a string. # # @example Basic flow # diagram = Diagram.new # # diagram.lifelines << Lifeline.new('Alice') # diagram.lifelines << Lifeline.new('Bob') # diagram.arrows << Arrow.new(1, 2) # # diagram.draw class Diagram # Set of arrows in the diagram, from top to bottom. attr_reader :arrows # Set of lifelines in the diagram, from left to right. attr_reader :lifelines # Creates an empty diagram. def initialize @arrows = [] @lifelines = [] end end end
Add some example code to Diagram documentation
Add some example code to Diagram documentation
Ruby
mit
lee-dohm/kangaruby
ruby
## Code Before: module KangaRuby # Creates a diagram and writes it out to an `SVG` file. A diagram is represented by a collection of objects. class Diagram # Set of arrows in the diagram, from top to bottom. attr_reader :arrows # Set of lifelines in the diagram, from left to right. attr_reader :lifelines # Creates an empty diagram. def initialize @arrows = [] @lifelines = [] end end end ## Instruction: Add some example code to Diagram documentation ## Code After: module KangaRuby # Creates a diagram by constructing an `SVG` document and returning it as a string. # # @example Basic flow # diagram = Diagram.new # # diagram.lifelines << Lifeline.new('Alice') # diagram.lifelines << Lifeline.new('Bob') # diagram.arrows << Arrow.new(1, 2) # # diagram.draw class Diagram # Set of arrows in the diagram, from top to bottom. attr_reader :arrows # Set of lifelines in the diagram, from left to right. attr_reader :lifelines # Creates an empty diagram. def initialize @arrows = [] @lifelines = [] end end end
module KangaRuby - # Creates a diagram and writes it out to an `SVG` file. A diagram is represented by a collection of objects. + # Creates a diagram by constructing an `SVG` document and returning it as a string. + # + # @example Basic flow + # diagram = Diagram.new + # + # diagram.lifelines << Lifeline.new('Alice') + # diagram.lifelines << Lifeline.new('Bob') + # diagram.arrows << Arrow.new(1, 2) + # + # diagram.draw class Diagram # Set of arrows in the diagram, from top to bottom. attr_reader :arrows # Set of lifelines in the diagram, from left to right. attr_reader :lifelines # Creates an empty diagram. def initialize @arrows = [] @lifelines = [] end end end
11
0.647059
10
1
24b7b5650ad9c9446f68316883b17b1e2ee3ae48
app/services/downloads_cache.rb
app/services/downloads_cache.rb
module DownloadsCache def self.downloads_path(dir) "#{Rails.root}/public/downloads/#{dir}" end def self.clear_dirs(dirs) dirs.each do |dir| Rails.logger.debug("Clearing #{dir}") FileUtils.rm_rf(Dir["#{downloads_path(dir)}/*"], :secure => true) end end # cleared after save & destroy def self.clear_shipments clear_dirs(['shipments']) clear_dirs(['comptab']) clear_dirs(['gross_exports']) clear_dirs(['gross_imports']) clear_dirs(['net_exports']) clear_dirs(['net_imports']) end end
module DownloadsCache def self.clear_shipments response = HTTParty.get("#{Rails.application.secrets.sapi_path}/api/trade_downloads_cache_cleanup") end end
Use httparty to invoke cache cleaning on SAPI
Use httparty to invoke cache cleaning on SAPI
Ruby
mit
unepwcmc/trade_reporting_tool,unepwcmc/trade_reporting_tool,unepwcmc/trade_reporting_tool
ruby
## Code Before: module DownloadsCache def self.downloads_path(dir) "#{Rails.root}/public/downloads/#{dir}" end def self.clear_dirs(dirs) dirs.each do |dir| Rails.logger.debug("Clearing #{dir}") FileUtils.rm_rf(Dir["#{downloads_path(dir)}/*"], :secure => true) end end # cleared after save & destroy def self.clear_shipments clear_dirs(['shipments']) clear_dirs(['comptab']) clear_dirs(['gross_exports']) clear_dirs(['gross_imports']) clear_dirs(['net_exports']) clear_dirs(['net_imports']) end end ## Instruction: Use httparty to invoke cache cleaning on SAPI ## Code After: module DownloadsCache def self.clear_shipments response = HTTParty.get("#{Rails.application.secrets.sapi_path}/api/trade_downloads_cache_cleanup") end end
module DownloadsCache - - def self.downloads_path(dir) - "#{Rails.root}/public/downloads/#{dir}" - end - - def self.clear_dirs(dirs) - dirs.each do |dir| - Rails.logger.debug("Clearing #{dir}") - FileUtils.rm_rf(Dir["#{downloads_path(dir)}/*"], :secure => true) - end - end - - # cleared after save & destroy def self.clear_shipments + response = HTTParty.get("#{Rails.application.secrets.sapi_path}/api/trade_downloads_cache_cleanup") - clear_dirs(['shipments']) - clear_dirs(['comptab']) - clear_dirs(['gross_exports']) - clear_dirs(['gross_imports']) - clear_dirs(['net_exports']) - clear_dirs(['net_imports']) end end
20
0.869565
1
19
9cda2512cc827a08170a83e4f00e8d4302a484d3
webdebugger/build/assets/triangle_right.svg
webdebugger/build/assets/triangle_right.svg
<?xml version="1.0" encoding="UTF-8" standalone="no"?> <svg width="8px" height="12px" viewBox="0 0 8 12" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"> <!-- Generator: Sketch 3.8.3 (29802) - http://www.bohemiancoding.com/sketch --> <title>Shape</title> <desc>Created with Sketch.</desc> <defs></defs> <g id="Sketch-App-Sources" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd" opacity="0.539999962"> <g id="system-icons" transform="translate(-372.000000, -105.000000)" fill="#FFFFFF"> <g id="chevron-right" transform="translate(364.000000, 99.000000)"> <polygon id="Shape" points="8.59997559 7.4 9.99997559 6 15.9999756 12 9.99997559 18 8.59997559 16.6 13.1999756 12"></polygon> </g> </g> </g> </svg>
<?xml version="1.0" encoding="UTF-8" standalone="no"?> <svg width="8px" height="12px" viewBox="0 0 8 12" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"> <!-- Generator: Sketch 3.8.3 (29802) - http://www.bohemiancoding.com/sketch --> <title>Shape</title> <desc>Created with Sketch.</desc> <defs></defs> <g id="Sketch-App-Sources" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd" opacity="1"> <g id="system-icons" transform="translate(-372.000000, -105.000000)" fill="#FFFFFF"> <g id="chevron-right" transform="translate(364.000000, 99.000000)"> <polygon id="Shape" points="8.59997559 7.4 9.99997559 6 15.9999756 12 9.99997559 18 8.59997559 16.6 13.1999756 12"></polygon> </g> </g> </g> </svg>
Revert "Commit changed triangle-right icon in build/"
Revert "Commit changed triangle-right icon in build/" This reverts commit e918f5f36611c9a992e6ca9077985c5012e062f7.
SVG
mit
andywer/postcss-debug,andywer/postcss-debug
svg
## Code Before: <?xml version="1.0" encoding="UTF-8" standalone="no"?> <svg width="8px" height="12px" viewBox="0 0 8 12" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"> <!-- Generator: Sketch 3.8.3 (29802) - http://www.bohemiancoding.com/sketch --> <title>Shape</title> <desc>Created with Sketch.</desc> <defs></defs> <g id="Sketch-App-Sources" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd" opacity="0.539999962"> <g id="system-icons" transform="translate(-372.000000, -105.000000)" fill="#FFFFFF"> <g id="chevron-right" transform="translate(364.000000, 99.000000)"> <polygon id="Shape" points="8.59997559 7.4 9.99997559 6 15.9999756 12 9.99997559 18 8.59997559 16.6 13.1999756 12"></polygon> </g> </g> </g> </svg> ## Instruction: Revert "Commit changed triangle-right icon in build/" This reverts commit e918f5f36611c9a992e6ca9077985c5012e062f7. ## Code After: <?xml version="1.0" encoding="UTF-8" standalone="no"?> <svg width="8px" height="12px" viewBox="0 0 8 12" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"> <!-- Generator: Sketch 3.8.3 (29802) - http://www.bohemiancoding.com/sketch --> <title>Shape</title> <desc>Created with Sketch.</desc> <defs></defs> <g id="Sketch-App-Sources" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd" opacity="1"> <g id="system-icons" transform="translate(-372.000000, -105.000000)" fill="#FFFFFF"> <g id="chevron-right" transform="translate(364.000000, 99.000000)"> <polygon id="Shape" points="8.59997559 7.4 9.99997559 6 15.9999756 12 9.99997559 18 8.59997559 16.6 13.1999756 12"></polygon> </g> </g> </g> </svg>
<?xml version="1.0" encoding="UTF-8" standalone="no"?> <svg width="8px" height="12px" viewBox="0 0 8 12" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"> <!-- Generator: Sketch 3.8.3 (29802) - http://www.bohemiancoding.com/sketch --> <title>Shape</title> <desc>Created with Sketch.</desc> <defs></defs> - <g id="Sketch-App-Sources" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd" opacity="0.539999962"> ? ^^^^^^^^^^^ + <g id="Sketch-App-Sources" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd" opacity="1"> ? ^ <g id="system-icons" transform="translate(-372.000000, -105.000000)" fill="#FFFFFF"> <g id="chevron-right" transform="translate(364.000000, 99.000000)"> <polygon id="Shape" points="8.59997559 7.4 9.99997559 6 15.9999756 12 9.99997559 18 8.59997559 16.6 13.1999756 12"></polygon> </g> </g> </g> </svg>
2
0.142857
1
1
74f482d35e3aeadc75860a6f1c3777f4772e8ff8
src/userprofiles/queries/index.js
src/userprofiles/queries/index.js
// export default { // FETCH_USER_CONSTELLATIONS_REQUEST: function (action) { // }, // CREATE_CONSTELLATIONS_REQUEST: function (action) { // }, // };
export default { FETCH_USERS_REQUEST: function (action) { }, USER_LOGIN_REQUEST: function (action) { }, USER_LOGOUT_REQUEST: function (action) { }, CREATE_USER_REQUEST: function (action) { }, EDIT_USER_REQUEST: function (action) { }, DELETE_USER_REQUEST: function (action) { }, ADD_TRUST_REQUEST: function (action) { }, REVOKE_TRUST_REQUEST: function (action) { }, ADD_WATCH_REQUEST: function (action) { }, REVOKE_WATCH_REQUEST: function (action) { }, ADD_SUPPORT_REQUEST: function (action) { }, REVOKE_SUPPORT_REQUEST: function (action) { }, SET_TRUST_DEGREES_REQUEST: function (action) { }, };
Set queries for User Profiles
Set queries for User Profiles
JavaScript
mit
GetGee/G,GetGee/G
javascript
## Code Before: // export default { // FETCH_USER_CONSTELLATIONS_REQUEST: function (action) { // }, // CREATE_CONSTELLATIONS_REQUEST: function (action) { // }, // }; ## Instruction: Set queries for User Profiles ## Code After: export default { FETCH_USERS_REQUEST: function (action) { }, USER_LOGIN_REQUEST: function (action) { }, USER_LOGOUT_REQUEST: function (action) { }, CREATE_USER_REQUEST: function (action) { }, EDIT_USER_REQUEST: function (action) { }, DELETE_USER_REQUEST: function (action) { }, ADD_TRUST_REQUEST: function (action) { }, REVOKE_TRUST_REQUEST: function (action) { }, ADD_WATCH_REQUEST: function (action) { }, REVOKE_WATCH_REQUEST: function (action) { }, ADD_SUPPORT_REQUEST: function (action) { }, REVOKE_SUPPORT_REQUEST: function (action) { }, SET_TRUST_DEGREES_REQUEST: function (action) { }, };
- // export default { ? --- + export default { - // FETCH_USER_CONSTELLATIONS_REQUEST: function (action) { ? --- -------------- + FETCH_USERS_REQUEST: function (action) { - // }, ? --- + }, - // CREATE_CONSTELLATIONS_REQUEST: function (action) { + USER_LOGIN_REQUEST: function (action) { - // }, ? --- + }, - // }; + USER_LOGOUT_REQUEST: function (action) { + + }, + CREATE_USER_REQUEST: function (action) { + + }, + EDIT_USER_REQUEST: function (action) { + + }, + DELETE_USER_REQUEST: function (action) { + + }, + ADD_TRUST_REQUEST: function (action) { + + }, + REVOKE_TRUST_REQUEST: function (action) { + + }, + ADD_WATCH_REQUEST: function (action) { + + }, + REVOKE_WATCH_REQUEST: function (action) { + + }, + ADD_SUPPORT_REQUEST: function (action) { + + }, + REVOKE_SUPPORT_REQUEST: function (action) { + + }, + SET_TRUST_DEGREES_REQUEST: function (action) { + + }, + };
45
5.625
39
6
18b3aaeb6ac486f6e3713f1014757923edc419cc
app/containers/NavigationContainer/reducer.js
app/containers/NavigationContainer/reducer.js
/* * * NavigationContainer reducer * */ import { fromJS } from 'immutable'; import { REQUEST_TOPICS_SUCCEEDED, } from './constants'; const initialState = fromJS({ topics: [], }); function navigationContainerReducer(state = initialState, action) { switch (action.type) { case REQUEST_TOPICS_SUCCEEDED: return state.set('topics', action.topics); default: return state; } } export default navigationContainerReducer;
/* * * NavigationContainer reducer * */ import { fromJS } from 'immutable'; import { REQUEST_TOPICS_SUCCEEDED, SELECT_TOPIC, } from './constants'; const initialState = fromJS({ topics: [], }); function navigationContainerReducer(state = initialState, action) { switch (action.type) { case REQUEST_TOPICS_SUCCEEDED: return state.set('topics', action.topics); case SELECT_TOPIC: return state.set('selectedTopic', action.topic); default: return state; } } export default navigationContainerReducer;
Set selected topic on state
Set selected topic on state
JavaScript
mit
GeertHuls/react-async-saga-example,GeertHuls/react-async-saga-example
javascript
## Code Before: /* * * NavigationContainer reducer * */ import { fromJS } from 'immutable'; import { REQUEST_TOPICS_SUCCEEDED, } from './constants'; const initialState = fromJS({ topics: [], }); function navigationContainerReducer(state = initialState, action) { switch (action.type) { case REQUEST_TOPICS_SUCCEEDED: return state.set('topics', action.topics); default: return state; } } export default navigationContainerReducer; ## Instruction: Set selected topic on state ## Code After: /* * * NavigationContainer reducer * */ import { fromJS } from 'immutable'; import { REQUEST_TOPICS_SUCCEEDED, SELECT_TOPIC, } from './constants'; const initialState = fromJS({ topics: [], }); function navigationContainerReducer(state = initialState, action) { switch (action.type) { case REQUEST_TOPICS_SUCCEEDED: return state.set('topics', action.topics); case SELECT_TOPIC: return state.set('selectedTopic', action.topic); default: return state; } } export default navigationContainerReducer;
/* * * NavigationContainer reducer * */ import { fromJS } from 'immutable'; import { REQUEST_TOPICS_SUCCEEDED, + SELECT_TOPIC, } from './constants'; const initialState = fromJS({ topics: [], }); function navigationContainerReducer(state = initialState, action) { switch (action.type) { case REQUEST_TOPICS_SUCCEEDED: return state.set('topics', action.topics); + case SELECT_TOPIC: + return state.set('selectedTopic', action.topic); default: return state; } } export default navigationContainerReducer;
3
0.115385
3
0