commit
stringlengths 40
40
| old_file
stringlengths 4
184
| new_file
stringlengths 4
184
| old_contents
stringlengths 1
3.6k
| new_contents
stringlengths 5
3.38k
| subject
stringlengths 15
778
| message
stringlengths 16
6.74k
| lang
stringclasses 201
values | license
stringclasses 13
values | repos
stringlengths 6
116k
| config
stringclasses 201
values | content
stringlengths 137
7.24k
| diff
stringlengths 26
5.55k
| diff_length
int64 1
123
| relative_diff_length
float64 0.01
89
| n_lines_added
int64 0
108
| n_lines_deleted
int64 0
106
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
cd3f1fc991ee7e34bfdae699b2b795338be3dde1 | puppet/modules/elasticsearch/templates/CirrusSearch.php.erb | puppet/modules/elasticsearch/templates/CirrusSearch.php.erb |
include_once "$IP/extensions/CirrusSearch/tests/jenkins/FullyFeaturedConfig.php";
$wgCirrusSearchExtraIndexes[ NS_FILE ] = array( 'commonswiki_file' );
$wgCirrusSearchDevelOptions['allow_nuke'] = true;
<% if @enable_eventgate -%>
$wgCirrusSearchRequestEventSampling = 1.0;
// Log api-requests via eventgate-analytics service.
$wgMWLoggerDefaultSpi['args'][0]['loggers']['cirrussearch-request'] = array(
'handlers' => array( 'eventgate-analytics' ),
);
<% end -%>
|
include_once "$IP/extensions/CirrusSearch/tests/jenkins/FullyFeaturedConfig.php";
$wgCirrusSearchExtraIndexes[ NS_FILE ] = array( 'commonswiki_file' );
$wgCirrusSearchDevelOptions['allow_nuke'] = true;
<% if @enable_eventgate -%>
$wgCirrusSearchRequestEventSampling = 1.0;
// Log api-requests via eventgate-analytics service.
$wgMWLoggerDefaultSpi['args'][0]['loggers']['cirrussearch-request'] = array(
'handlers' => array( 'eventgate-analytics' ),
);
<% end -%>
$wgCirrusSearchWMFExtraFeatures = [
'ores_articletopics' => [
'build' => true,
],
];
| Enable ORES articletopic handling in cirrussearch role | Enable ORES articletopic handling in cirrussearch role
Bug: T240550
Change-Id: Id1f7b76de89292d19f3c7bd9cfbea13ec8a280cc
| HTML+ERB | mit | wikimedia/mediawiki-vagrant,wikimedia/mediawiki-vagrant,wikimedia/mediawiki-vagrant,wikimedia/mediawiki-vagrant,wikimedia/mediawiki-vagrant,wikimedia/mediawiki-vagrant | html+erb | ## Code Before:
include_once "$IP/extensions/CirrusSearch/tests/jenkins/FullyFeaturedConfig.php";
$wgCirrusSearchExtraIndexes[ NS_FILE ] = array( 'commonswiki_file' );
$wgCirrusSearchDevelOptions['allow_nuke'] = true;
<% if @enable_eventgate -%>
$wgCirrusSearchRequestEventSampling = 1.0;
// Log api-requests via eventgate-analytics service.
$wgMWLoggerDefaultSpi['args'][0]['loggers']['cirrussearch-request'] = array(
'handlers' => array( 'eventgate-analytics' ),
);
<% end -%>
## Instruction:
Enable ORES articletopic handling in cirrussearch role
Bug: T240550
Change-Id: Id1f7b76de89292d19f3c7bd9cfbea13ec8a280cc
## Code After:
include_once "$IP/extensions/CirrusSearch/tests/jenkins/FullyFeaturedConfig.php";
$wgCirrusSearchExtraIndexes[ NS_FILE ] = array( 'commonswiki_file' );
$wgCirrusSearchDevelOptions['allow_nuke'] = true;
<% if @enable_eventgate -%>
$wgCirrusSearchRequestEventSampling = 1.0;
// Log api-requests via eventgate-analytics service.
$wgMWLoggerDefaultSpi['args'][0]['loggers']['cirrussearch-request'] = array(
'handlers' => array( 'eventgate-analytics' ),
);
<% end -%>
$wgCirrusSearchWMFExtraFeatures = [
'ores_articletopics' => [
'build' => true,
],
];
|
include_once "$IP/extensions/CirrusSearch/tests/jenkins/FullyFeaturedConfig.php";
$wgCirrusSearchExtraIndexes[ NS_FILE ] = array( 'commonswiki_file' );
$wgCirrusSearchDevelOptions['allow_nuke'] = true;
<% if @enable_eventgate -%>
$wgCirrusSearchRequestEventSampling = 1.0;
// Log api-requests via eventgate-analytics service.
$wgMWLoggerDefaultSpi['args'][0]['loggers']['cirrussearch-request'] = array(
'handlers' => array( 'eventgate-analytics' ),
);
<% end -%>
+
+ $wgCirrusSearchWMFExtraFeatures = [
+ 'ores_articletopics' => [
+ 'build' => true,
+ ],
+ ]; | 6 | 0.428571 | 6 | 0 |
d40eab44fbe0be77545758e7251be8b71fd38317 | packages/coreutils/src/time.ts | packages/coreutils/src/time.ts | // Copyright (c) Jupyter Development Team.
// Distributed under the terms of the Modified BSD License.
import moment from 'moment';
/**
* The namespace for date functions.
*/
export namespace Time {
/**
* Convert a timestring to a human readable string (e.g. 'two minutes ago').
*
* @param value - The date timestring or date object.
*
* @returns A formatted date.
*/
export function formatHuman(value: string | Date): string {
let time = moment(value).fromNow();
time = time === 'a few seconds ago' ? 'seconds ago' : time;
return time;
}
/**
* Convert a timestring to a date format.
*
* @param value - The date timestring or date object.
*
* @param format - The format string.
*
* @returns A formatted date.
*/
export function format(
value: string | Date,
format = 'YYYY-MM-DD HH:mm'
): string {
return moment(value).format(format);
}
}
| // Copyright (c) Jupyter Development Team.
// Distributed under the terms of the Modified BSD License.
import moment from 'moment';
/**
* The namespace for date functions.
*/
export namespace Time {
/**
* Convert a timestring to a human readable string (e.g. 'two minutes ago').
*
* @param value - The date timestring or date object.
*
* @returns A formatted date.
*/
export function formatHuman(value: string | Date): string {
let time = moment(value).fromNow();
time = time === 'a few seconds ago' ? 'seconds ago' : time;
return time;
}
/**
* Convert a timestring to a date format.
*
* @param value - The date timestring or date object.
*
* @param format - The format string.
*
* @returns A formatted date.
*/
export function format(
value: string | Date,
timeFormat = 'YYYY-MM-DD HH:mm'
): string {
return moment(value).format(timeFormat);
}
}
| Fix duplicate parameter in function with default parameter values | Fix duplicate parameter in function with default parameter values
| TypeScript | bsd-3-clause | jupyter/jupyterlab,jupyter/jupyterlab,jupyter/jupyterlab,jupyter/jupyterlab,jupyter/jupyterlab | typescript | ## Code Before:
// Copyright (c) Jupyter Development Team.
// Distributed under the terms of the Modified BSD License.
import moment from 'moment';
/**
* The namespace for date functions.
*/
export namespace Time {
/**
* Convert a timestring to a human readable string (e.g. 'two minutes ago').
*
* @param value - The date timestring or date object.
*
* @returns A formatted date.
*/
export function formatHuman(value: string | Date): string {
let time = moment(value).fromNow();
time = time === 'a few seconds ago' ? 'seconds ago' : time;
return time;
}
/**
* Convert a timestring to a date format.
*
* @param value - The date timestring or date object.
*
* @param format - The format string.
*
* @returns A formatted date.
*/
export function format(
value: string | Date,
format = 'YYYY-MM-DD HH:mm'
): string {
return moment(value).format(format);
}
}
## Instruction:
Fix duplicate parameter in function with default parameter values
## Code After:
// Copyright (c) Jupyter Development Team.
// Distributed under the terms of the Modified BSD License.
import moment from 'moment';
/**
* The namespace for date functions.
*/
export namespace Time {
/**
* Convert a timestring to a human readable string (e.g. 'two minutes ago').
*
* @param value - The date timestring or date object.
*
* @returns A formatted date.
*/
export function formatHuman(value: string | Date): string {
let time = moment(value).fromNow();
time = time === 'a few seconds ago' ? 'seconds ago' : time;
return time;
}
/**
* Convert a timestring to a date format.
*
* @param value - The date timestring or date object.
*
* @param format - The format string.
*
* @returns A formatted date.
*/
export function format(
value: string | Date,
timeFormat = 'YYYY-MM-DD HH:mm'
): string {
return moment(value).format(timeFormat);
}
}
| // Copyright (c) Jupyter Development Team.
// Distributed under the terms of the Modified BSD License.
import moment from 'moment';
/**
* The namespace for date functions.
*/
export namespace Time {
/**
* Convert a timestring to a human readable string (e.g. 'two minutes ago').
*
* @param value - The date timestring or date object.
*
* @returns A formatted date.
*/
export function formatHuman(value: string | Date): string {
let time = moment(value).fromNow();
time = time === 'a few seconds ago' ? 'seconds ago' : time;
return time;
}
/**
* Convert a timestring to a date format.
*
* @param value - The date timestring or date object.
*
* @param format - The format string.
*
* @returns A formatted date.
*/
export function format(
value: string | Date,
- format = 'YYYY-MM-DD HH:mm'
? ^
+ timeFormat = 'YYYY-MM-DD HH:mm'
? ^^^^^
): string {
- return moment(value).format(format);
? ^
+ return moment(value).format(timeFormat);
? ^^^^^
}
} | 4 | 0.105263 | 2 | 2 |
661f4a968c65c877bd3930159317dbadd455f287 | core/src/main/scala/org/http4s/ContextResponse.scala | core/src/main/scala/org/http4s/ContextResponse.scala | /*
* Copyright 2013-2020 http4s.org
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.http4s
import cats._
final case class ContextResponse[F[_], A](context: A, response: Response[F]) {
def mapContext[B](f: A => B): ContextResponse[F, B] =
ContextResponse(f(context), response)
def mapK[G[_]](fk: F ~> G): ContextResponse[G, A] =
ContextResponse(context, response.mapK(fk))
}
object ContextResponse{}
| /*
* Copyright 2013-2020 http4s.org
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.http4s
import cats._
final case class ContextResponse[F[_], A](context: A, response: Response[F]) {
def mapContext[B](f: A => B): ContextResponse[F, B] =
ContextResponse(f(context), response)
def mapK[G[_]](fk: F ~> G): ContextResponse[G, A] =
ContextResponse(context, response.mapK(fk))
}
// Include to avoid binary compatibility issues with the apply method if/when
// we ever need a companion object in the future.
object ContextResponse {}
| Add Comment About Why Companion Object Is Empty | Add Comment About Why Companion Object Is Empty
| Scala | apache-2.0 | rossabaker/http4s,http4s/http4s,rossabaker/http4s | scala | ## Code Before:
/*
* Copyright 2013-2020 http4s.org
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.http4s
import cats._
final case class ContextResponse[F[_], A](context: A, response: Response[F]) {
def mapContext[B](f: A => B): ContextResponse[F, B] =
ContextResponse(f(context), response)
def mapK[G[_]](fk: F ~> G): ContextResponse[G, A] =
ContextResponse(context, response.mapK(fk))
}
object ContextResponse{}
## Instruction:
Add Comment About Why Companion Object Is Empty
## Code After:
/*
* Copyright 2013-2020 http4s.org
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.http4s
import cats._
final case class ContextResponse[F[_], A](context: A, response: Response[F]) {
def mapContext[B](f: A => B): ContextResponse[F, B] =
ContextResponse(f(context), response)
def mapK[G[_]](fk: F ~> G): ContextResponse[G, A] =
ContextResponse(context, response.mapK(fk))
}
// Include to avoid binary compatibility issues with the apply method if/when
// we ever need a companion object in the future.
object ContextResponse {}
| /*
* Copyright 2013-2020 http4s.org
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.http4s
import cats._
final case class ContextResponse[F[_], A](context: A, response: Response[F]) {
def mapContext[B](f: A => B): ContextResponse[F, B] =
ContextResponse(f(context), response)
def mapK[G[_]](fk: F ~> G): ContextResponse[G, A] =
ContextResponse(context, response.mapK(fk))
}
+ // Include to avoid binary compatibility issues with the apply method if/when
+ // we ever need a companion object in the future.
- object ContextResponse{}
+ object ContextResponse {}
? +
| 4 | 0.210526 | 3 | 1 |
06bd941d102759823c7a86a34e8af81a09be3b96 | raml/openwebslides.raml | raml/openwebslides.raml |
title: OpenWebslides API
version: 1
baseUri: http://localhost:5000/api/
mediaType: application/vnd.api+json
/users:
displayName: Users
description: User API
get:
description: List of all users
post:
description: Create a new user (sign up)
/{id}:
get:
description: Get user
patch:
description: Update user
put:
description: Update user
delete:
description: Delete user
/decks:
displayName: Decks
description: Slidedeck API
get:
description: List of all visible decks
post:
description: Create a new deck
/{id}:
get:
description: Get deck
patch:
description: Update deck
put:
description: Update deck
delete:
description: Delete deck
|
title: OpenWebslides
version: 1
baseUri: http://localhost:5000/api/
mediaType: application/vnd.api+json
/users:
displayName: Users
description: User API
get:
description: List of all users
post:
description: Create a new user (sign up)
/{id}:
get:
description: Get user
patch:
description: Update user
put:
description: Update user
delete:
description: Delete user
/decks:
displayName: Decks
description: Slidedeck API
get:
description: List of all visible decks
post:
description: Create a new deck
/{id}:
get:
description: Get deck
patch:
description: Update deck
put:
description: Update deck
delete:
description: Delete deck
/auth:
displayName: Authentication
description: Authentication API
/{provider}:
get:
description: OAuth2 redirection URL
/callback:
get:
description: OAuth2 Provider callback URL
/token:
post:
description: Obtain an authentication token
body:
application/json:
type: object
properties:
email: string
password: string
responses:
201:
body:
application/json:
type: object
properties:
jwt: string
example:
{ "jwt": "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJleHAiOjE0OTI1OTMxNTcsInN1YiI6MSwidmVyIjoxfQ.cFLhHCC4vFmkGg3FkFaDUA2qnk-RxWkz_pt2SY8VOcQ" }
403:
body:
application/json:
type: object
properties:
error: string
example: |
{ "error": "Your account has not been activated yet." }
/confirm:
get:
description: Verify a user's email address
queryParameters:
confirmation_token:
description: Confirmation token sent by email
required: true
type: string
example: Ez2B7waoUofjCrGqVT9vz7Fy
| Add draft auth API description | Add draft auth API description
| RAML | mit | OpenWebslides/OpenWebslides,OpenWebslides/OpenWebslides,OpenWebslides/OpenWebslides,OpenWebslides/OpenWebslides | raml | ## Code Before:
title: OpenWebslides API
version: 1
baseUri: http://localhost:5000/api/
mediaType: application/vnd.api+json
/users:
displayName: Users
description: User API
get:
description: List of all users
post:
description: Create a new user (sign up)
/{id}:
get:
description: Get user
patch:
description: Update user
put:
description: Update user
delete:
description: Delete user
/decks:
displayName: Decks
description: Slidedeck API
get:
description: List of all visible decks
post:
description: Create a new deck
/{id}:
get:
description: Get deck
patch:
description: Update deck
put:
description: Update deck
delete:
description: Delete deck
## Instruction:
Add draft auth API description
## Code After:
title: OpenWebslides
version: 1
baseUri: http://localhost:5000/api/
mediaType: application/vnd.api+json
/users:
displayName: Users
description: User API
get:
description: List of all users
post:
description: Create a new user (sign up)
/{id}:
get:
description: Get user
patch:
description: Update user
put:
description: Update user
delete:
description: Delete user
/decks:
displayName: Decks
description: Slidedeck API
get:
description: List of all visible decks
post:
description: Create a new deck
/{id}:
get:
description: Get deck
patch:
description: Update deck
put:
description: Update deck
delete:
description: Delete deck
/auth:
displayName: Authentication
description: Authentication API
/{provider}:
get:
description: OAuth2 redirection URL
/callback:
get:
description: OAuth2 Provider callback URL
/token:
post:
description: Obtain an authentication token
body:
application/json:
type: object
properties:
email: string
password: string
responses:
201:
body:
application/json:
type: object
properties:
jwt: string
example:
{ "jwt": "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJleHAiOjE0OTI1OTMxNTcsInN1YiI6MSwidmVyIjoxfQ.cFLhHCC4vFmkGg3FkFaDUA2qnk-RxWkz_pt2SY8VOcQ" }
403:
body:
application/json:
type: object
properties:
error: string
example: |
{ "error": "Your account has not been activated yet." }
/confirm:
get:
description: Verify a user's email address
queryParameters:
confirmation_token:
description: Confirmation token sent by email
required: true
type: string
example: Ez2B7waoUofjCrGqVT9vz7Fy
|
- title: OpenWebslides API
? ----
+ title: OpenWebslides
version: 1
baseUri: http://localhost:5000/api/
mediaType: application/vnd.api+json
/users:
displayName: Users
description: User API
get:
description: List of all users
post:
description: Create a new user (sign up)
/{id}:
get:
description: Get user
patch:
description: Update user
put:
description: Update user
delete:
description: Delete user
/decks:
displayName: Decks
description: Slidedeck API
get:
description: List of all visible decks
post:
description: Create a new deck
/{id}:
get:
description: Get deck
patch:
description: Update deck
put:
description: Update deck
delete:
description: Delete deck
+ /auth:
+ displayName: Authentication
+ description: Authentication API
+ /{provider}:
+ get:
+ description: OAuth2 redirection URL
+ /callback:
+ get:
+ description: OAuth2 Provider callback URL
+ /token:
+ post:
+ description: Obtain an authentication token
+ body:
+ application/json:
+ type: object
+ properties:
+ email: string
+ password: string
+ responses:
+ 201:
+ body:
+ application/json:
+ type: object
+ properties:
+ jwt: string
+ example:
+ { "jwt": "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJleHAiOjE0OTI1OTMxNTcsInN1YiI6MSwidmVyIjoxfQ.cFLhHCC4vFmkGg3FkFaDUA2qnk-RxWkz_pt2SY8VOcQ" }
+ 403:
+ body:
+ application/json:
+ type: object
+ properties:
+ error: string
+ example: |
+ { "error": "Your account has not been activated yet." }
+ /confirm:
+ get:
+ description: Verify a user's email address
+ queryParameters:
+ confirmation_token:
+ description: Confirmation token sent by email
+ required: true
+ type: string
+ example: Ez2B7waoUofjCrGqVT9vz7Fy | 46 | 1.243243 | 45 | 1 |
640c5633198086b9c0332d3d220dc82b49e5eba7 | run.sh | run.sh |
cd `dirname $0`
path=`curl -s http://www.bing.com | grep -m1 -oP 'url:\s*("[^"]+")' | cut -d: -f2 | jq '.' | tr -d '"'`
url=http://www.bing.com$path
file=`echo $url | rev | cut -d/ -f1 | rev`
out=wallpapers/$file
if [ -f "$out" ]; then
echo "Wallpaper already exists, exiting."
exit -1
fi
mkdir -p wallpapers
curl -s -o "$out" "$url"
git checkout -b wallpapers
git add -v wallpapers
git commit -m "Add wallpaper."
ssh-agent bash -c "ssh-add bing-wallpapers-key && git push -u origin wallpapers"
|
cd `dirname $0`
path=`curl -s http://www.bing.com | grep -m1 -oP 'url:\s*("[^"]+")' | cut -d: -f2 | jq '.' | tr -d '"'`
if [ -z "$path" ]; then
echo "Unable to find wallpaper URL, exiting."
exit 1
fi
url=http://www.bing.com$path
file=`echo $url | rev | cut -d/ -f1 | rev`
out=wallpapers/$file
if [ -f "$out" ]; then
echo "Wallpaper already exists, exiting."
exit 1
fi
mkdir -p wallpapers
curl -s -o "$out" "$url"
git checkout -b wallpapers
git add -v wallpapers
git commit -m "Add wallpaper."
ssh-agent bash -c "ssh-add bing-wallpapers-key && git push -u origin wallpapers"
| Handle error condition when finding URL. | Handle error condition when finding URL.
| Shell | mit | schmich/wallpapers | shell | ## Code Before:
cd `dirname $0`
path=`curl -s http://www.bing.com | grep -m1 -oP 'url:\s*("[^"]+")' | cut -d: -f2 | jq '.' | tr -d '"'`
url=http://www.bing.com$path
file=`echo $url | rev | cut -d/ -f1 | rev`
out=wallpapers/$file
if [ -f "$out" ]; then
echo "Wallpaper already exists, exiting."
exit -1
fi
mkdir -p wallpapers
curl -s -o "$out" "$url"
git checkout -b wallpapers
git add -v wallpapers
git commit -m "Add wallpaper."
ssh-agent bash -c "ssh-add bing-wallpapers-key && git push -u origin wallpapers"
## Instruction:
Handle error condition when finding URL.
## Code After:
cd `dirname $0`
path=`curl -s http://www.bing.com | grep -m1 -oP 'url:\s*("[^"]+")' | cut -d: -f2 | jq '.' | tr -d '"'`
if [ -z "$path" ]; then
echo "Unable to find wallpaper URL, exiting."
exit 1
fi
url=http://www.bing.com$path
file=`echo $url | rev | cut -d/ -f1 | rev`
out=wallpapers/$file
if [ -f "$out" ]; then
echo "Wallpaper already exists, exiting."
exit 1
fi
mkdir -p wallpapers
curl -s -o "$out" "$url"
git checkout -b wallpapers
git add -v wallpapers
git commit -m "Add wallpaper."
ssh-agent bash -c "ssh-add bing-wallpapers-key && git push -u origin wallpapers"
|
cd `dirname $0`
path=`curl -s http://www.bing.com | grep -m1 -oP 'url:\s*("[^"]+")' | cut -d: -f2 | jq '.' | tr -d '"'`
+ if [ -z "$path" ]; then
+ echo "Unable to find wallpaper URL, exiting."
+ exit 1
+ fi
+
url=http://www.bing.com$path
file=`echo $url | rev | cut -d/ -f1 | rev`
out=wallpapers/$file
if [ -f "$out" ]; then
echo "Wallpaper already exists, exiting."
- exit -1
? -
+ exit 1
fi
mkdir -p wallpapers
curl -s -o "$out" "$url"
git checkout -b wallpapers
git add -v wallpapers
git commit -m "Add wallpaper."
ssh-agent bash -c "ssh-add bing-wallpapers-key && git push -u origin wallpapers" | 7 | 0.333333 | 6 | 1 |
70c45647bef1bc7c96f97dc4b0e939d7b1034279 | lib/generate.sh | lib/generate.sh | pushd `dirname $0` > /dev/null
THISDIR=`pwd`
popd > /dev/null
PARENTDIR="$(dirname "$THISDIR")"
PUBLICDIR="$PARENTDIR/public"
BUILDDIR="$PARENTDIR/build"
# Copy all static files from the public directory to the build directory
rm -rf $BUILDDIR
cp -r $PUBLICDIR $BUILDDIR
# Create location file for the frontend app
echo "window.locationEntries = " > "$BUILDDIR/js/location-data.js"
node lib/json-content.js >> "$BUILDDIR/js/location-data.js" | set -e # stop on error
# Adapted from http://stackoverflow.com/a/4774063
pushd `dirname $0` > /dev/null
THISDIR=`pwd`
popd > /dev/null
PARENTDIR="$(dirname "$THISDIR")"
PUBLICDIR="$PARENTDIR/public"
BUILDDIR="$PARENTDIR/build"
# Copy all static files from the public directory to the build directory
rm -rf $BUILDDIR
cp -r $PUBLICDIR $BUILDDIR
# Create location file for the frontend app
echo "window.locationEntries = " > "$BUILDDIR/js/location-data.js"
node lib/json-content.js >> "$BUILDDIR/js/location-data.js" | Make generation script more failure friendly | Make generation script more failure friendly
| Shell | mit | kaplas/camino-tracker,kaplas/camino-tracker | shell | ## Code Before:
pushd `dirname $0` > /dev/null
THISDIR=`pwd`
popd > /dev/null
PARENTDIR="$(dirname "$THISDIR")"
PUBLICDIR="$PARENTDIR/public"
BUILDDIR="$PARENTDIR/build"
# Copy all static files from the public directory to the build directory
rm -rf $BUILDDIR
cp -r $PUBLICDIR $BUILDDIR
# Create location file for the frontend app
echo "window.locationEntries = " > "$BUILDDIR/js/location-data.js"
node lib/json-content.js >> "$BUILDDIR/js/location-data.js"
## Instruction:
Make generation script more failure friendly
## Code After:
set -e # stop on error
# Adapted from http://stackoverflow.com/a/4774063
pushd `dirname $0` > /dev/null
THISDIR=`pwd`
popd > /dev/null
PARENTDIR="$(dirname "$THISDIR")"
PUBLICDIR="$PARENTDIR/public"
BUILDDIR="$PARENTDIR/build"
# Copy all static files from the public directory to the build directory
rm -rf $BUILDDIR
cp -r $PUBLICDIR $BUILDDIR
# Create location file for the frontend app
echo "window.locationEntries = " > "$BUILDDIR/js/location-data.js"
node lib/json-content.js >> "$BUILDDIR/js/location-data.js" | + set -e # stop on error
+
+ # Adapted from http://stackoverflow.com/a/4774063
pushd `dirname $0` > /dev/null
THISDIR=`pwd`
popd > /dev/null
PARENTDIR="$(dirname "$THISDIR")"
PUBLICDIR="$PARENTDIR/public"
BUILDDIR="$PARENTDIR/build"
# Copy all static files from the public directory to the build directory
rm -rf $BUILDDIR
cp -r $PUBLICDIR $BUILDDIR
# Create location file for the frontend app
echo "window.locationEntries = " > "$BUILDDIR/js/location-data.js"
node lib/json-content.js >> "$BUILDDIR/js/location-data.js" | 3 | 0.2 | 3 | 0 |
daf40eba1e5a3e9764ab2425e5661b08bc543dfc | tests/machine.nix | tests/machine.nix | {disnix, dysnomia}:
{config, pkgs, ...}:
{
virtualisation.writableStore = true;
virtualisation.pathsInNixDB = [ pkgs.stdenv ];
ids.gids = { disnix = 200; };
users.extraGroups = [ { gid = 200; name = "disnix"; } ];
users.extraUsers = [
{ uid = 1000;
name = "unprivileged";
group = "users";
shell = "/bin/sh";
description = "Unprivileged user for the disnix-service";
}
{ uid = 1001;
name = "privileged";
group = "users";
shell = "/bin/sh";
extraGroups = [ "disnix" ];
description = "Privileged user for the disnix-service";
}
];
services.dbus.enable = true;
services.dbus.packages = [ disnix ];
services.openssh.enable = true;
jobs.disnix =
{ description = "Disnix server";
wantedBy = [ "multi-user.target" ];
after = [ "dbus.service" ];
path = [ pkgs.nix pkgs.getopt disnix dysnomia ];
environment = {
HOME = "/root";
};
exec = "disnix-service";
};
environment.systemPackages = [ disnix dysnomia ];
}
| {disnix, dysnomia}:
{config, pkgs, ...}:
{
virtualisation.writableStore = true;
virtualisation.pathsInNixDB = [ pkgs.stdenv ];
ids.gids = { disnix = 200; };
users.extraGroups = [ { gid = 200; name = "disnix"; } ];
users.extraUsers = [
{ uid = 1000;
name = "unprivileged";
group = "users";
shell = "/bin/sh";
description = "Unprivileged user for the disnix-service";
}
{ uid = 1001;
name = "privileged";
group = "users";
shell = "/bin/sh";
extraGroups = [ "disnix" ];
description = "Privileged user for the disnix-service";
}
];
services.dbus.enable = true;
services.dbus.packages = [ disnix ];
services.openssh.enable = true;
systemd.services.disnix =
{ description = "Disnix server";
wantedBy = [ "multi-user.target" ];
after = [ "dbus.service" ];
path = [ pkgs.nix pkgs.getopt disnix dysnomia ];
environment = {
HOME = "/root";
};
serviceConfig.ExecStart = "${disnix}/bin/disnix-service";
};
environment.systemPackages = [ disnix dysnomia ];
}
| Convert jobs definition to the new systemd convention | Convert jobs definition to the new systemd convention
| Nix | lgpl-2.1 | svanderburg/disnix,svanderburg/disnix | nix | ## Code Before:
{disnix, dysnomia}:
{config, pkgs, ...}:
{
virtualisation.writableStore = true;
virtualisation.pathsInNixDB = [ pkgs.stdenv ];
ids.gids = { disnix = 200; };
users.extraGroups = [ { gid = 200; name = "disnix"; } ];
users.extraUsers = [
{ uid = 1000;
name = "unprivileged";
group = "users";
shell = "/bin/sh";
description = "Unprivileged user for the disnix-service";
}
{ uid = 1001;
name = "privileged";
group = "users";
shell = "/bin/sh";
extraGroups = [ "disnix" ];
description = "Privileged user for the disnix-service";
}
];
services.dbus.enable = true;
services.dbus.packages = [ disnix ];
services.openssh.enable = true;
jobs.disnix =
{ description = "Disnix server";
wantedBy = [ "multi-user.target" ];
after = [ "dbus.service" ];
path = [ pkgs.nix pkgs.getopt disnix dysnomia ];
environment = {
HOME = "/root";
};
exec = "disnix-service";
};
environment.systemPackages = [ disnix dysnomia ];
}
## Instruction:
Convert jobs definition to the new systemd convention
## Code After:
{disnix, dysnomia}:
{config, pkgs, ...}:
{
virtualisation.writableStore = true;
virtualisation.pathsInNixDB = [ pkgs.stdenv ];
ids.gids = { disnix = 200; };
users.extraGroups = [ { gid = 200; name = "disnix"; } ];
users.extraUsers = [
{ uid = 1000;
name = "unprivileged";
group = "users";
shell = "/bin/sh";
description = "Unprivileged user for the disnix-service";
}
{ uid = 1001;
name = "privileged";
group = "users";
shell = "/bin/sh";
extraGroups = [ "disnix" ];
description = "Privileged user for the disnix-service";
}
];
services.dbus.enable = true;
services.dbus.packages = [ disnix ];
services.openssh.enable = true;
systemd.services.disnix =
{ description = "Disnix server";
wantedBy = [ "multi-user.target" ];
after = [ "dbus.service" ];
path = [ pkgs.nix pkgs.getopt disnix dysnomia ];
environment = {
HOME = "/root";
};
serviceConfig.ExecStart = "${disnix}/bin/disnix-service";
};
environment.systemPackages = [ disnix dysnomia ];
}
| {disnix, dysnomia}:
{config, pkgs, ...}:
{
virtualisation.writableStore = true;
virtualisation.pathsInNixDB = [ pkgs.stdenv ];
ids.gids = { disnix = 200; };
users.extraGroups = [ { gid = 200; name = "disnix"; } ];
users.extraUsers = [
{ uid = 1000;
name = "unprivileged";
group = "users";
shell = "/bin/sh";
description = "Unprivileged user for the disnix-service";
}
{ uid = 1001;
name = "privileged";
group = "users";
shell = "/bin/sh";
extraGroups = [ "disnix" ];
description = "Privileged user for the disnix-service";
}
];
services.dbus.enable = true;
services.dbus.packages = [ disnix ];
services.openssh.enable = true;
- jobs.disnix =
+ systemd.services.disnix =
{ description = "Disnix server";
wantedBy = [ "multi-user.target" ];
after = [ "dbus.service" ];
path = [ pkgs.nix pkgs.getopt disnix dysnomia ];
environment = {
HOME = "/root";
};
- exec = "disnix-service";
+ serviceConfig.ExecStart = "${disnix}/bin/disnix-service";
};
environment.systemPackages = [ disnix dysnomia ];
} | 4 | 0.086957 | 2 | 2 |
b5fc62d022cd773a0333560f30d8c8c0d6dbd25e | txircd/utils.py | txircd/utils.py | def unescapeEndpointDescription(desc):
result = []
escape = []
depth = 0
desc = iter(desc)
for char in desc:
if char == "\\":
try:
char = desc.next()
except StopIteration:
raise ValueError ("Endpoint description not valid: escaped end of string")
if char not in "{}":
char = "\\{}".format(char)
if depth == 0:
result.extend(char)
else:
escape.extend(char)
elif char == "{":
if depth > 0:
escape.append("{")
depth += 1
elif char == "}":
depth -= 1
if depth < 0:
raise ValueError ("Endpoint description not valid: mismatched end brace")
if depth == 0:
result.extend(unescapeEndpointDescription("".join(escape)).replace("\\", "\\\\").replace(":", "\\:").replace("=", "\\="))
else:
escape.append("}")
else:
if depth == 0:
result.append(char)
else:
escape.append(char)
if depth != 0:
raise ValueError ("Endpoint description not valid: mismatched opening brace")
return "".join(result) | def _enum(**enums):
return type('Enum', (), enums)
ModeType = _enum(List=0, ParamOnUnset=1, Param=2, NoParam=3, Status=4)
def unescapeEndpointDescription(desc):
result = []
escape = []
depth = 0
desc = iter(desc)
for char in desc:
if char == "\\":
try:
char = desc.next()
except StopIteration:
raise ValueError ("Endpoint description not valid: escaped end of string")
if char not in "{}":
char = "\\{}".format(char)
if depth == 0:
result.extend(char)
else:
escape.extend(char)
elif char == "{":
if depth > 0:
escape.append("{")
depth += 1
elif char == "}":
depth -= 1
if depth < 0:
raise ValueError ("Endpoint description not valid: mismatched end brace")
if depth == 0:
result.extend(unescapeEndpointDescription("".join(escape)).replace("\\", "\\\\").replace(":", "\\:").replace("=", "\\="))
else:
escape.append("}")
else:
if depth == 0:
result.append(char)
else:
escape.append(char)
if depth != 0:
raise ValueError ("Endpoint description not valid: mismatched opening brace")
return "".join(result) | Add a ModeType enum for later benefit | Add a ModeType enum for later benefit
| Python | bsd-3-clause | ElementalAlchemist/txircd,Heufneutje/txircd | python | ## Code Before:
def unescapeEndpointDescription(desc):
result = []
escape = []
depth = 0
desc = iter(desc)
for char in desc:
if char == "\\":
try:
char = desc.next()
except StopIteration:
raise ValueError ("Endpoint description not valid: escaped end of string")
if char not in "{}":
char = "\\{}".format(char)
if depth == 0:
result.extend(char)
else:
escape.extend(char)
elif char == "{":
if depth > 0:
escape.append("{")
depth += 1
elif char == "}":
depth -= 1
if depth < 0:
raise ValueError ("Endpoint description not valid: mismatched end brace")
if depth == 0:
result.extend(unescapeEndpointDescription("".join(escape)).replace("\\", "\\\\").replace(":", "\\:").replace("=", "\\="))
else:
escape.append("}")
else:
if depth == 0:
result.append(char)
else:
escape.append(char)
if depth != 0:
raise ValueError ("Endpoint description not valid: mismatched opening brace")
return "".join(result)
## Instruction:
Add a ModeType enum for later benefit
## Code After:
def _enum(**enums):
return type('Enum', (), enums)
ModeType = _enum(List=0, ParamOnUnset=1, Param=2, NoParam=3, Status=4)
def unescapeEndpointDescription(desc):
result = []
escape = []
depth = 0
desc = iter(desc)
for char in desc:
if char == "\\":
try:
char = desc.next()
except StopIteration:
raise ValueError ("Endpoint description not valid: escaped end of string")
if char not in "{}":
char = "\\{}".format(char)
if depth == 0:
result.extend(char)
else:
escape.extend(char)
elif char == "{":
if depth > 0:
escape.append("{")
depth += 1
elif char == "}":
depth -= 1
if depth < 0:
raise ValueError ("Endpoint description not valid: mismatched end brace")
if depth == 0:
result.extend(unescapeEndpointDescription("".join(escape)).replace("\\", "\\\\").replace(":", "\\:").replace("=", "\\="))
else:
escape.append("}")
else:
if depth == 0:
result.append(char)
else:
escape.append(char)
if depth != 0:
raise ValueError ("Endpoint description not valid: mismatched opening brace")
return "".join(result) | + def _enum(**enums):
+ return type('Enum', (), enums)
+
+ ModeType = _enum(List=0, ParamOnUnset=1, Param=2, NoParam=3, Status=4)
+
def unescapeEndpointDescription(desc):
result = []
escape = []
depth = 0
desc = iter(desc)
for char in desc:
if char == "\\":
try:
char = desc.next()
except StopIteration:
raise ValueError ("Endpoint description not valid: escaped end of string")
if char not in "{}":
char = "\\{}".format(char)
if depth == 0:
result.extend(char)
else:
escape.extend(char)
elif char == "{":
if depth > 0:
escape.append("{")
depth += 1
elif char == "}":
depth -= 1
if depth < 0:
raise ValueError ("Endpoint description not valid: mismatched end brace")
if depth == 0:
result.extend(unescapeEndpointDescription("".join(escape)).replace("\\", "\\\\").replace(":", "\\:").replace("=", "\\="))
else:
escape.append("}")
else:
if depth == 0:
result.append(char)
else:
escape.append(char)
if depth != 0:
raise ValueError ("Endpoint description not valid: mismatched opening brace")
return "".join(result) | 5 | 0.135135 | 5 | 0 |
fd3c866d9e50699643a4173aa33baffb10020af6 | src/utils/fd-axios.js | src/utils/fd-axios.js | /* global localStorage */
import axios from 'axios'
const FD_API_URL = process.env.FD_API_URL || 'http://localhost:9111/v3'
export default axios.create({
baseURL: FD_API_URL,
headers: {
'Content-Type': 'application/json'
}
})
| /* global localStorage */
import axios from 'axios'
const FD_API_URL = process.env.FD_API_URL || 'http://localhost:9111/v3'
export default axios.create({
baseURL: FD_API_URL,
withCredentials: true,
headers: {
'Content-Type': 'application/json'
}
})
| Fix issue with websocket and Chrome (add Authorization header) | Fix issue with websocket and Chrome (add Authorization header)
| JavaScript | agpl-3.0 | freedomotic/fd-vue-webapp,freedomotic/fd-vue-webapp | javascript | ## Code Before:
/* global localStorage */
import axios from 'axios'
const FD_API_URL = process.env.FD_API_URL || 'http://localhost:9111/v3'
export default axios.create({
baseURL: FD_API_URL,
headers: {
'Content-Type': 'application/json'
}
})
## Instruction:
Fix issue with websocket and Chrome (add Authorization header)
## Code After:
/* global localStorage */
import axios from 'axios'
const FD_API_URL = process.env.FD_API_URL || 'http://localhost:9111/v3'
export default axios.create({
baseURL: FD_API_URL,
withCredentials: true,
headers: {
'Content-Type': 'application/json'
}
})
| /* global localStorage */
import axios from 'axios'
const FD_API_URL = process.env.FD_API_URL || 'http://localhost:9111/v3'
export default axios.create({
baseURL: FD_API_URL,
+ withCredentials: true,
headers: {
'Content-Type': 'application/json'
}
})
| 1 | 0.076923 | 1 | 0 |
b44861a7403bf42dfa034f08da3b51f44f3570ba | index.rst | index.rst | .. include:: global.rst
Pyramid Technologies Thermal Printer Command Set
================================================
|logo|
- `Documentation has moved <https://escpos.readthedocs.io/>`_
.. toctree::
:maxdepth: 4
self
Indices and tables
==================
* :ref:`genindex`
| .. include:: global.rst
Pyramid Technologies Thermal Printer Command Set
================================================
|logo|
This documentation has moved_.
.. _moved: https://escpos.readthedocs.io/
| Remove extra links from home page | Remove extra links from home page
| reStructuredText | mit | PyramidTechnologies/Reliance-ESCPOS-Commands | restructuredtext | ## Code Before:
.. include:: global.rst
Pyramid Technologies Thermal Printer Command Set
================================================
|logo|
- `Documentation has moved <https://escpos.readthedocs.io/>`_
.. toctree::
:maxdepth: 4
self
Indices and tables
==================
* :ref:`genindex`
## Instruction:
Remove extra links from home page
## Code After:
.. include:: global.rst
Pyramid Technologies Thermal Printer Command Set
================================================
|logo|
This documentation has moved_.
.. _moved: https://escpos.readthedocs.io/
| .. include:: global.rst
Pyramid Technologies Thermal Printer Command Set
================================================
+
|logo|
+ This documentation has moved_.
- - `Documentation has moved <https://escpos.readthedocs.io/>`_
+ .. _moved: https://escpos.readthedocs.io/
-
- .. toctree::
- :maxdepth: 4
-
- self
-
- Indices and tables
- ==================
-
- * :ref:`genindex`
- | 15 | 0.75 | 3 | 12 |
ed79bff080253f33878cbf9aba848cc84b607722 | index.js | index.js | /* jshint node: true */
'use strict';
var pickFiles = require('broccoli-static-compiler');
var merge = require('lodash.merge');
module.exports = {
name: 'ember-cli-chosen',
included: function(app) {
this._super.included(app);
// Setup default options for ember-cli-chosen
var options = merge({
'jQuery': true,
'importChosenCSS': true
}, app.options['ember-cli-chosen'] || {});
options.chosenJSType = options.jQuery ? 'jquery' : 'proto';
// Update `ember-cli-chosen` options on our `app` with updated hash
app.options['ember-cli-chosen'] = options;
// Import the correct JS for chosen
app.import(app.bowerDirectory + '/chosen/chosen.' + options.chosenJSType + '.js');
// Import Chosen CSS (done by default)
if(options.importChosenCSS) { app.import(app.bowerDirectory + '/chosen/chosen.css'); }
},
treeFor: function(treeName) {
var tree;
// Only include the Chosen sprites if we're including Chosen CSS in the build
if(treeName === 'public' && this.app.options['ember-cli-chosen'].importChosenCSS) {
tree = pickFiles(this.app.bowerDirectory + '/chosen', {
srcDir: '/',
files: ['*.png'],
destDir: '/assets'
});
}
return tree;
}
};
| /* jshint node: true */
'use strict';
var pickFiles = require('broccoli-static-compiler');
var merge = require('lodash.merge');
module.exports = {
name: 'ember-cli-chosen',
included: function(app) {
this._super.included(app);
// Setup default options for ember-cli-chosen
var options = merge({
'jQuery': true,
'importChosenCSS': true
}, app.options['ember-cli-chosen'] || {});
options.chosenJSType = options.jQuery ? 'jquery' : 'proto';
// Update `ember-cli-chosen` options on our `app` with updated hash
app.options['ember-cli-chosen'] = options;
// Import the correct JS for chosen
app.import(app.bowerDirectory + '/chosen/chosen.' + options.chosenJSType + '.js');
// Import Chosen CSS (done by default)
if(options.importChosenCSS) { app.import(app.bowerDirectory + '/chosen/chosen.css'); }
},
treeForPublic: function(treeName) {
var tree;
// Only include the Chosen sprites if we're including Chosen CSS in the build
if(this.app.options['ember-cli-chosen'].importChosenCSS) {
tree = pickFiles(this.app.bowerDirectory + '/chosen', {
srcDir: '/',
files: ['*.png'],
destDir: '/assets'
});
}
return tree;
}
};
| Allow other trees to be returned. | Allow other trees to be returned.
If you override `treeFor` but do not return anything for
`treeFor('addon')` or `treeFor('app')` no code will be shipped to the
browser.
| JavaScript | mit | meszike123/ember-cli-chosen,meszike123/ember-cli-chosen,green-arrow/ember-cli-chosen,dandehavilland/ember-cli-chosen,dandehavilland/ember-cli-chosen,green-arrow/ember-cli-chosen | javascript | ## Code Before:
/* jshint node: true */
'use strict';
var pickFiles = require('broccoli-static-compiler');
var merge = require('lodash.merge');
module.exports = {
name: 'ember-cli-chosen',
included: function(app) {
this._super.included(app);
// Setup default options for ember-cli-chosen
var options = merge({
'jQuery': true,
'importChosenCSS': true
}, app.options['ember-cli-chosen'] || {});
options.chosenJSType = options.jQuery ? 'jquery' : 'proto';
// Update `ember-cli-chosen` options on our `app` with updated hash
app.options['ember-cli-chosen'] = options;
// Import the correct JS for chosen
app.import(app.bowerDirectory + '/chosen/chosen.' + options.chosenJSType + '.js');
// Import Chosen CSS (done by default)
if(options.importChosenCSS) { app.import(app.bowerDirectory + '/chosen/chosen.css'); }
},
treeFor: function(treeName) {
var tree;
// Only include the Chosen sprites if we're including Chosen CSS in the build
if(treeName === 'public' && this.app.options['ember-cli-chosen'].importChosenCSS) {
tree = pickFiles(this.app.bowerDirectory + '/chosen', {
srcDir: '/',
files: ['*.png'],
destDir: '/assets'
});
}
return tree;
}
};
## Instruction:
Allow other trees to be returned.
If you override `treeFor` but do not return anything for
`treeFor('addon')` or `treeFor('app')` no code will be shipped to the
browser.
## Code After:
/* jshint node: true */
'use strict';
var pickFiles = require('broccoli-static-compiler');
var merge = require('lodash.merge');
module.exports = {
name: 'ember-cli-chosen',
included: function(app) {
this._super.included(app);
// Setup default options for ember-cli-chosen
var options = merge({
'jQuery': true,
'importChosenCSS': true
}, app.options['ember-cli-chosen'] || {});
options.chosenJSType = options.jQuery ? 'jquery' : 'proto';
// Update `ember-cli-chosen` options on our `app` with updated hash
app.options['ember-cli-chosen'] = options;
// Import the correct JS for chosen
app.import(app.bowerDirectory + '/chosen/chosen.' + options.chosenJSType + '.js');
// Import Chosen CSS (done by default)
if(options.importChosenCSS) { app.import(app.bowerDirectory + '/chosen/chosen.css'); }
},
treeForPublic: function(treeName) {
var tree;
// Only include the Chosen sprites if we're including Chosen CSS in the build
if(this.app.options['ember-cli-chosen'].importChosenCSS) {
tree = pickFiles(this.app.bowerDirectory + '/chosen', {
srcDir: '/',
files: ['*.png'],
destDir: '/assets'
});
}
return tree;
}
};
| /* jshint node: true */
'use strict';
var pickFiles = require('broccoli-static-compiler');
var merge = require('lodash.merge');
module.exports = {
name: 'ember-cli-chosen',
included: function(app) {
this._super.included(app);
// Setup default options for ember-cli-chosen
var options = merge({
'jQuery': true,
'importChosenCSS': true
}, app.options['ember-cli-chosen'] || {});
options.chosenJSType = options.jQuery ? 'jquery' : 'proto';
// Update `ember-cli-chosen` options on our `app` with updated hash
app.options['ember-cli-chosen'] = options;
// Import the correct JS for chosen
app.import(app.bowerDirectory + '/chosen/chosen.' + options.chosenJSType + '.js');
// Import Chosen CSS (done by default)
if(options.importChosenCSS) { app.import(app.bowerDirectory + '/chosen/chosen.css'); }
},
- treeFor: function(treeName) {
+ treeForPublic: function(treeName) {
? ++++++
var tree;
// Only include the Chosen sprites if we're including Chosen CSS in the build
- if(treeName === 'public' && this.app.options['ember-cli-chosen'].importChosenCSS) {
? -------------------------
+ if(this.app.options['ember-cli-chosen'].importChosenCSS) {
tree = pickFiles(this.app.bowerDirectory + '/chosen', {
srcDir: '/',
files: ['*.png'],
destDir: '/assets'
});
}
return tree;
}
}; | 4 | 0.093023 | 2 | 2 |
630c823706e28e66306828d6c3001b6e3773ce90 | ui/players/models.py | ui/players/models.py | from django.db import models
from django.contrib.auth.models import User
class Player(models.Model):
user = models.OneToOneField(User)
class Avatar(models.Model):
player = models.ForeignKey(User)
code = models.TextField() | from django.db import models
from django.contrib.auth.models import User
class Player(models.Model):
user = models.OneToOneField(User)
code = models.TextField()
class Avatar(models.Model):
player = models.ForeignKey(User)
| Move code into player (if only for now) | Move code into player (if only for now)
| Python | agpl-3.0 | Spycho/aimmo,Spycho/aimmo,Spycho/aimmo,Spycho/aimmo | python | ## Code Before:
from django.db import models
from django.contrib.auth.models import User
class Player(models.Model):
user = models.OneToOneField(User)
class Avatar(models.Model):
player = models.ForeignKey(User)
code = models.TextField()
## Instruction:
Move code into player (if only for now)
## Code After:
from django.db import models
from django.contrib.auth.models import User
class Player(models.Model):
user = models.OneToOneField(User)
code = models.TextField()
class Avatar(models.Model):
player = models.ForeignKey(User)
| from django.db import models
from django.contrib.auth.models import User
class Player(models.Model):
user = models.OneToOneField(User)
+ code = models.TextField()
class Avatar(models.Model):
player = models.ForeignKey(User)
- code = models.TextField()
+ | 3 | 0.272727 | 2 | 1 |
7d804e27fe4f420e1437f36922a76da645d50249 | index.js | index.js | var AWS = require('aws-sdk');
var s3 = new AWS.S3();
var yaml = require('js-yaml');
// const s3EnvVars = require('s3-env-vars');
// var doc = s3EnvVars("mybucketname", "folderpathinbucket", "filename", function(err, data) {
// if(err) console.log(err);
// else console.log(data);
// });
module.exports = function(bucket, path, filename, callback) {
var s3Params = {
Bucket: bucket,
Key: path + "/" + filename
};
s3.getObject(s3Params, function(err, data) {
if (err) callback(err, err.stack); // an error occurred
else {
try {
console.log('info: ', "Retrieved s3 object.");
var doc = yaml.safeLoad(data.Body);
console.log('data: ', "yml file contents: ", doc);
callback(null, doc);
} catch (e) {
callback(err, err.stack); // an error occurred reading the yml file
}
}
});
}; | var AWS = require('aws-sdk');
var s3 = new AWS.S3();
var yaml = require('js-yaml');
// const s3EnvVars = require('s3-env-vars');
// var doc = s3EnvVars("mybucketname", "folderpathinbucket", "filename", function(err, data) {
// if(err) console.log(err);
// else console.log(data);
// });
module.exports = function(bucket, path, filename, callback) {
var s3Params = {
Bucket: bucket,
Key: path + "/" + filename
};
s3.getObject(s3Params, function(err, data) {
if (err) callback(err, err.stack); // an error occurred
else {
try {
var doc = yaml.safeLoad(data.Body);
callback(null, doc);
} catch (e) {
callback(err, err.stack); // an error occurred reading the yml file
}
}
});
}; | Remove extra logging that exposed secrets to cloud watch log | Remove extra logging that exposed secrets to cloud watch log
| JavaScript | mit | Referly/lambda-s3-yml | javascript | ## Code Before:
var AWS = require('aws-sdk');
var s3 = new AWS.S3();
var yaml = require('js-yaml');
// const s3EnvVars = require('s3-env-vars');
// var doc = s3EnvVars("mybucketname", "folderpathinbucket", "filename", function(err, data) {
// if(err) console.log(err);
// else console.log(data);
// });
module.exports = function(bucket, path, filename, callback) {
var s3Params = {
Bucket: bucket,
Key: path + "/" + filename
};
s3.getObject(s3Params, function(err, data) {
if (err) callback(err, err.stack); // an error occurred
else {
try {
console.log('info: ', "Retrieved s3 object.");
var doc = yaml.safeLoad(data.Body);
console.log('data: ', "yml file contents: ", doc);
callback(null, doc);
} catch (e) {
callback(err, err.stack); // an error occurred reading the yml file
}
}
});
};
## Instruction:
Remove extra logging that exposed secrets to cloud watch log
## Code After:
var AWS = require('aws-sdk');
var s3 = new AWS.S3();
var yaml = require('js-yaml');
// const s3EnvVars = require('s3-env-vars');
// var doc = s3EnvVars("mybucketname", "folderpathinbucket", "filename", function(err, data) {
// if(err) console.log(err);
// else console.log(data);
// });
module.exports = function(bucket, path, filename, callback) {
var s3Params = {
Bucket: bucket,
Key: path + "/" + filename
};
s3.getObject(s3Params, function(err, data) {
if (err) callback(err, err.stack); // an error occurred
else {
try {
var doc = yaml.safeLoad(data.Body);
callback(null, doc);
} catch (e) {
callback(err, err.stack); // an error occurred reading the yml file
}
}
});
}; | var AWS = require('aws-sdk');
var s3 = new AWS.S3();
var yaml = require('js-yaml');
// const s3EnvVars = require('s3-env-vars');
// var doc = s3EnvVars("mybucketname", "folderpathinbucket", "filename", function(err, data) {
// if(err) console.log(err);
// else console.log(data);
// });
module.exports = function(bucket, path, filename, callback) {
var s3Params = {
Bucket: bucket,
Key: path + "/" + filename
};
s3.getObject(s3Params, function(err, data) {
if (err) callback(err, err.stack); // an error occurred
else {
try {
- console.log('info: ', "Retrieved s3 object.");
var doc = yaml.safeLoad(data.Body);
- console.log('data: ', "yml file contents: ", doc);
callback(null, doc);
} catch (e) {
callback(err, err.stack); // an error occurred reading the yml file
}
}
});
-
}; | 3 | 0.103448 | 0 | 3 |
d01a4a45254ae0c3c22a054a575631999c48641e | src/codeManager.js | src/codeManager.js | var utils = require("./utils");
function CodeManager() {
this.codes = [];
this.currentSession = -1;
this.codesGenerated = false;
}
CodeManager.prototype.nextSession = function() {
if (this.codes[this.currentSession + 1]) {
this.currentSession++;
} else if (this.currentSession === -1) {
throw "No codes generated";
} else {
this.currentSession = -1;
throw "Out of generated codes";
}
};
CodeManager.prototype.generateCodes = function(nbrOfUsers, nbrOfCodesPerUser, lengthOfCodes) {
this.codesGenerated = true;
return this.codes = utils.generateCodes(nbrOfUsers, nbrOfCodesPerUser, lengthOfCodes);
};
CodeManager.prototype.currentSessionCodes = function() {
return this.codes[this.currentSession];
};
CodeManager.prototype.isValidCode = function(code) {
return this.currentSessionCodes().indexOf(code) !== -1;
};
CodeManager.prototype.invalidateCode = function(code) {
var previousLength = this.currentSessionCodes().length;
this.codes[this.currentSession] = this.currentSessionCodes().filter(function(sessionCode) {
return sessionCode !== code;
});
return previousLength - 1 === this.currentSessionCodes().length;
};
module.exports = CodeManager;
| var utils = require("./utils");
function CodeManager() {
this.codes = [];
this.currentSession = -1;
this.codesGenerated = false;
}
CodeManager.prototype.nextSession = function() {
if (!this.codesGenerated) {
throw "No codes generated";
}
if (this.codes[this.currentSession + 1]) {
this.currentSession++;
} else {
this.currentSession = -1;
throw "Out of generated codes";
}
};
CodeManager.prototype.generateCodes = function(nbrOfUsers, nbrOfCodesPerUser, lengthOfCodes) {
this.codesGenerated = true;
return this.codes = utils.generateCodes(nbrOfUsers, nbrOfCodesPerUser, lengthOfCodes);
};
CodeManager.prototype.currentSessionCodes = function() {
return this.codes[this.currentSession];
};
CodeManager.prototype.isValidCode = function(code) {
return this.currentSessionCodes().indexOf(code) !== -1;
};
CodeManager.prototype.invalidateCode = function(code) {
var previousLength = this.currentSessionCodes().length;
this.codes[this.currentSession] = this.currentSessionCodes().filter(function(sessionCode) {
return sessionCode !== code;
});
return previousLength - 1 === this.currentSessionCodes().length;
};
module.exports = CodeManager;
| Clarify logic around codesGenerated boolean | Clarify logic around codesGenerated boolean
| JavaScript | mit | cthit/VoteIT,cthit/VoteIT | javascript | ## Code Before:
var utils = require("./utils");
function CodeManager() {
this.codes = [];
this.currentSession = -1;
this.codesGenerated = false;
}
CodeManager.prototype.nextSession = function() {
if (this.codes[this.currentSession + 1]) {
this.currentSession++;
} else if (this.currentSession === -1) {
throw "No codes generated";
} else {
this.currentSession = -1;
throw "Out of generated codes";
}
};
CodeManager.prototype.generateCodes = function(nbrOfUsers, nbrOfCodesPerUser, lengthOfCodes) {
this.codesGenerated = true;
return this.codes = utils.generateCodes(nbrOfUsers, nbrOfCodesPerUser, lengthOfCodes);
};
CodeManager.prototype.currentSessionCodes = function() {
return this.codes[this.currentSession];
};
CodeManager.prototype.isValidCode = function(code) {
return this.currentSessionCodes().indexOf(code) !== -1;
};
CodeManager.prototype.invalidateCode = function(code) {
var previousLength = this.currentSessionCodes().length;
this.codes[this.currentSession] = this.currentSessionCodes().filter(function(sessionCode) {
return sessionCode !== code;
});
return previousLength - 1 === this.currentSessionCodes().length;
};
module.exports = CodeManager;
## Instruction:
Clarify logic around codesGenerated boolean
## Code After:
var utils = require("./utils");
function CodeManager() {
this.codes = [];
this.currentSession = -1;
this.codesGenerated = false;
}
CodeManager.prototype.nextSession = function() {
if (!this.codesGenerated) {
throw "No codes generated";
}
if (this.codes[this.currentSession + 1]) {
this.currentSession++;
} else {
this.currentSession = -1;
throw "Out of generated codes";
}
};
CodeManager.prototype.generateCodes = function(nbrOfUsers, nbrOfCodesPerUser, lengthOfCodes) {
this.codesGenerated = true;
return this.codes = utils.generateCodes(nbrOfUsers, nbrOfCodesPerUser, lengthOfCodes);
};
CodeManager.prototype.currentSessionCodes = function() {
return this.codes[this.currentSession];
};
CodeManager.prototype.isValidCode = function(code) {
return this.currentSessionCodes().indexOf(code) !== -1;
};
CodeManager.prototype.invalidateCode = function(code) {
var previousLength = this.currentSessionCodes().length;
this.codes[this.currentSession] = this.currentSessionCodes().filter(function(sessionCode) {
return sessionCode !== code;
});
return previousLength - 1 === this.currentSessionCodes().length;
};
module.exports = CodeManager;
| var utils = require("./utils");
function CodeManager() {
this.codes = [];
this.currentSession = -1;
this.codesGenerated = false;
}
CodeManager.prototype.nextSession = function() {
+ if (!this.codesGenerated) {
+ throw "No codes generated";
+ }
+
if (this.codes[this.currentSession + 1]) {
this.currentSession++;
- } else if (this.currentSession === -1) {
- throw "No codes generated";
} else {
this.currentSession = -1;
throw "Out of generated codes";
}
};
CodeManager.prototype.generateCodes = function(nbrOfUsers, nbrOfCodesPerUser, lengthOfCodes) {
this.codesGenerated = true;
return this.codes = utils.generateCodes(nbrOfUsers, nbrOfCodesPerUser, lengthOfCodes);
};
CodeManager.prototype.currentSessionCodes = function() {
return this.codes[this.currentSession];
};
CodeManager.prototype.isValidCode = function(code) {
return this.currentSessionCodes().indexOf(code) !== -1;
};
CodeManager.prototype.invalidateCode = function(code) {
var previousLength = this.currentSessionCodes().length;
this.codes[this.currentSession] = this.currentSessionCodes().filter(function(sessionCode) {
return sessionCode !== code;
});
return previousLength - 1 === this.currentSessionCodes().length;
};
module.exports = CodeManager; | 6 | 0.142857 | 4 | 2 |
7570f23a9f2e73863f5e84a652646df348ad35df | salt/slack-irc/config/slack-irc.json.jinja | salt/slack-irc/config/slack-irc.json.jinja | [
{
"nickname": "PSFSlack",
"server": "irc.freenode.net",
"token": "{{ slack_token }}",
"autoSendCommands": [
["PRIVMSG", "NickServ", "IDENTIFY {{ irc_password }}"]
],
"channelMapping": {
"#infrastructure": "#python-infra",
"psf-board": "#psf-board"
},
"commandCharacters": ["!", "."]
}
]
| [
{
"nickname": "PSFSlack",
"server": "irc.freenode.net",
"token": "{{ slack_token }}",
"autoSendCommands": [
["PRIVMSG", "NickServ", "IDENTIFY {{ irc_password }}"]
],
"channelMapping": {
"#infrastructure": "#python-infra",
"psf-board": "#psf-board",
"psf-board-private": "#psf-board-private"
},
"commandCharacters": ["!", "."]
}
]
| Add another slack <-> IRC bridge | Add another slack <-> IRC bridge
| HTML+Django | mit | python/psf-salt,python/psf-salt,python/psf-salt,zware/psf-salt,python/psf-salt,zware/psf-salt,zware/psf-salt,zware/psf-salt | html+django | ## Code Before:
[
{
"nickname": "PSFSlack",
"server": "irc.freenode.net",
"token": "{{ slack_token }}",
"autoSendCommands": [
["PRIVMSG", "NickServ", "IDENTIFY {{ irc_password }}"]
],
"channelMapping": {
"#infrastructure": "#python-infra",
"psf-board": "#psf-board"
},
"commandCharacters": ["!", "."]
}
]
## Instruction:
Add another slack <-> IRC bridge
## Code After:
[
{
"nickname": "PSFSlack",
"server": "irc.freenode.net",
"token": "{{ slack_token }}",
"autoSendCommands": [
["PRIVMSG", "NickServ", "IDENTIFY {{ irc_password }}"]
],
"channelMapping": {
"#infrastructure": "#python-infra",
"psf-board": "#psf-board",
"psf-board-private": "#psf-board-private"
},
"commandCharacters": ["!", "."]
}
]
| [
{
"nickname": "PSFSlack",
"server": "irc.freenode.net",
"token": "{{ slack_token }}",
"autoSendCommands": [
["PRIVMSG", "NickServ", "IDENTIFY {{ irc_password }}"]
],
"channelMapping": {
"#infrastructure": "#python-infra",
- "psf-board": "#psf-board"
+ "psf-board": "#psf-board",
? +
+ "psf-board-private": "#psf-board-private"
},
"commandCharacters": ["!", "."]
}
] | 3 | 0.2 | 2 | 1 |
0b70bfac935e4e5f57f3239b9a0aeb5c8d043610 | Sodium.podspec | Sodium.podspec | Pod::Spec.new do |s|
s.name = 'Sodium'
s.version = '0.8.0'
s.swift_version = '5.0'
s.license = { :type => "ISC", :file => 'LICENSE' }
s.summary = 'Swift-Sodium provides a safe and easy to use interface to perform common cryptographic operations on iOS and OSX.'
s.homepage = 'https://github.com/jedisct1/swift-sodium'
s.social_media_url = 'https://twitter.com/jedisct1'
s.authors = { 'Frank Denis' => '' }
s.source = { :git => 'https://github.com/jedisct1/swift-sodium.git',
:tag => '0.8.0' }
s.ios.deployment_target = '8.0'
s.osx.deployment_target = '10.11'
s.watchos.deployment_target = '5.0'
s.ios.vendored_library = 'Sodium/libsodium/libsodium-ios.a'
s.osx.vendored_library = 'Sodium/libsodium/libsodium-osx.a'
s.watchos.vendored_library = 'Sodium/libsodium/libsodium-watchos.a'
s.source_files = 'Sodium/**/*.{swift,h}'
s.private_header_files = 'Sodium/libsodium/*.h'
s.preserve_paths = 'Sodium/libsodium/module.modulemap'
s.pod_target_xcconfig = {
'SWIFT_INCLUDE_PATHS' => '$(PODS_TARGET_SRCROOT)/Sodium/libsodium',
}
s.requires_arc = true
end
| Pod::Spec.new do |s|
s.name = 'Sodium'
s.version = '0.8.0'
s.swift_version = '5.0'
s.license = { :type => "ISC", :file => 'LICENSE' }
s.summary = 'Swift-Sodium provides a safe and easy to use interface to perform common cryptographic operations on iOS and OSX.'
s.homepage = 'https://github.com/jedisct1/swift-sodium'
s.social_media_url = 'https://twitter.com/jedisct1'
s.authors = { 'Frank Denis' => '' }
s.source = { :git => 'https://github.com/jedisct1/swift-sodium.git',
:tag => '0.8.0' }
s.ios.deployment_target = '8.0'
s.osx.deployment_target = '10.11'
s.watchos.deployment_target = '5.0'
s.source_files = 'Sodium/**/*.{swift,h}'
s.private_header_files = 'Sodium/libsodium/*.h'
s.pod_target_xcconfig = {
'SWIFT_INCLUDE_PATHS' => '$(PODS_TARGET_SRCROOT)/Sodium/libsodium',
}
s.requires_arc = true
end
| Remove references to nonexistent files | Remove references to nonexistent files
| Ruby | isc | jedisct1/swift-sodium,jedisct1/swift-sodium,jedisct1/swift-sodium | ruby | ## Code Before:
Pod::Spec.new do |s|
s.name = 'Sodium'
s.version = '0.8.0'
s.swift_version = '5.0'
s.license = { :type => "ISC", :file => 'LICENSE' }
s.summary = 'Swift-Sodium provides a safe and easy to use interface to perform common cryptographic operations on iOS and OSX.'
s.homepage = 'https://github.com/jedisct1/swift-sodium'
s.social_media_url = 'https://twitter.com/jedisct1'
s.authors = { 'Frank Denis' => '' }
s.source = { :git => 'https://github.com/jedisct1/swift-sodium.git',
:tag => '0.8.0' }
s.ios.deployment_target = '8.0'
s.osx.deployment_target = '10.11'
s.watchos.deployment_target = '5.0'
s.ios.vendored_library = 'Sodium/libsodium/libsodium-ios.a'
s.osx.vendored_library = 'Sodium/libsodium/libsodium-osx.a'
s.watchos.vendored_library = 'Sodium/libsodium/libsodium-watchos.a'
s.source_files = 'Sodium/**/*.{swift,h}'
s.private_header_files = 'Sodium/libsodium/*.h'
s.preserve_paths = 'Sodium/libsodium/module.modulemap'
s.pod_target_xcconfig = {
'SWIFT_INCLUDE_PATHS' => '$(PODS_TARGET_SRCROOT)/Sodium/libsodium',
}
s.requires_arc = true
end
## Instruction:
Remove references to nonexistent files
## Code After:
Pod::Spec.new do |s|
s.name = 'Sodium'
s.version = '0.8.0'
s.swift_version = '5.0'
s.license = { :type => "ISC", :file => 'LICENSE' }
s.summary = 'Swift-Sodium provides a safe and easy to use interface to perform common cryptographic operations on iOS and OSX.'
s.homepage = 'https://github.com/jedisct1/swift-sodium'
s.social_media_url = 'https://twitter.com/jedisct1'
s.authors = { 'Frank Denis' => '' }
s.source = { :git => 'https://github.com/jedisct1/swift-sodium.git',
:tag => '0.8.0' }
s.ios.deployment_target = '8.0'
s.osx.deployment_target = '10.11'
s.watchos.deployment_target = '5.0'
s.source_files = 'Sodium/**/*.{swift,h}'
s.private_header_files = 'Sodium/libsodium/*.h'
s.pod_target_xcconfig = {
'SWIFT_INCLUDE_PATHS' => '$(PODS_TARGET_SRCROOT)/Sodium/libsodium',
}
s.requires_arc = true
end
| Pod::Spec.new do |s|
s.name = 'Sodium'
s.version = '0.8.0'
s.swift_version = '5.0'
s.license = { :type => "ISC", :file => 'LICENSE' }
s.summary = 'Swift-Sodium provides a safe and easy to use interface to perform common cryptographic operations on iOS and OSX.'
s.homepage = 'https://github.com/jedisct1/swift-sodium'
s.social_media_url = 'https://twitter.com/jedisct1'
s.authors = { 'Frank Denis' => '' }
s.source = { :git => 'https://github.com/jedisct1/swift-sodium.git',
:tag => '0.8.0' }
s.ios.deployment_target = '8.0'
s.osx.deployment_target = '10.11'
s.watchos.deployment_target = '5.0'
- s.ios.vendored_library = 'Sodium/libsodium/libsodium-ios.a'
- s.osx.vendored_library = 'Sodium/libsodium/libsodium-osx.a'
- s.watchos.vendored_library = 'Sodium/libsodium/libsodium-watchos.a'
-
s.source_files = 'Sodium/**/*.{swift,h}'
s.private_header_files = 'Sodium/libsodium/*.h'
- s.preserve_paths = 'Sodium/libsodium/module.modulemap'
s.pod_target_xcconfig = {
'SWIFT_INCLUDE_PATHS' => '$(PODS_TARGET_SRCROOT)/Sodium/libsodium',
}
s.requires_arc = true
end | 5 | 0.166667 | 0 | 5 |
4ec043be37b474b22e19884653667b81c8a18885 | concourse/scripts/lib/google-oauth.sh | concourse/scripts/lib/google-oauth.sh | set -e
set -u
get_google_oauth_secrets() {
# shellcheck disable=SC2154
secrets_uri="s3://${state_bucket}/google-oauth-secrets.yml"
export oauth_client_id
export oauth_client_secret
if aws s3 ls "${secrets_uri}" > /dev/null ; then
secrets_file=$(mktemp -t google-oauth-secrets.XXXXXX)
aws s3 cp "${secrets_uri}" "${secrets_file}"
oauth_client_id=$("${SCRIPT_DIR}"/val_from_yaml.rb secrets.google_oauth_client_id "${secrets_file}")
oauth_client_secret=$("${SCRIPT_DIR}"/val_from_yaml.rb secrets.google_oauth_client_secret "${secrets_file}")
rm -f "${secrets_file}"
fi
}
| set -e
set -u
get_google_oauth_secrets() {
# shellcheck disable=SC2154
secrets_uri="s3://${state_bucket}/google-oauth-secrets.yml"
export oauth_client_id
export oauth_client_secret
secrets_size=$(aws s3 ls "${secrets_uri}" | awk '{print $3}')
if [ "${secrets_size}" != 0 ] && [ -n "${secrets_size}" ] ; then
secrets_file=$(mktemp -t google-oauth-secrets.XXXXXX)
aws s3 cp "${secrets_uri}" "${secrets_file}"
oauth_client_id=$("${SCRIPT_DIR}"/val_from_yaml.rb secrets.google_oauth_client_id "${secrets_file}")
oauth_client_secret=$("${SCRIPT_DIR}"/val_from_yaml.rb secrets.google_oauth_client_secret "${secrets_file}")
rm -f "${secrets_file}"
fi
}
| Check if google secrets file is not empty | Check if google secrets file is not empty
We always initialise that file even if we dont populate it so in case
the size is 0 than creation of admins never happen and secrets are not
generated.
| Shell | mit | alphagov/paas-cf,alphagov/paas-cf,alphagov/paas-cf,alphagov/paas-cf,alphagov/paas-cf,alphagov/paas-cf,alphagov/paas-cf,alphagov/paas-cf | shell | ## Code Before:
set -e
set -u
get_google_oauth_secrets() {
# shellcheck disable=SC2154
secrets_uri="s3://${state_bucket}/google-oauth-secrets.yml"
export oauth_client_id
export oauth_client_secret
if aws s3 ls "${secrets_uri}" > /dev/null ; then
secrets_file=$(mktemp -t google-oauth-secrets.XXXXXX)
aws s3 cp "${secrets_uri}" "${secrets_file}"
oauth_client_id=$("${SCRIPT_DIR}"/val_from_yaml.rb secrets.google_oauth_client_id "${secrets_file}")
oauth_client_secret=$("${SCRIPT_DIR}"/val_from_yaml.rb secrets.google_oauth_client_secret "${secrets_file}")
rm -f "${secrets_file}"
fi
}
## Instruction:
Check if google secrets file is not empty
We always initialise that file even if we dont populate it so in case
the size is 0 than creation of admins never happen and secrets are not
generated.
## Code After:
set -e
set -u
get_google_oauth_secrets() {
# shellcheck disable=SC2154
secrets_uri="s3://${state_bucket}/google-oauth-secrets.yml"
export oauth_client_id
export oauth_client_secret
secrets_size=$(aws s3 ls "${secrets_uri}" | awk '{print $3}')
if [ "${secrets_size}" != 0 ] && [ -n "${secrets_size}" ] ; then
secrets_file=$(mktemp -t google-oauth-secrets.XXXXXX)
aws s3 cp "${secrets_uri}" "${secrets_file}"
oauth_client_id=$("${SCRIPT_DIR}"/val_from_yaml.rb secrets.google_oauth_client_id "${secrets_file}")
oauth_client_secret=$("${SCRIPT_DIR}"/val_from_yaml.rb secrets.google_oauth_client_secret "${secrets_file}")
rm -f "${secrets_file}"
fi
}
| set -e
set -u
get_google_oauth_secrets() {
# shellcheck disable=SC2154
secrets_uri="s3://${state_bucket}/google-oauth-secrets.yml"
export oauth_client_id
export oauth_client_secret
- if aws s3 ls "${secrets_uri}" > /dev/null ; then
+ secrets_size=$(aws s3 ls "${secrets_uri}" | awk '{print $3}')
+ if [ "${secrets_size}" != 0 ] && [ -n "${secrets_size}" ] ; then
secrets_file=$(mktemp -t google-oauth-secrets.XXXXXX)
aws s3 cp "${secrets_uri}" "${secrets_file}"
oauth_client_id=$("${SCRIPT_DIR}"/val_from_yaml.rb secrets.google_oauth_client_id "${secrets_file}")
oauth_client_secret=$("${SCRIPT_DIR}"/val_from_yaml.rb secrets.google_oauth_client_secret "${secrets_file}")
rm -f "${secrets_file}"
fi
} | 3 | 0.166667 | 2 | 1 |
2f227554e85a64f73cdf86ff17c2f426d0e0d7f8 | config/googlebooks.php | config/googlebooks.php | <?php
return [
/*
|--------------------------------------------------------------------------
| API KEY
|--------------------------------------------------------------------------
*/
'key' => env('GOOGLE_BOOKS_KEY'),
];
| <?php
return [
/*
|--------------------------------------------------------------------------
| API KEY
|--------------------------------------------------------------------------
*/
'key' => env('GOOGLE_BOOKS_KEY'),
/*
|--------------------------------------------------------------------------
| 2 letter ISO 639 country code
|--------------------------------------------------------------------------
*/
'country' => env('GOOGLE_BOOKS_COUNTRY_CODE'),
];
| Fix country config for google books harvesting | Fix country config for google books harvesting
(should have been part of 5d5652b0)
| PHP | mit | scriptotek/colligator-backend,scriptotek/colligator-backend,scriptotek/colligator-backend | php | ## Code Before:
<?php
return [
/*
|--------------------------------------------------------------------------
| API KEY
|--------------------------------------------------------------------------
*/
'key' => env('GOOGLE_BOOKS_KEY'),
];
## Instruction:
Fix country config for google books harvesting
(should have been part of 5d5652b0)
## Code After:
<?php
return [
/*
|--------------------------------------------------------------------------
| API KEY
|--------------------------------------------------------------------------
*/
'key' => env('GOOGLE_BOOKS_KEY'),
/*
|--------------------------------------------------------------------------
| 2 letter ISO 639 country code
|--------------------------------------------------------------------------
*/
'country' => env('GOOGLE_BOOKS_COUNTRY_CODE'),
];
| <?php
return [
/*
|--------------------------------------------------------------------------
| API KEY
|--------------------------------------------------------------------------
*/
'key' => env('GOOGLE_BOOKS_KEY'),
+ /*
+ |--------------------------------------------------------------------------
+ | 2 letter ISO 639 country code
+ |--------------------------------------------------------------------------
+ */
+ 'country' => env('GOOGLE_BOOKS_COUNTRY_CODE'),
+
]; | 7 | 0.583333 | 7 | 0 |
c6146d440d92673a0784db9780cd527811be83fd | States/MovingEntity.hs | States/MovingEntity.hs | {-# LANGUAGE TupleSections #-}
module States.MovingEntity where
#include "Gamgine/Utils.cpp"
import Control.Applicative ((<$>))
import qualified Gamgine.Math.Vect as V
import Gamgine.Control (applyIf, (?))
import qualified States.State as ST
import qualified States.GameRunning as GR
import qualified GameData.Level as LV
import qualified GameData.Entity as E
import qualified Entity.Id as EI
import qualified Entity.Position as EP
import qualified AppData as AP
IMPORT_LENS
data MovingEntity = MovingEntity {
entityId :: Maybe Int,
startPos :: V.Vect,
basePos :: V.Vect
}
instance ST.State MovingEntity AP.AppData where
enterWithMousePos me app mp =
case LV.findEntityAt mp $ LE.getL AP.currentLevelL app of
Just e -> (me {entityId = Just $ EI.entityId e,
startPos = mp,
basePos = EP.position e}, app)
leave me app = (me {entityId = Nothing, startPos = V.nullVec, basePos = V.nullVec}, app)
render me app rs = (me,) <$> GR.render app rs
mouseMoved me@MovingEntity {entityId = Just id, startPos = sp, basePos = bp} app mp =
(me, E.eMap (\e -> id == EI.entityId e ? EP.setPosition e newPos $ e) app)
where
newPos = bp + (mp - sp)
mouseMoved me app _ = (me, app)
| {-# LANGUAGE TupleSections #-}
module States.MovingEntity where
#include "Gamgine/Utils.cpp"
import Control.Applicative ((<$>))
import qualified Gamgine.Math.Vect as V
import Gamgine.Control ((?))
import qualified States.State as ST
import qualified States.GameRunning as GR
import qualified GameData.Level as LV
import qualified GameData.Entity as E
import qualified GameData.Data as GD
import qualified Entity.Id as EI
import qualified Entity.Position as EP
IMPORT_LENS
data MovingEntity = MovingEntity {
entityId :: Maybe Int,
startPos :: V.Vect,
basePos :: V.Vect
}
instance ST.State MovingEntity GD.Data where
enterWithMousePos me gd mp =
case LV.findEntityAt mp $ LE.getL GD.currentLevelL gd of
Just e -> (me {entityId = Just $ EI.entityId e,
startPos = mp,
basePos = EP.position e}, gd)
leave me gd = (me {entityId = Nothing, startPos = V.nullVec, basePos = V.nullVec}, gd)
render me gd rs = (me,) <$> GR.render gd rs
mouseMoved me@MovingEntity {entityId = Just id, startPos = sp, basePos = bp} gd mp =
(me, E.eMap (\e -> id == EI.entityId e ? EP.setPosition e newPos $ e) gd)
where
newPos = bp + (mp - sp)
mouseMoved me gd _ = (me, gd)
| Change data type of MoveEntity | Change data type of MoveEntity
| Haskell | bsd-3-clause | dan-t/layers | haskell | ## Code Before:
{-# LANGUAGE TupleSections #-}
module States.MovingEntity where
#include "Gamgine/Utils.cpp"
import Control.Applicative ((<$>))
import qualified Gamgine.Math.Vect as V
import Gamgine.Control (applyIf, (?))
import qualified States.State as ST
import qualified States.GameRunning as GR
import qualified GameData.Level as LV
import qualified GameData.Entity as E
import qualified Entity.Id as EI
import qualified Entity.Position as EP
import qualified AppData as AP
IMPORT_LENS
data MovingEntity = MovingEntity {
entityId :: Maybe Int,
startPos :: V.Vect,
basePos :: V.Vect
}
instance ST.State MovingEntity AP.AppData where
enterWithMousePos me app mp =
case LV.findEntityAt mp $ LE.getL AP.currentLevelL app of
Just e -> (me {entityId = Just $ EI.entityId e,
startPos = mp,
basePos = EP.position e}, app)
leave me app = (me {entityId = Nothing, startPos = V.nullVec, basePos = V.nullVec}, app)
render me app rs = (me,) <$> GR.render app rs
mouseMoved me@MovingEntity {entityId = Just id, startPos = sp, basePos = bp} app mp =
(me, E.eMap (\e -> id == EI.entityId e ? EP.setPosition e newPos $ e) app)
where
newPos = bp + (mp - sp)
mouseMoved me app _ = (me, app)
## Instruction:
Change data type of MoveEntity
## Code After:
{-# LANGUAGE TupleSections #-}
module States.MovingEntity where
#include "Gamgine/Utils.cpp"
import Control.Applicative ((<$>))
import qualified Gamgine.Math.Vect as V
import Gamgine.Control ((?))
import qualified States.State as ST
import qualified States.GameRunning as GR
import qualified GameData.Level as LV
import qualified GameData.Entity as E
import qualified GameData.Data as GD
import qualified Entity.Id as EI
import qualified Entity.Position as EP
IMPORT_LENS
data MovingEntity = MovingEntity {
entityId :: Maybe Int,
startPos :: V.Vect,
basePos :: V.Vect
}
instance ST.State MovingEntity GD.Data where
enterWithMousePos me gd mp =
case LV.findEntityAt mp $ LE.getL GD.currentLevelL gd of
Just e -> (me {entityId = Just $ EI.entityId e,
startPos = mp,
basePos = EP.position e}, gd)
leave me gd = (me {entityId = Nothing, startPos = V.nullVec, basePos = V.nullVec}, gd)
render me gd rs = (me,) <$> GR.render gd rs
mouseMoved me@MovingEntity {entityId = Just id, startPos = sp, basePos = bp} gd mp =
(me, E.eMap (\e -> id == EI.entityId e ? EP.setPosition e newPos $ e) gd)
where
newPos = bp + (mp - sp)
mouseMoved me gd _ = (me, gd)
| {-# LANGUAGE TupleSections #-}
module States.MovingEntity where
#include "Gamgine/Utils.cpp"
import Control.Applicative ((<$>))
import qualified Gamgine.Math.Vect as V
- import Gamgine.Control (applyIf, (?))
? ---------
+ import Gamgine.Control ((?))
import qualified States.State as ST
import qualified States.GameRunning as GR
import qualified GameData.Level as LV
import qualified GameData.Entity as E
+ import qualified GameData.Data as GD
import qualified Entity.Id as EI
import qualified Entity.Position as EP
- import qualified AppData as AP
IMPORT_LENS
data MovingEntity = MovingEntity {
entityId :: Maybe Int,
startPos :: V.Vect,
basePos :: V.Vect
}
- instance ST.State MovingEntity AP.AppData where
? ^^ ---
+ instance ST.State MovingEntity GD.Data where
? ^^
- enterWithMousePos me app mp =
? ^^^
+ enterWithMousePos me gd mp =
? ^^
- case LV.findEntityAt mp $ LE.getL AP.currentLevelL app of
? ^^ ^^^
+ case LV.findEntityAt mp $ LE.getL GD.currentLevelL gd of
? ^^ ^^
Just e -> (me {entityId = Just $ EI.entityId e,
startPos = mp,
- basePos = EP.position e}, app)
? ^^^
+ basePos = EP.position e}, gd)
? ^^
- leave me app = (me {entityId = Nothing, startPos = V.nullVec, basePos = V.nullVec}, app)
? ^^^ ^^^
+ leave me gd = (me {entityId = Nothing, startPos = V.nullVec, basePos = V.nullVec}, gd)
? ^^ ^^
- render me app rs = (me,) <$> GR.render app rs
? ^^^ ^^^
+ render me gd rs = (me,) <$> GR.render gd rs
? ^^ ^^
- mouseMoved me@MovingEntity {entityId = Just id, startPos = sp, basePos = bp} app mp =
? ^^^
+ mouseMoved me@MovingEntity {entityId = Just id, startPos = sp, basePos = bp} gd mp =
? ^^
- (me, E.eMap (\e -> id == EI.entityId e ? EP.setPosition e newPos $ e) app)
? ^^^
+ (me, E.eMap (\e -> id == EI.entityId e ? EP.setPosition e newPos $ e) gd)
? ^^
where
newPos = bp + (mp - sp)
- mouseMoved me app _ = (me, app)
? ^^^ ^^^
+ mouseMoved me gd _ = (me, gd)
? ^^ ^^
| 22 | 0.564103 | 11 | 11 |
d08973c3854d10755e156b1457972a8aaebb251b | bottle_utils/form/__init__.py | bottle_utils/form/__init__.py | from .exceptions import ValidationError
from .fields import (DormantField,
Field,
StringField,
PasswordField,
HiddenField,
EmailField,
TextAreaField,
DateField,
FileField,
IntegerField,
FloatField,
BooleanField,
SelectField)
from .forms import Form
from .validators import Validator, Required, DateValidator, InRangeValidator
__all__ = ['ValidationError',
'DormantField',
'Field',
'StringField',
'PasswordField',
'HiddenField',
'EmailField',
'TextAreaField',
'DateField',
'FileField',
'IntegerField',
'FloatField',
'BooleanField',
'SelectField',
'Form',
'Validator',
'Required',
'DateValidator',
'InRangeValidator']
| from .exceptions import ValidationError
from .fields import (DormantField,
Field,
StringField,
PasswordField,
HiddenField,
EmailField,
TextAreaField,
DateField,
FileField,
IntegerField,
FloatField,
BooleanField,
SelectField)
from .forms import Form
from .validators import (Validator, Required, DateValidator, InRangeValidator,
LengthValidator)
__all__ = ['ValidationError',
'DormantField',
'Field',
'StringField',
'PasswordField',
'HiddenField',
'EmailField',
'TextAreaField',
'DateField',
'FileField',
'IntegerField',
'FloatField',
'BooleanField',
'SelectField',
'Form',
'Validator',
'Required',
'DateValidator',
'InRangeValidator']
| Include LengthValidator in list of exporeted objects | Include LengthValidator in list of exporeted objects
Signed-off-by: Branko Vukelic <[email protected]>
| Python | bsd-2-clause | Outernet-Project/bottle-utils | python | ## Code Before:
from .exceptions import ValidationError
from .fields import (DormantField,
Field,
StringField,
PasswordField,
HiddenField,
EmailField,
TextAreaField,
DateField,
FileField,
IntegerField,
FloatField,
BooleanField,
SelectField)
from .forms import Form
from .validators import Validator, Required, DateValidator, InRangeValidator
__all__ = ['ValidationError',
'DormantField',
'Field',
'StringField',
'PasswordField',
'HiddenField',
'EmailField',
'TextAreaField',
'DateField',
'FileField',
'IntegerField',
'FloatField',
'BooleanField',
'SelectField',
'Form',
'Validator',
'Required',
'DateValidator',
'InRangeValidator']
## Instruction:
Include LengthValidator in list of exporeted objects
Signed-off-by: Branko Vukelic <[email protected]>
## Code After:
from .exceptions import ValidationError
from .fields import (DormantField,
Field,
StringField,
PasswordField,
HiddenField,
EmailField,
TextAreaField,
DateField,
FileField,
IntegerField,
FloatField,
BooleanField,
SelectField)
from .forms import Form
from .validators import (Validator, Required, DateValidator, InRangeValidator,
LengthValidator)
__all__ = ['ValidationError',
'DormantField',
'Field',
'StringField',
'PasswordField',
'HiddenField',
'EmailField',
'TextAreaField',
'DateField',
'FileField',
'IntegerField',
'FloatField',
'BooleanField',
'SelectField',
'Form',
'Validator',
'Required',
'DateValidator',
'InRangeValidator']
| from .exceptions import ValidationError
from .fields import (DormantField,
Field,
StringField,
PasswordField,
HiddenField,
EmailField,
TextAreaField,
DateField,
FileField,
IntegerField,
FloatField,
BooleanField,
SelectField)
from .forms import Form
- from .validators import Validator, Required, DateValidator, InRangeValidator
+ from .validators import (Validator, Required, DateValidator, InRangeValidator,
? + +
+ LengthValidator)
__all__ = ['ValidationError',
'DormantField',
'Field',
'StringField',
'PasswordField',
'HiddenField',
'EmailField',
'TextAreaField',
'DateField',
'FileField',
'IntegerField',
'FloatField',
'BooleanField',
'SelectField',
'Form',
'Validator',
'Required',
'DateValidator',
'InRangeValidator'] | 3 | 0.081081 | 2 | 1 |
f49bc6c1d7baa789a7b9b4a26b3073da6f644574 | test/Interpreter/opaque_return_type_protocol_ext.swift | test/Interpreter/opaque_return_type_protocol_ext.swift | // RUN: %target-run-simple-swift | %FileCheck %s
// REQUIRES: executable_test
protocol P {
associatedtype AT
func foo() -> AT
}
struct Adapter<T: P>: P {
var inner: T
func foo() -> some P {
return inner
}
}
extension P {
func foo() -> some P {
return Adapter(inner: self)
}
}
func getPAT<T: P>(_: T.Type) -> Any.Type {
return T.AT.self
}
extension Int: P { }
// CHECK: Adapter<Int>
print(getPAT(Int.self))
| // RUN: %target-run-simple-swift | %FileCheck %s
// REQUIRES: executable_test
@available(iOS 9999, macOS 9999, tvOS 9999, watchOS 9999, *)
protocol P {
associatedtype AT
func foo() -> AT
}
@available(iOS 9999, macOS 9999, tvOS 9999, watchOS 9999, *)
struct Adapter<T: P>: P {
var inner: T
@available(iOS 9999, macOS 9999, tvOS 9999, watchOS 9999, *)
func foo() -> some P {
return inner
}
}
@available(iOS 9999, macOS 9999, tvOS 9999, watchOS 9999, *)
extension P {
@available(iOS 9999, macOS 9999, tvOS 9999, watchOS 9999, *)
func foo() -> some P {
return Adapter(inner: self)
}
}
@available(iOS 9999, macOS 9999, tvOS 9999, watchOS 9999, *)
func getPAT<T: P>(_: T.Type) -> Any.Type {
return T.AT.self
}
@available(iOS 9999, macOS 9999, tvOS 9999, watchOS 9999, *)
extension Int: P { }
// CHECK: {{Adapter<Int>|too old}}
if #available(iOS 9999, macOS 9999, tvOS 9999, watchOS 9999, *) {
print(getPAT(Int.self))
} else {
print("i'm getting too old for this sh")
}
| Correct availability for opaque type execution test. | Correct availability for opaque type execution test.
Opaque types require the next Swift runtime.
| Swift | apache-2.0 | aschwaighofer/swift,xwu/swift,gregomni/swift,parkera/swift,CodaFi/swift,nathawes/swift,harlanhaskins/swift,devincoughlin/swift,atrick/swift,JGiola/swift,gribozavr/swift,lorentey/swift,CodaFi/swift,stephentyrone/swift,xedin/swift,atrick/swift,benlangmuir/swift,tkremenek/swift,aschwaighofer/swift,airspeedswift/swift,lorentey/swift,lorentey/swift,allevato/swift,devincoughlin/swift,apple/swift,shahmishal/swift,karwa/swift,sschiau/swift,ahoppen/swift,stephentyrone/swift,shajrawi/swift,tkremenek/swift,rudkx/swift,gribozavr/swift,hooman/swift,harlanhaskins/swift,karwa/swift,xedin/swift,parkera/swift,xedin/swift,gregomni/swift,harlanhaskins/swift,parkera/swift,CodaFi/swift,jckarter/swift,allevato/swift,jckarter/swift,lorentey/swift,roambotics/swift,jckarter/swift,roambotics/swift,apple/swift,parkera/swift,sschiau/swift,roambotics/swift,gregomni/swift,JGiola/swift,hooman/swift,airspeedswift/swift,roambotics/swift,stephentyrone/swift,devincoughlin/swift,parkera/swift,sschiau/swift,roambotics/swift,aschwaighofer/swift,shahmishal/swift,CodaFi/swift,hooman/swift,airspeedswift/swift,CodaFi/swift,jckarter/swift,allevato/swift,ahoppen/swift,lorentey/swift,harlanhaskins/swift,nathawes/swift,tkremenek/swift,xedin/swift,karwa/swift,nathawes/swift,devincoughlin/swift,sschiau/swift,parkera/swift,stephentyrone/swift,hooman/swift,tkremenek/swift,allevato/swift,lorentey/swift,xedin/swift,karwa/swift,lorentey/swift,devincoughlin/swift,tkremenek/swift,parkera/swift,jmgc/swift,xedin/swift,shajrawi/swift,apple/swift,devincoughlin/swift,shajrawi/swift,xedin/swift,sschiau/swift,allevato/swift,JGiola/swift,jmgc/swift,xwu/swift,benlangmuir/swift,shahmishal/swift,karwa/swift,glessard/swift,shahmishal/swift,stephentyrone/swift,aschwaighofer/swift,gregomni/swift,harlanhaskins/swift,airspeedswift/swift,benlangmuir/swift,nathawes/swift,karwa/swift,shahmishal/swift,atrick/swift,jckarter/swift,nathawes/swift,glessard/swift,shajrawi/swift,rudkx/swift,devincoughlin/swift,xwu/swift,jmgc/swift,CodaFi/swift,jmgc/swift,glessard/swift,ahoppen/swift,atrick/swift,JGiola/swift,airspeedswift/swift,sschiau/swift,roambotics/swift,stephentyrone/swift,gregomni/swift,apple/swift,harlanhaskins/swift,benlangmuir/swift,hooman/swift,JGiola/swift,stephentyrone/swift,glessard/swift,lorentey/swift,shajrawi/swift,aschwaighofer/swift,jmgc/swift,rudkx/swift,airspeedswift/swift,atrick/swift,hooman/swift,tkremenek/swift,allevato/swift,gregomni/swift,sschiau/swift,aschwaighofer/swift,ahoppen/swift,airspeedswift/swift,nathawes/swift,aschwaighofer/swift,tkremenek/swift,harlanhaskins/swift,shahmishal/swift,CodaFi/swift,ahoppen/swift,hooman/swift,shajrawi/swift,benlangmuir/swift,allevato/swift,rudkx/swift,nathawes/swift,jmgc/swift,benlangmuir/swift,apple/swift,xwu/swift,apple/swift,devincoughlin/swift,glessard/swift,xwu/swift,gribozavr/swift,shajrawi/swift,shahmishal/swift,xedin/swift,gribozavr/swift,rudkx/swift,jckarter/swift,JGiola/swift,ahoppen/swift,atrick/swift,gribozavr/swift,xwu/swift,karwa/swift,shajrawi/swift,sschiau/swift,jmgc/swift,shahmishal/swift,xwu/swift,karwa/swift,gribozavr/swift,glessard/swift,gribozavr/swift,gribozavr/swift,rudkx/swift,jckarter/swift,parkera/swift | swift | ## Code Before:
// RUN: %target-run-simple-swift | %FileCheck %s
// REQUIRES: executable_test
protocol P {
associatedtype AT
func foo() -> AT
}
struct Adapter<T: P>: P {
var inner: T
func foo() -> some P {
return inner
}
}
extension P {
func foo() -> some P {
return Adapter(inner: self)
}
}
func getPAT<T: P>(_: T.Type) -> Any.Type {
return T.AT.self
}
extension Int: P { }
// CHECK: Adapter<Int>
print(getPAT(Int.self))
## Instruction:
Correct availability for opaque type execution test.
Opaque types require the next Swift runtime.
## Code After:
// RUN: %target-run-simple-swift | %FileCheck %s
// REQUIRES: executable_test
@available(iOS 9999, macOS 9999, tvOS 9999, watchOS 9999, *)
protocol P {
associatedtype AT
func foo() -> AT
}
@available(iOS 9999, macOS 9999, tvOS 9999, watchOS 9999, *)
struct Adapter<T: P>: P {
var inner: T
@available(iOS 9999, macOS 9999, tvOS 9999, watchOS 9999, *)
func foo() -> some P {
return inner
}
}
@available(iOS 9999, macOS 9999, tvOS 9999, watchOS 9999, *)
extension P {
@available(iOS 9999, macOS 9999, tvOS 9999, watchOS 9999, *)
func foo() -> some P {
return Adapter(inner: self)
}
}
@available(iOS 9999, macOS 9999, tvOS 9999, watchOS 9999, *)
func getPAT<T: P>(_: T.Type) -> Any.Type {
return T.AT.self
}
@available(iOS 9999, macOS 9999, tvOS 9999, watchOS 9999, *)
extension Int: P { }
// CHECK: {{Adapter<Int>|too old}}
if #available(iOS 9999, macOS 9999, tvOS 9999, watchOS 9999, *) {
print(getPAT(Int.self))
} else {
print("i'm getting too old for this sh")
}
| // RUN: %target-run-simple-swift | %FileCheck %s
// REQUIRES: executable_test
+ @available(iOS 9999, macOS 9999, tvOS 9999, watchOS 9999, *)
protocol P {
associatedtype AT
func foo() -> AT
}
+ @available(iOS 9999, macOS 9999, tvOS 9999, watchOS 9999, *)
struct Adapter<T: P>: P {
var inner: T
+
+ @available(iOS 9999, macOS 9999, tvOS 9999, watchOS 9999, *)
func foo() -> some P {
return inner
}
}
+ @available(iOS 9999, macOS 9999, tvOS 9999, watchOS 9999, *)
extension P {
+ @available(iOS 9999, macOS 9999, tvOS 9999, watchOS 9999, *)
func foo() -> some P {
return Adapter(inner: self)
}
}
+ @available(iOS 9999, macOS 9999, tvOS 9999, watchOS 9999, *)
func getPAT<T: P>(_: T.Type) -> Any.Type {
return T.AT.self
}
+ @available(iOS 9999, macOS 9999, tvOS 9999, watchOS 9999, *)
extension Int: P { }
- // CHECK: Adapter<Int>
+ // CHECK: {{Adapter<Int>|too old}}
? ++ ++++++++++
+ if #available(iOS 9999, macOS 9999, tvOS 9999, watchOS 9999, *) {
- print(getPAT(Int.self))
+ print(getPAT(Int.self))
? ++
+ } else {
+ print("i'm getting too old for this sh")
+ } | 16 | 0.551724 | 14 | 2 |
79c6c71ab6edd8313fd6c9c6441d69ad04d50721 | update-database/stackdoc/namespaces/microsoftkb.py | update-database/stackdoc/namespaces/microsoftkb.py | import re
import urllib
############### Functions called by stackdoc
def get_version():
return 1
def get_ids(title, body, tags):
ids = []
if "http://support.microsoft.com/":
urls = re.findall(r'<a href="([^"]+)"', body)
for url in urls:
m = re.match("http://support\.microsoft\.com/(?:default.aspx/)?kb/(\w+)", url)
if m:
ids.append(m.group(1))
return ids
def get_tags():
return None # There isn't a reliable set of tags to filter by. Null indicates that we're not filtering
| import re
import urllib
############### Functions called by stackdoc
def get_version():
return 1
def get_ids(title, body, tags):
ids = []
if "http://support.microsoft.com/":
urls = re.findall(r'<a href="([^"]+)"', body)
for url in urls:
m = re.match("http://support\.microsoft\.com/(?:default\.aspx/)?[kK][bB]/(\w+)", url)
if m:
ids.append(m.group(1))
m2 = re.match("http://support\.microsoft\.com/(?:default\.aspx)?\?scid=[kK][bB];[-\w]+;(\w+)", url)
if m2:
ids.append(m2.group(1))
return ids
def get_tags():
return None # There isn't a reliable set of tags to filter by. Null indicates that we're not filtering
| Support another form of KB URL. | Support another form of KB URL.
| Python | bsd-3-clause | alnorth/stackdoc,alnorth/stackdoc,alnorth/stackdoc | python | ## Code Before:
import re
import urllib
############### Functions called by stackdoc
def get_version():
return 1
def get_ids(title, body, tags):
ids = []
if "http://support.microsoft.com/":
urls = re.findall(r'<a href="([^"]+)"', body)
for url in urls:
m = re.match("http://support\.microsoft\.com/(?:default.aspx/)?kb/(\w+)", url)
if m:
ids.append(m.group(1))
return ids
def get_tags():
return None # There isn't a reliable set of tags to filter by. Null indicates that we're not filtering
## Instruction:
Support another form of KB URL.
## Code After:
import re
import urllib
############### Functions called by stackdoc
def get_version():
return 1
def get_ids(title, body, tags):
ids = []
if "http://support.microsoft.com/":
urls = re.findall(r'<a href="([^"]+)"', body)
for url in urls:
m = re.match("http://support\.microsoft\.com/(?:default\.aspx/)?[kK][bB]/(\w+)", url)
if m:
ids.append(m.group(1))
m2 = re.match("http://support\.microsoft\.com/(?:default\.aspx)?\?scid=[kK][bB];[-\w]+;(\w+)", url)
if m2:
ids.append(m2.group(1))
return ids
def get_tags():
return None # There isn't a reliable set of tags to filter by. Null indicates that we're not filtering
| import re
import urllib
############### Functions called by stackdoc
def get_version():
return 1
def get_ids(title, body, tags):
ids = []
if "http://support.microsoft.com/":
urls = re.findall(r'<a href="([^"]+)"', body)
for url in urls:
- m = re.match("http://support\.microsoft\.com/(?:default.aspx/)?kb/(\w+)", url)
+ m = re.match("http://support\.microsoft\.com/(?:default\.aspx/)?[kK][bB]/(\w+)", url)
? + + +++ ++
if m:
ids.append(m.group(1))
+ m2 = re.match("http://support\.microsoft\.com/(?:default\.aspx)?\?scid=[kK][bB];[-\w]+;(\w+)", url)
+ if m2:
+ ids.append(m2.group(1))
return ids
def get_tags():
return None # There isn't a reliable set of tags to filter by. Null indicates that we're not filtering
| 5 | 0.217391 | 4 | 1 |
cc321494762b6daee1054dccc1d33b613f12512d | lib/generators/sts_salesforce/install/install_generator.rb | lib/generators/sts_salesforce/install/install_generator.rb | require 'rails/generators/migration'
require 'rails/generators/active_record/migration'
module StsSalesforce
module Generators
class InstallGenerator < ::Rails::Generators::Base
include Rails::Generators::Migration
extend ActiveRecord::Generators::Migration
source_root File.expand_path('../templates', __FILE__ )
desc "add migrations"
def self.next_migration_number(path)
unless @prev_migration_nr
@prev_migration_nr = Time.now.utc.strftime("%Y%m%d%H%M%S").to_i
else
@prev_migration_nr += 1
end
@prev_migration_nr.to_s
end
def copy_migrations
copy_migration "create_salesforce_orgs"
copy_migration "add_disabled_to_salesforce_org"
copy_migration "add_error_message_to_salesforce_org"
copy_migration "expand_error_message_field_length"
copy_migration "rename_username_to_username_encrypted"
end
def install_assets
template 'sts_salesforce.js', 'app/assets/javascripts/sts_salesforce.js'
template 'sts_salesforce.css', 'app/assets/stylesheets/sts_salesforce.css'
end
def install_resources
template "salesforce_orgs.rb", "app/admin/salesforce_orgs.rb"
end
protected
def copy_migration(filename)
if self.class.migration_exists?("db/migrate", "#{filename}")
say_status("skipped", "Migration #{filename}.rb already exists")
else
migration_template "/migrations/#{filename}.rb", "db/migrate/#{filename}.rb"
end
end
end
end
end
| require 'rails/generators/migration'
require 'rails/generators/active_record/migration'
module StsSalesforce
module Generators
class InstallGenerator < ::Rails::Generators::Base
include Rails::Generators::Migration
extend ActiveRecord::Generators::Migration
source_root File.expand_path('../templates', __FILE__ )
desc "add migrations"
def copy_migrations
copy_migration "create_salesforce_orgs"
copy_migration "add_disabled_to_salesforce_org"
copy_migration "add_error_message_to_salesforce_org"
copy_migration "expand_error_message_field_length"
copy_migration "rename_username_to_username_encrypted"
end
def install_assets
template 'sts_salesforce.js', 'app/assets/javascripts/sts_salesforce.js'
template 'sts_salesforce.css', 'app/assets/stylesheets/sts_salesforce.css'
end
def install_resources
template "salesforce_orgs.rb", "app/admin/salesforce_orgs.rb"
end
protected
def copy_migration(filename)
if self.class.migration_exists?("db/migrate", "#{filename}")
say_status("skipped", "Migration #{filename}.rb already exists")
else
migration_template "/migrations/#{filename}.rb", "db/migrate/#{filename}.rb"
end
end
end
end
end
| Remove next_migration since using ActiveRecord | Remove next_migration since using ActiveRecord
| Ruby | mit | singletracksystems/sts_salesforce,singletracksystems/sts_salesforce,singletracksystems/sts_salesforce | ruby | ## Code Before:
require 'rails/generators/migration'
require 'rails/generators/active_record/migration'
module StsSalesforce
module Generators
class InstallGenerator < ::Rails::Generators::Base
include Rails::Generators::Migration
extend ActiveRecord::Generators::Migration
source_root File.expand_path('../templates', __FILE__ )
desc "add migrations"
def self.next_migration_number(path)
unless @prev_migration_nr
@prev_migration_nr = Time.now.utc.strftime("%Y%m%d%H%M%S").to_i
else
@prev_migration_nr += 1
end
@prev_migration_nr.to_s
end
def copy_migrations
copy_migration "create_salesforce_orgs"
copy_migration "add_disabled_to_salesforce_org"
copy_migration "add_error_message_to_salesforce_org"
copy_migration "expand_error_message_field_length"
copy_migration "rename_username_to_username_encrypted"
end
def install_assets
template 'sts_salesforce.js', 'app/assets/javascripts/sts_salesforce.js'
template 'sts_salesforce.css', 'app/assets/stylesheets/sts_salesforce.css'
end
def install_resources
template "salesforce_orgs.rb", "app/admin/salesforce_orgs.rb"
end
protected
def copy_migration(filename)
if self.class.migration_exists?("db/migrate", "#{filename}")
say_status("skipped", "Migration #{filename}.rb already exists")
else
migration_template "/migrations/#{filename}.rb", "db/migrate/#{filename}.rb"
end
end
end
end
end
## Instruction:
Remove next_migration since using ActiveRecord
## Code After:
require 'rails/generators/migration'
require 'rails/generators/active_record/migration'
module StsSalesforce
module Generators
class InstallGenerator < ::Rails::Generators::Base
include Rails::Generators::Migration
extend ActiveRecord::Generators::Migration
source_root File.expand_path('../templates', __FILE__ )
desc "add migrations"
def copy_migrations
copy_migration "create_salesforce_orgs"
copy_migration "add_disabled_to_salesforce_org"
copy_migration "add_error_message_to_salesforce_org"
copy_migration "expand_error_message_field_length"
copy_migration "rename_username_to_username_encrypted"
end
def install_assets
template 'sts_salesforce.js', 'app/assets/javascripts/sts_salesforce.js'
template 'sts_salesforce.css', 'app/assets/stylesheets/sts_salesforce.css'
end
def install_resources
template "salesforce_orgs.rb", "app/admin/salesforce_orgs.rb"
end
protected
def copy_migration(filename)
if self.class.migration_exists?("db/migrate", "#{filename}")
say_status("skipped", "Migration #{filename}.rb already exists")
else
migration_template "/migrations/#{filename}.rb", "db/migrate/#{filename}.rb"
end
end
end
end
end
| require 'rails/generators/migration'
require 'rails/generators/active_record/migration'
module StsSalesforce
module Generators
class InstallGenerator < ::Rails::Generators::Base
include Rails::Generators::Migration
extend ActiveRecord::Generators::Migration
source_root File.expand_path('../templates', __FILE__ )
desc "add migrations"
-
- def self.next_migration_number(path)
- unless @prev_migration_nr
- @prev_migration_nr = Time.now.utc.strftime("%Y%m%d%H%M%S").to_i
- else
- @prev_migration_nr += 1
- end
- @prev_migration_nr.to_s
- end
def copy_migrations
copy_migration "create_salesforce_orgs"
copy_migration "add_disabled_to_salesforce_org"
copy_migration "add_error_message_to_salesforce_org"
copy_migration "expand_error_message_field_length"
copy_migration "rename_username_to_username_encrypted"
end
def install_assets
template 'sts_salesforce.js', 'app/assets/javascripts/sts_salesforce.js'
template 'sts_salesforce.css', 'app/assets/stylesheets/sts_salesforce.css'
end
def install_resources
template "salesforce_orgs.rb", "app/admin/salesforce_orgs.rb"
end
protected
def copy_migration(filename)
if self.class.migration_exists?("db/migrate", "#{filename}")
say_status("skipped", "Migration #{filename}.rb already exists")
else
migration_template "/migrations/#{filename}.rb", "db/migrate/#{filename}.rb"
end
end
end
end
end | 9 | 0.173077 | 0 | 9 |
d8c382d735c4bb148c50c1309d128992baa75dbc | recipes/package.rb | recipes/package.rb |
case node['platform_family']
when 'debian', 'ubuntu'
package %w(transmission transmission-cli transmission-daemon)
else
package %w(transmission transmission-cli transmission-daemon)
end
include_recipe 'transmission::default'
|
package %w(transmission transmission-cli transmission-daemon)
include_recipe 'transmission::default'
| Remove a case statement for no reason | Remove a case statement for no reason
Signed-off-by: Tim Smith <[email protected]>
| Ruby | apache-2.0 | chef-cookbooks/transmission,opscode-cookbooks/transmission,chef-cookbooks/transmission,opscode-cookbooks/transmission,opscode-cookbooks/transmission,chef-cookbooks/transmission | ruby | ## Code Before:
case node['platform_family']
when 'debian', 'ubuntu'
package %w(transmission transmission-cli transmission-daemon)
else
package %w(transmission transmission-cli transmission-daemon)
end
include_recipe 'transmission::default'
## Instruction:
Remove a case statement for no reason
Signed-off-by: Tim Smith <[email protected]>
## Code After:
package %w(transmission transmission-cli transmission-daemon)
include_recipe 'transmission::default'
|
- case node['platform_family']
- when 'debian', 'ubuntu'
- package %w(transmission transmission-cli transmission-daemon)
? --
+ package %w(transmission transmission-cli transmission-daemon)
- else
- package %w(transmission transmission-cli transmission-daemon)
- end
include_recipe 'transmission::default' | 7 | 0.777778 | 1 | 6 |
b9654ffbbd1c2057d1ff377a0190b115f568d080 | knights/defaulttags.py | knights/defaulttags.py | from .library import Library
import datetime
register = Library()
@register.tag(name='now')
def now(parser, token):
args, kwargs = parser.parse_args(token)
def _now(context):
a, k = parser.resolve_args(context, args, kwargs)
val = datetime.datetime.now()
return val.strftime(a[0])
return _now
| from .library import Library
from .parse import BasicNode
import datetime
register = Library()
@register.tag(name='now')
class NowNode(BasicNode):
def render(self, fmt):
val = datetime.datetime.now()
return val.strftime(fmt)
| Rewrite 'now' tag to use BasicNode | Rewrite 'now' tag to use BasicNode
| Python | mit | funkybob/knights-templater,funkybob/knights-templater | python | ## Code Before:
from .library import Library
import datetime
register = Library()
@register.tag(name='now')
def now(parser, token):
args, kwargs = parser.parse_args(token)
def _now(context):
a, k = parser.resolve_args(context, args, kwargs)
val = datetime.datetime.now()
return val.strftime(a[0])
return _now
## Instruction:
Rewrite 'now' tag to use BasicNode
## Code After:
from .library import Library
from .parse import BasicNode
import datetime
register = Library()
@register.tag(name='now')
class NowNode(BasicNode):
def render(self, fmt):
val = datetime.datetime.now()
return val.strftime(fmt)
| from .library import Library
+ from .parse import BasicNode
import datetime
register = Library()
@register.tag(name='now')
+ class NowNode(BasicNode):
+ def render(self, fmt):
- def now(parser, token):
-
- args, kwargs = parser.parse_args(token)
-
- def _now(context):
- a, k = parser.resolve_args(context, args, kwargs)
val = datetime.datetime.now()
- return val.strftime(a[0])
? ^^^^
+ return val.strftime(fmt)
? ^^^
-
- return _now | 13 | 0.722222 | 4 | 9 |
268c4dce6cfa59e10cff7f4bf8456276c2e11f7d | main.py | main.py | # Copyright 2014 Dimitri "Tyrope" Molenaars
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use self file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from config import Config
import os
import versionComparator
def main():
cf = os.path.join(os.path.expanduser('~'), '.KSP-AVC','default.cfg')
cfg = Config(cf)
print cfg.get('install_dir')
#Shutdown procedure
if cfg.need_save():
cfg.save()
sys.exit(0)
if __name__ == '__main__':
main()
| # Copyright 2014 Dimitri "Tyrope" Molenaars
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import config
import os, sys
import versionComparator as verComp
def main():
# Find config folder.
cfg_dir = os.path.join(os.path.expanduser('~'), '.KSP-AVC')
# Create it if needed.
if not os.path.exists(cfg_dir):
os.makedirs(cfg_dir)
# Create config object.
cfg = config.Config(os.path.join(cfg_dir, 'default.cfg'))
for mod in findMods(cfg):
remote = verComp.getRemote(mod)
print "Found version file %s reporting remote %s" % (mod, remote)
# comp = verComp.versionComparator(mod, remote)
#Shutdown procedure
cfg.save()
sys.exit(0)
def findMods(cfg):
mods = set()
# Walk through the directories
for path, folders, files in os.walk(cfg.get('gamedata_dir')):
# Walk through the files.
for f in files:
# Found a version file.
if f.lower().endswith(".version"):
mods.add(os.path.join(path, f))
return mods
# Startup sequence
if __name__ == '__main__':
main()
| Create config paths if needed, find files and (for now) report remotes. | Create config paths if needed, find files and (for now) report remotes.
| Python | apache-2.0 | tyrope/KSP-addon-version-checker | python | ## Code Before:
# Copyright 2014 Dimitri "Tyrope" Molenaars
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use self file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from config import Config
import os
import versionComparator
def main():
cf = os.path.join(os.path.expanduser('~'), '.KSP-AVC','default.cfg')
cfg = Config(cf)
print cfg.get('install_dir')
#Shutdown procedure
if cfg.need_save():
cfg.save()
sys.exit(0)
if __name__ == '__main__':
main()
## Instruction:
Create config paths if needed, find files and (for now) report remotes.
## Code After:
# Copyright 2014 Dimitri "Tyrope" Molenaars
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import config
import os, sys
import versionComparator as verComp
def main():
# Find config folder.
cfg_dir = os.path.join(os.path.expanduser('~'), '.KSP-AVC')
# Create it if needed.
if not os.path.exists(cfg_dir):
os.makedirs(cfg_dir)
# Create config object.
cfg = config.Config(os.path.join(cfg_dir, 'default.cfg'))
for mod in findMods(cfg):
remote = verComp.getRemote(mod)
print "Found version file %s reporting remote %s" % (mod, remote)
# comp = verComp.versionComparator(mod, remote)
#Shutdown procedure
cfg.save()
sys.exit(0)
def findMods(cfg):
mods = set()
# Walk through the directories
for path, folders, files in os.walk(cfg.get('gamedata_dir')):
# Walk through the files.
for f in files:
# Found a version file.
if f.lower().endswith(".version"):
mods.add(os.path.join(path, f))
return mods
# Startup sequence
if __name__ == '__main__':
main()
| # Copyright 2014 Dimitri "Tyrope" Molenaars
# Licensed under the Apache License, Version 2.0 (the "License");
- # you may not use self file except in compliance with the License.
? ---
+ # you may not use this file except in compliance with the License.
? +++
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
- from config import Config
+ import config
- import os
+ import os, sys
? +++++
- import versionComparator
+ import versionComparator as verComp
? +++++++++++
def main():
+ # Find config folder.
- cf = os.path.join(os.path.expanduser('~'), '.KSP-AVC','default.cfg')
? --------------
+ cfg_dir = os.path.join(os.path.expanduser('~'), '.KSP-AVC')
? +++++
- cfg = Config(cf)
- print cfg.get('install_dir')
+ # Create it if needed.
+ if not os.path.exists(cfg_dir):
+ os.makedirs(cfg_dir)
+ # Create config object.
+ cfg = config.Config(os.path.join(cfg_dir, 'default.cfg'))
+
+ for mod in findMods(cfg):
+ remote = verComp.getRemote(mod)
+ print "Found version file %s reporting remote %s" % (mod, remote)
+ # comp = verComp.versionComparator(mod, remote)
#Shutdown procedure
- if cfg.need_save():
- cfg.save()
? ----
+ cfg.save()
sys.exit(0)
+ def findMods(cfg):
+ mods = set()
+ # Walk through the directories
+ for path, folders, files in os.walk(cfg.get('gamedata_dir')):
+ # Walk through the files.
+ for f in files:
+ # Found a version file.
+ if f.lower().endswith(".version"):
+ mods.add(os.path.join(path, f))
+ return mods
+
+ # Startup sequence
if __name__ == '__main__':
main()
+ | 39 | 1.258065 | 30 | 9 |
ede8c47c3f41828e6f5a086454f8c052ef9c8da3 | .travis.yml | .travis.yml | sudo: false
language: node_js
node_js:
- "5.0.0"
- "4.2.2"
- "0.12"
| sudo: false
language: node_js
node_js:
- "5.0.0"
- "4.2.2"
| Remove support for node 0.12 | Remove support for node 0.12
| YAML | mit | css-modules/postcss-modules,outpunk/postcss-modules | yaml | ## Code Before:
sudo: false
language: node_js
node_js:
- "5.0.0"
- "4.2.2"
- "0.12"
## Instruction:
Remove support for node 0.12
## Code After:
sudo: false
language: node_js
node_js:
- "5.0.0"
- "4.2.2"
| sudo: false
language: node_js
node_js:
- "5.0.0"
- "4.2.2"
- - "0.12" | 1 | 0.166667 | 0 | 1 |
9547988a1a9ef8faf22d9bfa881f4e542637fd46 | utils.py | utils.py | import xmlrpclib
import cPickle
import subprocess
from time import sleep
p = None
s = None
def start_plot_server():
global p
if p is None:
p = subprocess.Popen(["python", "plot_server.py"])
def stop_plot_server():
if p is not None:
p.terminate()
sleep(0.01)
p.kill()
def plot_server_alive():
global s
try:
s.alive()
except Exception, e:
if str(e).endswith("Connection refused"):
return False
else:
raise
return True
def establish_connection():
global s
s = xmlrpclib.ServerProxy("http://localhost:8000/", allow_none=True)
if not plot_server_alive():
start_plot_server()
print "waiting for the plot server to start up..."
while not plot_server_alive():
sleep(0.05)
print " done."
def plot(vert, triangles):
print "plotting using mayavi..."
v = cPickle.dumps(vert)
t = cPickle.dumps(triangles)
s.plot(v, t)
print " done."
establish_connection()
| import xmlrpclib
import cPickle
import subprocess
from time import sleep
p = None
s = None
def start_plot_server():
global p
if p is None:
p = subprocess.Popen(["python", "plot_server.py"])
def stop_plot_server():
if p is not None:
p.terminate()
sleep(0.01)
p.kill()
def plot_server_alive():
global s
try:
s.alive()
except Exception, e:
if str(e).endswith("Connection refused"):
return False
else:
raise
return True
def establish_connection():
global s
if s is not None:
return
s = xmlrpclib.ServerProxy("http://localhost:8000/", allow_none=True)
if not plot_server_alive():
start_plot_server()
print "waiting for the plot server to start up..."
while not plot_server_alive():
sleep(0.05)
print " done."
def plot(vert, triangles):
establish_connection()
print "plotting using mayavi..."
v = cPickle.dumps(vert)
t = cPickle.dumps(triangles)
s.plot(v, t)
print " done."
| Establish connection only when needed | Establish connection only when needed
| Python | bsd-3-clause | certik/mhd-hermes,certik/mhd-hermes | python | ## Code Before:
import xmlrpclib
import cPickle
import subprocess
from time import sleep
p = None
s = None
def start_plot_server():
global p
if p is None:
p = subprocess.Popen(["python", "plot_server.py"])
def stop_plot_server():
if p is not None:
p.terminate()
sleep(0.01)
p.kill()
def plot_server_alive():
global s
try:
s.alive()
except Exception, e:
if str(e).endswith("Connection refused"):
return False
else:
raise
return True
def establish_connection():
global s
s = xmlrpclib.ServerProxy("http://localhost:8000/", allow_none=True)
if not plot_server_alive():
start_plot_server()
print "waiting for the plot server to start up..."
while not plot_server_alive():
sleep(0.05)
print " done."
def plot(vert, triangles):
print "plotting using mayavi..."
v = cPickle.dumps(vert)
t = cPickle.dumps(triangles)
s.plot(v, t)
print " done."
establish_connection()
## Instruction:
Establish connection only when needed
## Code After:
import xmlrpclib
import cPickle
import subprocess
from time import sleep
p = None
s = None
def start_plot_server():
global p
if p is None:
p = subprocess.Popen(["python", "plot_server.py"])
def stop_plot_server():
if p is not None:
p.terminate()
sleep(0.01)
p.kill()
def plot_server_alive():
global s
try:
s.alive()
except Exception, e:
if str(e).endswith("Connection refused"):
return False
else:
raise
return True
def establish_connection():
global s
if s is not None:
return
s = xmlrpclib.ServerProxy("http://localhost:8000/", allow_none=True)
if not plot_server_alive():
start_plot_server()
print "waiting for the plot server to start up..."
while not plot_server_alive():
sleep(0.05)
print " done."
def plot(vert, triangles):
establish_connection()
print "plotting using mayavi..."
v = cPickle.dumps(vert)
t = cPickle.dumps(triangles)
s.plot(v, t)
print " done."
| import xmlrpclib
import cPickle
import subprocess
from time import sleep
p = None
s = None
def start_plot_server():
global p
if p is None:
p = subprocess.Popen(["python", "plot_server.py"])
def stop_plot_server():
if p is not None:
p.terminate()
sleep(0.01)
p.kill()
def plot_server_alive():
global s
try:
s.alive()
except Exception, e:
if str(e).endswith("Connection refused"):
return False
else:
raise
return True
def establish_connection():
global s
+ if s is not None:
+ return
s = xmlrpclib.ServerProxy("http://localhost:8000/", allow_none=True)
if not plot_server_alive():
start_plot_server()
print "waiting for the plot server to start up..."
while not plot_server_alive():
sleep(0.05)
print " done."
def plot(vert, triangles):
+ establish_connection()
print "plotting using mayavi..."
v = cPickle.dumps(vert)
t = cPickle.dumps(triangles)
s.plot(v, t)
print " done."
- establish_connection() | 4 | 0.081633 | 3 | 1 |
ec2675e2d033fd315462615784783bd9b7a3101c | package.json | package.json | {
"name": "praline",
"version": "1.0.0-alpha",
"description": "Praline",
"author": "Nevena Gaj <[email protected]>",
"license": "MIT"
}
| {
"name": "praline",
"version": "1.0.0-alpha",
"description": "Praline",
"author": "Nevena Gaj <[email protected]>",
"license": "MIT",
"scripts": {
"container": "export NAME=praline WORKDIR=/$NAME; if docker ps -a | grep -q $NAME; then npm run container:start; else npm run container:run; fi",
"container:start": "docker start -ai praline",
"container:run": "docker run --name=$NAME -w /$NAME -p 8080:8080 -v $(pwd):/$NAME -v $(pwd)/package.json:/package.json -v /root/.npm -v /node_modules -e \"NODE_PATH=/node_modules\" -e \"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/node_modules/.bin\" --entrypoint=/bin/bash -it node:6"
}
}
| Add Docker container run scripts | Add Docker container run scripts
| JSON | mit | nevenagaj/praline | json | ## Code Before:
{
"name": "praline",
"version": "1.0.0-alpha",
"description": "Praline",
"author": "Nevena Gaj <[email protected]>",
"license": "MIT"
}
## Instruction:
Add Docker container run scripts
## Code After:
{
"name": "praline",
"version": "1.0.0-alpha",
"description": "Praline",
"author": "Nevena Gaj <[email protected]>",
"license": "MIT",
"scripts": {
"container": "export NAME=praline WORKDIR=/$NAME; if docker ps -a | grep -q $NAME; then npm run container:start; else npm run container:run; fi",
"container:start": "docker start -ai praline",
"container:run": "docker run --name=$NAME -w /$NAME -p 8080:8080 -v $(pwd):/$NAME -v $(pwd)/package.json:/package.json -v /root/.npm -v /node_modules -e \"NODE_PATH=/node_modules\" -e \"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/node_modules/.bin\" --entrypoint=/bin/bash -it node:6"
}
}
| {
"name": "praline",
"version": "1.0.0-alpha",
"description": "Praline",
"author": "Nevena Gaj <[email protected]>",
- "license": "MIT"
+ "license": "MIT",
? +
+ "scripts": {
+ "container": "export NAME=praline WORKDIR=/$NAME; if docker ps -a | grep -q $NAME; then npm run container:start; else npm run container:run; fi",
+ "container:start": "docker start -ai praline",
+ "container:run": "docker run --name=$NAME -w /$NAME -p 8080:8080 -v $(pwd):/$NAME -v $(pwd)/package.json:/package.json -v /root/.npm -v /node_modules -e \"NODE_PATH=/node_modules\" -e \"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/node_modules/.bin\" --entrypoint=/bin/bash -it node:6"
+ }
} | 7 | 1 | 6 | 1 |
ccb59e12cd4c9f3d831e530c103f24afc3c5fbaf | ckanext/stadtzhtheme/templates/base.html | ckanext/stadtzhtheme/templates/base.html | {% ckan_extends %}
{%- block title -%}
Open Data Zürich - Stadt Zürich
{%- endblock -%}
{% block links -%}
<link rel="shortcut icon" href="/favicon.ico" />
{% endblock -%}
{% block styles %}
{{ super() }}
{% resource 'stadtzhtheme/auto-complete.css' %}
{% resource 'stadtzhtheme/stadtzhtheme.css' %}
{% resource 'stadtzhtheme/masonry.pkgd.js' %}
{% resource 'stadtzhtheme/auto-complete.js' %}
{% resource 'stadtzhtheme/autocomplete-ogdzh-facet-search.js' %}
{% resource 'stadtzhtheme/autocomplete-ogdzh-header-search.js' %}
{% endblock %}
| {% ckan_extends %}
{% block meta %}
{{ super() }}
<meta name="robots" content="noindex" />
{% endblock %}
{%- block title -%}
Open Data Zürich - Stadt Zürich
{%- endblock -%}
{% block links -%}
<link rel="shortcut icon" href="/favicon.ico" />
{% endblock -%}
{% block styles %}
{{ super() }}
{% resource 'stadtzhtheme/auto-complete.css' %}
{% resource 'stadtzhtheme/stadtzhtheme.css' %}
{% resource 'stadtzhtheme/masonry.pkgd.js' %}
{% resource 'stadtzhtheme/auto-complete.js' %}
{% resource 'stadtzhtheme/autocomplete-ogdzh-facet-search.js' %}
{% resource 'stadtzhtheme/autocomplete-ogdzh-header-search.js' %}
{% endblock %}
| Add robots noindex meta tag | feat: Add robots noindex meta tag
| HTML | agpl-3.0 | opendatazurich/ckanext-stadtzh-theme,opendatazurich/ckanext-stadtzh-theme,opendatazurich/ckanext-stadtzh-theme | html | ## Code Before:
{% ckan_extends %}
{%- block title -%}
Open Data Zürich - Stadt Zürich
{%- endblock -%}
{% block links -%}
<link rel="shortcut icon" href="/favicon.ico" />
{% endblock -%}
{% block styles %}
{{ super() }}
{% resource 'stadtzhtheme/auto-complete.css' %}
{% resource 'stadtzhtheme/stadtzhtheme.css' %}
{% resource 'stadtzhtheme/masonry.pkgd.js' %}
{% resource 'stadtzhtheme/auto-complete.js' %}
{% resource 'stadtzhtheme/autocomplete-ogdzh-facet-search.js' %}
{% resource 'stadtzhtheme/autocomplete-ogdzh-header-search.js' %}
{% endblock %}
## Instruction:
feat: Add robots noindex meta tag
## Code After:
{% ckan_extends %}
{% block meta %}
{{ super() }}
<meta name="robots" content="noindex" />
{% endblock %}
{%- block title -%}
Open Data Zürich - Stadt Zürich
{%- endblock -%}
{% block links -%}
<link rel="shortcut icon" href="/favicon.ico" />
{% endblock -%}
{% block styles %}
{{ super() }}
{% resource 'stadtzhtheme/auto-complete.css' %}
{% resource 'stadtzhtheme/stadtzhtheme.css' %}
{% resource 'stadtzhtheme/masonry.pkgd.js' %}
{% resource 'stadtzhtheme/auto-complete.js' %}
{% resource 'stadtzhtheme/autocomplete-ogdzh-facet-search.js' %}
{% resource 'stadtzhtheme/autocomplete-ogdzh-header-search.js' %}
{% endblock %}
| {% ckan_extends %}
+
+ {% block meta %}
+ {{ super() }}
+ <meta name="robots" content="noindex" />
+ {% endblock %}
{%- block title -%}
Open Data Zürich - Stadt Zürich
{%- endblock -%}
{% block links -%}
<link rel="shortcut icon" href="/favicon.ico" />
{% endblock -%}
{% block styles %}
{{ super() }}
{% resource 'stadtzhtheme/auto-complete.css' %}
{% resource 'stadtzhtheme/stadtzhtheme.css' %}
{% resource 'stadtzhtheme/masonry.pkgd.js' %}
{% resource 'stadtzhtheme/auto-complete.js' %}
{% resource 'stadtzhtheme/autocomplete-ogdzh-facet-search.js' %}
{% resource 'stadtzhtheme/autocomplete-ogdzh-header-search.js' %}
{% endblock %} | 5 | 0.263158 | 5 | 0 |
f784faf0f0119728b1c5020e5e8f4b926d4c30e9 | composer.json | composer.json | {
"name": "shalvah/unnportal-api",
"description": "Authenticate and get details about your users as UNN students",
"license": "MIT",
"keywords": ["UNN"],
"authors": [
{
"name": "Shalvah Adebayo",
"email": "[email protected]"
}
],
"require": {
"php": ">=5.3.3",
"ext-curl": "*"
},
"require-dev": {},
"autoload": {
"psr-4": {
"UnnPortal\\": "src"
}
}
}
| {
"name": "shalvah/unnportal-api",
"description": "Authenticate and get details about your users as UNN students",
"license": "MIT",
"keywords": ["UNN"],
"authors": [
{
"name": "Shalvah Adebayo",
"email": "[email protected]"
}
],
"require": {
"php": ">=5.6.4",
"ext-curl": "*",
"kitetail/zttp": "^0.3.0"
},
"require-dev": {},
"autoload": {
"psr-4": {
"UnnPortal\\": "src"
}
}
}
| Switch to Zttp for requests | Switch to Zttp for requests
| JSON | mit | shalvah/unnportal-php | json | ## Code Before:
{
"name": "shalvah/unnportal-api",
"description": "Authenticate and get details about your users as UNN students",
"license": "MIT",
"keywords": ["UNN"],
"authors": [
{
"name": "Shalvah Adebayo",
"email": "[email protected]"
}
],
"require": {
"php": ">=5.3.3",
"ext-curl": "*"
},
"require-dev": {},
"autoload": {
"psr-4": {
"UnnPortal\\": "src"
}
}
}
## Instruction:
Switch to Zttp for requests
## Code After:
{
"name": "shalvah/unnportal-api",
"description": "Authenticate and get details about your users as UNN students",
"license": "MIT",
"keywords": ["UNN"],
"authors": [
{
"name": "Shalvah Adebayo",
"email": "[email protected]"
}
],
"require": {
"php": ">=5.6.4",
"ext-curl": "*",
"kitetail/zttp": "^0.3.0"
},
"require-dev": {},
"autoload": {
"psr-4": {
"UnnPortal\\": "src"
}
}
}
| {
"name": "shalvah/unnportal-api",
"description": "Authenticate and get details about your users as UNN students",
"license": "MIT",
"keywords": ["UNN"],
"authors": [
{
"name": "Shalvah Adebayo",
"email": "[email protected]"
}
],
"require": {
- "php": ">=5.3.3",
? ^ ^
+ "php": ">=5.6.4",
? ^ ^
- "ext-curl": "*"
+ "ext-curl": "*",
? +
+ "kitetail/zttp": "^0.3.0"
},
"require-dev": {},
"autoload": {
"psr-4": {
"UnnPortal\\": "src"
}
}
} | 5 | 0.227273 | 3 | 2 |
19d45e44ab45caee8b0378323f8433df617cb9b6 | lib/generators/social_stream/documents/install_generator.rb | lib/generators/social_stream/documents/install_generator.rb | class SocialStream::Documents::InstallGenerator < Rails::Generators::Base
include Rails::Generators::Migration
source_root File.expand_path('../templates', __FILE__)
def create_migration_file
require 'rake'
Rails.application.load_tasks
Rake::Task['railties:install:migrations'].reenable
Rake::Task['social_stream_documents_engine:install:migrations'].invoke
end
end
| class SocialStream::Documents::InstallGenerator < Rails::Generators::Base
include Rails::Generators::Migration
source_root File.expand_path('../templates', __FILE__)
def create_migration_file
require 'rake'
Rails.application.load_tasks
Rake::Task['railties:install:migrations'].reenable
Rake::Task['social_stream_documents_engine:install:migrations'].invoke
end
def require_javascripts
inject_into_file 'app/assets/javascripts/application.js',
"//= require social_stream-documents\n",
:before => '//= require_tree .'
end
def require_stylesheets
inject_into_file 'app/assets/stylesheets/application.css',
" *= require social_stream-documents\n",
:before => ' *= require_tree .'
end
end
| Add javascripts and stylesheets to generator | Add javascripts and stylesheets to generator
| Ruby | mit | beder/social_stream,ging/social_stream,luca/social_stream-documents,honorlin/social_stream,luca/social_stream-documents,ging/social_stream,beder/social_stream,honorlin/social_stream,ging/social_stream,beder/social_stream,honorlin/social_stream,honorlin/social_stream,beder/social_stream | ruby | ## Code Before:
class SocialStream::Documents::InstallGenerator < Rails::Generators::Base
include Rails::Generators::Migration
source_root File.expand_path('../templates', __FILE__)
def create_migration_file
require 'rake'
Rails.application.load_tasks
Rake::Task['railties:install:migrations'].reenable
Rake::Task['social_stream_documents_engine:install:migrations'].invoke
end
end
## Instruction:
Add javascripts and stylesheets to generator
## Code After:
class SocialStream::Documents::InstallGenerator < Rails::Generators::Base
include Rails::Generators::Migration
source_root File.expand_path('../templates', __FILE__)
def create_migration_file
require 'rake'
Rails.application.load_tasks
Rake::Task['railties:install:migrations'].reenable
Rake::Task['social_stream_documents_engine:install:migrations'].invoke
end
def require_javascripts
inject_into_file 'app/assets/javascripts/application.js',
"//= require social_stream-documents\n",
:before => '//= require_tree .'
end
def require_stylesheets
inject_into_file 'app/assets/stylesheets/application.css',
" *= require social_stream-documents\n",
:before => ' *= require_tree .'
end
end
| class SocialStream::Documents::InstallGenerator < Rails::Generators::Base
include Rails::Generators::Migration
source_root File.expand_path('../templates', __FILE__)
def create_migration_file
require 'rake'
Rails.application.load_tasks
Rake::Task['railties:install:migrations'].reenable
Rake::Task['social_stream_documents_engine:install:migrations'].invoke
end
+
+ def require_javascripts
+ inject_into_file 'app/assets/javascripts/application.js',
+ "//= require social_stream-documents\n",
+ :before => '//= require_tree .'
+ end
+
+ def require_stylesheets
+ inject_into_file 'app/assets/stylesheets/application.css',
+ " *= require social_stream-documents\n",
+ :before => ' *= require_tree .'
+ end
end | 12 | 1 | 12 | 0 |
17f01c2c9a97c9eaccbb13c24c71833ef642b685 | src/app/controllers/SettingsMainController.js | src/app/controllers/SettingsMainController.js | import Controller from "@ember/controller";
import { computed } from "@ember/object";
import { locales as localesConfig, themes as themesConfig } from "config";
const { locales } = localesConfig;
const { themes } = themesConfig;
export default Controller.extend({
contentGuiLanguages: computed(function() {
return Object.keys( locales )
.map( key => ({
id: key,
label: locales[ key ]
}) );
}),
contentGuiTheme: computed(function() {
return themes.map( id => ({ id }) );
})
});
| import Controller from "@ember/controller";
import { computed } from "@ember/object";
import { locales as localesConfig, themes as themesConfig } from "config";
const { locales } = localesConfig;
const { themes } = themesConfig;
export default Controller.extend({
contentGuiLanguages: computed(function() {
const compare = new Intl.Collator( "en", { sensitivity: "base" } ).compare;
return Object.keys( locales )
.map( key => ({
id: key,
label: locales[ key ]
}) )
// sort by localized language name in English order
.sort( ( a, b ) => compare( a.label, b.label ) );
}),
contentGuiTheme: computed(function() {
return themes.map( id => ({ id }) );
})
});
| Sort language dropdown by localized language name | Sort language dropdown by localized language name
| JavaScript | mit | chhe/livestreamer-twitch-gui,chhe/livestreamer-twitch-gui,bastimeyer/livestreamer-twitch-gui,bastimeyer/livestreamer-twitch-gui,streamlink/streamlink-twitch-gui,bastimeyer/livestreamer-twitch-gui,streamlink/streamlink-twitch-gui,chhe/livestreamer-twitch-gui,streamlink/streamlink-twitch-gui | javascript | ## Code Before:
import Controller from "@ember/controller";
import { computed } from "@ember/object";
import { locales as localesConfig, themes as themesConfig } from "config";
const { locales } = localesConfig;
const { themes } = themesConfig;
export default Controller.extend({
contentGuiLanguages: computed(function() {
return Object.keys( locales )
.map( key => ({
id: key,
label: locales[ key ]
}) );
}),
contentGuiTheme: computed(function() {
return themes.map( id => ({ id }) );
})
});
## Instruction:
Sort language dropdown by localized language name
## Code After:
import Controller from "@ember/controller";
import { computed } from "@ember/object";
import { locales as localesConfig, themes as themesConfig } from "config";
const { locales } = localesConfig;
const { themes } = themesConfig;
export default Controller.extend({
contentGuiLanguages: computed(function() {
const compare = new Intl.Collator( "en", { sensitivity: "base" } ).compare;
return Object.keys( locales )
.map( key => ({
id: key,
label: locales[ key ]
}) )
// sort by localized language name in English order
.sort( ( a, b ) => compare( a.label, b.label ) );
}),
contentGuiTheme: computed(function() {
return themes.map( id => ({ id }) );
})
});
| import Controller from "@ember/controller";
import { computed } from "@ember/object";
import { locales as localesConfig, themes as themesConfig } from "config";
const { locales } = localesConfig;
const { themes } = themesConfig;
export default Controller.extend({
contentGuiLanguages: computed(function() {
+ const compare = new Intl.Collator( "en", { sensitivity: "base" } ).compare;
+
return Object.keys( locales )
.map( key => ({
id: key,
label: locales[ key ]
- }) );
? -
+ }) )
+ // sort by localized language name in English order
+ .sort( ( a, b ) => compare( a.label, b.label ) );
}),
contentGuiTheme: computed(function() {
return themes.map( id => ({ id }) );
})
}); | 6 | 0.272727 | 5 | 1 |
db2509009c8ee175654350d9dc892fbfcd5345ec | app.json | app.json | {
"name": "Caterblu",
"description": "A Django project to retrieve various informations on blu-ray releases.",
"image": "heroku/python",
"repository": "https://github.com/heroku/caterblu",
"keywords": ["python", "django" ],
"addons": [],
"buildpacks": [{
"url": "https://github.com/apiaryio/heroku-buildpack-nodejs-grunt"
}, {
"url": "https://github.com/heroku/heroku-buildpack-python"
}],
}
| {
"name": "Caterblu",
"description": "A Django project to retrieve various informations on blu-ray releases.",
"image": "heroku/python",
"repository": "https://github.com/heroku/caterblu",
"keywords": ["python", "django" ],
"addons": [],
"buildpacks": [{
"url": "https://github.com/heroku/heroku-buildpack-python"
}],
"scripts": {
"preinstall": "python3 manage.py bower install",
"preinstall": "python3 manage.py collectstatic"
},
}
| Move commands to other file | Move commands to other file
| JSON | mit | jeremyrea/caterblu,jeremyrea/caterblu,jeremyrea/caterblu,jeremyrea/caterblu | json | ## Code Before:
{
"name": "Caterblu",
"description": "A Django project to retrieve various informations on blu-ray releases.",
"image": "heroku/python",
"repository": "https://github.com/heroku/caterblu",
"keywords": ["python", "django" ],
"addons": [],
"buildpacks": [{
"url": "https://github.com/apiaryio/heroku-buildpack-nodejs-grunt"
}, {
"url": "https://github.com/heroku/heroku-buildpack-python"
}],
}
## Instruction:
Move commands to other file
## Code After:
{
"name": "Caterblu",
"description": "A Django project to retrieve various informations on blu-ray releases.",
"image": "heroku/python",
"repository": "https://github.com/heroku/caterblu",
"keywords": ["python", "django" ],
"addons": [],
"buildpacks": [{
"url": "https://github.com/heroku/heroku-buildpack-python"
}],
"scripts": {
"preinstall": "python3 manage.py bower install",
"preinstall": "python3 manage.py collectstatic"
},
}
| {
"name": "Caterblu",
"description": "A Django project to retrieve various informations on blu-ray releases.",
"image": "heroku/python",
"repository": "https://github.com/heroku/caterblu",
"keywords": ["python", "django" ],
"addons": [],
"buildpacks": [{
- "url": "https://github.com/apiaryio/heroku-buildpack-nodejs-grunt"
- }, {
- "url": "https://github.com/heroku/heroku-buildpack-python"
? ----
+ "url": "https://github.com/heroku/heroku-buildpack-python"
- }],
? --
+ }],
+ "scripts": {
+ "preinstall": "python3 manage.py bower install",
+ "preinstall": "python3 manage.py collectstatic"
+ },
} | 10 | 0.769231 | 6 | 4 |
a044aedea1e69145430f4cbe8249b014e9645e18 | app/views/admin/local_transactions/new.html.erb | app/views/admin/local_transactions/new.html.erb | <h2>New Local Transaction</h2>
<p>We need a bit more information to create your local transaction.</p>
<%= semantic_form_for([:admin, @publication]) do |f| %>
<%= f.inputs :lgsl_code %>
<%= f.inputs do %>
<%= f.input :panopticon_id, :as => :hidden %>
<% end %>
<%= f.buttons %>
<% end %>
| <h2>New Local Transaction</h2>
<p>We need a bit more information to create your local transaction.</p>
<%= semantic_form_for([:admin, @publication]) do |f| %>
<%= f.inputs :lgsl_code %>
<%= f.inputs do %>
<%= f.input :panopticon_id, :as => :hidden %>
<%= f.input :title, :as => :hidden %>
<%= f.input :slug, :as => :hidden %>
<%= f.input :section, :as => :hidden %>
<%= f.input :department, :as => :hidden %>
<%= f.input :related_items, :as => :hidden %>
<% end %>
<%= f.buttons %>
<% end %>
| Hide more information in the local transaction form | Hide more information in the local transaction form
| HTML+ERB | mit | leftees/publisher,alphagov/publisher,alphagov/publisher,telekomatrix/publisher,telekomatrix/publisher,leftees/publisher,theodi/publisher,alphagov/publisher,theodi/publisher,telekomatrix/publisher,theodi/publisher,leftees/publisher,telekomatrix/publisher,leftees/publisher,theodi/publisher | html+erb | ## Code Before:
<h2>New Local Transaction</h2>
<p>We need a bit more information to create your local transaction.</p>
<%= semantic_form_for([:admin, @publication]) do |f| %>
<%= f.inputs :lgsl_code %>
<%= f.inputs do %>
<%= f.input :panopticon_id, :as => :hidden %>
<% end %>
<%= f.buttons %>
<% end %>
## Instruction:
Hide more information in the local transaction form
## Code After:
<h2>New Local Transaction</h2>
<p>We need a bit more information to create your local transaction.</p>
<%= semantic_form_for([:admin, @publication]) do |f| %>
<%= f.inputs :lgsl_code %>
<%= f.inputs do %>
<%= f.input :panopticon_id, :as => :hidden %>
<%= f.input :title, :as => :hidden %>
<%= f.input :slug, :as => :hidden %>
<%= f.input :section, :as => :hidden %>
<%= f.input :department, :as => :hidden %>
<%= f.input :related_items, :as => :hidden %>
<% end %>
<%= f.buttons %>
<% end %>
| <h2>New Local Transaction</h2>
<p>We need a bit more information to create your local transaction.</p>
<%= semantic_form_for([:admin, @publication]) do |f| %>
<%= f.inputs :lgsl_code %>
<%= f.inputs do %>
<%= f.input :panopticon_id, :as => :hidden %>
+ <%= f.input :title, :as => :hidden %>
+ <%= f.input :slug, :as => :hidden %>
+ <%= f.input :section, :as => :hidden %>
+ <%= f.input :department, :as => :hidden %>
+ <%= f.input :related_items, :as => :hidden %>
<% end %>
<%= f.buttons %>
<% end %> | 5 | 0.454545 | 5 | 0 |
8056de5d6052e55c78633c8c28292b115a8ba56b | js/app.js | js/app.js | var app = angular.module('tokenKidApp', ['ngRoute']);
// create the controller and inject Angular's $scope
app.controller('mainController', function($scope) {
// create a message to display in our view
$scope.message = 'Everyone come and see how good I look!';
$scope.loginClicked = function() {
console.log('click! ' + $scope.username);
};
});
app.config(function($routeProvider){
$routeProvider
.when('/',{
templateUrl: 'login.html'
})
.when('/list',{
templateUrl: 'list.html'
});
});
| var app = angular.module('tokenKidApp', ['ngRoute']);
// create the controller and inject Angular's $scope
app.controller('mainController', function($scope) {
// create a message to display in our view
$scope.message = 'Everyone come and see how good I look!';
$scope.loginClicked = function() {
window.location.href = "/#/list";
};
});
app.config(function($routeProvider){
$routeProvider
.when('/',{
templateUrl: 'login.html'
})
.when('/list',{
templateUrl: 'list.html'
});
});
| Add some links and things | Add some links and things
| JavaScript | apache-2.0 | itsjason/TokenKid,itsjason/TokenKid | javascript | ## Code Before:
var app = angular.module('tokenKidApp', ['ngRoute']);
// create the controller and inject Angular's $scope
app.controller('mainController', function($scope) {
// create a message to display in our view
$scope.message = 'Everyone come and see how good I look!';
$scope.loginClicked = function() {
console.log('click! ' + $scope.username);
};
});
app.config(function($routeProvider){
$routeProvider
.when('/',{
templateUrl: 'login.html'
})
.when('/list',{
templateUrl: 'list.html'
});
});
## Instruction:
Add some links and things
## Code After:
var app = angular.module('tokenKidApp', ['ngRoute']);
// create the controller and inject Angular's $scope
app.controller('mainController', function($scope) {
// create a message to display in our view
$scope.message = 'Everyone come and see how good I look!';
$scope.loginClicked = function() {
window.location.href = "/#/list";
};
});
app.config(function($routeProvider){
$routeProvider
.when('/',{
templateUrl: 'login.html'
})
.when('/list',{
templateUrl: 'list.html'
});
});
| var app = angular.module('tokenKidApp', ['ngRoute']);
// create the controller and inject Angular's $scope
app.controller('mainController', function($scope) {
// create a message to display in our view
$scope.message = 'Everyone come and see how good I look!';
$scope.loginClicked = function() {
- console.log('click! ' + $scope.username);
+ window.location.href = "/#/list";
};
});
app.config(function($routeProvider){
$routeProvider
.when('/',{
templateUrl: 'login.html'
})
.when('/list',{
templateUrl: 'list.html'
});
});
| 2 | 0.086957 | 1 | 1 |
c6e130682712e8534e773036ba3d87c09b91ff1c | knowledge_repo/postprocessors/format_checks.py | knowledge_repo/postprocessors/format_checks.py | from ..constants import FORMAT_CHECKS
from ..post import HEADER_OPTIONAL_FIELD_TYPES, HEADER_REQUIRED_FIELD_TYPES
from ..postprocessor import KnowledgePostProcessor
class FormatChecks(KnowledgePostProcessor):
_registry_keys = [FORMAT_CHECKS]
def process(self, kp):
headers = kp.headers
for field, typ, input in HEADER_REQUIRED_FIELD_TYPES:
assert field in headers, "Required field `{}` missing from headers.".format(
field)
assert isinstance(headers[field], typ), "Value for field `{}` is of type {}, and needs to be of type {}.".format(
field, type(headers[field]), typ)
for field, typ, input in HEADER_OPTIONAL_FIELD_TYPES:
if field in headers:
assert isinstance(headers[field], typ), "Value for field `{}` is of type {}, and needs to be of type {}.".format(
field, type(headers[field]), typ)
| from ..constants import FORMAT_CHECKS
from ..post import HEADER_OPTIONAL_FIELD_TYPES, HEADER_REQUIRED_FIELD_TYPES
from ..postprocessor import KnowledgePostProcessor
class FormatChecks(KnowledgePostProcessor):
_registry_keys = [FORMAT_CHECKS]
def process(self, kp):
headers = kp.headers
for field, typ, input in HEADER_REQUIRED_FIELD_TYPES:
assert field in headers, \
"Required field `{field}` missing from headers."
assert isinstance(headers[field], typ), \
f"Value for field `{field}` is of type " + \
f"{type(headers[field])}, and needs to be of type {typ}."
for field, typ, input in HEADER_OPTIONAL_FIELD_TYPES:
if field in headers:
assert isinstance(headers[field], typ), \
f"Value for field `{field}` is of type " + \
f"{type(headers[field])}, and needs to be of type {typ}."
| Fix lint issues related to long lines | Fix lint issues related to long lines
| Python | apache-2.0 | airbnb/knowledge-repo,airbnb/knowledge-repo,airbnb/knowledge-repo,airbnb/knowledge-repo,airbnb/knowledge-repo | python | ## Code Before:
from ..constants import FORMAT_CHECKS
from ..post import HEADER_OPTIONAL_FIELD_TYPES, HEADER_REQUIRED_FIELD_TYPES
from ..postprocessor import KnowledgePostProcessor
class FormatChecks(KnowledgePostProcessor):
_registry_keys = [FORMAT_CHECKS]
def process(self, kp):
headers = kp.headers
for field, typ, input in HEADER_REQUIRED_FIELD_TYPES:
assert field in headers, "Required field `{}` missing from headers.".format(
field)
assert isinstance(headers[field], typ), "Value for field `{}` is of type {}, and needs to be of type {}.".format(
field, type(headers[field]), typ)
for field, typ, input in HEADER_OPTIONAL_FIELD_TYPES:
if field in headers:
assert isinstance(headers[field], typ), "Value for field `{}` is of type {}, and needs to be of type {}.".format(
field, type(headers[field]), typ)
## Instruction:
Fix lint issues related to long lines
## Code After:
from ..constants import FORMAT_CHECKS
from ..post import HEADER_OPTIONAL_FIELD_TYPES, HEADER_REQUIRED_FIELD_TYPES
from ..postprocessor import KnowledgePostProcessor
class FormatChecks(KnowledgePostProcessor):
_registry_keys = [FORMAT_CHECKS]
def process(self, kp):
headers = kp.headers
for field, typ, input in HEADER_REQUIRED_FIELD_TYPES:
assert field in headers, \
"Required field `{field}` missing from headers."
assert isinstance(headers[field], typ), \
f"Value for field `{field}` is of type " + \
f"{type(headers[field])}, and needs to be of type {typ}."
for field, typ, input in HEADER_OPTIONAL_FIELD_TYPES:
if field in headers:
assert isinstance(headers[field], typ), \
f"Value for field `{field}` is of type " + \
f"{type(headers[field])}, and needs to be of type {typ}."
| from ..constants import FORMAT_CHECKS
from ..post import HEADER_OPTIONAL_FIELD_TYPES, HEADER_REQUIRED_FIELD_TYPES
from ..postprocessor import KnowledgePostProcessor
class FormatChecks(KnowledgePostProcessor):
_registry_keys = [FORMAT_CHECKS]
def process(self, kp):
headers = kp.headers
for field, typ, input in HEADER_REQUIRED_FIELD_TYPES:
+ assert field in headers, \
- assert field in headers, "Required field `{}` missing from headers.".format(
? ------ ----- -- -------- --------
+ "Required field `{field}` missing from headers."
? +++++
- field)
- assert isinstance(headers[field], typ), "Value for field `{}` is of type {}, and needs to be of type {}.".format(
- field, type(headers[field]), typ)
+ assert isinstance(headers[field], typ), \
+ f"Value for field `{field}` is of type " + \
+ f"{type(headers[field])}, and needs to be of type {typ}."
for field, typ, input in HEADER_OPTIONAL_FIELD_TYPES:
if field in headers:
- assert isinstance(headers[field], typ), "Value for field `{}` is of type {}, and needs to be of type {}.".format(
- field, type(headers[field]), typ)
+ assert isinstance(headers[field], typ), \
+ f"Value for field `{field}` is of type " + \
+ f"{type(headers[field])}, and needs to be of type {typ}." | 14 | 0.736842 | 8 | 6 |
b33c1b70bcb7a5303c1731cb6699466610ee54af | pyedgar/__init__.py | pyedgar/__init__.py |
__title__ = 'pyedgar'
__version__ = '0.0.3a1'
__author__ = 'Mac Gaulin'
__license__ = 'MIT'
__copyright__ = 'Copyright 2018 Mac Gaulin'
# Include top level modules
from . import filing
from . import downloader
# Include sub-modules
from . import utilities
from . import exceptions
from .exceptions import (InputTypeError, WrongFormType,
NoFormTypeFound, NoCIKFound)
# __all__ = [edgarweb, forms, localstore, plaintext, #downloader,
# InputTypeError, WrongFormType, NoFormTypeFound, NoCIKFound]
|
__title__ = 'pyedgar'
__version__ = '0.0.4a1'
__author__ = 'Mac Gaulin'
__license__ = 'MIT'
__copyright__ = 'Copyright 2018 Mac Gaulin'
# Include sub-modules
from . import utilities
from . import exceptions
from .exceptions import (InputTypeError, WrongFormType,
NoFormTypeFound, NoCIKFound)
# __all__ = [edgarweb, forms, localstore, plaintext, #downloader,
# InputTypeError, WrongFormType, NoFormTypeFound, NoCIKFound]
| Remove top level imports to avoid cyclical import | Remove top level imports to avoid cyclical import
| Python | mit | gaulinmp/pyedgar | python | ## Code Before:
__title__ = 'pyedgar'
__version__ = '0.0.3a1'
__author__ = 'Mac Gaulin'
__license__ = 'MIT'
__copyright__ = 'Copyright 2018 Mac Gaulin'
# Include top level modules
from . import filing
from . import downloader
# Include sub-modules
from . import utilities
from . import exceptions
from .exceptions import (InputTypeError, WrongFormType,
NoFormTypeFound, NoCIKFound)
# __all__ = [edgarweb, forms, localstore, plaintext, #downloader,
# InputTypeError, WrongFormType, NoFormTypeFound, NoCIKFound]
## Instruction:
Remove top level imports to avoid cyclical import
## Code After:
__title__ = 'pyedgar'
__version__ = '0.0.4a1'
__author__ = 'Mac Gaulin'
__license__ = 'MIT'
__copyright__ = 'Copyright 2018 Mac Gaulin'
# Include sub-modules
from . import utilities
from . import exceptions
from .exceptions import (InputTypeError, WrongFormType,
NoFormTypeFound, NoCIKFound)
# __all__ = [edgarweb, forms, localstore, plaintext, #downloader,
# InputTypeError, WrongFormType, NoFormTypeFound, NoCIKFound]
|
__title__ = 'pyedgar'
- __version__ = '0.0.3a1'
? ^
+ __version__ = '0.0.4a1'
? ^
__author__ = 'Mac Gaulin'
__license__ = 'MIT'
__copyright__ = 'Copyright 2018 Mac Gaulin'
-
- # Include top level modules
- from . import filing
- from . import downloader
# Include sub-modules
from . import utilities
from . import exceptions
from .exceptions import (InputTypeError, WrongFormType,
NoFormTypeFound, NoCIKFound)
# __all__ = [edgarweb, forms, localstore, plaintext, #downloader,
# InputTypeError, WrongFormType, NoFormTypeFound, NoCIKFound] | 6 | 0.3 | 1 | 5 |
f62dd2ff0eedfca086f833d8dd710534512a51a1 | main.rb | main.rb | Dir[File.dirname(__FILE__) + '/lib/**/*.rb'].each {|file| require file }
class TernaryExpression
def initialize(variable_terminals, depth)
@operator = get_operator
@operand_1 = ::Expression::OperandCreator.create(variable_terminals, depth)
@operand_2 = ::Expression::OperandCreator.create(variable_terminals, depth)
@conditional = ConditionalExpression.new(variable_terminals, depth)
end
def get_operator
end
def to_s
"( " + @conditional.to_s + " ? " + @operand_1.to_s + " : " + @operand_2.to_s + " )"
end
end
class BinaryExpression < Expression::Base
def initialize(variable_terminals, depth)
super
@operand_2 = ::Expression::OperandCreator.create(variable_terminals, depth)
end
def get_operator
['+', '-', '*', '/', '**'].sample
end
def to_s
"( " + @operand_1.to_s + " " + @operator.to_s + " " + @operand_2.to_s + " )"
end
end
class ConditionalExpression < BinaryExpression
def get_operator
['<', '<=', '>', '>='].sample
end
end
class UnaryExpression < Expression::Base
def get_operator
'-'
end
def to_s
"( " + @operator.to_s + " " + @operand_1.to_s + " )"
end
end
10.times do
e = Expression.create
puts e.to_s
end
| Dir[File.dirname(__FILE__) + '/lib/**/*.rb'].each {|file| require file }
class Expression
class TernaryExpression
def initialize(variable_terminals, depth)
@operator = get_operator
@operand_1 = ::Expression::OperandCreator.create(variable_terminals, depth)
@operand_2 = ::Expression::OperandCreator.create(variable_terminals, depth)
@conditional = ConditionalExpression.new(variable_terminals, depth)
end
def get_operator
end
def to_s
"( " + @conditional.to_s + " ? " + @operand_1.to_s + " : " + @operand_2.to_s + " )"
end
end
class BinaryExpression < Expression::Base
def initialize(variable_terminals, depth)
super
@operand_2 = ::Expression::OperandCreator.create(variable_terminals, depth)
end
def get_operator
['+', '-', '*', '/', '**'].sample
end
def to_s
"( " + @operand_1.to_s + " " + @operator.to_s + " " + @operand_2.to_s + " )"
end
end
class ConditionalExpression < BinaryExpression
def get_operator
['<', '<=', '>', '>='].sample
end
end
class UnaryExpression < Expression::Base
def get_operator
'-'
end
def to_s
"( " + @operator.to_s + " " + @operand_1.to_s + " )"
end
end
end
10.times do
e = Expression.create
puts e.to_s
end
| Add class Expression for namespacing | Add class Expression for namespacing
| Ruby | mit | nick1123/abstract_syntax_tree | ruby | ## Code Before:
Dir[File.dirname(__FILE__) + '/lib/**/*.rb'].each {|file| require file }
class TernaryExpression
def initialize(variable_terminals, depth)
@operator = get_operator
@operand_1 = ::Expression::OperandCreator.create(variable_terminals, depth)
@operand_2 = ::Expression::OperandCreator.create(variable_terminals, depth)
@conditional = ConditionalExpression.new(variable_terminals, depth)
end
def get_operator
end
def to_s
"( " + @conditional.to_s + " ? " + @operand_1.to_s + " : " + @operand_2.to_s + " )"
end
end
class BinaryExpression < Expression::Base
def initialize(variable_terminals, depth)
super
@operand_2 = ::Expression::OperandCreator.create(variable_terminals, depth)
end
def get_operator
['+', '-', '*', '/', '**'].sample
end
def to_s
"( " + @operand_1.to_s + " " + @operator.to_s + " " + @operand_2.to_s + " )"
end
end
class ConditionalExpression < BinaryExpression
def get_operator
['<', '<=', '>', '>='].sample
end
end
class UnaryExpression < Expression::Base
def get_operator
'-'
end
def to_s
"( " + @operator.to_s + " " + @operand_1.to_s + " )"
end
end
10.times do
e = Expression.create
puts e.to_s
end
## Instruction:
Add class Expression for namespacing
## Code After:
Dir[File.dirname(__FILE__) + '/lib/**/*.rb'].each {|file| require file }
class Expression
class TernaryExpression
def initialize(variable_terminals, depth)
@operator = get_operator
@operand_1 = ::Expression::OperandCreator.create(variable_terminals, depth)
@operand_2 = ::Expression::OperandCreator.create(variable_terminals, depth)
@conditional = ConditionalExpression.new(variable_terminals, depth)
end
def get_operator
end
def to_s
"( " + @conditional.to_s + " ? " + @operand_1.to_s + " : " + @operand_2.to_s + " )"
end
end
class BinaryExpression < Expression::Base
def initialize(variable_terminals, depth)
super
@operand_2 = ::Expression::OperandCreator.create(variable_terminals, depth)
end
def get_operator
['+', '-', '*', '/', '**'].sample
end
def to_s
"( " + @operand_1.to_s + " " + @operator.to_s + " " + @operand_2.to_s + " )"
end
end
class ConditionalExpression < BinaryExpression
def get_operator
['<', '<=', '>', '>='].sample
end
end
class UnaryExpression < Expression::Base
def get_operator
'-'
end
def to_s
"( " + @operator.to_s + " " + @operand_1.to_s + " )"
end
end
end
10.times do
e = Expression.create
puts e.to_s
end
| Dir[File.dirname(__FILE__) + '/lib/**/*.rb'].each {|file| require file }
+ class Expression
- class TernaryExpression
+ class TernaryExpression
? ++
- def initialize(variable_terminals, depth)
+ def initialize(variable_terminals, depth)
? ++
- @operator = get_operator
+ @operator = get_operator
? ++
- @operand_1 = ::Expression::OperandCreator.create(variable_terminals, depth)
+ @operand_1 = ::Expression::OperandCreator.create(variable_terminals, depth)
? ++
- @operand_2 = ::Expression::OperandCreator.create(variable_terminals, depth)
+ @operand_2 = ::Expression::OperandCreator.create(variable_terminals, depth)
? ++
- @conditional = ConditionalExpression.new(variable_terminals, depth)
+ @conditional = ConditionalExpression.new(variable_terminals, depth)
? ++
+ end
+
+ def get_operator
+ end
+
+ def to_s
+ "( " + @conditional.to_s + " ? " + @operand_1.to_s + " : " + @operand_2.to_s + " )"
+ end
end
+ class BinaryExpression < Expression::Base
+ def initialize(variable_terminals, depth)
+ super
+ @operand_2 = ::Expression::OperandCreator.create(variable_terminals, depth)
+ end
+
- def get_operator
+ def get_operator
? ++
+ ['+', '-', '*', '/', '**'].sample
+ end
+
+ def to_s
+ "( " + @operand_1.to_s + " " + @operator.to_s + " " + @operand_2.to_s + " )"
+ end
end
- def to_s
- "( " + @conditional.to_s + " ? " + @operand_1.to_s + " : " + @operand_2.to_s + " )"
+ class ConditionalExpression < BinaryExpression
+ def get_operator
+ ['<', '<=', '>', '>='].sample
- end
+ end
? ++
- end
-
- class BinaryExpression < Expression::Base
- def initialize(variable_terminals, depth)
- super
- @operand_2 = ::Expression::OperandCreator.create(variable_terminals, depth)
end
+ class UnaryExpression < Expression::Base
- def get_operator
+ def get_operator
? ++
- ['+', '-', '*', '/', '**'].sample
+ '-'
- end
+ end
? ++
- def to_s
+ def to_s
? ++
- "( " + @operand_1.to_s + " " + @operator.to_s + " " + @operand_2.to_s + " )"
- end
- end
-
- class ConditionalExpression < BinaryExpression
- def get_operator
- ['<', '<=', '>', '>='].sample
- end
- end
-
- class UnaryExpression < Expression::Base
- def get_operator
- '-'
- end
-
- def to_s
- "( " + @operator.to_s + " " + @operand_1.to_s + " )"
+ "( " + @operator.to_s + " " + @operand_1.to_s + " )"
? ++
+ end
end
end
10.times do
e = Expression.create
puts e.to_s
end
| 76 | 1.407407 | 39 | 37 |
ff8a574864e8cee4245095a67ac0adfbb43e7c42 | index.php | index.php | <?php
/**
* RockMongo startup
*
* In here we define some default settings and start the configuration files
* @package rockmongo
*/
/**
* Defining version number and enabling error reporting
*/
define("ROCK_MONGO_VERSION", "1.1.8");
error_reporting(E_ALL);
/**
* Environment detection
*/
if (!version_compare(PHP_VERSION, "5.0")) {
exit("To make things right, you must install PHP5");
}
if (!class_exists("Mongo") && !class_exists("MongoClient")) {
exit("To make things right, you must install php_mongo module. <a href=\"http://www.php.net/manual/en/mongo.installation.php\" target=\"_blank\">Here for installation documents on PHP.net.</a>");
}
// enforce Mongo support for int64 data type (Kyryl Bilokurov <[email protected]>)
ini_set("mongo.native_long", 1);
ini_set("mongo.long_as_object", 1);
/**
* Initializing configuration files and RockMongo
*/
require "config.php";
require "rock.php";
rock_check_version();
rock_init_lang();
rock_init_plugins();
Rock::start();
?> | <?php
/**
* RockMongo startup
*
* In here we define some default settings and start the configuration files
* @package rockmongo
*/
/**
* Defining version number and enabling error reporting
*/
define("ROCK_MONGO_VERSION", "1.1.8");
error_reporting(E_ALL);
/**
* Environment detection
*/
if (!version_compare(PHP_VERSION, "5.0")) {
exit("To make things right, you must install PHP5");
}
if (!class_exists("Mongo") && !class_exists("MongoClient")) {
exit("To make things right, you must install php_mongo module. <a href=\"http://www.php.net/manual/en/mongo.installation.php\" target=\"_blank\">Here for installation documents on PHP.net.</a>");
}
// enforce Mongo support for int64 data type (Kyryl Bilokurov <[email protected]>)
if (PHP_INT_SIZE == 8) {
ini_set("mongo.native_long", 1);
ini_set("mongo.long_as_object", 1);
}
/**
* Initializing configuration files and RockMongo
*/
require "config.php";
require "rock.php";
rock_check_version();
rock_init_lang();
rock_init_plugins();
Rock::start();
?> | Set mongo.native_long only under 64bit PHP build | Set mongo.native_long only under 64bit PHP build
| PHP | bsd-3-clause | lzpfmh/rockmongo,FGM/rockmongo,asalem/rockmongo,asalem/rockmongo,bajian/rockmongo,pristavu/rockmongo,anapsix/rockmongo,lzpfmh/rockmongo,pataquets/rockmongo,bajian/rockmongo,anapsix/rockmongo,vvarada1/rockmongo,vvarada1/rockmongo,pristavu/rockmongo,pataquets/rockmongo,FGM/rockmongo | php | ## Code Before:
<?php
/**
* RockMongo startup
*
* In here we define some default settings and start the configuration files
* @package rockmongo
*/
/**
* Defining version number and enabling error reporting
*/
define("ROCK_MONGO_VERSION", "1.1.8");
error_reporting(E_ALL);
/**
* Environment detection
*/
if (!version_compare(PHP_VERSION, "5.0")) {
exit("To make things right, you must install PHP5");
}
if (!class_exists("Mongo") && !class_exists("MongoClient")) {
exit("To make things right, you must install php_mongo module. <a href=\"http://www.php.net/manual/en/mongo.installation.php\" target=\"_blank\">Here for installation documents on PHP.net.</a>");
}
// enforce Mongo support for int64 data type (Kyryl Bilokurov <[email protected]>)
ini_set("mongo.native_long", 1);
ini_set("mongo.long_as_object", 1);
/**
* Initializing configuration files and RockMongo
*/
require "config.php";
require "rock.php";
rock_check_version();
rock_init_lang();
rock_init_plugins();
Rock::start();
?>
## Instruction:
Set mongo.native_long only under 64bit PHP build
## Code After:
<?php
/**
* RockMongo startup
*
* In here we define some default settings and start the configuration files
* @package rockmongo
*/
/**
* Defining version number and enabling error reporting
*/
define("ROCK_MONGO_VERSION", "1.1.8");
error_reporting(E_ALL);
/**
* Environment detection
*/
if (!version_compare(PHP_VERSION, "5.0")) {
exit("To make things right, you must install PHP5");
}
if (!class_exists("Mongo") && !class_exists("MongoClient")) {
exit("To make things right, you must install php_mongo module. <a href=\"http://www.php.net/manual/en/mongo.installation.php\" target=\"_blank\">Here for installation documents on PHP.net.</a>");
}
// enforce Mongo support for int64 data type (Kyryl Bilokurov <[email protected]>)
if (PHP_INT_SIZE == 8) {
ini_set("mongo.native_long", 1);
ini_set("mongo.long_as_object", 1);
}
/**
* Initializing configuration files and RockMongo
*/
require "config.php";
require "rock.php";
rock_check_version();
rock_init_lang();
rock_init_plugins();
Rock::start();
?> | <?php
/**
* RockMongo startup
*
* In here we define some default settings and start the configuration files
* @package rockmongo
*/
/**
* Defining version number and enabling error reporting
*/
define("ROCK_MONGO_VERSION", "1.1.8");
error_reporting(E_ALL);
/**
* Environment detection
*/
if (!version_compare(PHP_VERSION, "5.0")) {
exit("To make things right, you must install PHP5");
}
if (!class_exists("Mongo") && !class_exists("MongoClient")) {
exit("To make things right, you must install php_mongo module. <a href=\"http://www.php.net/manual/en/mongo.installation.php\" target=\"_blank\">Here for installation documents on PHP.net.</a>");
}
// enforce Mongo support for int64 data type (Kyryl Bilokurov <[email protected]>)
+ if (PHP_INT_SIZE == 8) {
- ini_set("mongo.native_long", 1);
+ ini_set("mongo.native_long", 1);
? +
- ini_set("mongo.long_as_object", 1);
+ ini_set("mongo.long_as_object", 1);
? +
+ }
/**
* Initializing configuration files and RockMongo
*/
require "config.php";
require "rock.php";
rock_check_version();
rock_init_lang();
rock_init_plugins();
Rock::start();
?> | 6 | 0.15 | 4 | 2 |
7a04764896d808e8a72f153d49318bfe9cdf523c | test/e2e/chatbotSpec.js | test/e2e/chatbotSpec.js | /*
* Copyright (c) 2014-2020 Bjoern Kimminich.
* SPDX-License-Identifier: MIT
*/
const config = require('config')
describe('/chatbot', () => {
let username, submitButton, messageBox
protractor.beforeEach.login({ email: 'admin@' + config.get('application.domain'), password: 'admin123' })
describe('challenge "killJuicy"', () => {
it('should be possible to kill the chatbot by setting the process to null', () => {
browser.waitForAngularEnabled(false)
browser.get(protractor.basePath + '/profile')
username = element(by.id('username'))
submitButton = element(by.id('submit'))
username.sendKeys('admin"); process=null; usersusers.addUser("1337", "test')
submitButton.click()
browser.get(protractor.basePath + '/#/chatbot')
browser.driver.sleep(5000)
browser.waitForAngularEnabled(true)
messageBox = element(by.id('message-input'))
messageBox.sendKeys('hi')
messageBox.sendKeys(protractor.Key.ENTER)
browser.driver.sleep(5000)
protractor.expect.challengeSolved({ challenge: 'Kill Chatbot' })
})
})
})
| /*
* Copyright (c) 2014-2020 Bjoern Kimminich.
* SPDX-License-Identifier: MIT
*/
const config = require('config')
describe('/chatbot', () => {
let username, submitButton, messageBox
protractor.beforeEach.login({ email: 'admin@' + config.get('application.domain'), password: 'admin123' })
describe('challenge "killJuicy"', () => {
it('should be possible to kill the chatbot by setting the process to null', () => {
browser.waitForAngularEnabled(false)
browser.get(protractor.basePath + '/profile')
username = element(by.id('username'))
submitButton = element(by.id('submit'))
username.sendKeys('admin"); process=null; usersusers.addUser("1337", "test')
submitButton.click()
browser.driver.sleep(5000)
browser.waitForAngularEnabled(true)
browser.get(protractor.basePath + '/#/chatbot')
messageBox = element(by.id('message-input'))
messageBox.sendKeys('hi')
messageBox.sendKeys(protractor.Key.ENTER)
})
protractor.expect.challengeSolved({ challenge: 'Kill Chatbot' })
})
})
| Enable waiting for Angular earlier and fix solved-check | Enable waiting for Angular earlier and fix solved-check | JavaScript | mit | bkimminich/juice-shop,bkimminich/juice-shop,bkimminich/juice-shop,bkimminich/juice-shop,bkimminich/juice-shop,bkimminich/juice-shop | javascript | ## Code Before:
/*
* Copyright (c) 2014-2020 Bjoern Kimminich.
* SPDX-License-Identifier: MIT
*/
const config = require('config')
describe('/chatbot', () => {
let username, submitButton, messageBox
protractor.beforeEach.login({ email: 'admin@' + config.get('application.domain'), password: 'admin123' })
describe('challenge "killJuicy"', () => {
it('should be possible to kill the chatbot by setting the process to null', () => {
browser.waitForAngularEnabled(false)
browser.get(protractor.basePath + '/profile')
username = element(by.id('username'))
submitButton = element(by.id('submit'))
username.sendKeys('admin"); process=null; usersusers.addUser("1337", "test')
submitButton.click()
browser.get(protractor.basePath + '/#/chatbot')
browser.driver.sleep(5000)
browser.waitForAngularEnabled(true)
messageBox = element(by.id('message-input'))
messageBox.sendKeys('hi')
messageBox.sendKeys(protractor.Key.ENTER)
browser.driver.sleep(5000)
protractor.expect.challengeSolved({ challenge: 'Kill Chatbot' })
})
})
})
## Instruction:
Enable waiting for Angular earlier and fix solved-check
## Code After:
/*
* Copyright (c) 2014-2020 Bjoern Kimminich.
* SPDX-License-Identifier: MIT
*/
const config = require('config')
describe('/chatbot', () => {
let username, submitButton, messageBox
protractor.beforeEach.login({ email: 'admin@' + config.get('application.domain'), password: 'admin123' })
describe('challenge "killJuicy"', () => {
it('should be possible to kill the chatbot by setting the process to null', () => {
browser.waitForAngularEnabled(false)
browser.get(protractor.basePath + '/profile')
username = element(by.id('username'))
submitButton = element(by.id('submit'))
username.sendKeys('admin"); process=null; usersusers.addUser("1337", "test')
submitButton.click()
browser.driver.sleep(5000)
browser.waitForAngularEnabled(true)
browser.get(protractor.basePath + '/#/chatbot')
messageBox = element(by.id('message-input'))
messageBox.sendKeys('hi')
messageBox.sendKeys(protractor.Key.ENTER)
})
protractor.expect.challengeSolved({ challenge: 'Kill Chatbot' })
})
})
| /*
* Copyright (c) 2014-2020 Bjoern Kimminich.
* SPDX-License-Identifier: MIT
*/
const config = require('config')
describe('/chatbot', () => {
let username, submitButton, messageBox
protractor.beforeEach.login({ email: 'admin@' + config.get('application.domain'), password: 'admin123' })
describe('challenge "killJuicy"', () => {
it('should be possible to kill the chatbot by setting the process to null', () => {
browser.waitForAngularEnabled(false)
browser.get(protractor.basePath + '/profile')
username = element(by.id('username'))
submitButton = element(by.id('submit'))
username.sendKeys('admin"); process=null; usersusers.addUser("1337", "test')
submitButton.click()
- browser.get(protractor.basePath + '/#/chatbot')
browser.driver.sleep(5000)
browser.waitForAngularEnabled(true)
+
+ browser.get(protractor.basePath + '/#/chatbot')
messageBox = element(by.id('message-input'))
messageBox.sendKeys('hi')
messageBox.sendKeys(protractor.Key.ENTER)
- browser.driver.sleep(5000)
- protractor.expect.challengeSolved({ challenge: 'Kill Chatbot' })
})
+ protractor.expect.challengeSolved({ challenge: 'Kill Chatbot' })
})
}) | 6 | 0.2 | 3 | 3 |
70e36fb4fd5514411308e6838cd356dc11cf8f70 | app/controllers/misc_controller.rb | app/controllers/misc_controller.rb | class MiscController < ApplicationController
def show
redirect_to misc_information_path
end
def information
@env = ENV
@plugins = Plugin.installed
end
end
| require "fluent/version"
class MiscController < ApplicationController
def show
redirect_to misc_information_path
end
def information
@env = ENV
@plugins = Plugin.installed
end
end
| Fix system information page didn't load fluent/version | Fix system information page didn't load fluent/version
| Ruby | apache-2.0 | fluent/fluentd-ui,fluent/fluentd-ui,mt0803/fluentd-ui,fluent/fluentd-ui,mt0803/fluentd-ui,mt0803/fluentd-ui | ruby | ## Code Before:
class MiscController < ApplicationController
def show
redirect_to misc_information_path
end
def information
@env = ENV
@plugins = Plugin.installed
end
end
## Instruction:
Fix system information page didn't load fluent/version
## Code After:
require "fluent/version"
class MiscController < ApplicationController
def show
redirect_to misc_information_path
end
def information
@env = ENV
@plugins = Plugin.installed
end
end
| + require "fluent/version"
+
class MiscController < ApplicationController
def show
redirect_to misc_information_path
end
def information
@env = ENV
@plugins = Plugin.installed
end
end | 2 | 0.2 | 2 | 0 |
cddcc7e5735022c7a4faeee5331e7b80a6349406 | src/functions.py | src/functions.py | def getTableColumnLabel(c):
label = ''
while True:
label += 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'[c % 26]
if c <= 26:
break
c = int(c/26)
return label
def parseTableColumnLabel(label):
ret = 0
for c in map(ord, reversed(label)):
if 0x41 <= c <= 0x5A:
ret = ret*26 + (c-0x41)
else:
raise ValueError('Invalid label: %s' % label)
return ret
| def getTableColumnLabel(c):
label = ''
while True:
label = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'[c % 26] + label
if c < 26:
break
c = c//26-1
return label
def parseTableColumnLabel(label):
if not label:
raise ValueError('Invalid label: %s' % label)
ret = -1
for c in map(ord, label):
if 0x41 <= c <= 0x5A:
ret = (ret+1)*26 + (c-0x41)
else:
raise ValueError('Invalid label: %s' % label)
return ret
| Fix (parse|generate) table header label function | Fix (parse|generate) table header label function
| Python | mit | takumak/tuna,takumak/tuna | python | ## Code Before:
def getTableColumnLabel(c):
label = ''
while True:
label += 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'[c % 26]
if c <= 26:
break
c = int(c/26)
return label
def parseTableColumnLabel(label):
ret = 0
for c in map(ord, reversed(label)):
if 0x41 <= c <= 0x5A:
ret = ret*26 + (c-0x41)
else:
raise ValueError('Invalid label: %s' % label)
return ret
## Instruction:
Fix (parse|generate) table header label function
## Code After:
def getTableColumnLabel(c):
label = ''
while True:
label = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'[c % 26] + label
if c < 26:
break
c = c//26-1
return label
def parseTableColumnLabel(label):
if not label:
raise ValueError('Invalid label: %s' % label)
ret = -1
for c in map(ord, label):
if 0x41 <= c <= 0x5A:
ret = (ret+1)*26 + (c-0x41)
else:
raise ValueError('Invalid label: %s' % label)
return ret
| def getTableColumnLabel(c):
label = ''
while True:
- label += 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'[c % 26]
? -
+ label = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'[c % 26] + label
? ++++++++
- if c <= 26:
? -
+ if c < 26:
break
- c = int(c/26)
? ---- ^
+ c = c//26-1
? + ^^
return label
def parseTableColumnLabel(label):
+ if not label:
+ raise ValueError('Invalid label: %s' % label)
- ret = 0
? ^
+ ret = -1
? ^^
- for c in map(ord, reversed(label)):
? --------- -
+ for c in map(ord, label):
if 0x41 <= c <= 0x5A:
- ret = ret*26 + (c-0x41)
+ ret = (ret+1)*26 + (c-0x41)
? + +++
else:
raise ValueError('Invalid label: %s' % label)
return ret | 14 | 0.823529 | 8 | 6 |
c24ef1cd65f2d0843172111d13a8f5a830d721ac | application/controllers/newpostcont.php | application/controllers/newpostcont.php | <?php
class newpostcont extends CI_Controller{
public function __construct(){
parent::__construct();
$this->load->library('session');
$this->load->helper('url');
$this->load->model('newpostmodel');
}
public function index(){
$this->write();
}
public function write(){
$this->load->helper('form');
$this->load->library('form_validation');
$this->load->view('templates/header.html');
$this->load->view('newpost/newpost.php');
$this->load->view('templates/footer.html');
}
public function addpost(){
echo 'We will talk with the model now.';
$this->newpostmodel->addNewPost();
}
}
| <?php
class newpostcont extends CI_Controller{
public function __construct(){
parent::__construct();
$this->load->library('session');
$this->load->helper('url');
}
public function index(){
$this->write();
}
public function write(){
$this->load->helper('form');
$this->load->library('form_validation');
$this->load->view('templates/header.html');
$this->load->view('newpost/newpost.php');
$this->load->view('templates/footer.html');
}
public function addpost(){
echo 'We will talk with the model now.';
$this->load->model('newpostmodel');
$this->newpostmodel->addNewPost();
}
}
| Load the model only when necessary | Load the model only when necessary | PHP | mit | icyflame/create-together,icyflame/create-together,icyflame/create-together | php | ## Code Before:
<?php
class newpostcont extends CI_Controller{
public function __construct(){
parent::__construct();
$this->load->library('session');
$this->load->helper('url');
$this->load->model('newpostmodel');
}
public function index(){
$this->write();
}
public function write(){
$this->load->helper('form');
$this->load->library('form_validation');
$this->load->view('templates/header.html');
$this->load->view('newpost/newpost.php');
$this->load->view('templates/footer.html');
}
public function addpost(){
echo 'We will talk with the model now.';
$this->newpostmodel->addNewPost();
}
}
## Instruction:
Load the model only when necessary
## Code After:
<?php
class newpostcont extends CI_Controller{
public function __construct(){
parent::__construct();
$this->load->library('session');
$this->load->helper('url');
}
public function index(){
$this->write();
}
public function write(){
$this->load->helper('form');
$this->load->library('form_validation');
$this->load->view('templates/header.html');
$this->load->view('newpost/newpost.php');
$this->load->view('templates/footer.html');
}
public function addpost(){
echo 'We will talk with the model now.';
$this->load->model('newpostmodel');
$this->newpostmodel->addNewPost();
}
}
| <?php
class newpostcont extends CI_Controller{
public function __construct(){
parent::__construct();
$this->load->library('session');
$this->load->helper('url');
- $this->load->model('newpostmodel');
+
}
public function index(){
$this->write();
}
public function write(){
$this->load->helper('form');
$this->load->library('form_validation');
$this->load->view('templates/header.html');
$this->load->view('newpost/newpost.php');
$this->load->view('templates/footer.html');
}
public function addpost(){
echo 'We will talk with the model now.';
-
+ $this->load->model('newpostmodel');
$this->newpostmodel->addNewPost();
}
} | 4 | 0.105263 | 2 | 2 |
28b68d6220456eacdf837e81bd470939e1d5480c | .travis.yml | .travis.yml | language: go
go:
- 1.6.1
install:
- mkdir ${GOPATH}/src/github.com/freewil
- mkdir ${GOPATH}/src/github.com/freewil/bitcoin-testnet-box
- git clone https://github.com/freewil/bitcoin-testnet-box.git ${GOPATH}/src/github.com/freewil/bitcoin-testnet-box
- wget https://bitcoin.org/bin/bitcoin-core-0.11.2/bitcoin-0.11.2-linux64.tar.gz -O /tmp/bitcoin.tar.gz
- tar -xvf /tmp/bitcoin.tar.gz
- export PATH=$PATH:$(pwd)/bitcoin-0.11.2/bin
- make -C ${GOPATH}/src/github.com/freewil/bitcoin-testnet-box start
- bash -c 'while ! make -C ${GOPATH}/src/github.com/freewil/bitcoin-testnet-box/ getinfo; do sleep 1; done;'
- make -C ${GOPATH}/src/github.com/freewil/bitcoin-testnet-box stop
script:
- go test -v github.com/AutoRoute/node/... -race
| language: go
go:
- 1.6.1
install:
- mkdir ${GOPATH}/src/github.com/freewil
- mkdir ${GOPATH}/src/github.com/freewil/bitcoin-testnet-box
- git clone https://github.com/freewil/bitcoin-testnet-box.git ${GOPATH}/src/github.com/freewil/bitcoin-testnet-box
- wget https://bitcoin.org/bin/bitcoin-core-0.12.1/bitcoin-0.12.1-linux64.tar.gz -O /tmp/bitcoin.tar.gz
- tar -xvf /tmp/bitcoin.tar.gz
- export PATH=$PATH:$(pwd)/bitcoin-0.12.1/bin
- make -C ${GOPATH}/src/github.com/freewil/bitcoin-testnet-box start
- bash -c 'while ! make -C ${GOPATH}/src/github.com/freewil/bitcoin-testnet-box/ getinfo; do sleep 1; done;'
- make -C ${GOPATH}/src/github.com/freewil/bitcoin-testnet-box stop
script:
- go test -v ./... -race
| Use a newer version of bitcoin. | Use a newer version of bitcoin.
| YAML | mit | AutoRoute/node,AutoRoute/node | yaml | ## Code Before:
language: go
go:
- 1.6.1
install:
- mkdir ${GOPATH}/src/github.com/freewil
- mkdir ${GOPATH}/src/github.com/freewil/bitcoin-testnet-box
- git clone https://github.com/freewil/bitcoin-testnet-box.git ${GOPATH}/src/github.com/freewil/bitcoin-testnet-box
- wget https://bitcoin.org/bin/bitcoin-core-0.11.2/bitcoin-0.11.2-linux64.tar.gz -O /tmp/bitcoin.tar.gz
- tar -xvf /tmp/bitcoin.tar.gz
- export PATH=$PATH:$(pwd)/bitcoin-0.11.2/bin
- make -C ${GOPATH}/src/github.com/freewil/bitcoin-testnet-box start
- bash -c 'while ! make -C ${GOPATH}/src/github.com/freewil/bitcoin-testnet-box/ getinfo; do sleep 1; done;'
- make -C ${GOPATH}/src/github.com/freewil/bitcoin-testnet-box stop
script:
- go test -v github.com/AutoRoute/node/... -race
## Instruction:
Use a newer version of bitcoin.
## Code After:
language: go
go:
- 1.6.1
install:
- mkdir ${GOPATH}/src/github.com/freewil
- mkdir ${GOPATH}/src/github.com/freewil/bitcoin-testnet-box
- git clone https://github.com/freewil/bitcoin-testnet-box.git ${GOPATH}/src/github.com/freewil/bitcoin-testnet-box
- wget https://bitcoin.org/bin/bitcoin-core-0.12.1/bitcoin-0.12.1-linux64.tar.gz -O /tmp/bitcoin.tar.gz
- tar -xvf /tmp/bitcoin.tar.gz
- export PATH=$PATH:$(pwd)/bitcoin-0.12.1/bin
- make -C ${GOPATH}/src/github.com/freewil/bitcoin-testnet-box start
- bash -c 'while ! make -C ${GOPATH}/src/github.com/freewil/bitcoin-testnet-box/ getinfo; do sleep 1; done;'
- make -C ${GOPATH}/src/github.com/freewil/bitcoin-testnet-box stop
script:
- go test -v ./... -race
| language: go
go:
- 1.6.1
install:
- mkdir ${GOPATH}/src/github.com/freewil
- mkdir ${GOPATH}/src/github.com/freewil/bitcoin-testnet-box
- git clone https://github.com/freewil/bitcoin-testnet-box.git ${GOPATH}/src/github.com/freewil/bitcoin-testnet-box
- - wget https://bitcoin.org/bin/bitcoin-core-0.11.2/bitcoin-0.11.2-linux64.tar.gz -O /tmp/bitcoin.tar.gz
? -- --
+ - wget https://bitcoin.org/bin/bitcoin-core-0.12.1/bitcoin-0.12.1-linux64.tar.gz -O /tmp/bitcoin.tar.gz
? ++ ++
- tar -xvf /tmp/bitcoin.tar.gz
- - export PATH=$PATH:$(pwd)/bitcoin-0.11.2/bin
? --
+ - export PATH=$PATH:$(pwd)/bitcoin-0.12.1/bin
? ++
- make -C ${GOPATH}/src/github.com/freewil/bitcoin-testnet-box start
- bash -c 'while ! make -C ${GOPATH}/src/github.com/freewil/bitcoin-testnet-box/ getinfo; do sleep 1; done;'
- make -C ${GOPATH}/src/github.com/freewil/bitcoin-testnet-box stop
script:
- - go test -v github.com/AutoRoute/node/... -race
+ - go test -v ./... -race | 6 | 0.333333 | 3 | 3 |
7bfc05b48658ee6c6b20327ac43de9e3d75f93eb | libs/daux.io/docs/config.json | libs/daux.io/docs/config.json | {
"title": "Pulsar Documentation",
"tagline": "Beautiful user interfaces, Stellar user experiences.",
"clean_urls": false,
"docs_path": "../../docs",
"image": "img/app.png",
"theme": "navy",
"date_modified": true,
"float": false,
"ignore": {
"files": ["index.php"],
"folders": ["images"]
}
} | {
"title": "Pulsar Documentation",
"tagline": "Beautiful user interfaces, Stellar user experiences.",
"clean_urls": false,
"docs_path": "../../docs",
"image": "img/app.png",
"theme": "navy",
"timezone": "Europe/London",
"date_modified": true,
"float": false,
"ignore": {
"files": ["index.php"],
"folders": ["images"]
}
} | Add timezone to documentation runner | Add timezone to documentation runner
This should prevent the warning message on the hosted versoin
| JSON | mit | jadu/pulsar,jadu/pulsar,jadu/pulsar | json | ## Code Before:
{
"title": "Pulsar Documentation",
"tagline": "Beautiful user interfaces, Stellar user experiences.",
"clean_urls": false,
"docs_path": "../../docs",
"image": "img/app.png",
"theme": "navy",
"date_modified": true,
"float": false,
"ignore": {
"files": ["index.php"],
"folders": ["images"]
}
}
## Instruction:
Add timezone to documentation runner
This should prevent the warning message on the hosted versoin
## Code After:
{
"title": "Pulsar Documentation",
"tagline": "Beautiful user interfaces, Stellar user experiences.",
"clean_urls": false,
"docs_path": "../../docs",
"image": "img/app.png",
"theme": "navy",
"timezone": "Europe/London",
"date_modified": true,
"float": false,
"ignore": {
"files": ["index.php"],
"folders": ["images"]
}
} | {
"title": "Pulsar Documentation",
"tagline": "Beautiful user interfaces, Stellar user experiences.",
"clean_urls": false,
"docs_path": "../../docs",
"image": "img/app.png",
"theme": "navy",
+ "timezone": "Europe/London",
"date_modified": true,
"float": false,
"ignore": {
"files": ["index.php"],
"folders": ["images"]
}
} | 1 | 0.071429 | 1 | 0 |
3bee1c7d09464ef109305ce7c8c12fed613105a4 | client/app/templates/components/f-checkout.hbs | client/app/templates/components/f-checkout.hbs | <form class="f-default f-checkout">
<h2 class="f-default__title">Оформление заказа</h2>
<div class="f-checkout__order-info">
{{pr-order order=model.order}}
</div>
<div class="f-default__row">
<div class="portion-list">
{{#each model.portions as |portion|}}
<div class="portion-list__item">
<div class="portion-list__row">
<span class="portion-list__member">Вася</span>
<span class="portion-list__cost">{{portion.cost}}</span>
<div class="portion-list__paid">
<label class="f-default__checkbox-label">
<input type="checkbox" class="f-default__checkbox"/>
<span class="f-default__checkbox-text"></span>
</label>
</div>
</div>
<div class="portion-list__row">
<div class="list-member__text">{{portion.text}}</div>
</div>
</div>
{{/each}}
</div>
<button {{action "submit"}} type="submit" class="button _submit">Сформировать</button>
</div>
<div class="f-default__row">
<textarea class="f-default__textarea" rows="4" required></textarea>
<button {{action "submit"}} type="submit" class="button _submit">Отправить уведомление</button>
</div>
</form>
| <form class="f-default f-checkout">
<h2 class="f-default__title">Оформление заказа</h2>
<div class="f-checkout__order-info">
{{pr-order order=model.order}}
</div>
<div class="f-default__row">
<div class="portion-list">
{{#each model.portions as |portion|}}
<div class="portion-list__item">
<div class="portion-list__row">
<span class="portion-list__member">Вася</span>
<span class="portion-list__cost">{{portion.cost}}</span>
<div class="portion-list__paid">
<label class="f-default__checkbox-label">
<input type="checkbox" class="f-default__checkbox"/>
<span class="f-default__checkbox-text"></span>
</label>
</div>
</div>
<div class="portion-list__row">
<div class="list-member__text">{{portion.text}}</div>
</div>
</div>
{{/each}}
</div>
<button {{action "submit"}} type="submit" class="button _submit">Сформировать</button>
</div>
<div class="f-default__row">
<textarea class="f-default__textarea" rows="4" required></textarea>
<button {{action "submit"}} type="submit" class="button _submit">Отправить уведомление</button>
</div>
</form>
| Fix indentation in checkout form template | Fix indentation in checkout form template
Closes #89.
| Handlebars | mit | yandex-shri-minsk-2016/yummy-time,yandex-shri-minsk-2016/yummy-time,yandex-shri-minsk-2016/yummy-time | handlebars | ## Code Before:
<form class="f-default f-checkout">
<h2 class="f-default__title">Оформление заказа</h2>
<div class="f-checkout__order-info">
{{pr-order order=model.order}}
</div>
<div class="f-default__row">
<div class="portion-list">
{{#each model.portions as |portion|}}
<div class="portion-list__item">
<div class="portion-list__row">
<span class="portion-list__member">Вася</span>
<span class="portion-list__cost">{{portion.cost}}</span>
<div class="portion-list__paid">
<label class="f-default__checkbox-label">
<input type="checkbox" class="f-default__checkbox"/>
<span class="f-default__checkbox-text"></span>
</label>
</div>
</div>
<div class="portion-list__row">
<div class="list-member__text">{{portion.text}}</div>
</div>
</div>
{{/each}}
</div>
<button {{action "submit"}} type="submit" class="button _submit">Сформировать</button>
</div>
<div class="f-default__row">
<textarea class="f-default__textarea" rows="4" required></textarea>
<button {{action "submit"}} type="submit" class="button _submit">Отправить уведомление</button>
</div>
</form>
## Instruction:
Fix indentation in checkout form template
Closes #89.
## Code After:
<form class="f-default f-checkout">
<h2 class="f-default__title">Оформление заказа</h2>
<div class="f-checkout__order-info">
{{pr-order order=model.order}}
</div>
<div class="f-default__row">
<div class="portion-list">
{{#each model.portions as |portion|}}
<div class="portion-list__item">
<div class="portion-list__row">
<span class="portion-list__member">Вася</span>
<span class="portion-list__cost">{{portion.cost}}</span>
<div class="portion-list__paid">
<label class="f-default__checkbox-label">
<input type="checkbox" class="f-default__checkbox"/>
<span class="f-default__checkbox-text"></span>
</label>
</div>
</div>
<div class="portion-list__row">
<div class="list-member__text">{{portion.text}}</div>
</div>
</div>
{{/each}}
</div>
<button {{action "submit"}} type="submit" class="button _submit">Сформировать</button>
</div>
<div class="f-default__row">
<textarea class="f-default__textarea" rows="4" required></textarea>
<button {{action "submit"}} type="submit" class="button _submit">Отправить уведомление</button>
</div>
</form>
| <form class="f-default f-checkout">
<h2 class="f-default__title">Оформление заказа</h2>
<div class="f-checkout__order-info">
{{pr-order order=model.order}}
</div>
<div class="f-default__row">
<div class="portion-list">
{{#each model.portions as |portion|}}
- <div class="portion-list__item">
+ <div class="portion-list__item">
? ++
- <div class="portion-list__row">
+ <div class="portion-list__row">
? ++
- <span class="portion-list__member">Вася</span>
+ <span class="portion-list__member">Вася</span>
? ++
- <span class="portion-list__cost">{{portion.cost}}</span>
+ <span class="portion-list__cost">{{portion.cost}}</span>
? ++
- <div class="portion-list__paid">
+ <div class="portion-list__paid">
? ++
- <label class="f-default__checkbox-label">
+ <label class="f-default__checkbox-label">
? ++++
- <input type="checkbox" class="f-default__checkbox"/>
+ <input type="checkbox" class="f-default__checkbox"/>
? ++++
- <span class="f-default__checkbox-text"></span>
+ <span class="f-default__checkbox-text"></span>
? ++++
- </label>
+ </label>
? ++++
+ </div>
+ </div>
+
+ <div class="portion-list__row">
+ <div class="list-member__text">{{portion.text}}</div>
+ </div>
</div>
+ {{/each}}
- </div>
- <div class="portion-list__row">
- <div class="list-member__text">{{portion.text}}</div>
- </div>
</div>
- {{/each}}
+
+ <button {{action "submit"}} type="submit" class="button _submit">Сформировать</button>
</div>
- <button {{action "submit"}} type="submit" class="button _submit">Сформировать</button>
- </div>
-
- <div class="f-default__row">
+ <div class="f-default__row">
? ++
- <textarea class="f-default__textarea" rows="4" required></textarea>
+ <textarea class="f-default__textarea" rows="4" required></textarea>
? ++
- <button {{action "submit"}} type="submit" class="button _submit">Отправить уведомление</button>
+ <button {{action "submit"}} type="submit" class="button _submit">Отправить уведомление</button>
? ++
- </div>
+ </div>
? ++
</form> | 43 | 1.131579 | 22 | 21 |
5b96c5c248d6dd2b7b4168c0f8e9990ba316f9f1 | src/server/migrations/1580263521268-CreateUserTable.ts | src/server/migrations/1580263521268-CreateUserTable.ts | import {MigrationInterface, QueryRunner} from "typeorm";
export class CreateUserTable1580263521268 implements MigrationInterface {
readonly name = 'CreateUserTable1580263521268'
public async up(queryRunner: QueryRunner): Promise<any> {
await queryRunner.query(`CREATE TABLE "user" ("id" SERIAL NOT NULL, "email" character varying NOT NULL, CONSTRAINT "UQ_e12875dfb3b1d92d7d7c5377e22" UNIQUE ("email"), CONSTRAINT "PK_cace4a159ff9f2512dd42373760" PRIMARY KEY ("id"))`, undefined);
}
public async down(queryRunner: QueryRunner): Promise<any> {
await queryRunner.query(`DROP TABLE "user"`, undefined);
}
}
| import {MigrationInterface, QueryRunner} from "typeorm";
export class CreateUserTable1580263521268 implements MigrationInterface {
readonly name = 'CreateUserTable1580263521268'
public async up(queryRunner: QueryRunner): Promise<any> {
await queryRunner.query(`CREATE TABLE "user" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "email" character varying NOT NULL, CONSTRAINT "UQ_e12875dfb3b1d92d7d7c5377e22" UNIQUE ("email"), CONSTRAINT "PK_cace4a159ff9f2512dd42373760" PRIMARY KEY ("id"))`, undefined);
}
public async down(queryRunner: QueryRunner): Promise<any> {
await queryRunner.query(`DROP TABLE "user"`, undefined);
}
}
| Switch migration to use uuid for id column | Switch migration to use uuid for id column
| TypeScript | apache-2.0 | PublicMapping/DistrictBuilder,PublicMapping/DistrictBuilder,PublicMapping/DistrictBuilder,PublicMapping/DistrictBuilder,PublicMapping/DistrictBuilder | typescript | ## Code Before:
import {MigrationInterface, QueryRunner} from "typeorm";
export class CreateUserTable1580263521268 implements MigrationInterface {
readonly name = 'CreateUserTable1580263521268'
public async up(queryRunner: QueryRunner): Promise<any> {
await queryRunner.query(`CREATE TABLE "user" ("id" SERIAL NOT NULL, "email" character varying NOT NULL, CONSTRAINT "UQ_e12875dfb3b1d92d7d7c5377e22" UNIQUE ("email"), CONSTRAINT "PK_cace4a159ff9f2512dd42373760" PRIMARY KEY ("id"))`, undefined);
}
public async down(queryRunner: QueryRunner): Promise<any> {
await queryRunner.query(`DROP TABLE "user"`, undefined);
}
}
## Instruction:
Switch migration to use uuid for id column
## Code After:
import {MigrationInterface, QueryRunner} from "typeorm";
export class CreateUserTable1580263521268 implements MigrationInterface {
readonly name = 'CreateUserTable1580263521268'
public async up(queryRunner: QueryRunner): Promise<any> {
await queryRunner.query(`CREATE TABLE "user" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "email" character varying NOT NULL, CONSTRAINT "UQ_e12875dfb3b1d92d7d7c5377e22" UNIQUE ("email"), CONSTRAINT "PK_cace4a159ff9f2512dd42373760" PRIMARY KEY ("id"))`, undefined);
}
public async down(queryRunner: QueryRunner): Promise<any> {
await queryRunner.query(`DROP TABLE "user"`, undefined);
}
}
| import {MigrationInterface, QueryRunner} from "typeorm";
export class CreateUserTable1580263521268 implements MigrationInterface {
readonly name = 'CreateUserTable1580263521268'
public async up(queryRunner: QueryRunner): Promise<any> {
- await queryRunner.query(`CREATE TABLE "user" ("id" SERIAL NOT NULL, "email" character varying NOT NULL, CONSTRAINT "UQ_e12875dfb3b1d92d7d7c5377e22" UNIQUE ("email"), CONSTRAINT "PK_cace4a159ff9f2512dd42373760" PRIMARY KEY ("id"))`, undefined);
? ^^^^^^^^^^^^^^^
+ await queryRunner.query(`CREATE TABLE "user" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "email" character varying NOT NULL, CONSTRAINT "UQ_e12875dfb3b1d92d7d7c5377e22" UNIQUE ("email"), CONSTRAINT "PK_cace4a159ff9f2512dd42373760" PRIMARY KEY ("id"))`, undefined);
? ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
}
public async down(queryRunner: QueryRunner): Promise<any> {
await queryRunner.query(`DROP TABLE "user"`, undefined);
}
} | 2 | 0.142857 | 1 | 1 |
6324a642dae43cd087919ed0d1583d9a432871d8 | S02-magicals/pid.t | S02-magicals/pid.t | use v6;
use Test;
use lib 't/spec/packages';
use Test::Util;
=begin description
Test that C< $*PID > in this process is different from
C< $*PID > in the child process.
L<A05/"RFC 332: Regex: Make /\$/ equivalent to /\z/ under the '/s' modifier" /The current process id is now C<\$\*PID>/>
=end description
plan 1;
is_run 'say $*PID',
{
out => -> $p { $p > 0 && $p != $*PID },
err => '',
status => 0,
}, 'my $*PID is different from a child $*PID';
# vim: ft=perl6
| use v6;
use Test;
use lib 't/spec/packages';
use Test::Util;
=begin description
Test that C< $*PID > in this process is different from
C< $*PID > in the child process.
L<A05/"RFC 332: Regex: Make /\$/ equivalent to /\z/ under the '/s' modifier" /The current process id is now C<\$\*PID>/>
=end description
plan 2;
is_run 'say $*PID',
{
out => -> $p { $p > 0 && $p != $*PID },
err => '',
status => 0,
}, 'my $*PID is different from a child $*PID';
throws_like { $*PID = 42 }, X::Assignment::RO;
# vim: ft=perl6
| Add test for immutability of $*PID | Add test for immutability of $*PID
| Perl | artistic-2.0 | dogbert17/roast,skids/roast,bitrauser/roast,b2gills/roast,perl6/roast,bitrauser/roast,skids/roast,zostay/roast,dankogai/roast,cygx/roast,niner/roast,skids/roast,laben/roast,dankogai/roast,zostay/roast,niner/roast,zostay/roast,cygx/roast,dogbert17/roast,b2gills/roast,niner/roast,laben/roast,b2gills/roast,laben/roast,dankogai/roast,cygx/roast | perl | ## Code Before:
use v6;
use Test;
use lib 't/spec/packages';
use Test::Util;
=begin description
Test that C< $*PID > in this process is different from
C< $*PID > in the child process.
L<A05/"RFC 332: Regex: Make /\$/ equivalent to /\z/ under the '/s' modifier" /The current process id is now C<\$\*PID>/>
=end description
plan 1;
is_run 'say $*PID',
{
out => -> $p { $p > 0 && $p != $*PID },
err => '',
status => 0,
}, 'my $*PID is different from a child $*PID';
# vim: ft=perl6
## Instruction:
Add test for immutability of $*PID
## Code After:
use v6;
use Test;
use lib 't/spec/packages';
use Test::Util;
=begin description
Test that C< $*PID > in this process is different from
C< $*PID > in the child process.
L<A05/"RFC 332: Regex: Make /\$/ equivalent to /\z/ under the '/s' modifier" /The current process id is now C<\$\*PID>/>
=end description
plan 2;
is_run 'say $*PID',
{
out => -> $p { $p > 0 && $p != $*PID },
err => '',
status => 0,
}, 'my $*PID is different from a child $*PID';
throws_like { $*PID = 42 }, X::Assignment::RO;
# vim: ft=perl6
| use v6;
use Test;
use lib 't/spec/packages';
use Test::Util;
=begin description
Test that C< $*PID > in this process is different from
C< $*PID > in the child process.
L<A05/"RFC 332: Regex: Make /\$/ equivalent to /\z/ under the '/s' modifier" /The current process id is now C<\$\*PID>/>
=end description
- plan 1;
? ^
+ plan 2;
? ^
is_run 'say $*PID',
{
out => -> $p { $p > 0 && $p != $*PID },
err => '',
status => 0,
}, 'my $*PID is different from a child $*PID';
+ throws_like { $*PID = 42 }, X::Assignment::RO;
+
# vim: ft=perl6 | 4 | 0.173913 | 3 | 1 |
14bd2c0732b5871ac43991a237a8f12a334e982d | sirius/LI_V00/__init__.py | sirius/LI_V00/__init__.py | from . import lattice as _lattice
from . import accelerator as _accelerator
from . import record_names
create_accelerator = accelerator.create_accelerator
# -- default accelerator values for LI_V00 --
energy = _lattice._energy
single_bunch_charge = _lattice._single_bunch_charge
multi_bunch_charge = _lattice._multi_bunch_charge
pulse_duration_interval = _lattice._pulse_duration_interval
default_optics_mode = _lattice._default_optics_mode.label
lattice_version = 'LI_V00'
family_data = _lattice._family_data
emittance = _lattice._emittance
| from . import lattice as _lattice
from . import accelerator as _accelerator
from . import record_names
create_accelerator = accelerator.create_accelerator
# -- default accelerator values for LI_V00 --
energy = _lattice._energy
single_bunch_charge = _lattice._single_bunch_charge
multi_bunch_charge = _lattice._multi_bunch_charge
pulse_duration_interval = _lattice._pulse_duration_interval
default_optics_mode = _lattice._default_optics_mode.label
lattice_version = 'LI_V00'
family_data = _lattice._family_data
emittance = _lattice._emittance
global_coupling = 1.0 # "round" beam
| Add parameters of initial beam distribution at LI | Add parameters of initial beam distribution at LI
| Python | mit | lnls-fac/sirius | python | ## Code Before:
from . import lattice as _lattice
from . import accelerator as _accelerator
from . import record_names
create_accelerator = accelerator.create_accelerator
# -- default accelerator values for LI_V00 --
energy = _lattice._energy
single_bunch_charge = _lattice._single_bunch_charge
multi_bunch_charge = _lattice._multi_bunch_charge
pulse_duration_interval = _lattice._pulse_duration_interval
default_optics_mode = _lattice._default_optics_mode.label
lattice_version = 'LI_V00'
family_data = _lattice._family_data
emittance = _lattice._emittance
## Instruction:
Add parameters of initial beam distribution at LI
## Code After:
from . import lattice as _lattice
from . import accelerator as _accelerator
from . import record_names
create_accelerator = accelerator.create_accelerator
# -- default accelerator values for LI_V00 --
energy = _lattice._energy
single_bunch_charge = _lattice._single_bunch_charge
multi_bunch_charge = _lattice._multi_bunch_charge
pulse_duration_interval = _lattice._pulse_duration_interval
default_optics_mode = _lattice._default_optics_mode.label
lattice_version = 'LI_V00'
family_data = _lattice._family_data
emittance = _lattice._emittance
global_coupling = 1.0 # "round" beam
| from . import lattice as _lattice
from . import accelerator as _accelerator
from . import record_names
create_accelerator = accelerator.create_accelerator
# -- default accelerator values for LI_V00 --
energy = _lattice._energy
single_bunch_charge = _lattice._single_bunch_charge
multi_bunch_charge = _lattice._multi_bunch_charge
pulse_duration_interval = _lattice._pulse_duration_interval
default_optics_mode = _lattice._default_optics_mode.label
lattice_version = 'LI_V00'
family_data = _lattice._family_data
- emittance = _lattice._emittance
+ emittance = _lattice._emittance
? +++++++++++
+ global_coupling = 1.0 # "round" beam | 3 | 0.1875 | 2 | 1 |
10bb10a3bb1ec57e1688466f3e1e1afa19ff7504 | composer.json | composer.json | {
"name": "propa/tcpdi",
"type": "library",
"description": "TCPDI is a PHP class for importing PDF to use with TCPDF",
"keywords": ["PDF", "tcpdi", "tcpdi_parser", "tcpdf"],
"license": "Apache-2.0",
"authors": [
{
"name": "Nicola Asuni",
"email": "[email protected]",
"homepage": "http://nicolaasuni.tecnick.com"
}
],
"require": {
"php": ">=5.3.0",
"tecnickcom/tcpdf": "dev-master"
},
"autoload": {
"classmap": [
"fpdf_tpl.php",
"tcpdi.php",
"tcpdi_parser.php"
]
}
}
| {
"name": "propa/tcpdi",
"type": "library",
"description": "TCPDI is a PHP class for importing PDF to use with TCPDF",
"keywords": ["PDF", "tcpdi", "tcpdi_parser", "tcpdf"],
"license": "Apache-2.0",
"authors": [
{
"name": "Nicola Asuni",
"email": "[email protected]",
"homepage": "http://nicolaasuni.tecnick.com"
}
],
"require": {
"php": ">=5.3.0",
"tecnickcom/tcpdf": "^6.3"
},
"autoload": {
"classmap": [
"fpdf_tpl.php",
"tcpdi.php",
"tcpdi_parser.php"
]
}
}
| Set fixed tcpdf major version | Set fixed tcpdf major version | JSON | apache-2.0 | kulbakin/tcpdi | json | ## Code Before:
{
"name": "propa/tcpdi",
"type": "library",
"description": "TCPDI is a PHP class for importing PDF to use with TCPDF",
"keywords": ["PDF", "tcpdi", "tcpdi_parser", "tcpdf"],
"license": "Apache-2.0",
"authors": [
{
"name": "Nicola Asuni",
"email": "[email protected]",
"homepage": "http://nicolaasuni.tecnick.com"
}
],
"require": {
"php": ">=5.3.0",
"tecnickcom/tcpdf": "dev-master"
},
"autoload": {
"classmap": [
"fpdf_tpl.php",
"tcpdi.php",
"tcpdi_parser.php"
]
}
}
## Instruction:
Set fixed tcpdf major version
## Code After:
{
"name": "propa/tcpdi",
"type": "library",
"description": "TCPDI is a PHP class for importing PDF to use with TCPDF",
"keywords": ["PDF", "tcpdi", "tcpdi_parser", "tcpdf"],
"license": "Apache-2.0",
"authors": [
{
"name": "Nicola Asuni",
"email": "[email protected]",
"homepage": "http://nicolaasuni.tecnick.com"
}
],
"require": {
"php": ">=5.3.0",
"tecnickcom/tcpdf": "^6.3"
},
"autoload": {
"classmap": [
"fpdf_tpl.php",
"tcpdi.php",
"tcpdi_parser.php"
]
}
}
| {
"name": "propa/tcpdi",
"type": "library",
"description": "TCPDI is a PHP class for importing PDF to use with TCPDF",
"keywords": ["PDF", "tcpdi", "tcpdi_parser", "tcpdf"],
"license": "Apache-2.0",
"authors": [
{
"name": "Nicola Asuni",
"email": "[email protected]",
"homepage": "http://nicolaasuni.tecnick.com"
}
],
"require": {
"php": ">=5.3.0",
- "tecnickcom/tcpdf": "dev-master"
? ^^^^^^^^^^
+ "tecnickcom/tcpdf": "^6.3"
? ^^^^
},
"autoload": {
"classmap": [
"fpdf_tpl.php",
"tcpdi.php",
"tcpdi_parser.php"
]
}
} | 2 | 0.08 | 1 | 1 |
d7d61f7f8304e02ff49822bcb6b3bcf417c1fa20 | docs/Wallets/Core-Wallet.md | docs/Wallets/Core-Wallet.md |
!!!caution "Note"
The Vertcoin Core Wallet requires at least 2 GB of space on your computer to store the block chain.
### Windows
To install the Vertcoin Code wallet in Windows, navigate to the Vertcoin Core repository [Releases Page](https://github.com/vertcoin/vertcoin/releases) on GitHub and download the Windows binary file for the latest release. Both 64bit and 32 bit version are available.
Inside the zipped download you will find four applications;```vertcoin-qt```,```vertcoind```,```vertcoin-cli```, and ```vertcoin-tx```.
| Application | Description |
|--------------|------------------------------------------------------------------|
| vertcoin-qt | Core wallet with a GUI interface. |
| vertcoind | Headless daemon core wallet. |
| vertcoin-cli | Command line interface for interacting with vertcoind. |
| vertcoin-tx | Command line interface to create, parse, or modify transactions |
### Linux
Will update...
###MacOSX
das
|
!!!caution "Note"
The Vertcoin Core Wallet requires at least 2 GB of space on your computer to
store the block chain.
### Windows
To install the Vertcoin Code wallet in Windows, navigate to the Vertcoin Core
repository [Releases Page](https://github.com/vertcoin/vertcoin/releases) on
GitHub and download the Windows binary file for the latest release. Both 64bit
and 32 bit version are available.
Inside the zipped download you will find four applications; vertcoin-qt,
vertcoind, vertcoin-cli and vertcoin-tx.
| Application | Description |
|--------------|------------------------------------------------------------------|
| vertcoin-qt | Core wallet with a GUI interface. |
| vertcoind | Headless daemon core wallet. |
| vertcoin-cli | Command line interface for interacting with vertcoind. |
| vertcoin-tx | Command line interface to create, parse, or modify transactions |
### Linux
#### Ubuntu
You can install vertcoind (headless daemon) or the GUI wallet vertcoin-qt via
the Vertcoin ppa.
``` shell
$ sudo add-apt-repository ppa:vertcoin/ppa
$ sudo apt-get update
```
##### vertcoind
Then to install vertcoind run:
``` shell
$ sudo apt-get install vertcoind
```
Launch vertcoind from the command line with:
``` shell
$ vertcoind -daemon
```
And interact with vertcoind via:
``` shell
$ vertcoin-cli help
```
##### vertcoin-qt
To install the GUI wallet:
``` shell
$ sudo apt-get install vertcoin-qt
```
Then launch Vertcoin via the app icon installed the launcher.
#### Other Linux
Navigate to the Vertcoin Core repository
[Releases Page](https://github.com/vertcoin/vertcoin/releases) on GitHub and
download the Linux binary file for the latest release. Both 64bit and 32 bit
version are available.
###MacOSX
das
| Add Linux Core wallet install instructions | Add Linux Core wallet install instructions
| Markdown | mit | Bryangoodson/VertDocs,Bryangoodson/VertDocs,Bryangoodson/VertDocs,Bryangoodson/VertDocs | markdown | ## Code Before:
!!!caution "Note"
The Vertcoin Core Wallet requires at least 2 GB of space on your computer to store the block chain.
### Windows
To install the Vertcoin Code wallet in Windows, navigate to the Vertcoin Core repository [Releases Page](https://github.com/vertcoin/vertcoin/releases) on GitHub and download the Windows binary file for the latest release. Both 64bit and 32 bit version are available.
Inside the zipped download you will find four applications;```vertcoin-qt```,```vertcoind```,```vertcoin-cli```, and ```vertcoin-tx```.
| Application | Description |
|--------------|------------------------------------------------------------------|
| vertcoin-qt | Core wallet with a GUI interface. |
| vertcoind | Headless daemon core wallet. |
| vertcoin-cli | Command line interface for interacting with vertcoind. |
| vertcoin-tx | Command line interface to create, parse, or modify transactions |
### Linux
Will update...
###MacOSX
das
## Instruction:
Add Linux Core wallet install instructions
## Code After:
!!!caution "Note"
The Vertcoin Core Wallet requires at least 2 GB of space on your computer to
store the block chain.
### Windows
To install the Vertcoin Code wallet in Windows, navigate to the Vertcoin Core
repository [Releases Page](https://github.com/vertcoin/vertcoin/releases) on
GitHub and download the Windows binary file for the latest release. Both 64bit
and 32 bit version are available.
Inside the zipped download you will find four applications; vertcoin-qt,
vertcoind, vertcoin-cli and vertcoin-tx.
| Application | Description |
|--------------|------------------------------------------------------------------|
| vertcoin-qt | Core wallet with a GUI interface. |
| vertcoind | Headless daemon core wallet. |
| vertcoin-cli | Command line interface for interacting with vertcoind. |
| vertcoin-tx | Command line interface to create, parse, or modify transactions |
### Linux
#### Ubuntu
You can install vertcoind (headless daemon) or the GUI wallet vertcoin-qt via
the Vertcoin ppa.
``` shell
$ sudo add-apt-repository ppa:vertcoin/ppa
$ sudo apt-get update
```
##### vertcoind
Then to install vertcoind run:
``` shell
$ sudo apt-get install vertcoind
```
Launch vertcoind from the command line with:
``` shell
$ vertcoind -daemon
```
And interact with vertcoind via:
``` shell
$ vertcoin-cli help
```
##### vertcoin-qt
To install the GUI wallet:
``` shell
$ sudo apt-get install vertcoin-qt
```
Then launch Vertcoin via the app icon installed the launcher.
#### Other Linux
Navigate to the Vertcoin Core repository
[Releases Page](https://github.com/vertcoin/vertcoin/releases) on GitHub and
download the Linux binary file for the latest release. Both 64bit and 32 bit
version are available.
###MacOSX
das
|
!!!caution "Note"
- The Vertcoin Core Wallet requires at least 2 GB of space on your computer to store the block chain.
? -----------------------
+ The Vertcoin Core Wallet requires at least 2 GB of space on your computer to
+ store the block chain.
### Windows
- To install the Vertcoin Code wallet in Windows, navigate to the Vertcoin Core repository [Releases Page](https://github.com/vertcoin/vertcoin/releases) on GitHub and download the Windows binary file for the latest release. Both 64bit and 32 bit version are available.
+ To install the Vertcoin Code wallet in Windows, navigate to the Vertcoin Core
+ repository [Releases Page](https://github.com/vertcoin/vertcoin/releases) on
+ GitHub and download the Windows binary file for the latest release. Both 64bit
+ and 32 bit version are available.
- Inside the zipped download you will find four applications;```vertcoin-qt```,```vertcoind```,```vertcoin-cli```, and ```vertcoin-tx```.
+ Inside the zipped download you will find four applications; vertcoin-qt,
+ vertcoind, vertcoin-cli and vertcoin-tx.
| Application | Description |
|--------------|------------------------------------------------------------------|
| vertcoin-qt | Core wallet with a GUI interface. |
| vertcoind | Headless daemon core wallet. |
| vertcoin-cli | Command line interface for interacting with vertcoind. |
| vertcoin-tx | Command line interface to create, parse, or modify transactions |
### Linux
- Will update...
+ #### Ubuntu
+
+ You can install vertcoind (headless daemon) or the GUI wallet vertcoin-qt via
+ the Vertcoin ppa.
+
+ ``` shell
+ $ sudo add-apt-repository ppa:vertcoin/ppa
+ $ sudo apt-get update
+ ```
+
+ ##### vertcoind
+
+ Then to install vertcoind run:
+ ``` shell
+ $ sudo apt-get install vertcoind
+ ```
+
+ Launch vertcoind from the command line with:
+ ``` shell
+ $ vertcoind -daemon
+ ```
+
+ And interact with vertcoind via:
+ ``` shell
+ $ vertcoin-cli help
+ ```
+
+ ##### vertcoin-qt
+
+ To install the GUI wallet:
+ ``` shell
+ $ sudo apt-get install vertcoin-qt
+ ```
+
+ Then launch Vertcoin via the app icon installed the launcher.
+
+ #### Other Linux
+
+ Navigate to the Vertcoin Core repository
+ [Releases Page](https://github.com/vertcoin/vertcoin/releases) on GitHub and
+ download the Linux binary file for the latest release. Both 64bit and 32 bit
+ version are available.
###MacOSX
das
| 54 | 1.8 | 50 | 4 |
0ce833d662d1a315ec87a7cf1fdacf95bd011d69 | src/main/scala/eu/timepit/refined/generic.scala | src/main/scala/eu/timepit/refined/generic.scala | package eu.timepit.refined
import eu.timepit.refined.boolean.Not
import shapeless.Witness
object generic {
/** Predicate that checks if a value is equal to `U`. */
trait Equal[U]
/** Predicate that checks if a value is `null`. */
trait IsNull
/** Predicate that checks if a value is not `null`. */
type NonNull = Not[IsNull]
implicit def equalPredicate[T, U <: T](implicit wu: Witness.Aux[U]): Predicate[Equal[U], T] =
Predicate.instance(_ == wu.value, t => s"($t == ${wu.value})")
implicit def isNullPredicate[T <: AnyRef]: Predicate[IsNull, T] =
Predicate.instance(_ == null, t => s"($t == null)")
}
| package eu.timepit.refined
import eu.timepit.refined.boolean.Not
import eu.timepit.refined.internal.WeakWitness
object generic {
/** Predicate that checks if a value is equal to `U`. */
trait Equal[U]
/** Predicate that checks if a value is `null`. */
trait IsNull
/** Predicate that checks if a value is not `null`. */
type NonNull = Not[IsNull]
implicit def equalPredicate[T, U <: T](implicit wu: WeakWitness.Aux[U]): Predicate[Equal[U], T] =
Predicate.instance(_ == wu.value, t => s"($t == ${wu.value})")
implicit def isNullPredicate[T <: AnyRef]: Predicate[IsNull, T] =
Predicate.instance(_ == null, t => s"($t == null)")
}
| Change Equal to use WeakWitness | Change Equal to use WeakWitness
| Scala | mit | sh0hei/refined,fthomas/refined | scala | ## Code Before:
package eu.timepit.refined
import eu.timepit.refined.boolean.Not
import shapeless.Witness
object generic {
/** Predicate that checks if a value is equal to `U`. */
trait Equal[U]
/** Predicate that checks if a value is `null`. */
trait IsNull
/** Predicate that checks if a value is not `null`. */
type NonNull = Not[IsNull]
implicit def equalPredicate[T, U <: T](implicit wu: Witness.Aux[U]): Predicate[Equal[U], T] =
Predicate.instance(_ == wu.value, t => s"($t == ${wu.value})")
implicit def isNullPredicate[T <: AnyRef]: Predicate[IsNull, T] =
Predicate.instance(_ == null, t => s"($t == null)")
}
## Instruction:
Change Equal to use WeakWitness
## Code After:
package eu.timepit.refined
import eu.timepit.refined.boolean.Not
import eu.timepit.refined.internal.WeakWitness
object generic {
/** Predicate that checks if a value is equal to `U`. */
trait Equal[U]
/** Predicate that checks if a value is `null`. */
trait IsNull
/** Predicate that checks if a value is not `null`. */
type NonNull = Not[IsNull]
implicit def equalPredicate[T, U <: T](implicit wu: WeakWitness.Aux[U]): Predicate[Equal[U], T] =
Predicate.instance(_ == wu.value, t => s"($t == ${wu.value})")
implicit def isNullPredicate[T <: AnyRef]: Predicate[IsNull, T] =
Predicate.instance(_ == null, t => s"($t == null)")
}
| package eu.timepit.refined
import eu.timepit.refined.boolean.Not
- import shapeless.Witness
+ import eu.timepit.refined.internal.WeakWitness
object generic {
/** Predicate that checks if a value is equal to `U`. */
trait Equal[U]
/** Predicate that checks if a value is `null`. */
trait IsNull
/** Predicate that checks if a value is not `null`. */
type NonNull = Not[IsNull]
- implicit def equalPredicate[T, U <: T](implicit wu: Witness.Aux[U]): Predicate[Equal[U], T] =
+ implicit def equalPredicate[T, U <: T](implicit wu: WeakWitness.Aux[U]): Predicate[Equal[U], T] =
? ++++
Predicate.instance(_ == wu.value, t => s"($t == ${wu.value})")
implicit def isNullPredicate[T <: AnyRef]: Predicate[IsNull, T] =
Predicate.instance(_ == null, t => s"($t == null)")
} | 4 | 0.190476 | 2 | 2 |
0cbe16beff5fc3070972d33609d5fe5c47189ece | lib/amf_socket/policy_connection.rb | lib/amf_socket/policy_connection.rb | class AmfSocket::PolicyConnection < EM::Connection
def post_init
policy = <<-eos
<?xml version="1.0" encoding="UTF-8"?>
<cross-domain-policy xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="http://www.adobe.com/xml/schemas/PolicyFileSocket.xsd">
<allow-access-from domain="*" to-ports="*"/>
</cross-domain-policy>
eos
send_data(policy)
close_connection_after_writing
end
def receive_data(data)
end
end
| class AmfSocket::PolicyConnection < EM::Connection
def post_init
policy = <<-eos
<?xml version="1.0" encoding="UTF-8"?>
<cross-domain-policy xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="http://www.adobe.com/xml/schemas/PolicyFileSocket.xsd">
<allow-access-from domain="*" to-ports="*"/>
</cross-domain-policy>
eos
send_data(policy + "\0")
EM::Timer.new(5) do
close_connection_after_writing
end
end
def receive_data(data)
end
end
| Work around client side policy violations. | Work around client side policy violations.
| Ruby | mit | chadrem/amf_socket_ruby | ruby | ## Code Before:
class AmfSocket::PolicyConnection < EM::Connection
def post_init
policy = <<-eos
<?xml version="1.0" encoding="UTF-8"?>
<cross-domain-policy xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="http://www.adobe.com/xml/schemas/PolicyFileSocket.xsd">
<allow-access-from domain="*" to-ports="*"/>
</cross-domain-policy>
eos
send_data(policy)
close_connection_after_writing
end
def receive_data(data)
end
end
## Instruction:
Work around client side policy violations.
## Code After:
class AmfSocket::PolicyConnection < EM::Connection
def post_init
policy = <<-eos
<?xml version="1.0" encoding="UTF-8"?>
<cross-domain-policy xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="http://www.adobe.com/xml/schemas/PolicyFileSocket.xsd">
<allow-access-from domain="*" to-ports="*"/>
</cross-domain-policy>
eos
send_data(policy + "\0")
EM::Timer.new(5) do
close_connection_after_writing
end
end
def receive_data(data)
end
end
| class AmfSocket::PolicyConnection < EM::Connection
def post_init
policy = <<-eos
<?xml version="1.0" encoding="UTF-8"?>
<cross-domain-policy xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="http://www.adobe.com/xml/schemas/PolicyFileSocket.xsd">
<allow-access-from domain="*" to-ports="*"/>
</cross-domain-policy>
eos
- send_data(policy)
+ send_data(policy + "\0")
? +++++++
+ EM::Timer.new(5) do
- close_connection_after_writing
+ close_connection_after_writing
? ++
+ end
end
def receive_data(data)
end
end | 6 | 0.375 | 4 | 2 |
e5166df5f1d2b4ce11b52b601a19b7b036df562e | app/views/scroll/figure_components/_figure_component.html.erb | app/views/scroll/figure_components/_figure_component.html.erb | <figure id="<%= dom_id(component) %>"
class="FigureComponent <%= component.renderable.style %>"
data-selectable="Selected"
style="<%= "width: #{figure_component.width}px;" if figure_component.width.present? && !figure_component.width.zero? %>"
data-commands="#<%= dom_id(component) %>_commands"
data-reload="<%= scroll.component_path(component) %>">
<% if figure_component.image.present? %>
<% if figure_component.url.present? %>
<%= link_to image_tag(figure_component.image.url(:large)), figure_component.url %>
<% else %>
<%= image_tag figure_component.image.url(:large) %>
<% end %>
<% else %>
image missing
<% end %>
<% if figure_component.caption.present? %>
<figcaption>
<%= figure_component.caption %>
</figcaption>
<% end %>
</figure>
| <figure id="<%= dom_id(component) %>"
class="FigureComponent <%= component.renderable.style %>"
data-selectable="Selected"
data-commands="#<%= dom_id(component) %>_commands"
data-reload="<%= scroll.component_path(component) %>">
<% if figure_component.width.present? && !figure_component.width.zero? %>
<% width = figure_component.width %>
<% else %>
<% width = nil %>
<% end %>
<% if figure_component.image.present? %>
<% if figure_component.url.present? %>
<%= link_to image_tag(figure_component.image.url(:large), width: width), figure_component.url %>
<% else %>
<%= image_tag figure_component.image.url(:large), width: width %>
<% end %>
<% else %>
image missing
<% end %>
<% if figure_component.caption.present? %>
<figcaption>
<%= figure_component.caption %>
</figcaption>
<% end %>
</figure>
| Set image width instead of wrapper | Set image width instead of wrapper
| HTML+ERB | mit | robinclart/scroll,robinclart/scroll | html+erb | ## Code Before:
<figure id="<%= dom_id(component) %>"
class="FigureComponent <%= component.renderable.style %>"
data-selectable="Selected"
style="<%= "width: #{figure_component.width}px;" if figure_component.width.present? && !figure_component.width.zero? %>"
data-commands="#<%= dom_id(component) %>_commands"
data-reload="<%= scroll.component_path(component) %>">
<% if figure_component.image.present? %>
<% if figure_component.url.present? %>
<%= link_to image_tag(figure_component.image.url(:large)), figure_component.url %>
<% else %>
<%= image_tag figure_component.image.url(:large) %>
<% end %>
<% else %>
image missing
<% end %>
<% if figure_component.caption.present? %>
<figcaption>
<%= figure_component.caption %>
</figcaption>
<% end %>
</figure>
## Instruction:
Set image width instead of wrapper
## Code After:
<figure id="<%= dom_id(component) %>"
class="FigureComponent <%= component.renderable.style %>"
data-selectable="Selected"
data-commands="#<%= dom_id(component) %>_commands"
data-reload="<%= scroll.component_path(component) %>">
<% if figure_component.width.present? && !figure_component.width.zero? %>
<% width = figure_component.width %>
<% else %>
<% width = nil %>
<% end %>
<% if figure_component.image.present? %>
<% if figure_component.url.present? %>
<%= link_to image_tag(figure_component.image.url(:large), width: width), figure_component.url %>
<% else %>
<%= image_tag figure_component.image.url(:large), width: width %>
<% end %>
<% else %>
image missing
<% end %>
<% if figure_component.caption.present? %>
<figcaption>
<%= figure_component.caption %>
</figcaption>
<% end %>
</figure>
| <figure id="<%= dom_id(component) %>"
class="FigureComponent <%= component.renderable.style %>"
data-selectable="Selected"
- style="<%= "width: #{figure_component.width}px;" if figure_component.width.present? && !figure_component.width.zero? %>"
data-commands="#<%= dom_id(component) %>_commands"
data-reload="<%= scroll.component_path(component) %>">
+ <% if figure_component.width.present? && !figure_component.width.zero? %>
+ <% width = figure_component.width %>
+ <% else %>
+ <% width = nil %>
+ <% end %>
+
<% if figure_component.image.present? %>
<% if figure_component.url.present? %>
- <%= link_to image_tag(figure_component.image.url(:large)), figure_component.url %>
+ <%= link_to image_tag(figure_component.image.url(:large), width: width), figure_component.url %>
? ++++++++++++++
<% else %>
- <%= image_tag figure_component.image.url(:large) %>
+ <%= image_tag figure_component.image.url(:large), width: width %>
? ++++++++++++++
<% end %>
<% else %>
image missing
<% end %>
<% if figure_component.caption.present? %>
<figcaption>
<%= figure_component.caption %>
</figcaption>
<% end %>
</figure> | 11 | 0.458333 | 8 | 3 |
2969cd33bade3aa180e8bb23abdf86e3126d7d88 | _data/conference/venue.yml | _data/conference/venue.yml | location: The Port Workspaces
website: "http://portworkspaces.com"
address: Kaiser Mall
neighborhood: Lake Merritt
city: Oakland
state: CA
tickets: "http://www.eventbrite.com/tickets-external?eid=18063889580&ref=etckt"
start: 2015-10-16T18:00-07:00
end: 2015-10-18T18:00-07:00
| location: The Port Workspaces
website: "http://portworkspaces.com"
address: Kaiser Mall
neighborhood: Lake Merritt
city: Oakland
state: CA
| Move start/end dates into the schedule file and ticket info into the tickets file. | Move start/end dates into the schedule file and ticket info into the
tickets file.
| YAML | apache-2.0 | oaklandfinishup/oaklandfinishup.github.io,oaklandfinishup/oaklandfinishup.github.io | yaml | ## Code Before:
location: The Port Workspaces
website: "http://portworkspaces.com"
address: Kaiser Mall
neighborhood: Lake Merritt
city: Oakland
state: CA
tickets: "http://www.eventbrite.com/tickets-external?eid=18063889580&ref=etckt"
start: 2015-10-16T18:00-07:00
end: 2015-10-18T18:00-07:00
## Instruction:
Move start/end dates into the schedule file and ticket info into the
tickets file.
## Code After:
location: The Port Workspaces
website: "http://portworkspaces.com"
address: Kaiser Mall
neighborhood: Lake Merritt
city: Oakland
state: CA
| location: The Port Workspaces
website: "http://portworkspaces.com"
address: Kaiser Mall
neighborhood: Lake Merritt
city: Oakland
state: CA
- tickets: "http://www.eventbrite.com/tickets-external?eid=18063889580&ref=etckt"
- start: 2015-10-16T18:00-07:00
- end: 2015-10-18T18:00-07:00 | 3 | 0.333333 | 0 | 3 |
4f375374237f6433e04b7401666d02300beab00f | deployment_scripts/puppet/modules/lma_logging_analytics/spec/classes/lma_logging_analytics_kibana_spec.rb | deployment_scripts/puppet/modules/lma_logging_analytics/spec/classes/lma_logging_analytics_kibana_spec.rb | require 'spec_helper'
describe 'lma_logging_analytics::kibana' do
let(:facts) do
{:kernel => 'Linux', :operatingsystem => 'Ubuntu',
:concat_basedir => '/foo'}
end
it { should compile }
end
| require 'spec_helper'
describe 'lma_logging_analytics::kibana' do
let(:facts) do
{:kernel => 'Linux', :operatingsystem => 'Ubuntu',
:concat_basedir => '/foo'}
end
it { should create_file('/opt/kibana')}
it { should create_file('/opt/kibana/config.js')}
it { should create_file('/opt/kibana/app/dashboards/logs.json')}
it { should create_file('/opt/kibana/app/dashboards/notifications.json')}
it { should create_elasticsearch__template('kibana')}
it { should create_lma_logging_analytics__kibana_dashboard('logs')}
it { should create_lma_logging_analytics__kibana_dashboard('notifications')}
it { should create_class('nginx')}
end
| Update Puppet tests for Kibana class | Update Puppet tests for Kibana class
The 'should compile' directive is too dependant of local environment,
and failed with Facter option conflicts, let's replace it by more
specific tests.
Change-Id: I01156525c10f71e9b9f66481d15db2181ef99730
| Ruby | apache-2.0 | stackforge/fuel-plugin-elasticsearch-kibana,stackforge/fuel-plugin-elasticsearch-kibana,stackforge/fuel-plugin-elasticsearch-kibana | ruby | ## Code Before:
require 'spec_helper'
describe 'lma_logging_analytics::kibana' do
let(:facts) do
{:kernel => 'Linux', :operatingsystem => 'Ubuntu',
:concat_basedir => '/foo'}
end
it { should compile }
end
## Instruction:
Update Puppet tests for Kibana class
The 'should compile' directive is too dependant of local environment,
and failed with Facter option conflicts, let's replace it by more
specific tests.
Change-Id: I01156525c10f71e9b9f66481d15db2181ef99730
## Code After:
require 'spec_helper'
describe 'lma_logging_analytics::kibana' do
let(:facts) do
{:kernel => 'Linux', :operatingsystem => 'Ubuntu',
:concat_basedir => '/foo'}
end
it { should create_file('/opt/kibana')}
it { should create_file('/opt/kibana/config.js')}
it { should create_file('/opt/kibana/app/dashboards/logs.json')}
it { should create_file('/opt/kibana/app/dashboards/notifications.json')}
it { should create_elasticsearch__template('kibana')}
it { should create_lma_logging_analytics__kibana_dashboard('logs')}
it { should create_lma_logging_analytics__kibana_dashboard('notifications')}
it { should create_class('nginx')}
end
| require 'spec_helper'
describe 'lma_logging_analytics::kibana' do
let(:facts) do
{:kernel => 'Linux', :operatingsystem => 'Ubuntu',
:concat_basedir => '/foo'}
end
- it { should compile }
+ it { should create_file('/opt/kibana')}
+ it { should create_file('/opt/kibana/config.js')}
+ it { should create_file('/opt/kibana/app/dashboards/logs.json')}
+ it { should create_file('/opt/kibana/app/dashboards/notifications.json')}
+ it { should create_elasticsearch__template('kibana')}
+ it { should create_lma_logging_analytics__kibana_dashboard('logs')}
+ it { should create_lma_logging_analytics__kibana_dashboard('notifications')}
+ it { should create_class('nginx')}
end | 9 | 0.9 | 8 | 1 |
b635a774d98590b31a94a8e6c072911099ce0b2e | spec/metric_fu/reporter_spec.rb | spec/metric_fu/reporter_spec.rb | require "spec_helper"
describe MetricFu::Reporter do
context 'given a single formatter' do
before do
@formatter = double('formatter')
@reporter = Reporter.new(@formatter)
end
it 'notifies the formatter' do
@formatter.should_receive(:start)
@formatter.should_receive(:finish)
@reporter.start
@reporter.finish
end
it 'only sends notifications when supported by formatter' do
@formatter.stub(:respond_to?).with(:display_results).and_return(false)
@formatter.should_not_receive(:display_results)
@reporter.display_results
end
end
context 'given multiple formatters' do
before do
@formatters = [double('formatter'), double('formatter')]
@reporter = Reporter.new(@formatters)
end
it 'notifies all formatters' do
@formatters.each do |formatter|
formatter.should_receive(:start)
formatter.should_receive(:finish)
end
@reporter.start
@reporter.finish
end
end
end
| require "spec_helper"
describe MetricFu::Reporter do
context 'given a single formatter' do
before do
@formatter = double('formatter')
@formatter.stub(:to_a).and_return([@formatter])
@reporter = Reporter.new(@formatter)
end
it 'notifies the formatter' do
@formatter.should_receive(:start)
@formatter.should_receive(:finish)
@reporter.start
@reporter.finish
end
it 'only sends notifications when supported by formatter' do
@formatter.stub(:respond_to?).with(:display_results).and_return(false)
@formatter.should_not_receive(:display_results)
@reporter.display_results
end
end
context 'given multiple formatters' do
before do
@formatters = [double('formatter'), double('formatter')]
@reporter = Reporter.new(@formatters)
end
it 'notifies all formatters' do
@formatters.each do |formatter|
formatter.should_receive(:start)
formatter.should_receive(:finish)
end
@reporter.start
@reporter.finish
end
end
end
| Fix problem with rspec double on ruby192. | Fix problem with rspec double on ruby192.
| Ruby | mit | metricfu/metric_fu,metricfu/metric_fu,metricfu/metric_fu | ruby | ## Code Before:
require "spec_helper"
describe MetricFu::Reporter do
context 'given a single formatter' do
before do
@formatter = double('formatter')
@reporter = Reporter.new(@formatter)
end
it 'notifies the formatter' do
@formatter.should_receive(:start)
@formatter.should_receive(:finish)
@reporter.start
@reporter.finish
end
it 'only sends notifications when supported by formatter' do
@formatter.stub(:respond_to?).with(:display_results).and_return(false)
@formatter.should_not_receive(:display_results)
@reporter.display_results
end
end
context 'given multiple formatters' do
before do
@formatters = [double('formatter'), double('formatter')]
@reporter = Reporter.new(@formatters)
end
it 'notifies all formatters' do
@formatters.each do |formatter|
formatter.should_receive(:start)
formatter.should_receive(:finish)
end
@reporter.start
@reporter.finish
end
end
end
## Instruction:
Fix problem with rspec double on ruby192.
## Code After:
require "spec_helper"
describe MetricFu::Reporter do
context 'given a single formatter' do
before do
@formatter = double('formatter')
@formatter.stub(:to_a).and_return([@formatter])
@reporter = Reporter.new(@formatter)
end
it 'notifies the formatter' do
@formatter.should_receive(:start)
@formatter.should_receive(:finish)
@reporter.start
@reporter.finish
end
it 'only sends notifications when supported by formatter' do
@formatter.stub(:respond_to?).with(:display_results).and_return(false)
@formatter.should_not_receive(:display_results)
@reporter.display_results
end
end
context 'given multiple formatters' do
before do
@formatters = [double('formatter'), double('formatter')]
@reporter = Reporter.new(@formatters)
end
it 'notifies all formatters' do
@formatters.each do |formatter|
formatter.should_receive(:start)
formatter.should_receive(:finish)
end
@reporter.start
@reporter.finish
end
end
end
| require "spec_helper"
describe MetricFu::Reporter do
context 'given a single formatter' do
before do
@formatter = double('formatter')
+ @formatter.stub(:to_a).and_return([@formatter])
@reporter = Reporter.new(@formatter)
end
it 'notifies the formatter' do
@formatter.should_receive(:start)
@formatter.should_receive(:finish)
@reporter.start
@reporter.finish
end
it 'only sends notifications when supported by formatter' do
@formatter.stub(:respond_to?).with(:display_results).and_return(false)
@formatter.should_not_receive(:display_results)
@reporter.display_results
end
end
context 'given multiple formatters' do
before do
@formatters = [double('formatter'), double('formatter')]
@reporter = Reporter.new(@formatters)
end
it 'notifies all formatters' do
@formatters.each do |formatter|
formatter.should_receive(:start)
formatter.should_receive(:finish)
end
@reporter.start
@reporter.finish
end
end
end | 1 | 0.025 | 1 | 0 |
07999d1f24acbbfde50fe94897054e7c8df7fea1 | api/jsonstore.py | api/jsonstore.py | import json
import os
import tempfile
def store(data, directory="/var/www/luke/wikipedia/graphs/"):
try:
json.loads(data)
except ValueError:
return "not-json"
tf = tempfile.mkstemp(prefix="", dir=directory)[1]
with open(tf, "w") as f:
f.write(data)
return tf
if __name__ == "__main__":
print(store('{}'))
| import json
import os
import tempfile
def store(data, directory="/var/www/luke/wikipedia/graphs/"):
try:
json.loads(data)
except ValueError:
return "not-json"
tf = tempfile.mkstemp(prefix="", dir=directory)[1]
with open(tf, "w") as f:
f.write(data)
return os.path.split(tf)[1]
if __name__ == "__main__":
print(store('{}'))
| Tweak JSON api return value to be friendlier | Tweak JSON api return value to be friendlier
| Python | mit | controversial/wikipedia-map,controversial/wikipedia-map,controversial/wikipedia-map | python | ## Code Before:
import json
import os
import tempfile
def store(data, directory="/var/www/luke/wikipedia/graphs/"):
try:
json.loads(data)
except ValueError:
return "not-json"
tf = tempfile.mkstemp(prefix="", dir=directory)[1]
with open(tf, "w") as f:
f.write(data)
return tf
if __name__ == "__main__":
print(store('{}'))
## Instruction:
Tweak JSON api return value to be friendlier
## Code After:
import json
import os
import tempfile
def store(data, directory="/var/www/luke/wikipedia/graphs/"):
try:
json.loads(data)
except ValueError:
return "not-json"
tf = tempfile.mkstemp(prefix="", dir=directory)[1]
with open(tf, "w") as f:
f.write(data)
return os.path.split(tf)[1]
if __name__ == "__main__":
print(store('{}'))
| import json
import os
import tempfile
def store(data, directory="/var/www/luke/wikipedia/graphs/"):
try:
json.loads(data)
except ValueError:
return "not-json"
tf = tempfile.mkstemp(prefix="", dir=directory)[1]
with open(tf, "w") as f:
f.write(data)
- return tf
+ return os.path.split(tf)[1]
if __name__ == "__main__":
print(store('{}')) | 2 | 0.105263 | 1 | 1 |
04a45b0340a401bd251afce412ded04665d1ba2f | time_guarded_run.rb | time_guarded_run.rb |
time = Time.new
puts "time_guarded_run called at #{time}"
weekday_only = (ENV['KB_WEEKDAY_ONLY'] || 'true') == 'true'
start_hour = (ENV['KB_START_HOUR'] || 8).to_i
end_hour = (ENV['KB_END_HOUR'] || 17).to_i
max_tweets = (ENV['KB_MAX_TWEETS'] || 3).to_i
max_tweets_first_hour = (ENV['KB_MAX_TWEETS_FIRST_HOUR'] || 8).to_i
if weekday_only && (time.saturday? || time.sunday?)
exit
end
unless time.hour.between? start_hour, end_hour
exit
end
puts "Running Konstabot with #{max_tweets} max tweets"
system "ruby konstabot.rb -n #{max_tweets}"
|
time = Time.new
puts "time_guarded_run called at #{time}"
weekday_only = (ENV['KB_WEEKDAY_ONLY'] || 'true') == 'true'
start_hour = (ENV['KB_START_HOUR'] || 8).to_i
end_hour = (ENV['KB_END_HOUR'] || 17).to_i
max_tweets = (ENV['KB_MAX_TWEETS'] || 3).to_i
max_tweets_first_hour = (ENV['KB_MAX_TWEETS_FIRST_HOUR'] || 8).to_i
if weekday_only && (time.saturday? || time.sunday?)
exit
end
unless time.hour.between? start_hour, end_hour
exit
end
if time.hour == start_hour
max_tweets = max_tweets_first_hour
end
puts "Running Konstabot with #{max_tweets} max tweets"
system "ruby konstabot.rb -n #{max_tweets}"
| Allow for a larger number of tweets in the first hour | Allow for a larger number of tweets in the first hour
| Ruby | apache-2.0 | ktchernov/twitter-list-to-slack | ruby | ## Code Before:
time = Time.new
puts "time_guarded_run called at #{time}"
weekday_only = (ENV['KB_WEEKDAY_ONLY'] || 'true') == 'true'
start_hour = (ENV['KB_START_HOUR'] || 8).to_i
end_hour = (ENV['KB_END_HOUR'] || 17).to_i
max_tweets = (ENV['KB_MAX_TWEETS'] || 3).to_i
max_tweets_first_hour = (ENV['KB_MAX_TWEETS_FIRST_HOUR'] || 8).to_i
if weekday_only && (time.saturday? || time.sunday?)
exit
end
unless time.hour.between? start_hour, end_hour
exit
end
puts "Running Konstabot with #{max_tweets} max tweets"
system "ruby konstabot.rb -n #{max_tweets}"
## Instruction:
Allow for a larger number of tweets in the first hour
## Code After:
time = Time.new
puts "time_guarded_run called at #{time}"
weekday_only = (ENV['KB_WEEKDAY_ONLY'] || 'true') == 'true'
start_hour = (ENV['KB_START_HOUR'] || 8).to_i
end_hour = (ENV['KB_END_HOUR'] || 17).to_i
max_tweets = (ENV['KB_MAX_TWEETS'] || 3).to_i
max_tweets_first_hour = (ENV['KB_MAX_TWEETS_FIRST_HOUR'] || 8).to_i
if weekday_only && (time.saturday? || time.sunday?)
exit
end
unless time.hour.between? start_hour, end_hour
exit
end
if time.hour == start_hour
max_tweets = max_tweets_first_hour
end
puts "Running Konstabot with #{max_tweets} max tweets"
system "ruby konstabot.rb -n #{max_tweets}"
|
time = Time.new
puts "time_guarded_run called at #{time}"
weekday_only = (ENV['KB_WEEKDAY_ONLY'] || 'true') == 'true'
start_hour = (ENV['KB_START_HOUR'] || 8).to_i
end_hour = (ENV['KB_END_HOUR'] || 17).to_i
max_tweets = (ENV['KB_MAX_TWEETS'] || 3).to_i
max_tweets_first_hour = (ENV['KB_MAX_TWEETS_FIRST_HOUR'] || 8).to_i
if weekday_only && (time.saturday? || time.sunday?)
exit
end
unless time.hour.between? start_hour, end_hour
exit
end
+ if time.hour == start_hour
+ max_tweets = max_tweets_first_hour
+ end
+
puts "Running Konstabot with #{max_tweets} max tweets"
system "ruby konstabot.rb -n #{max_tweets}" | 4 | 0.2 | 4 | 0 |
f15a3047d333918d227f0dabcfebd8015669dc4a | README.md | README.md | Turnkey script to install ROOT from the current source pulled down from git
| Turnkey script to install ROOT from the current source pulled down from git
# Use
Letting your Unix shell (whatever is printed from `echo $SHELL | sed 's:.*/::'`) be called `shell`
## To install:
~~~bash
shell installROOTsource.sh
~~~
or
~~~bash
shell installROOTsource.sh install
~~~
## To update:
~~~bash
shell installROOTsource.sh rebuild
~~~
## To uninstall:
~~~bash
shell installROOTsource.sh uninstall
~~~
| Add use instructions with command line flags | Add use instructions with command line flags
| Markdown | mit | matthewfeickert/installROOTsource | markdown | ## Code Before:
Turnkey script to install ROOT from the current source pulled down from git
## Instruction:
Add use instructions with command line flags
## Code After:
Turnkey script to install ROOT from the current source pulled down from git
# Use
Letting your Unix shell (whatever is printed from `echo $SHELL | sed 's:.*/::'`) be called `shell`
## To install:
~~~bash
shell installROOTsource.sh
~~~
or
~~~bash
shell installROOTsource.sh install
~~~
## To update:
~~~bash
shell installROOTsource.sh rebuild
~~~
## To uninstall:
~~~bash
shell installROOTsource.sh uninstall
~~~
| Turnkey script to install ROOT from the current source pulled down from git
+
+ # Use
+ Letting your Unix shell (whatever is printed from `echo $SHELL | sed 's:.*/::'`) be called `shell`
+ ## To install:
+ ~~~bash
+ shell installROOTsource.sh
+ ~~~
+ or
+ ~~~bash
+ shell installROOTsource.sh install
+ ~~~
+ ## To update:
+ ~~~bash
+ shell installROOTsource.sh rebuild
+ ~~~
+ ## To uninstall:
+ ~~~bash
+ shell installROOTsource.sh uninstall
+ ~~~ | 19 | 19 | 19 | 0 |
972e7781ea75788872abe46ff390d24bfafb6f07 | docker-compose.yml | docker-compose.yml | version: '3'
services:
nginx:
image: nginx:alpine
ports:
- "127.0.0.1:${INTERNAL_NGINX_PORT}:8080"
restart: always
links:
- phpsv
- translate
depends_on:
- phpsv
volumes:
- ./config/nginx-site.conf:/etc/nginx/conf.d/default.conf
- ./src/subtitulamos/public:/code/public:cached
phpsv:
build:
context: .
dockerfile: Dockerfile.PHP
working_dir: /code
restart: always
links:
- redis
- sonic
depends_on:
- sonic
volumes:
- ./.env:/code/.env
- /var/run/mysqld/mysqld.sock:/var/run/mysqld/mysqld.sock
redis:
image: redis:6.0-alpine
volumes:
- ./data/redis:/data
sonic:
image: valeriansaliou/sonic:v1.3.0
restart: always
volumes:
- ./data/sonic:/var/lib/sonic/store/
- ./config/sonic.cfg:/etc/sonic.cfg
translate:
restart: always
build:
context: .
dockerfile: Dockerfile.Go
args:
REDIS_PUBSUB_ENV: ${ENVIRONMENT_NAME}
links:
- redis
| version: "3"
services:
nginx:
image: nginx:alpine
ports:
- "127.0.0.1:${INTERNAL_NGINX_PORT}:8080"
restart: always
links:
- phpsv
- translate
depends_on:
- phpsv
volumes:
- ./config/nginx-site.conf:/etc/nginx/conf.d/default.conf
- ./src/subtitulamos/public:/code/public:cached
logging:
options:
max-size: "600m"
max-file: "3"
phpsv:
build:
context: .
dockerfile: Dockerfile.PHP
working_dir: /code
restart: always
links:
- redis
- sonic
depends_on:
- sonic
volumes:
- ./.env:/code/.env
- /var/run/mysqld/mysqld.sock:/var/run/mysqld/mysqld.sock
logging:
options:
max-size: "400m"
max-file: "3"
redis:
image: redis:6.0-alpine
volumes:
- ./data/redis:/data
sonic:
image: valeriansaliou/sonic:v1.3.0
restart: always
volumes:
- ./data/sonic:/var/lib/sonic/store/
- ./config/sonic.cfg:/etc/sonic.cfg
translate:
restart: always
build:
context: .
dockerfile: Dockerfile.Go
args:
REDIS_PUBSUB_ENV: ${ENVIRONMENT_NAME}
links:
- redis
logging:
options:
max-size: "100m"
max-file: "3"
| Add restrictions to Docker log filesizes | Add restrictions to Docker log filesizes
| YAML | agpl-3.0 | subtitulamos/subtitulamos,subtitulamos/subtitulamos,subtitulamos/subtitulamos,subtitulamos/subtitulamos,subtitulamos/subtitulamos | yaml | ## Code Before:
version: '3'
services:
nginx:
image: nginx:alpine
ports:
- "127.0.0.1:${INTERNAL_NGINX_PORT}:8080"
restart: always
links:
- phpsv
- translate
depends_on:
- phpsv
volumes:
- ./config/nginx-site.conf:/etc/nginx/conf.d/default.conf
- ./src/subtitulamos/public:/code/public:cached
phpsv:
build:
context: .
dockerfile: Dockerfile.PHP
working_dir: /code
restart: always
links:
- redis
- sonic
depends_on:
- sonic
volumes:
- ./.env:/code/.env
- /var/run/mysqld/mysqld.sock:/var/run/mysqld/mysqld.sock
redis:
image: redis:6.0-alpine
volumes:
- ./data/redis:/data
sonic:
image: valeriansaliou/sonic:v1.3.0
restart: always
volumes:
- ./data/sonic:/var/lib/sonic/store/
- ./config/sonic.cfg:/etc/sonic.cfg
translate:
restart: always
build:
context: .
dockerfile: Dockerfile.Go
args:
REDIS_PUBSUB_ENV: ${ENVIRONMENT_NAME}
links:
- redis
## Instruction:
Add restrictions to Docker log filesizes
## Code After:
version: "3"
services:
nginx:
image: nginx:alpine
ports:
- "127.0.0.1:${INTERNAL_NGINX_PORT}:8080"
restart: always
links:
- phpsv
- translate
depends_on:
- phpsv
volumes:
- ./config/nginx-site.conf:/etc/nginx/conf.d/default.conf
- ./src/subtitulamos/public:/code/public:cached
logging:
options:
max-size: "600m"
max-file: "3"
phpsv:
build:
context: .
dockerfile: Dockerfile.PHP
working_dir: /code
restart: always
links:
- redis
- sonic
depends_on:
- sonic
volumes:
- ./.env:/code/.env
- /var/run/mysqld/mysqld.sock:/var/run/mysqld/mysqld.sock
logging:
options:
max-size: "400m"
max-file: "3"
redis:
image: redis:6.0-alpine
volumes:
- ./data/redis:/data
sonic:
image: valeriansaliou/sonic:v1.3.0
restart: always
volumes:
- ./data/sonic:/var/lib/sonic/store/
- ./config/sonic.cfg:/etc/sonic.cfg
translate:
restart: always
build:
context: .
dockerfile: Dockerfile.Go
args:
REDIS_PUBSUB_ENV: ${ENVIRONMENT_NAME}
links:
- redis
logging:
options:
max-size: "100m"
max-file: "3"
| - version: '3'
? ^ ^
+ version: "3"
? ^ ^
services:
nginx:
image: nginx:alpine
ports:
- "127.0.0.1:${INTERNAL_NGINX_PORT}:8080"
restart: always
links:
- phpsv
- translate
depends_on:
- phpsv
volumes:
- ./config/nginx-site.conf:/etc/nginx/conf.d/default.conf
- ./src/subtitulamos/public:/code/public:cached
+ logging:
+ options:
+ max-size: "600m"
+ max-file: "3"
phpsv:
build:
context: .
dockerfile: Dockerfile.PHP
working_dir: /code
restart: always
links:
- redis
- sonic
depends_on:
- sonic
volumes:
- ./.env:/code/.env
- /var/run/mysqld/mysqld.sock:/var/run/mysqld/mysqld.sock
+ logging:
+ options:
+ max-size: "400m"
+ max-file: "3"
redis:
image: redis:6.0-alpine
volumes:
- ./data/redis:/data
sonic:
image: valeriansaliou/sonic:v1.3.0
restart: always
volumes:
- ./data/sonic:/var/lib/sonic/store/
- ./config/sonic.cfg:/etc/sonic.cfg
translate:
restart: always
build:
context: .
dockerfile: Dockerfile.Go
args:
REDIS_PUBSUB_ENV: ${ENVIRONMENT_NAME}
links:
- redis
+ logging:
+ options:
+ max-size: "100m"
+ max-file: "3" | 14 | 0.264151 | 13 | 1 |
ca4c1ed33e6169a6f5462fe781e76bf58203d327 | scripts/build.js | scripts/build.js | const glob = require(`glob`);
const buildArticleHtml = require(`./build/article-html.js`);
const buildBaseCss = require(`./build/base-css.js`);
const buildBaseHtml = require(`./build/base-html.js`);
const extractArticleData = require(`./lib/extract-article-data.js`);
const articleFiles = glob.sync(`resources/articles/*.md`).reverse();
const defaultData = {
css: `<link rel="stylesheet" href="/base/css/global.css">`,
articles: [],
};
articleFiles.forEach((fileName) => {
const data = JSON.parse(JSON.stringify(defaultData));
Object.assign(data, extractArticleData(fileName));
defaultData.articles.push(data);
buildArticleHtml(fileName, data);
});
buildBaseHtml(defaultData);
buildBaseCss();
| const glob = require(`glob`);
const buildArticleHtml = require(`./build/article-html.js`);
const buildBaseCss = require(`./build/base-css.js`);
const buildBaseHtml = require(`./build/base-html.js`);
const extractArticleData = require(`./lib/extract-article-data.js`);
const articleFiles = glob.sync(`resources/articles/*.md`).reverse();
const defaultData = {
css: `<link rel="stylesheet" href="/base/css/global.css">`,
articles: [],
};
articleFiles.forEach((fileName) => {
const articleData = Object.assign({}, defaultData, extractArticleData(fileName));
defaultData.articles.push(articleData);
buildArticleHtml(fileName, articleData);
});
buildBaseHtml(defaultData);
buildBaseCss();
| Use Object.assign to dereference object. | Use Object.assign to dereference object.
| JavaScript | mit | maoberlehner/markus-oberlehner-net,maoberlehner/markus-oberlehner-net | javascript | ## Code Before:
const glob = require(`glob`);
const buildArticleHtml = require(`./build/article-html.js`);
const buildBaseCss = require(`./build/base-css.js`);
const buildBaseHtml = require(`./build/base-html.js`);
const extractArticleData = require(`./lib/extract-article-data.js`);
const articleFiles = glob.sync(`resources/articles/*.md`).reverse();
const defaultData = {
css: `<link rel="stylesheet" href="/base/css/global.css">`,
articles: [],
};
articleFiles.forEach((fileName) => {
const data = JSON.parse(JSON.stringify(defaultData));
Object.assign(data, extractArticleData(fileName));
defaultData.articles.push(data);
buildArticleHtml(fileName, data);
});
buildBaseHtml(defaultData);
buildBaseCss();
## Instruction:
Use Object.assign to dereference object.
## Code After:
const glob = require(`glob`);
const buildArticleHtml = require(`./build/article-html.js`);
const buildBaseCss = require(`./build/base-css.js`);
const buildBaseHtml = require(`./build/base-html.js`);
const extractArticleData = require(`./lib/extract-article-data.js`);
const articleFiles = glob.sync(`resources/articles/*.md`).reverse();
const defaultData = {
css: `<link rel="stylesheet" href="/base/css/global.css">`,
articles: [],
};
articleFiles.forEach((fileName) => {
const articleData = Object.assign({}, defaultData, extractArticleData(fileName));
defaultData.articles.push(articleData);
buildArticleHtml(fileName, articleData);
});
buildBaseHtml(defaultData);
buildBaseCss();
| const glob = require(`glob`);
const buildArticleHtml = require(`./build/article-html.js`);
const buildBaseCss = require(`./build/base-css.js`);
const buildBaseHtml = require(`./build/base-html.js`);
const extractArticleData = require(`./lib/extract-article-data.js`);
const articleFiles = glob.sync(`resources/articles/*.md`).reverse();
const defaultData = {
css: `<link rel="stylesheet" href="/base/css/global.css">`,
articles: [],
};
articleFiles.forEach((fileName) => {
- const data = JSON.parse(JSON.stringify(defaultData));
- Object.assign(data, extractArticleData(fileName));
+ const articleData = Object.assign({}, defaultData, extractArticleData(fileName));
? ++++++++++++++++++++ ++++ +++++++
- defaultData.articles.push(data);
? ^
+ defaultData.articles.push(articleData);
? ^^^^^^^^
- buildArticleHtml(fileName, data);
? ^
+ buildArticleHtml(fileName, articleData);
? ^^^^^^^^
});
buildBaseHtml(defaultData);
buildBaseCss(); | 7 | 0.291667 | 3 | 4 |
d5b63e0b784928696c6bb9e77776e93200272818 | protractor.conf.js | protractor.conf.js | // Protractor configuration file, see link for more information
// https://github.com/angular/protractor/blob/master/docs/referenceConf.js
/*global jasmine */
var SpecReporter = require('jasmine-spec-reporter');
exports.config = {
allScriptsTimeout: 11000,
specs: [
'./e2e/**/*.e2e-spec.ts'
],
capabilities: {
'browserName': 'chrome'
},
directConnect: true,
baseUrl: 'http://localhost:4200/',
framework: 'jasmine',
jasmineNodeOpts: {
showColors: true,
defaultTimeoutInterval: 30000,
print: function() {}
},
useAllAngular2AppRoots: true,
beforeLaunch: function() {
require('ts-node').register({
project: 'e2e'
});
},
onPrepare: function() {
jasmine.getEnv().addReporter(new SpecReporter());
}
};
| // Protractor configuration file, see link for more information
// https://github.com/angular/protractor/blob/master/docs/referenceConf.js
/*global jasmine */
var SpecReporter = require('jasmine-spec-reporter');
exports.config = {
allScriptsTimeout: 11000,
getPageTimeout: 60000,
specs: [
'./e2e/**/*.e2e-spec.ts'
],
capabilities: {
'browserName': 'chrome'
},
directConnect: true,
baseUrl: 'http://localhost:4200/',
framework: 'jasmine',
jasmineNodeOpts: {
showColors: true,
defaultTimeoutInterval: 30000,
print: function() {}
},
useAllAngular2AppRoots: true,
beforeLaunch: function() {
require('ts-node').register({
project: 'e2e'
});
},
onPrepare: function() {
jasmine.getEnv().addReporter(new SpecReporter());
}
};
| Set getPageTimeout to 60 seconds | Set getPageTimeout to 60 seconds
| JavaScript | mit | kendaleiv/angular-testing,kendaleiv/angular2-testing,kendaleiv/angular2-testing,kendaleiv/angular-testing,kendaleiv/angular-testing,kendaleiv/angular2-testing | javascript | ## Code Before:
// Protractor configuration file, see link for more information
// https://github.com/angular/protractor/blob/master/docs/referenceConf.js
/*global jasmine */
var SpecReporter = require('jasmine-spec-reporter');
exports.config = {
allScriptsTimeout: 11000,
specs: [
'./e2e/**/*.e2e-spec.ts'
],
capabilities: {
'browserName': 'chrome'
},
directConnect: true,
baseUrl: 'http://localhost:4200/',
framework: 'jasmine',
jasmineNodeOpts: {
showColors: true,
defaultTimeoutInterval: 30000,
print: function() {}
},
useAllAngular2AppRoots: true,
beforeLaunch: function() {
require('ts-node').register({
project: 'e2e'
});
},
onPrepare: function() {
jasmine.getEnv().addReporter(new SpecReporter());
}
};
## Instruction:
Set getPageTimeout to 60 seconds
## Code After:
// Protractor configuration file, see link for more information
// https://github.com/angular/protractor/blob/master/docs/referenceConf.js
/*global jasmine */
var SpecReporter = require('jasmine-spec-reporter');
exports.config = {
allScriptsTimeout: 11000,
getPageTimeout: 60000,
specs: [
'./e2e/**/*.e2e-spec.ts'
],
capabilities: {
'browserName': 'chrome'
},
directConnect: true,
baseUrl: 'http://localhost:4200/',
framework: 'jasmine',
jasmineNodeOpts: {
showColors: true,
defaultTimeoutInterval: 30000,
print: function() {}
},
useAllAngular2AppRoots: true,
beforeLaunch: function() {
require('ts-node').register({
project: 'e2e'
});
},
onPrepare: function() {
jasmine.getEnv().addReporter(new SpecReporter());
}
};
| // Protractor configuration file, see link for more information
// https://github.com/angular/protractor/blob/master/docs/referenceConf.js
/*global jasmine */
var SpecReporter = require('jasmine-spec-reporter');
exports.config = {
allScriptsTimeout: 11000,
+ getPageTimeout: 60000,
specs: [
'./e2e/**/*.e2e-spec.ts'
],
capabilities: {
'browserName': 'chrome'
},
directConnect: true,
baseUrl: 'http://localhost:4200/',
framework: 'jasmine',
jasmineNodeOpts: {
showColors: true,
defaultTimeoutInterval: 30000,
print: function() {}
},
useAllAngular2AppRoots: true,
beforeLaunch: function() {
require('ts-node').register({
project: 'e2e'
});
},
onPrepare: function() {
jasmine.getEnv().addReporter(new SpecReporter());
}
}; | 1 | 0.03125 | 1 | 0 |
fb3fbe1df40744ac42dfb79ad6117dce48322fc7 | tools/run-test.js | tools/run-test.js | const path = require("path");
const rootDir = path.normalize(path.join(__dirname, ".."));
const tester = path.join(rootDir, "node_modules", "vscode", "bin", "test");
const cp = require("child_process");
let tests = process.argv.slice(2);
let failed = 0;
tests.forEach((test) => {
let [ testName, wsName ] = test.split("@", 2);
if (wsName == null) {
wsName = testName;
}
let testRoot = path.join(rootDir, "out", "test", testName);
let workspace = path.join(rootDir, "test", "workspace", wsName);
console.log("#".repeat(100));
console.log(`# [${test}] Started at ${new Date().toString()}`);
console.log("");
let cmd = (process.platform === "win32") ? "node" : "env";
let args = [tester];
if (process.platform !== "win32") {
args.unshift("node");
}
let result = cp.spawnSync(cmd, args, {
env: {
CODE_TESTS_PATH: testRoot,
CODE_TESTS_WORKSPACE: workspace
},
stdio: "inherit"
});
console.log(`# [${test}] Finished at ${new Date().toString()} (result=${result.status})`);
if (result.status !== 0) {
console.error(`# ${result.error}`);
console.info(result.env);
++failed;
}
console.log("");
});
if (failed > 0) {
process.exitCode = 1;
}
| const path = require("path");
const rootDir = path.normalize(path.join(__dirname, ".."));
const tester = path.join(rootDir, "node_modules", "vscode", "bin", "test");
const cp = require("child_process");
let tests = process.argv.slice(2);
let failed = 0;
tests.forEach((test) => {
let [ testName, wsName ] = test.split("@", 2);
if (wsName == null) {
wsName = testName;
}
let testRoot = path.join(rootDir, "out", "test", testName);
let workspace = path.join(rootDir, "test", "workspace", wsName);
console.log("#".repeat(100));
console.log(`# [${test}] Started at ${new Date().toString()}`);
console.log("");
let result = cp.spawnSync("node", [tester], {
env: Object.assign({}, process.env, {
CODE_TESTS_PATH: testRoot,
CODE_TESTS_WORKSPACE: workspace
}),
stdio: "inherit"
});
console.log(`# [${test}] Finished at ${new Date().toString()} (result=${result.status})`);
if (result.status !== 0) {
console.error(`# ${result.error}`);
console.info(result.env);
++failed;
}
console.log("");
});
if (failed > 0) {
process.exitCode = 1;
}
| Fix env for test runner | Fix env for test runner
| JavaScript | mit | kimushu/rubic-vscode,kimushu/rubic-vscode,kimushu/rubic-vscode,kimushu/rubic-vscode | javascript | ## Code Before:
const path = require("path");
const rootDir = path.normalize(path.join(__dirname, ".."));
const tester = path.join(rootDir, "node_modules", "vscode", "bin", "test");
const cp = require("child_process");
let tests = process.argv.slice(2);
let failed = 0;
tests.forEach((test) => {
let [ testName, wsName ] = test.split("@", 2);
if (wsName == null) {
wsName = testName;
}
let testRoot = path.join(rootDir, "out", "test", testName);
let workspace = path.join(rootDir, "test", "workspace", wsName);
console.log("#".repeat(100));
console.log(`# [${test}] Started at ${new Date().toString()}`);
console.log("");
let cmd = (process.platform === "win32") ? "node" : "env";
let args = [tester];
if (process.platform !== "win32") {
args.unshift("node");
}
let result = cp.spawnSync(cmd, args, {
env: {
CODE_TESTS_PATH: testRoot,
CODE_TESTS_WORKSPACE: workspace
},
stdio: "inherit"
});
console.log(`# [${test}] Finished at ${new Date().toString()} (result=${result.status})`);
if (result.status !== 0) {
console.error(`# ${result.error}`);
console.info(result.env);
++failed;
}
console.log("");
});
if (failed > 0) {
process.exitCode = 1;
}
## Instruction:
Fix env for test runner
## Code After:
const path = require("path");
const rootDir = path.normalize(path.join(__dirname, ".."));
const tester = path.join(rootDir, "node_modules", "vscode", "bin", "test");
const cp = require("child_process");
let tests = process.argv.slice(2);
let failed = 0;
tests.forEach((test) => {
let [ testName, wsName ] = test.split("@", 2);
if (wsName == null) {
wsName = testName;
}
let testRoot = path.join(rootDir, "out", "test", testName);
let workspace = path.join(rootDir, "test", "workspace", wsName);
console.log("#".repeat(100));
console.log(`# [${test}] Started at ${new Date().toString()}`);
console.log("");
let result = cp.spawnSync("node", [tester], {
env: Object.assign({}, process.env, {
CODE_TESTS_PATH: testRoot,
CODE_TESTS_WORKSPACE: workspace
}),
stdio: "inherit"
});
console.log(`# [${test}] Finished at ${new Date().toString()} (result=${result.status})`);
if (result.status !== 0) {
console.error(`# ${result.error}`);
console.info(result.env);
++failed;
}
console.log("");
});
if (failed > 0) {
process.exitCode = 1;
}
| const path = require("path");
const rootDir = path.normalize(path.join(__dirname, ".."));
const tester = path.join(rootDir, "node_modules", "vscode", "bin", "test");
const cp = require("child_process");
let tests = process.argv.slice(2);
let failed = 0;
tests.forEach((test) => {
let [ testName, wsName ] = test.split("@", 2);
if (wsName == null) {
wsName = testName;
}
let testRoot = path.join(rootDir, "out", "test", testName);
let workspace = path.join(rootDir, "test", "workspace", wsName);
console.log("#".repeat(100));
console.log(`# [${test}] Started at ${new Date().toString()}`);
console.log("");
- let cmd = (process.platform === "win32") ? "node" : "env";
- let args = [tester];
- if (process.platform !== "win32") {
- args.unshift("node");
- }
- let result = cp.spawnSync(cmd, args, {
? ^^ ^ ^^
+ let result = cp.spawnSync("node", [tester], {
? ^^^ ++ ^^^^^^ ^
- env: {
+ env: Object.assign({}, process.env, {
CODE_TESTS_PATH: testRoot,
CODE_TESTS_WORKSPACE: workspace
- },
+ }),
? +
stdio: "inherit"
});
console.log(`# [${test}] Finished at ${new Date().toString()} (result=${result.status})`);
if (result.status !== 0) {
console.error(`# ${result.error}`);
console.info(result.env);
++failed;
}
console.log("");
});
if (failed > 0) {
process.exitCode = 1;
} | 11 | 0.268293 | 3 | 8 |
39644fb737894d6f310f300dd9f984704051e9fe | README.md | README.md | react-backbone-data
===================
React.addons.update is an awesome tool for updating
objects in an immutable way. I wanted to build in
immutability to Backbone Models so that
shouldComponentUpdate can be fast.
Todo
----
- Add collection support
| react-backbone-data
===================
React.addons.update is an awesome tool for updating
objects in an immutable way. I wanted to build in
immutability to Backbone Models so that
shouldComponentUpdate can be fast.
Example
-------
Models
```javascript
var Article = ReactBackboneData.Model.extend({
defaults: {
title: "Default title"
}
});
```
```javascript
var Author = ReactBackboneData.Model.extend({
defaults: {
name: "John Doe"
}
});
```
Component
```javascript
var Component = React.createClass({
mixins: [ReactBackboneData.Mixin],
models: {
article: Article,
author: Author
},
componentWillMount: function() {
this._models.article.fetch();
this._models.author.fetch();
},
render: function() {
return (
<div>
<h1>{this.state.article.title}</h1>
<h2>by {this.state.author.name}</h2>
</div>
);
}
});
```
React
```javascript
React.renderComponent(<Component articleId={1} authorId={1}/>, document.getElementById("page"));
```
Todo
----
- Support componentWillReceiveProps
- Add collection support
| Add an example to the Readme | Add an example to the Readme
| Markdown | mit | sdemjanenko/react-backbone-data | markdown | ## Code Before:
react-backbone-data
===================
React.addons.update is an awesome tool for updating
objects in an immutable way. I wanted to build in
immutability to Backbone Models so that
shouldComponentUpdate can be fast.
Todo
----
- Add collection support
## Instruction:
Add an example to the Readme
## Code After:
react-backbone-data
===================
React.addons.update is an awesome tool for updating
objects in an immutable way. I wanted to build in
immutability to Backbone Models so that
shouldComponentUpdate can be fast.
Example
-------
Models
```javascript
var Article = ReactBackboneData.Model.extend({
defaults: {
title: "Default title"
}
});
```
```javascript
var Author = ReactBackboneData.Model.extend({
defaults: {
name: "John Doe"
}
});
```
Component
```javascript
var Component = React.createClass({
mixins: [ReactBackboneData.Mixin],
models: {
article: Article,
author: Author
},
componentWillMount: function() {
this._models.article.fetch();
this._models.author.fetch();
},
render: function() {
return (
<div>
<h1>{this.state.article.title}</h1>
<h2>by {this.state.author.name}</h2>
</div>
);
}
});
```
React
```javascript
React.renderComponent(<Component articleId={1} authorId={1}/>, document.getElementById("page"));
```
Todo
----
- Support componentWillReceiveProps
- Add collection support
| react-backbone-data
===================
React.addons.update is an awesome tool for updating
objects in an immutable way. I wanted to build in
immutability to Backbone Models so that
shouldComponentUpdate can be fast.
+ Example
+ -------
+
+ Models
+
+ ```javascript
+ var Article = ReactBackboneData.Model.extend({
+ defaults: {
+ title: "Default title"
+ }
+ });
+ ```
+
+ ```javascript
+ var Author = ReactBackboneData.Model.extend({
+ defaults: {
+ name: "John Doe"
+ }
+ });
+ ```
+
+ Component
+
+ ```javascript
+
+ var Component = React.createClass({
+ mixins: [ReactBackboneData.Mixin],
+ models: {
+ article: Article,
+ author: Author
+ },
+ componentWillMount: function() {
+ this._models.article.fetch();
+ this._models.author.fetch();
+ },
+ render: function() {
+ return (
+ <div>
+ <h1>{this.state.article.title}</h1>
+ <h2>by {this.state.author.name}</h2>
+ </div>
+ );
+ }
+ });
+ ```
+
+ React
+ ```javascript
+ React.renderComponent(<Component articleId={1} authorId={1}/>, document.getElementById("page"));
+ ```
+
+
Todo
----
+ - Support componentWillReceiveProps
- Add collection support | 53 | 4.416667 | 53 | 0 |
f1389b702666d0e47124c09443cb3d5db9d48a4f | scalafmt-tests/src/test/scala/org/scalafmt/cli/FileTestOps.scala | scalafmt-tests/src/test/scala/org/scalafmt/cli/FileTestOps.scala | package org.scalafmt.cli
import java.io.File
import org.scalafmt.util.AbsoluteFile
import org.scalafmt.util.FileOps
object FileTestOps {
/**
* The inverse of [[dir2string]]. Given a string representation creates the
* necessary files/directories with respective file contents.
*/
def string2dir(layout: String): AbsoluteFile = {
val root = File.createTempFile("root", "root")
root.delete()
root.mkdir()
layout.split("(?=\n/)").foreach { row =>
val path :: contents :: Nil =
row.stripPrefix("\n").split("\n", 2).toList
val file = new File(root, path)
file.getParentFile.mkdirs()
FileOps.writeFile(file, contents)
}
AbsoluteFile.fromPath(root.getAbsolutePath).get
}
/** Gives a string representation of a directory. For example
*
* /build.sbt
* val x = project
* /src/main/scala/Main.scala
* object A { def main = Unit }
* /target/scala-2.11/foo.class
* ^!*@#@!*#&@*!&#^
*/
def dir2string(file: AbsoluteFile): String = {
FileOps
.listFiles(file.jfile)
.sorted
.map { path =>
val contents = FileOps.readFile(path)
s"""|${path.stripPrefix(file.jfile.getPath)}
|$contents""".stripMargin
}
.mkString("\n")
.replace(File.separator, "/") // ensure original separators
}
}
| package org.scalafmt.cli
import java.io.File
import java.nio.file.Files
import org.scalafmt.util.AbsoluteFile
import org.scalafmt.util.FileOps
object FileTestOps {
/**
* The inverse of [[dir2string]]. Given a string representation creates the
* necessary files/directories with respective file contents.
*/
def string2dir(layout: String): AbsoluteFile = {
val root = Files.createTempDirectory("root").toFile
layout.split("(?=\n/)").foreach { row =>
val path :: contents :: Nil =
row.stripPrefix("\n").split("\n", 2).toList
val file = new File(root, path)
file.getParentFile.mkdirs()
FileOps.writeFile(file, contents)
}
AbsoluteFile.fromPath(root.getAbsolutePath).get
}
/** Gives a string representation of a directory. For example
*
* /build.sbt
* val x = project
* /src/main/scala/Main.scala
* object A { def main = Unit }
* /target/scala-2.11/foo.class
* ^!*@#@!*#&@*!&#^
*/
def dir2string(file: AbsoluteFile): String = {
FileOps
.listFiles(file.jfile)
.sorted
.map { path =>
val contents = FileOps.readFile(path)
s"""|${path.stripPrefix(file.jfile.getPath)}
|$contents""".stripMargin
}
.mkString("\n")
.replace(File.separator, "/") // ensure original separators
}
}
| Fix minor inefficiency in test infrastructure | Fix minor inefficiency in test infrastructure
| Scala | apache-2.0 | scalameta/scalafmt,scalameta/scalafmt,olafurpg/scalafmt,scalameta/scalafmt,olafurpg/scalafmt,scalameta/scalafmt,olafurpg/scalafmt,olafurpg/scalafmt | scala | ## Code Before:
package org.scalafmt.cli
import java.io.File
import org.scalafmt.util.AbsoluteFile
import org.scalafmt.util.FileOps
object FileTestOps {
/**
* The inverse of [[dir2string]]. Given a string representation creates the
* necessary files/directories with respective file contents.
*/
def string2dir(layout: String): AbsoluteFile = {
val root = File.createTempFile("root", "root")
root.delete()
root.mkdir()
layout.split("(?=\n/)").foreach { row =>
val path :: contents :: Nil =
row.stripPrefix("\n").split("\n", 2).toList
val file = new File(root, path)
file.getParentFile.mkdirs()
FileOps.writeFile(file, contents)
}
AbsoluteFile.fromPath(root.getAbsolutePath).get
}
/** Gives a string representation of a directory. For example
*
* /build.sbt
* val x = project
* /src/main/scala/Main.scala
* object A { def main = Unit }
* /target/scala-2.11/foo.class
* ^!*@#@!*#&@*!&#^
*/
def dir2string(file: AbsoluteFile): String = {
FileOps
.listFiles(file.jfile)
.sorted
.map { path =>
val contents = FileOps.readFile(path)
s"""|${path.stripPrefix(file.jfile.getPath)}
|$contents""".stripMargin
}
.mkString("\n")
.replace(File.separator, "/") // ensure original separators
}
}
## Instruction:
Fix minor inefficiency in test infrastructure
## Code After:
package org.scalafmt.cli
import java.io.File
import java.nio.file.Files
import org.scalafmt.util.AbsoluteFile
import org.scalafmt.util.FileOps
object FileTestOps {
/**
* The inverse of [[dir2string]]. Given a string representation creates the
* necessary files/directories with respective file contents.
*/
def string2dir(layout: String): AbsoluteFile = {
val root = Files.createTempDirectory("root").toFile
layout.split("(?=\n/)").foreach { row =>
val path :: contents :: Nil =
row.stripPrefix("\n").split("\n", 2).toList
val file = new File(root, path)
file.getParentFile.mkdirs()
FileOps.writeFile(file, contents)
}
AbsoluteFile.fromPath(root.getAbsolutePath).get
}
/** Gives a string representation of a directory. For example
*
* /build.sbt
* val x = project
* /src/main/scala/Main.scala
* object A { def main = Unit }
* /target/scala-2.11/foo.class
* ^!*@#@!*#&@*!&#^
*/
def dir2string(file: AbsoluteFile): String = {
FileOps
.listFiles(file.jfile)
.sorted
.map { path =>
val contents = FileOps.readFile(path)
s"""|${path.stripPrefix(file.jfile.getPath)}
|$contents""".stripMargin
}
.mkString("\n")
.replace(File.separator, "/") // ensure original separators
}
}
| package org.scalafmt.cli
import java.io.File
+ import java.nio.file.Files
import org.scalafmt.util.AbsoluteFile
import org.scalafmt.util.FileOps
object FileTestOps {
/**
* The inverse of [[dir2string]]. Given a string representation creates the
* necessary files/directories with respective file contents.
*/
def string2dir(layout: String): AbsoluteFile = {
- val root = File.createTempFile("root", "root")
? ^ ^ ^^^^ ^^^^
+ val root = Files.createTempDirectory("root").toFile
? + ^ ^ +++++ ^^^ ^^^^
- root.delete()
- root.mkdir()
layout.split("(?=\n/)").foreach { row =>
val path :: contents :: Nil =
row.stripPrefix("\n").split("\n", 2).toList
val file = new File(root, path)
file.getParentFile.mkdirs()
FileOps.writeFile(file, contents)
}
AbsoluteFile.fromPath(root.getAbsolutePath).get
}
/** Gives a string representation of a directory. For example
*
* /build.sbt
* val x = project
* /src/main/scala/Main.scala
* object A { def main = Unit }
* /target/scala-2.11/foo.class
* ^!*@#@!*#&@*!&#^
*/
def dir2string(file: AbsoluteFile): String = {
FileOps
.listFiles(file.jfile)
.sorted
.map { path =>
val contents = FileOps.readFile(path)
s"""|${path.stripPrefix(file.jfile.getPath)}
|$contents""".stripMargin
}
.mkString("\n")
.replace(File.separator, "/") // ensure original separators
}
} | 5 | 0.1 | 2 | 3 |
d0e31a409fc39ed827e380c1a9626d9639b376c1 | ForensicDemoServer/package.json | ForensicDemoServer/package.json | {
"name": "ForensicDemoServer",
"version": "1.0.0",
"description": "Demo server for Forensic 2.0",
"main": "server.js",
"dependencies": {
"express": "^4.10.6",
"passport": "^0.2.1"
},
"devDependencies": {
"express": "^4.10.7",
"http-auth": "^2.2.5",
"passport": "^0.2.1",
"passport-local": "^1.0.0"
},
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"start": "node server.js"
},
"author": "Chris Dickson"
}
| {
"name": "ForensicDemoServer",
"version": "1.0.0",
"description": "Demo server for Forensic 2.0",
"main": "server.js",
"dependencies": {
"express": "^4.10.6",
"passport": "^0.2.1"
},
"devDependencies": {
"express": "^4.10.7",
"http-auth": "^2.2.5"
},
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"start": "node server.js"
},
"author": "Chris Dickson"
}
| Remove uneccesary dependencies in demo server | Remove uneccesary dependencies in demo server
| JSON | apache-2.0 | TeamHG-Memex/Datawake,Sotera/Datawake-Legacy,Sotera/Datawake-Legacy,TeamHG-Memex/Datawake,TeamHG-Memex/Datawake,Sotera/Datawake-Legacy,Sotera/Datawake-Legacy,Sotera/Datawake-Legacy | json | ## Code Before:
{
"name": "ForensicDemoServer",
"version": "1.0.0",
"description": "Demo server for Forensic 2.0",
"main": "server.js",
"dependencies": {
"express": "^4.10.6",
"passport": "^0.2.1"
},
"devDependencies": {
"express": "^4.10.7",
"http-auth": "^2.2.5",
"passport": "^0.2.1",
"passport-local": "^1.0.0"
},
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"start": "node server.js"
},
"author": "Chris Dickson"
}
## Instruction:
Remove uneccesary dependencies in demo server
## Code After:
{
"name": "ForensicDemoServer",
"version": "1.0.0",
"description": "Demo server for Forensic 2.0",
"main": "server.js",
"dependencies": {
"express": "^4.10.6",
"passport": "^0.2.1"
},
"devDependencies": {
"express": "^4.10.7",
"http-auth": "^2.2.5"
},
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"start": "node server.js"
},
"author": "Chris Dickson"
}
| {
"name": "ForensicDemoServer",
"version": "1.0.0",
"description": "Demo server for Forensic 2.0",
"main": "server.js",
"dependencies": {
"express": "^4.10.6",
"passport": "^0.2.1"
},
"devDependencies": {
"express": "^4.10.7",
- "http-auth": "^2.2.5",
? -
+ "http-auth": "^2.2.5"
- "passport": "^0.2.1",
- "passport-local": "^1.0.0"
},
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"start": "node server.js"
},
"author": "Chris Dickson"
} | 4 | 0.190476 | 1 | 3 |
e6ff1de8260bb2fad672eed49524b9f9e5ef3b54 | src/apps/omis/apps/edit/controllers/edit-lead-assignee.js | src/apps/omis/apps/edit/controllers/edit-lead-assignee.js | const { find, get } = require('lodash')
const { Order } = require('../../../models')
async function editLeadAssignee (req, res, next) {
const adviserId = req.body.adviserId
const orderId = req.body.orderId
const returnUrl = req.body.returnUrl || req.header('Referer')
if (!adviserId || !orderId) {
return res.redirect(returnUrl)
}
try {
const allAssignees = await Order.getAssignees(req.session.token, orderId)
const assignees = allAssignees.map(assignee => {
return Object.assign(assignee, {
is_lead: assignee.adviser.id === adviserId,
})
})
const leadAdviser = find(assignees, { adviser: { id: adviserId } })
await Order.saveAssignees(req.session.token, orderId, assignees)
req.flash('success', `Lead post adviser set to ${get(leadAdviser, 'adviser.name')}`)
res.redirect(returnUrl)
} catch (error) {
next(error)
}
}
module.exports = editLeadAssignee
| const { find, get } = require('lodash')
const { Order } = require('../../../models')
async function editLeadAssignee (req, res, next) {
const adviserId = req.body.adviserId
const orderId = req.body.orderId
const returnUrl = req.body.returnUrl || req.header('Referer')
if (!adviserId || !orderId) {
return res.redirect(returnUrl)
}
try {
const allAssignees = await Order.getAssignees(req.session.token, orderId)
const assignees = allAssignees.map(assignee => {
return Object.assign(assignee, {
is_lead: assignee.adviser.id === adviserId,
})
})
const leadAdviser = find(assignees, { adviser: { id: adviserId } })
await Order.saveAssignees(req.session.token, orderId, assignees)
req.flash('success', `Lead adviser in the market set to ${get(leadAdviser, 'adviser.name')}`)
res.redirect(returnUrl)
} catch (error) {
next(error)
}
}
module.exports = editLeadAssignee
| Update lead adviser success message | Update lead adviser success message
| JavaScript | mit | uktrade/data-hub-frontend,uktrade/data-hub-fe-beta2,uktrade/data-hub-frontend,uktrade/data-hub-fe-beta2,uktrade/data-hub-frontend | javascript | ## Code Before:
const { find, get } = require('lodash')
const { Order } = require('../../../models')
async function editLeadAssignee (req, res, next) {
const adviserId = req.body.adviserId
const orderId = req.body.orderId
const returnUrl = req.body.returnUrl || req.header('Referer')
if (!adviserId || !orderId) {
return res.redirect(returnUrl)
}
try {
const allAssignees = await Order.getAssignees(req.session.token, orderId)
const assignees = allAssignees.map(assignee => {
return Object.assign(assignee, {
is_lead: assignee.adviser.id === adviserId,
})
})
const leadAdviser = find(assignees, { adviser: { id: adviserId } })
await Order.saveAssignees(req.session.token, orderId, assignees)
req.flash('success', `Lead post adviser set to ${get(leadAdviser, 'adviser.name')}`)
res.redirect(returnUrl)
} catch (error) {
next(error)
}
}
module.exports = editLeadAssignee
## Instruction:
Update lead adviser success message
## Code After:
const { find, get } = require('lodash')
const { Order } = require('../../../models')
async function editLeadAssignee (req, res, next) {
const adviserId = req.body.adviserId
const orderId = req.body.orderId
const returnUrl = req.body.returnUrl || req.header('Referer')
if (!adviserId || !orderId) {
return res.redirect(returnUrl)
}
try {
const allAssignees = await Order.getAssignees(req.session.token, orderId)
const assignees = allAssignees.map(assignee => {
return Object.assign(assignee, {
is_lead: assignee.adviser.id === adviserId,
})
})
const leadAdviser = find(assignees, { adviser: { id: adviserId } })
await Order.saveAssignees(req.session.token, orderId, assignees)
req.flash('success', `Lead adviser in the market set to ${get(leadAdviser, 'adviser.name')}`)
res.redirect(returnUrl)
} catch (error) {
next(error)
}
}
module.exports = editLeadAssignee
| const { find, get } = require('lodash')
const { Order } = require('../../../models')
async function editLeadAssignee (req, res, next) {
const adviserId = req.body.adviserId
const orderId = req.body.orderId
const returnUrl = req.body.returnUrl || req.header('Referer')
if (!adviserId || !orderId) {
return res.redirect(returnUrl)
}
try {
const allAssignees = await Order.getAssignees(req.session.token, orderId)
const assignees = allAssignees.map(assignee => {
return Object.assign(assignee, {
is_lead: assignee.adviser.id === adviserId,
})
})
const leadAdviser = find(assignees, { adviser: { id: adviserId } })
await Order.saveAssignees(req.session.token, orderId, assignees)
- req.flash('success', `Lead post adviser set to ${get(leadAdviser, 'adviser.name')}`)
? -----
+ req.flash('success', `Lead adviser in the market set to ${get(leadAdviser, 'adviser.name')}`)
? ++++++++++++++
res.redirect(returnUrl)
} catch (error) {
next(error)
}
}
module.exports = editLeadAssignee | 2 | 0.0625 | 1 | 1 |
468497c561a49d754616c775a6fb84f1e3fbaeab | src/modules/youtube.rb | src/modules/youtube.rb | require 'uri'
require 'net/http'
def fetch(uri_str, limit = 10)
# You should choose better exception.
raise ArgumentError, 'HTTP redirect too deep' if limit == 0
response = Net::HTTP.get_response(URI.parse(uri_str))
case response
when Net::HTTPSuccess then response
when Net::HTTPRedirection then fetch(response['location'], limit - 1)
else
response.error!
end
end
def parseYoutube(url)
if URI.parse(url).host =~ /.*youtube\.com/
video = URI.parse(url).query.match(/v=([^ &]*)/)[0][2,15]
api = "http://gdata.youtube.com/feeds/api/videos/" + video
print api + "\n"
reply = fetch(api)
return "" if (reply.code != "200")
title = "?"
rating = "?"
views = "?"
title = $1 if reply.body =~ /.*<title type='text'>(.*)<\/title>.*/
rating = $1 if reply.body =~ /.*<gd:rating average='([0-9.]*)'.*/
ratingcount = $1 if reply.body =~ /.*numRaters='([0-9]*)'.*/
views = $1 if reply.body =~ /.*<yt:statistics favoriteCount='[0-9]*' viewCount='([0-9]*)'\/>.*/
average = ((rating.to_f * 100).round).to_f / 100
"#{title} (rating: #{average} (#{ratingcount}), views: #{views})"
else
""
end
end
| require 'uri'
require 'net/http'
Kernel.load('fetch_uri.rb')
def parseYoutube(url)
if URI.parse(url).host =~ /.*youtube\.com/
video = URI.parse(url).query.match(/v=([^ &]*)/)[0][2,15]
api = "http://gdata.youtube.com/feeds/api/videos/" + video
print api + "\n"
reply = fetch_uri(api)
return "" if (reply.code != "200")
title = "?"
rating = "?"
views = "?"
title = $1 if reply.body =~ /.*<title type='text'>(.*)<\/title>.*/
rating = $1 if reply.body =~ /.*<gd:rating average='([0-9.]*)'.*/
ratingcount = $1 if reply.body =~ /.*numRaters='([0-9]*)'.*/
views = $1 if reply.body =~ /.*<yt:statistics favoriteCount='[0-9]*' viewCount='([0-9]*)'\/>.*/
average = ((rating.to_f * 100).round).to_f / 100
"#{title} (rating: #{average} (#{ratingcount}), views: #{views})"
else
""
end
end
| Split fetching uris to own file | Split fetching uris to own file
Signed-off-by: Aki Saarinen <[email protected]>
| Ruby | mit | akisaarinen/lullizio,akisaarinen/lullizio | ruby | ## Code Before:
require 'uri'
require 'net/http'
def fetch(uri_str, limit = 10)
# You should choose better exception.
raise ArgumentError, 'HTTP redirect too deep' if limit == 0
response = Net::HTTP.get_response(URI.parse(uri_str))
case response
when Net::HTTPSuccess then response
when Net::HTTPRedirection then fetch(response['location'], limit - 1)
else
response.error!
end
end
def parseYoutube(url)
if URI.parse(url).host =~ /.*youtube\.com/
video = URI.parse(url).query.match(/v=([^ &]*)/)[0][2,15]
api = "http://gdata.youtube.com/feeds/api/videos/" + video
print api + "\n"
reply = fetch(api)
return "" if (reply.code != "200")
title = "?"
rating = "?"
views = "?"
title = $1 if reply.body =~ /.*<title type='text'>(.*)<\/title>.*/
rating = $1 if reply.body =~ /.*<gd:rating average='([0-9.]*)'.*/
ratingcount = $1 if reply.body =~ /.*numRaters='([0-9]*)'.*/
views = $1 if reply.body =~ /.*<yt:statistics favoriteCount='[0-9]*' viewCount='([0-9]*)'\/>.*/
average = ((rating.to_f * 100).round).to_f / 100
"#{title} (rating: #{average} (#{ratingcount}), views: #{views})"
else
""
end
end
## Instruction:
Split fetching uris to own file
Signed-off-by: Aki Saarinen <[email protected]>
## Code After:
require 'uri'
require 'net/http'
Kernel.load('fetch_uri.rb')
def parseYoutube(url)
if URI.parse(url).host =~ /.*youtube\.com/
video = URI.parse(url).query.match(/v=([^ &]*)/)[0][2,15]
api = "http://gdata.youtube.com/feeds/api/videos/" + video
print api + "\n"
reply = fetch_uri(api)
return "" if (reply.code != "200")
title = "?"
rating = "?"
views = "?"
title = $1 if reply.body =~ /.*<title type='text'>(.*)<\/title>.*/
rating = $1 if reply.body =~ /.*<gd:rating average='([0-9.]*)'.*/
ratingcount = $1 if reply.body =~ /.*numRaters='([0-9]*)'.*/
views = $1 if reply.body =~ /.*<yt:statistics favoriteCount='[0-9]*' viewCount='([0-9]*)'\/>.*/
average = ((rating.to_f * 100).round).to_f / 100
"#{title} (rating: #{average} (#{ratingcount}), views: #{views})"
else
""
end
end
| require 'uri'
require 'net/http'
+ Kernel.load('fetch_uri.rb')
- def fetch(uri_str, limit = 10)
- # You should choose better exception.
- raise ArgumentError, 'HTTP redirect too deep' if limit == 0
-
- response = Net::HTTP.get_response(URI.parse(uri_str))
- case response
- when Net::HTTPSuccess then response
- when Net::HTTPRedirection then fetch(response['location'], limit - 1)
- else
- response.error!
- end
- end
def parseYoutube(url)
if URI.parse(url).host =~ /.*youtube\.com/
video = URI.parse(url).query.match(/v=([^ &]*)/)[0][2,15]
api = "http://gdata.youtube.com/feeds/api/videos/" + video
print api + "\n"
- reply = fetch(api)
+ reply = fetch_uri(api)
? ++ ++++
return "" if (reply.code != "200")
title = "?"
rating = "?"
views = "?"
title = $1 if reply.body =~ /.*<title type='text'>(.*)<\/title>.*/
rating = $1 if reply.body =~ /.*<gd:rating average='([0-9.]*)'.*/
ratingcount = $1 if reply.body =~ /.*numRaters='([0-9]*)'.*/
views = $1 if reply.body =~ /.*<yt:statistics favoriteCount='[0-9]*' viewCount='([0-9]*)'\/>.*/
average = ((rating.to_f * 100).round).to_f / 100
"#{title} (rating: #{average} (#{ratingcount}), views: #{views})"
else
""
end
end
| 15 | 0.365854 | 2 | 13 |
c13ec7b5002f7b90e353305eb52b76a63d3f3e32 | src/main/java/fr/insee/pogues/webservice/rest/PoguesException.java | src/main/java/fr/insee/pogues/webservice/rest/PoguesException.java | package fr.insee.pogues.webservice.rest;
/**
* Created by acordier on 04/07/17.
*/
public class PoguesException extends Exception {
private int status;
private String details;
/**
*
* @param status
* @param message
* @param details
*/
public PoguesException(int status, String message, String details) {
super(message);
this.status = status;
this.details = details;
}
public RestMessage toRestMessage(){
return new RestMessage(this.status, this.getMessage(), this.details);
}
}
| package fr.insee.pogues.webservice.rest;
import java.io.IOException;
/**
* Created by acordier on 04/07/17.
*/
public class PoguesException extends IOException {
private int status;
private String details;
/**
*
* @param status
* @param message
* @param details
*/
public PoguesException(int status, String message, String details) {
super(message);
this.status = status;
this.details = details;
}
public RestMessage toRestMessage(){
return new RestMessage(this.status, this.getMessage(), this.details);
}
}
| Make pogues exception usable in authorization filter | Make pogues exception usable in authorization filter
| Java | mit | InseeFr/Pogues-Back-Office,InseeFr/Pogues-Back-Office | java | ## Code Before:
package fr.insee.pogues.webservice.rest;
/**
* Created by acordier on 04/07/17.
*/
public class PoguesException extends Exception {
private int status;
private String details;
/**
*
* @param status
* @param message
* @param details
*/
public PoguesException(int status, String message, String details) {
super(message);
this.status = status;
this.details = details;
}
public RestMessage toRestMessage(){
return new RestMessage(this.status, this.getMessage(), this.details);
}
}
## Instruction:
Make pogues exception usable in authorization filter
## Code After:
package fr.insee.pogues.webservice.rest;
import java.io.IOException;
/**
* Created by acordier on 04/07/17.
*/
public class PoguesException extends IOException {
private int status;
private String details;
/**
*
* @param status
* @param message
* @param details
*/
public PoguesException(int status, String message, String details) {
super(message);
this.status = status;
this.details = details;
}
public RestMessage toRestMessage(){
return new RestMessage(this.status, this.getMessage(), this.details);
}
}
| package fr.insee.pogues.webservice.rest;
+
+ import java.io.IOException;
/**
* Created by acordier on 04/07/17.
*/
- public class PoguesException extends Exception {
+ public class PoguesException extends IOException {
? ++
private int status;
private String details;
/**
*
* @param status
* @param message
* @param details
*/
public PoguesException(int status, String message, String details) {
super(message);
this.status = status;
this.details = details;
}
public RestMessage toRestMessage(){
return new RestMessage(this.status, this.getMessage(), this.details);
}
} | 4 | 0.153846 | 3 | 1 |
7cb5401659acba985024eb10fd7b48ca6cd8faee | view/frontend/layout/default.xml | view/frontend/layout/default.xml | <?xml version="1.0"?>
<!--
/**
* NewRelic2 plugin for Magento
*
* @package Yireo_NewRelic2
* @author Yireo (https://www.yireo.com/)
* @copyright Copyright 2015 Yireo (https://www.yireo.com/)
* @license Simplified BSD License
*/
-->
<page xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="urn:magento:framework:View/Layout/etc/page_configuration.xsd">
<body>
<container name="newrelic-footer" after="-">
<block class="Yireo\NewRelic2\Block\Rum\Timing\Footer" name="newrelic-rum-timing-footer" after="-" />
</container>
<referenceBlock name="head.additional" after="-">
<block class="Yireo\NewRelic2\Block\Rum\Timing\Header" name="newrelic-rum-timing-header" after="-" />
</referenceBlock>
</body>
</page> | <?xml version="1.0"?>
<!--
/**
* NewRelic2 plugin for Magento
*
* @package Yireo_NewRelic2
* @author Yireo (https://www.yireo.com/)
* @copyright Copyright 2015 Yireo (https://www.yireo.com/)
* @license Simplified BSD License
*/
-->
<page xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="urn:magento:framework:View/Layout/etc/page_configuration.xsd">
<body>
<container name="newrelic-footer" after="-">
<block class="Yireo\NewRelic2\Block\Rum\Timing\Footer" name="newrelic-rum-timing-footer" after="-" />
</container>
<referenceBlock name="head.additional">
<block class="Yireo\NewRelic2\Block\Rum\Timing\Header" name="newrelic-rum-timing-header" after="-" />
</referenceBlock>
</body>
</page> | Fix invalid M2.2 XML Layout with after-argument in referenceBlock | Fix invalid M2.2 XML Layout with after-argument in referenceBlock
| XML | bsd-2-clause | yireo/Yireo_NewRelic2 | xml | ## Code Before:
<?xml version="1.0"?>
<!--
/**
* NewRelic2 plugin for Magento
*
* @package Yireo_NewRelic2
* @author Yireo (https://www.yireo.com/)
* @copyright Copyright 2015 Yireo (https://www.yireo.com/)
* @license Simplified BSD License
*/
-->
<page xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="urn:magento:framework:View/Layout/etc/page_configuration.xsd">
<body>
<container name="newrelic-footer" after="-">
<block class="Yireo\NewRelic2\Block\Rum\Timing\Footer" name="newrelic-rum-timing-footer" after="-" />
</container>
<referenceBlock name="head.additional" after="-">
<block class="Yireo\NewRelic2\Block\Rum\Timing\Header" name="newrelic-rum-timing-header" after="-" />
</referenceBlock>
</body>
</page>
## Instruction:
Fix invalid M2.2 XML Layout with after-argument in referenceBlock
## Code After:
<?xml version="1.0"?>
<!--
/**
* NewRelic2 plugin for Magento
*
* @package Yireo_NewRelic2
* @author Yireo (https://www.yireo.com/)
* @copyright Copyright 2015 Yireo (https://www.yireo.com/)
* @license Simplified BSD License
*/
-->
<page xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="urn:magento:framework:View/Layout/etc/page_configuration.xsd">
<body>
<container name="newrelic-footer" after="-">
<block class="Yireo\NewRelic2\Block\Rum\Timing\Footer" name="newrelic-rum-timing-footer" after="-" />
</container>
<referenceBlock name="head.additional">
<block class="Yireo\NewRelic2\Block\Rum\Timing\Header" name="newrelic-rum-timing-header" after="-" />
</referenceBlock>
</body>
</page> | <?xml version="1.0"?>
<!--
/**
* NewRelic2 plugin for Magento
*
* @package Yireo_NewRelic2
* @author Yireo (https://www.yireo.com/)
* @copyright Copyright 2015 Yireo (https://www.yireo.com/)
* @license Simplified BSD License
*/
-->
<page xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="urn:magento:framework:View/Layout/etc/page_configuration.xsd">
<body>
<container name="newrelic-footer" after="-">
<block class="Yireo\NewRelic2\Block\Rum\Timing\Footer" name="newrelic-rum-timing-footer" after="-" />
</container>
- <referenceBlock name="head.additional" after="-">
? ----------
+ <referenceBlock name="head.additional">
<block class="Yireo\NewRelic2\Block\Rum\Timing\Header" name="newrelic-rum-timing-header" after="-" />
</referenceBlock>
</body>
</page> | 2 | 0.090909 | 1 | 1 |
ecd8b6b41044c7e94115b433c20ed421611d9800 | app/soc/templates/v2/modules/gsoc/proposal/_assign_mentor_form.html | app/soc/templates/v2/modules/gsoc/proposal/_assign_mentor_form.html | <form action="{{ action }}" method="post" id="form" class="form-assign-mentor">
<div id="form_fields_assign_mentor">
<select name="assign_mentor" id="id_assign_mentor">
<option value="">Assign a mentor</option>
{% if possible_mentors %}
<optgroup label="Possible Mentors">
{% for m in possible_mentors %}
<option value="{{ m.key }}" {% if m.selected %}selected="selected"{% endif %}>{{ m.name }}</option>
{% endfor %}
</optgroup>
{% endif %}
{% if all_mentors %}
<optgroup label="All Mentors">
{% for m in all_mentors %}
<option value="{{ m.key }}" {% if m.selected %}selected="selected"{% endif %}>{{ m.name }}</option>
{% endfor %}
</optgroup>
{% endif %}
</select>
<input id="form-project-assign-mentor-submit" type="submit" value="Assign"/>
<span class="note">Select a mentor from the drop-down list to assign to this proposal. <br/>
{% if not all_mentors %}
Note that only mentors that have clicked the "I wish to mentor this project" button can be selected.</span>
{% endif %}
</div>
</form>
| <form action="{{ action }}" method="post" id="form" class="form-assign-mentor">
<div id="form_fields_assign_mentor">
<select name="assign_mentor" id="id_assign_mentor">
<option value="">Assign a mentor</option>
{% if possible_mentors %}
<optgroup label="Possible Mentors">
{% for m in possible_mentors %}
<option value="{{ m.key }}" {% if m.selected %}selected="selected"{% endif %}>{{ m.name }}</option>
{% endfor %}
</optgroup>
{% endif %}
{% if all_mentors %}
<optgroup label="All Mentors">
{% for m in all_mentors %}
<option value="{{ m.key }}" {% if m.selected %}selected="selected"{% endif %}>{{ m.name }}</option>
{% endfor %}
</optgroup>
{% endif %}
</select>
<input id="form-project-assign-mentor-submit" type="submit" value="Assign"/>
<span class="note">Select a mentor from the drop-down list to assign to this proposal. <br/>
{% if not all_mentors %}
Note that only mentors that have clicked the "I wish to mentor this project" button can be selected. <br/>
This can be changed in the organization profile page under "Organization preferences".
{% endif %}
</span>
</div>
</form>
| Move /span outside endif, add note about org preferences | Move /span outside endif, add note about org preferences
| HTML | apache-2.0 | rhyolight/nupic.son,rhyolight/nupic.son,rhyolight/nupic.son | html | ## Code Before:
<form action="{{ action }}" method="post" id="form" class="form-assign-mentor">
<div id="form_fields_assign_mentor">
<select name="assign_mentor" id="id_assign_mentor">
<option value="">Assign a mentor</option>
{% if possible_mentors %}
<optgroup label="Possible Mentors">
{% for m in possible_mentors %}
<option value="{{ m.key }}" {% if m.selected %}selected="selected"{% endif %}>{{ m.name }}</option>
{% endfor %}
</optgroup>
{% endif %}
{% if all_mentors %}
<optgroup label="All Mentors">
{% for m in all_mentors %}
<option value="{{ m.key }}" {% if m.selected %}selected="selected"{% endif %}>{{ m.name }}</option>
{% endfor %}
</optgroup>
{% endif %}
</select>
<input id="form-project-assign-mentor-submit" type="submit" value="Assign"/>
<span class="note">Select a mentor from the drop-down list to assign to this proposal. <br/>
{% if not all_mentors %}
Note that only mentors that have clicked the "I wish to mentor this project" button can be selected.</span>
{% endif %}
</div>
</form>
## Instruction:
Move /span outside endif, add note about org preferences
## Code After:
<form action="{{ action }}" method="post" id="form" class="form-assign-mentor">
<div id="form_fields_assign_mentor">
<select name="assign_mentor" id="id_assign_mentor">
<option value="">Assign a mentor</option>
{% if possible_mentors %}
<optgroup label="Possible Mentors">
{% for m in possible_mentors %}
<option value="{{ m.key }}" {% if m.selected %}selected="selected"{% endif %}>{{ m.name }}</option>
{% endfor %}
</optgroup>
{% endif %}
{% if all_mentors %}
<optgroup label="All Mentors">
{% for m in all_mentors %}
<option value="{{ m.key }}" {% if m.selected %}selected="selected"{% endif %}>{{ m.name }}</option>
{% endfor %}
</optgroup>
{% endif %}
</select>
<input id="form-project-assign-mentor-submit" type="submit" value="Assign"/>
<span class="note">Select a mentor from the drop-down list to assign to this proposal. <br/>
{% if not all_mentors %}
Note that only mentors that have clicked the "I wish to mentor this project" button can be selected. <br/>
This can be changed in the organization profile page under "Organization preferences".
{% endif %}
</span>
</div>
</form>
| <form action="{{ action }}" method="post" id="form" class="form-assign-mentor">
<div id="form_fields_assign_mentor">
<select name="assign_mentor" id="id_assign_mentor">
<option value="">Assign a mentor</option>
{% if possible_mentors %}
<optgroup label="Possible Mentors">
{% for m in possible_mentors %}
<option value="{{ m.key }}" {% if m.selected %}selected="selected"{% endif %}>{{ m.name }}</option>
{% endfor %}
</optgroup>
{% endif %}
{% if all_mentors %}
<optgroup label="All Mentors">
{% for m in all_mentors %}
<option value="{{ m.key }}" {% if m.selected %}selected="selected"{% endif %}>{{ m.name }}</option>
{% endfor %}
</optgroup>
{% endif %}
</select>
<input id="form-project-assign-mentor-submit" type="submit" value="Assign"/>
<span class="note">Select a mentor from the drop-down list to assign to this proposal. <br/>
{% if not all_mentors %}
- Note that only mentors that have clicked the "I wish to mentor this project" button can be selected.</span>
? ----
+ Note that only mentors that have clicked the "I wish to mentor this project" button can be selected. <br/>
? + ++
+ This can be changed in the organization profile page under "Organization preferences".
{% endif %}
+ </span>
</div>
</form> | 4 | 0.153846 | 3 | 1 |
aa8b06e32bc8fc48155dcd4a9eab9f94df72eabb | Casks/widelands.rb | Casks/widelands.rb | class Widelands < Cask
url 'https://launchpad.net/widelands/build17/build-17/+download/widelands-build17-mac.dmg'
homepage 'https://wl.widelands.org/'
version 'Build 17'
sha256 '3812ae9f95697269a94970fc83c2c16ab962f450b5a114057046fde3bcfc5a2c'
link 'Widelands.app'
end
| class Widelands < Cask
url 'https://launchpad.net/widelands/build18/build-18/+download/widelands-build18-mac.dmg'
homepage 'https://wl.widelands.org/'
version 'Build 18'
sha256 '1d209dcf653942788120c6f1abbe6f421fdefe6776f4feed48c58eddeb4c3722'
link 'Widelands.app'
end
| Update Widelands to Build 18 | Update Widelands to Build 18
| Ruby | bsd-2-clause | stevehedrick/homebrew-cask,mwilmer/homebrew-cask,hyuna917/homebrew-cask,otzy007/homebrew-cask,tan9/homebrew-cask,tan9/homebrew-cask,af/homebrew-cask,askl56/homebrew-cask,mindriot101/homebrew-cask,bgandon/homebrew-cask,zeusdeux/homebrew-cask,jamesmlees/homebrew-cask,kievechua/homebrew-cask,adelinofaria/homebrew-cask,ajbw/homebrew-cask,mishari/homebrew-cask,dlovitch/homebrew-cask,ahundt/homebrew-cask,bcaceiro/homebrew-cask,lcasey001/homebrew-cask,andyli/homebrew-cask,Amorymeltzer/homebrew-cask,pinut/homebrew-cask,gurghet/homebrew-cask,schneidmaster/homebrew-cask,mariusbutuc/homebrew-cask,kesara/homebrew-cask,theoriginalgri/homebrew-cask,ctrevino/homebrew-cask,zmwangx/homebrew-cask,0xadada/homebrew-cask,catap/homebrew-cask,MisumiRize/homebrew-cask,forevergenin/homebrew-cask,robbiethegeek/homebrew-cask,koenrh/homebrew-cask,ahbeng/homebrew-cask,lieuwex/homebrew-cask,deiga/homebrew-cask,gyndav/homebrew-cask,wastrachan/homebrew-cask,leonmachadowilcox/homebrew-cask,sachin21/homebrew-cask,jacobdam/homebrew-cask,crzrcn/homebrew-cask,aki77/homebrew-cask,exherb/homebrew-cask,sirodoht/homebrew-cask,L2G/homebrew-cask,lukeadams/homebrew-cask,sscotth/homebrew-cask,phpwutz/homebrew-cask,barravi/homebrew-cask,jiashuw/homebrew-cask,nathanielvarona/homebrew-cask,shishi/homebrew-cask,elyscape/homebrew-cask,jtriley/homebrew-cask,feigaochn/homebrew-cask,sysbot/homebrew-cask,perfide/homebrew-cask,taherio/homebrew-cask,jedahan/homebrew-cask,malford/homebrew-cask,Ngrd/homebrew-cask,ftiff/homebrew-cask,zerrot/homebrew-cask,underyx/homebrew-cask,jeroenj/homebrew-cask,moimikey/homebrew-cask,bcaceiro/homebrew-cask,gwaldo/homebrew-cask,athrunsun/homebrew-cask,pinut/homebrew-cask,xiongchiamiov/homebrew-cask,andrewdisley/homebrew-cask,y00rb/homebrew-cask,cedwardsmedia/homebrew-cask,hswong3i/homebrew-cask,josa42/homebrew-cask,aguynamedryan/homebrew-cask,kronicd/homebrew-cask,esebastian/homebrew-cask,gyugyu/homebrew-cask,caskroom/homebrew-cask,howie/homebrew-cask,hovancik/homebrew-cask,linc01n/homebrew-cask,devmynd/homebrew-cask,thehunmonkgroup/homebrew-cask,amatos/homebrew-cask,kevyau/homebrew-cask,royalwang/homebrew-cask,bdhess/homebrew-cask,neverfox/homebrew-cask,ohammersmith/homebrew-cask,wastrachan/homebrew-cask,kuno/homebrew-cask,asbachb/homebrew-cask,sohtsuka/homebrew-cask,norio-nomura/homebrew-cask,reelsense/homebrew-cask,elnappo/homebrew-cask,hvisage/homebrew-cask,Whoaa512/homebrew-cask,mishari/homebrew-cask,tarwich/homebrew-cask,sscotth/homebrew-cask,3van/homebrew-cask,tolbkni/homebrew-cask,AdamCmiel/homebrew-cask,franklouwers/homebrew-cask,ahbeng/homebrew-cask,rcuza/homebrew-cask,miccal/homebrew-cask,a1russell/homebrew-cask,MircoT/homebrew-cask,helloIAmPau/homebrew-cask,xyb/homebrew-cask,Ephemera/homebrew-cask,nshemonsky/homebrew-cask,13k/homebrew-cask,ninjahoahong/homebrew-cask,kamilboratynski/homebrew-cask,kiliankoe/homebrew-cask,d/homebrew-cask,jalaziz/homebrew-cask,gwaldo/homebrew-cask,xakraz/homebrew-cask,mlocher/homebrew-cask,yuhki50/homebrew-cask,shonjir/homebrew-cask,gguillotte/homebrew-cask,wKovacs64/homebrew-cask,yutarody/homebrew-cask,hyuna917/homebrew-cask,donbobka/homebrew-cask,mjdescy/homebrew-cask,mchlrmrz/homebrew-cask,ponychicken/homebrew-customcask,moonboots/homebrew-cask,kievechua/homebrew-cask,samshadwell/homebrew-cask,gerrypower/homebrew-cask,vuquoctuan/homebrew-cask,joaocc/homebrew-cask,y00rb/homebrew-cask,mkozjak/homebrew-cask,stigkj/homebrew-caskroom-cask,a-x-/homebrew-cask,rajiv/homebrew-cask,nelsonjchen/homebrew-cask,anbotero/homebrew-cask,gilesdring/homebrew-cask,wayou/homebrew-cask,wmorin/homebrew-cask,BahtiyarB/homebrew-cask,mwek/homebrew-cask,chuanxd/homebrew-cask,colindean/homebrew-cask,gguillotte/homebrew-cask,klane/homebrew-cask,rkJun/homebrew-cask,dezon/homebrew-cask,ericbn/homebrew-cask,lolgear/homebrew-cask,d/homebrew-cask,mkozjak/homebrew-cask,epardee/homebrew-cask,afdnlw/homebrew-cask,okket/homebrew-cask,Cottser/homebrew-cask,xalep/homebrew-cask,neil-ca-moore/homebrew-cask,jgarber623/homebrew-cask,andersonba/homebrew-cask,segiddins/homebrew-cask,bendoerr/homebrew-cask,samdoran/homebrew-cask,tarwich/homebrew-cask,johan/homebrew-cask,kei-yamazaki/homebrew-cask,carlmod/homebrew-cask,tyage/homebrew-cask,scottsuch/homebrew-cask,Dremora/homebrew-cask,bosr/homebrew-cask,Bombenleger/homebrew-cask,miku/homebrew-cask,tranc99/homebrew-cask,jbeagley52/homebrew-cask,xcezx/homebrew-cask,Ngrd/homebrew-cask,ksylvan/homebrew-cask,brianshumate/homebrew-cask,adrianchia/homebrew-cask,jhowtan/homebrew-cask,FinalDes/homebrew-cask,remko/homebrew-cask,julionc/homebrew-cask,markhuber/homebrew-cask,ctrevino/homebrew-cask,tmoreira2020/homebrew,fly19890211/homebrew-cask,mwean/homebrew-cask,rednoah/homebrew-cask,jangalinski/homebrew-cask,miguelfrde/homebrew-cask,nathanielvarona/homebrew-cask,daften/homebrew-cask,toonetown/homebrew-cask,hovancik/homebrew-cask,dictcp/homebrew-cask,mrmachine/homebrew-cask,djmonta/homebrew-cask,catap/homebrew-cask,rubenerd/homebrew-cask,lumaxis/homebrew-cask,wmorin/homebrew-cask,anbotero/homebrew-cask,Ketouem/homebrew-cask,githubutilities/homebrew-cask,cblecker/homebrew-cask,wesen/homebrew-cask,6uclz1/homebrew-cask,a1russell/homebrew-cask,paour/homebrew-cask,muan/homebrew-cask,cfillion/homebrew-cask,blainesch/homebrew-cask,hakamadare/homebrew-cask,arronmabrey/homebrew-cask,imgarylai/homebrew-cask,greg5green/homebrew-cask,gyndav/homebrew-cask,0rax/homebrew-cask,illusionfield/homebrew-cask,cclauss/homebrew-cask,franklouwers/homebrew-cask,fazo96/homebrew-cask,elseym/homebrew-cask,scottsuch/homebrew-cask,garborg/homebrew-cask,riyad/homebrew-cask,unasuke/homebrew-cask,arranubels/homebrew-cask,malob/homebrew-cask,reelsense/homebrew-cask,3van/homebrew-cask,ddm/homebrew-cask,Hywan/homebrew-cask,englishm/homebrew-cask,kostasdizas/homebrew-cask,shoichiaizawa/homebrew-cask,renaudguerin/homebrew-cask,wizonesolutions/homebrew-cask,wickles/homebrew-cask,alexg0/homebrew-cask,bkono/homebrew-cask,markhuber/homebrew-cask,retrography/homebrew-cask,hanxue/caskroom,jamesmlees/homebrew-cask,antogg/homebrew-cask,optikfluffel/homebrew-cask,jellyfishcoder/homebrew-cask,ericbn/homebrew-cask,djakarta-trap/homebrew-myCask,hvisage/homebrew-cask,ingorichter/homebrew-cask,paour/homebrew-cask,williamboman/homebrew-cask,n8henrie/homebrew-cask,exherb/homebrew-cask,BenjaminHCCarr/homebrew-cask,garborg/homebrew-cask,bcomnes/homebrew-cask,wayou/homebrew-cask,RickWong/homebrew-cask,muan/homebrew-cask,diogodamiani/homebrew-cask,winkelsdorf/homebrew-cask,Keloran/homebrew-cask,MerelyAPseudonym/homebrew-cask,qnm/homebrew-cask,mfpierre/homebrew-cask,joshka/homebrew-cask,nrlquaker/homebrew-cask,kpearson/homebrew-cask,hakamadare/homebrew-cask,retbrown/homebrew-cask,dwkns/homebrew-cask,fly19890211/homebrew-cask,0xadada/homebrew-cask,Gasol/homebrew-cask,joshka/homebrew-cask,jbeagley52/homebrew-cask,gabrielizaias/homebrew-cask,onlynone/homebrew-cask,otaran/homebrew-cask,tolbkni/homebrew-cask,andyshinn/homebrew-cask,stephenwade/homebrew-cask,Nitecon/homebrew-cask,hanxue/caskroom,neverfox/homebrew-cask,yutarody/homebrew-cask,lauantai/homebrew-cask,jonathanwiesel/homebrew-cask,ky0615/homebrew-cask-1,shonjir/homebrew-cask,donbobka/homebrew-cask,coeligena/homebrew-customized,malob/homebrew-cask,corbt/homebrew-cask,huanzhang/homebrew-cask,paulbreslin/homebrew-cask,lalyos/homebrew-cask,jasmas/homebrew-cask,ldong/homebrew-cask,epardee/homebrew-cask,jawshooah/homebrew-cask,rhendric/homebrew-cask,boecko/homebrew-cask,djmonta/homebrew-cask,tangestani/homebrew-cask,gustavoavellar/homebrew-cask,diguage/homebrew-cask,ebraminio/homebrew-cask,vin047/homebrew-cask,scribblemaniac/homebrew-cask,mhubig/homebrew-cask,gilesdring/homebrew-cask,gmkey/homebrew-cask,csmith-palantir/homebrew-cask,joaoponceleao/homebrew-cask,artdevjs/homebrew-cask,jmeridth/homebrew-cask,schneidmaster/homebrew-cask,mhubig/homebrew-cask,scw/homebrew-cask,chino/homebrew-cask,puffdad/homebrew-cask,dwkns/homebrew-cask,kpearson/homebrew-cask,uetchy/homebrew-cask,dunn/homebrew-cask,giannitm/homebrew-cask,gerrypower/homebrew-cask,sanchezm/homebrew-cask,nysthee/homebrew-cask,rubenerd/homebrew-cask,ahvigil/homebrew-cask,n0ts/homebrew-cask,Labutin/homebrew-cask,tranc99/homebrew-cask,kolomiichenko/homebrew-cask,jellyfishcoder/homebrew-cask,tedbundyjr/homebrew-cask,My2ndAngelic/homebrew-cask,mathbunnyru/homebrew-cask,santoshsahoo/homebrew-cask,jconley/homebrew-cask,andersonba/homebrew-cask,usami-k/homebrew-cask,zeusdeux/homebrew-cask,wickedsp1d3r/homebrew-cask,morsdyce/homebrew-cask,gurghet/homebrew-cask,shonjir/homebrew-cask,miccal/homebrew-cask,askl56/homebrew-cask,coneman/homebrew-cask,hackhandslabs/homebrew-cask,cobyism/homebrew-cask,xight/homebrew-cask,bric3/homebrew-cask,feniix/homebrew-cask,kTitan/homebrew-cask,victorpopkov/homebrew-cask,napaxton/homebrew-cask,gmkey/homebrew-cask,cprecioso/homebrew-cask,m3nu/homebrew-cask,robertgzr/homebrew-cask,iAmGhost/homebrew-cask,artdevjs/homebrew-cask,mwean/homebrew-cask,pablote/homebrew-cask,kassi/homebrew-cask,ayohrling/homebrew-cask,jmeridth/homebrew-cask,johnjelinek/homebrew-cask,nanoxd/homebrew-cask,shanonvl/homebrew-cask,faun/homebrew-cask,dieterdemeyer/homebrew-cask,chadcatlett/caskroom-homebrew-cask,singingwolfboy/homebrew-cask,kingthorin/homebrew-cask,sachin21/homebrew-cask,lauantai/homebrew-cask,lifepillar/homebrew-cask,blogabe/homebrew-cask,dwihn0r/homebrew-cask,kongslund/homebrew-cask,wuman/homebrew-cask,daften/homebrew-cask,cblecker/homebrew-cask,fkrone/homebrew-cask,winkelsdorf/homebrew-cask,spruceb/homebrew-cask,dlovitch/homebrew-cask,nicolas-brousse/homebrew-cask,stonehippo/homebrew-cask,lukasbestle/homebrew-cask,amatos/homebrew-cask,feigaochn/homebrew-cask,phpwutz/homebrew-cask,blainesch/homebrew-cask,stevenmaguire/homebrew-cask,jeroenseegers/homebrew-cask,retbrown/homebrew-cask,lantrix/homebrew-cask,deiga/homebrew-cask,hellosky806/homebrew-cask,j13k/homebrew-cask,sjackman/homebrew-cask,jrwesolo/homebrew-cask,FranklinChen/homebrew-cask,fharbe/homebrew-cask,xtian/homebrew-cask,thomanq/homebrew-cask,sebcode/homebrew-cask,dwihn0r/homebrew-cask,rogeriopradoj/homebrew-cask,SentinelWarren/homebrew-cask,fharbe/homebrew-cask,MircoT/homebrew-cask,huanzhang/homebrew-cask,ch3n2k/homebrew-cask,danielbayley/homebrew-cask,buo/homebrew-cask,jangalinski/homebrew-cask,rajiv/homebrew-cask,kingthorin/homebrew-cask,axodys/homebrew-cask,coeligena/homebrew-customized,lieuwex/homebrew-cask,reitermarkus/homebrew-cask,vin047/homebrew-cask,AndreTheHunter/homebrew-cask,joschi/homebrew-cask,napaxton/homebrew-cask,Ephemera/homebrew-cask,crmne/homebrew-cask,kryhear/homebrew-cask,bchatard/homebrew-cask,renard/homebrew-cask,alloy/homebrew-cask,a-x-/homebrew-cask,kkdd/homebrew-cask,markthetech/homebrew-cask,ianyh/homebrew-cask,ayohrling/homebrew-cask,aktau/homebrew-cask,norio-nomura/homebrew-cask,xyb/homebrew-cask,guylabs/homebrew-cask,ch3n2k/homebrew-cask,sparrc/homebrew-cask,rednoah/homebrew-cask,nivanchikov/homebrew-cask,englishm/homebrew-cask,yumitsu/homebrew-cask,kamilboratynski/homebrew-cask,andyli/homebrew-cask,otaran/homebrew-cask,vitorgalvao/homebrew-cask,opsdev-ws/homebrew-cask,wickedsp1d3r/homebrew-cask,vigosan/homebrew-cask,lifepillar/homebrew-cask,mjgardner/homebrew-cask,aktau/homebrew-cask,slack4u/homebrew-cask,tjnycum/homebrew-cask,christophermanning/homebrew-cask,atsuyim/homebrew-cask,leonmachadowilcox/homebrew-cask,MerelyAPseudonym/homebrew-cask,thomanq/homebrew-cask,arronmabrey/homebrew-cask,jtriley/homebrew-cask,ywfwj2008/homebrew-cask,tedski/homebrew-cask,tonyseek/homebrew-cask,iamso/homebrew-cask,MoOx/homebrew-cask,deanmorin/homebrew-cask,stephenwade/homebrew-cask,mingzhi22/homebrew-cask,doits/homebrew-cask,giannitm/homebrew-cask,neverfox/homebrew-cask,pkq/homebrew-cask,adrianchia/homebrew-cask,nshemonsky/homebrew-cask,mahori/homebrew-cask,tjt263/homebrew-cask,mattfelsen/homebrew-cask,miguelfrde/homebrew-cask,Cottser/homebrew-cask,ptb/homebrew-cask,mazehall/homebrew-cask,jppelteret/homebrew-cask,deiga/homebrew-cask,kteru/homebrew-cask,johnjelinek/homebrew-cask,enriclluelles/homebrew-cask,jeroenseegers/homebrew-cask,jrwesolo/homebrew-cask,remko/homebrew-cask,mAAdhaTTah/homebrew-cask,shishi/homebrew-cask,vuquoctuan/homebrew-cask,seanzxx/homebrew-cask,troyxmccall/homebrew-cask,thehunmonkgroup/homebrew-cask,fkrone/homebrew-cask,LaurentFough/homebrew-cask,lolgear/homebrew-cask,jeanregisser/homebrew-cask,inz/homebrew-cask,julienlavergne/homebrew-cask,illusionfield/homebrew-cask,ky0615/homebrew-cask-1,reitermarkus/homebrew-cask,jacobbednarz/homebrew-cask,fanquake/homebrew-cask,andrewdisley/homebrew-cask,skyyuan/homebrew-cask,markthetech/homebrew-cask,jacobbednarz/homebrew-cask,mjdescy/homebrew-cask,leipert/homebrew-cask,mrmachine/homebrew-cask,zchee/homebrew-cask,dspeckhard/homebrew-cask,stevehedrick/homebrew-cask,FredLackeyOfficial/homebrew-cask,pacav69/homebrew-cask,adriweb/homebrew-cask,thii/homebrew-cask,yumitsu/homebrew-cask,esebastian/homebrew-cask,gibsjose/homebrew-cask,lvicentesanchez/homebrew-cask,xakraz/homebrew-cask,seanzxx/homebrew-cask,vmrob/homebrew-cask,mattrobenolt/homebrew-cask,syscrusher/homebrew-cask,bcomnes/homebrew-cask,mindriot101/homebrew-cask,deanmorin/homebrew-cask,moogar0880/homebrew-cask,farmerchris/homebrew-cask,jgarber623/homebrew-cask,Ibuprofen/homebrew-cask,katoquro/homebrew-cask,asins/homebrew-cask,puffdad/homebrew-cask,bendoerr/homebrew-cask,seanorama/homebrew-cask,stephenwade/homebrew-cask,bkono/homebrew-cask,L2G/homebrew-cask,MatzFan/homebrew-cask,RogerThiede/homebrew-cask,AnastasiaSulyagina/homebrew-cask,skatsuta/homebrew-cask,jpmat296/homebrew-cask,gregkare/homebrew-cask,crzrcn/homebrew-cask,dustinblackman/homebrew-cask,kuno/homebrew-cask,troyxmccall/homebrew-cask,haha1903/homebrew-cask,afh/homebrew-cask,LaurentFough/homebrew-cask,axodys/homebrew-cask,frapposelli/homebrew-cask,stonehippo/homebrew-cask,casidiablo/homebrew-cask,brianshumate/homebrew-cask,zorosteven/homebrew-cask,singingwolfboy/homebrew-cask,klane/homebrew-cask,nicolas-brousse/homebrew-cask,imgarylai/homebrew-cask,SentinelWarren/homebrew-cask,epmatsw/homebrew-cask,johnste/homebrew-cask,faun/homebrew-cask,xcezx/homebrew-cask,nicholsn/homebrew-cask,onlynone/homebrew-cask,cohei/homebrew-cask,flaviocamilo/homebrew-cask,stonehippo/homebrew-cask,RJHsiao/homebrew-cask,leipert/homebrew-cask,dictcp/homebrew-cask,xight/homebrew-cask,mokagio/homebrew-cask,mattrobenolt/homebrew-cask,cliffcotino/homebrew-cask,kesara/homebrew-cask,ywfwj2008/homebrew-cask,prime8/homebrew-cask,m3nu/homebrew-cask,a1russell/homebrew-cask,yurikoles/homebrew-cask,tdsmith/homebrew-cask,jiashuw/homebrew-cask,santoshsahoo/homebrew-cask,tyage/homebrew-cask,theoriginalgri/homebrew-cask,kTitan/homebrew-cask,nanoxd/homebrew-cask,iamso/homebrew-cask,farmerchris/homebrew-cask,CameronGarrett/homebrew-cask,KosherBacon/homebrew-cask,sanyer/homebrew-cask,shorshe/homebrew-cask,mchlrmrz/homebrew-cask,wesen/homebrew-cask,pacav69/homebrew-cask,rogeriopradoj/homebrew-cask,fanquake/homebrew-cask,cliffcotino/homebrew-cask,bsiddiqui/homebrew-cask,ftiff/homebrew-cask,rickychilcott/homebrew-cask,stevenmaguire/homebrew-cask,elseym/homebrew-cask,robbiethegeek/homebrew-cask,mariusbutuc/homebrew-cask,lucasmezencio/homebrew-cask,moimikey/homebrew-cask,Keloran/homebrew-cask,yurrriq/homebrew-cask,arranubels/homebrew-cask,psibre/homebrew-cask,tdsmith/homebrew-cask,jawshooah/homebrew-cask,chuanxd/homebrew-cask,blogabe/homebrew-cask,kingthorin/homebrew-cask,yutarody/homebrew-cask,astorije/homebrew-cask,hanxue/caskroom,sgnh/homebrew-cask,albertico/homebrew-cask,danielgomezrico/homebrew-cask,julionc/homebrew-cask,sosedoff/homebrew-cask,andrewschleifer/homebrew-cask,seanorama/homebrew-cask,sparrc/homebrew-cask,nickpellant/homebrew-cask,danielbayley/homebrew-cask,moimikey/homebrew-cask,colindunn/homebrew-cask,dunn/homebrew-cask,vmrob/homebrew-cask,diogodamiani/homebrew-cask,gibsjose/homebrew-cask,inz/homebrew-cask,alebcay/homebrew-cask,colindean/homebrew-cask,JosephViolago/homebrew-cask,mjgardner/homebrew-cask,nelsonjchen/homebrew-cask,alexg0/homebrew-cask,ponychicken/homebrew-customcask,skyyuan/homebrew-cask,josa42/homebrew-cask,CameronGarrett/homebrew-cask,yurikoles/homebrew-cask,valepert/homebrew-cask,gord1anknot/homebrew-cask,AnastasiaSulyagina/homebrew-cask,mgryszko/homebrew-cask,hswong3i/homebrew-cask,qbmiller/homebrew-cask,tangestani/homebrew-cask,kongslund/homebrew-cask,0rax/homebrew-cask,pkq/homebrew-cask,sebcode/homebrew-cask,nickpellant/homebrew-cask,supriyantomaftuh/homebrew-cask,MisumiRize/homebrew-cask,flada-auxv/homebrew-cask,nathansgreen/homebrew-cask,xiongchiamiov/homebrew-cask,uetchy/homebrew-cask,goxberry/homebrew-cask,elyscape/homebrew-cask,RJHsiao/homebrew-cask,wolflee/homebrew-cask,iAmGhost/homebrew-cask,maxnordlund/homebrew-cask,xight/homebrew-cask,rajiv/homebrew-cask,timsutton/homebrew-cask,ianyh/homebrew-cask,BenjaminHCCarr/homebrew-cask,ksato9700/homebrew-cask,rcuza/homebrew-cask,adelinofaria/homebrew-cask,miku/homebrew-cask,inta/homebrew-cask,Fedalto/homebrew-cask,drostron/homebrew-cask,otzy007/homebrew-cask,JosephViolago/homebrew-cask,mjgardner/homebrew-cask,Hywan/homebrew-cask,asbachb/homebrew-cask,gord1anknot/homebrew-cask,linc01n/homebrew-cask,freeslugs/homebrew-cask,sosedoff/homebrew-cask,lukasbestle/homebrew-cask,albertico/homebrew-cask,codeurge/homebrew-cask,sirodoht/homebrew-cask,christer155/homebrew-cask,jspahrsummers/homebrew-cask,boydj/homebrew-cask,johntrandall/homebrew-cask,koenrh/homebrew-cask,cobyism/homebrew-cask,Philosoft/homebrew-cask,kevyau/homebrew-cask,claui/homebrew-cask,paour/homebrew-cask,athrunsun/homebrew-cask,boecko/homebrew-cask,ninjahoahong/homebrew-cask,jacobdam/homebrew-cask,opsdev-ws/homebrew-cask,hristozov/homebrew-cask,renaudguerin/homebrew-cask,corbt/homebrew-cask,My2ndAngelic/homebrew-cask,epmatsw/homebrew-cask,bosr/homebrew-cask,mauricerkelly/homebrew-cask,michelegera/homebrew-cask,nysthee/homebrew-cask,bric3/homebrew-cask,mattfelsen/homebrew-cask,spruceb/homebrew-cask,j13k/homebrew-cask,gustavoavellar/homebrew-cask,Ephemera/homebrew-cask,Dremora/homebrew-cask,okket/homebrew-cask,fwiesel/homebrew-cask,Amorymeltzer/homebrew-cask,devmynd/homebrew-cask,mazehall/homebrew-cask,jonathanwiesel/homebrew-cask,jasmas/homebrew-cask,blogabe/homebrew-cask,ohammersmith/homebrew-cask,ebraminio/homebrew-cask,antogg/homebrew-cask,joschi/homebrew-cask,cedwardsmedia/homebrew-cask,morganestes/homebrew-cask,pgr0ss/homebrew-cask,jen20/homebrew-cask,mikem/homebrew-cask,robertgzr/homebrew-cask,Fedalto/homebrew-cask,diguage/homebrew-cask,paulbreslin/homebrew-cask,jpmat296/homebrew-cask,squid314/homebrew-cask,wKovacs64/homebrew-cask,FranklinChen/homebrew-cask,gregkare/homebrew-cask,dvdoliveira/homebrew-cask,JikkuJose/homebrew-cask,kteru/homebrew-cask,danielgomezrico/homebrew-cask,julienlavergne/homebrew-cask,kryhear/homebrew-cask,sjackman/homebrew-cask,githubutilities/homebrew-cask,sanyer/homebrew-cask,josa42/homebrew-cask,slack4u/homebrew-cask,ajbw/homebrew-cask,chino/homebrew-cask,samnung/homebrew-cask,dieterdemeyer/homebrew-cask,christophermanning/homebrew-cask,dlackty/homebrew-cask,rhendric/homebrew-cask,lcasey001/homebrew-cask,scribblemaniac/homebrew-cask,mingzhi22/homebrew-cask,caskroom/homebrew-cask,patresi/homebrew-cask,andyshinn/homebrew-cask,jaredsampson/homebrew-cask,nathancahill/homebrew-cask,samdoran/homebrew-cask,dlackty/homebrew-cask,JoelLarson/homebrew-cask,cprecioso/homebrew-cask,lvicentesanchez/homebrew-cask,michelegera/homebrew-cask,tangestani/homebrew-cask,dcondrey/homebrew-cask,jhowtan/homebrew-cask,singingwolfboy/homebrew-cask,chrisfinazzo/homebrew-cask,JacopKane/homebrew-cask,perfide/homebrew-cask,rkJun/homebrew-cask,BahtiyarB/homebrew-cask,deizel/homebrew-cask,taherio/homebrew-cask,inta/homebrew-cask,bgandon/homebrew-cask,chrisRidgers/homebrew-cask,zorosteven/homebrew-cask,unasuke/homebrew-cask,Philosoft/homebrew-cask,afdnlw/homebrew-cask,akiomik/homebrew-cask,larseggert/homebrew-cask,asins/homebrew-cask,jgarber623/homebrew-cask,mahori/homebrew-cask,Ibuprofen/homebrew-cask,cblecker/homebrew-cask,FinalDes/homebrew-cask,optikfluffel/homebrew-cask,moonboots/homebrew-cask,bric3/homebrew-cask,csmith-palantir/homebrew-cask,dspeckhard/homebrew-cask,shanonvl/homebrew-cask,chrisfinazzo/homebrew-cask,timsutton/homebrew-cask,JacopKane/homebrew-cask,nightscape/homebrew-cask,tjnycum/homebrew-cask,MicTech/homebrew-cask,AdamCmiel/homebrew-cask,kirikiriyamama/homebrew-cask,Gasol/homebrew-cask,petmoo/homebrew-cask,ptb/homebrew-cask,wizonesolutions/homebrew-cask,zchee/homebrew-cask,greg5green/homebrew-cask,ashishb/homebrew-cask,moogar0880/homebrew-cask,chadcatlett/caskroom-homebrew-cask,malob/homebrew-cask,gabrielizaias/homebrew-cask,usami-k/homebrew-cask,syscrusher/homebrew-cask,mattrobenolt/homebrew-cask,mikem/homebrew-cask,mwek/homebrew-cask,bsiddiqui/homebrew-cask,6uclz1/homebrew-cask,mathbunnyru/homebrew-cask,BenjaminHCCarr/homebrew-cask,freeslugs/homebrew-cask,crmne/homebrew-cask,claui/homebrew-cask,alloy/homebrew-cask,joaocc/homebrew-cask,chrisfinazzo/homebrew-cask,janlugt/homebrew-cask,bdhess/homebrew-cask,rickychilcott/homebrew-cask,stigkj/homebrew-caskroom-cask,ericbn/homebrew-cask,winkelsdorf/homebrew-cask,yuhki50/homebrew-cask,gerrymiller/homebrew-cask,adrianchia/homebrew-cask,andrewschleifer/homebrew-cask,RickWong/homebrew-cask,kirikiriyamama/homebrew-cask,goxberry/homebrew-cask,sgnh/homebrew-cask,wickles/homebrew-cask,jpodlech/homebrew-cask,ashishb/homebrew-cask,jeanregisser/homebrew-cask,tjt263/homebrew-cask,lucasmezencio/homebrew-cask,zhuzihhhh/homebrew-cask,MicTech/homebrew-cask,akiomik/homebrew-cask,janlugt/homebrew-cask,tonyseek/homebrew-cask,yurrriq/homebrew-cask,flaviocamilo/homebrew-cask,wolflee/homebrew-cask,malford/homebrew-cask,gyndav/homebrew-cask,Bombenleger/homebrew-cask,paulombcosta/homebrew-cask,joshka/homebrew-cask,jedahan/homebrew-cask,sscotth/homebrew-cask,SamiHiltunen/homebrew-cask,guylabs/homebrew-cask,ksylvan/homebrew-cask,slnovak/homebrew-cask,imgarylai/homebrew-cask,esebastian/homebrew-cask,gerrymiller/homebrew-cask,ingorichter/homebrew-cask,jppelteret/homebrew-cask,squid314/homebrew-cask,Labutin/homebrew-cask,buo/homebrew-cask,jconley/homebrew-cask,cobyism/homebrew-cask,tsparber/homebrew-cask,lalyos/homebrew-cask,johnste/homebrew-cask,atsuyim/homebrew-cask,kolomiichenko/homebrew-cask,djakarta-trap/homebrew-myCask,mfpierre/homebrew-cask,ahvigil/homebrew-cask,JoelLarson/homebrew-cask,kiliankoe/homebrew-cask,MoOx/homebrew-cask,pgr0ss/homebrew-cask,williamboman/homebrew-cask,alebcay/homebrew-cask,sanchezm/homebrew-cask,cohei/homebrew-cask,n0ts/homebrew-cask,nicholsn/homebrew-cask,m3nu/homebrew-cask,gyugyu/homebrew-cask,feniix/homebrew-cask,forevergenin/homebrew-cask,chrisRidgers/homebrew-cask,xtian/homebrew-cask,alexg0/homebrew-cask,qnm/homebrew-cask,jalaziz/homebrew-cask,decrement/homebrew-cask,mauricerkelly/homebrew-cask,alebcay/homebrew-cask,shoichiaizawa/homebrew-cask,decrement/homebrew-cask,MichaelPei/homebrew-cask,kronicd/homebrew-cask,SamiHiltunen/homebrew-cask,qbmiller/homebrew-cask,kesara/homebrew-cask,hackhandslabs/homebrew-cask,jspahrsummers/homebrew-cask,sideci-sample/sideci-sample-homebrew-cask,andrewdisley/homebrew-cask,JikkuJose/homebrew-cask,RogerThiede/homebrew-cask,lantrix/homebrew-cask,mathbunnyru/homebrew-cask,ldong/homebrew-cask,scottsuch/homebrew-cask,supriyantomaftuh/homebrew-cask,jayshao/homebrew-cask,JacopKane/homebrew-cask,nathancahill/homebrew-cask,Whoaa512/homebrew-cask,jaredsampson/homebrew-cask,shorshe/homebrew-cask,scribblemaniac/homebrew-cask,nrlquaker/homebrew-cask,aki77/homebrew-cask,victorpopkov/homebrew-cask,underyx/homebrew-cask,paulombcosta/homebrew-cask,af/homebrew-cask,reitermarkus/homebrew-cask,ddm/homebrew-cask,guerrero/homebrew-cask,retrography/homebrew-cask,Ketouem/homebrew-cask,pablote/homebrew-cask,nathansgreen/homebrew-cask,howie/homebrew-cask,hristozov/homebrew-cask,dictcp/homebrew-cask,julionc/homebrew-cask,tmoreira2020/homebrew,sideci-sample/sideci-sample-homebrew-cask,aguynamedryan/homebrew-cask,deizel/homebrew-cask,astorije/homebrew-cask,lukeadams/homebrew-cask,adriweb/homebrew-cask,delphinus35/homebrew-cask,joaoponceleao/homebrew-cask,larseggert/homebrew-cask,FredLackeyOfficial/homebrew-cask,dustinblackman/homebrew-cask,jayshao/homebrew-cask,boydj/homebrew-cask,toonetown/homebrew-cask,colindunn/homebrew-cask,pkq/homebrew-cask,jalaziz/homebrew-cask,wmorin/homebrew-cask,coneman/homebrew-cask,johndbritton/homebrew-cask,patresi/homebrew-cask,doits/homebrew-cask,nightscape/homebrew-cask,casidiablo/homebrew-cask,fazo96/homebrew-cask,cfillion/homebrew-cask,shoichiaizawa/homebrew-cask,scw/homebrew-cask,Saklad5/homebrew-cask,Amorymeltzer/homebrew-cask,danielbayley/homebrew-cask,n8henrie/homebrew-cask,MichaelPei/homebrew-cask,AndreTheHunter/homebrew-cask,samnung/homebrew-cask,skatsuta/homebrew-cask,riyad/homebrew-cask,barravi/homebrew-cask,yurikoles/homebrew-cask,samshadwell/homebrew-cask,guerrero/homebrew-cask,codeurge/homebrew-cask,antogg/homebrew-cask,slnovak/homebrew-cask,genewoo/homebrew-cask,kei-yamazaki/homebrew-cask,bchatard/homebrew-cask,helloIAmPau/homebrew-cask,drostron/homebrew-cask,elnappo/homebrew-cask,JosephViolago/homebrew-cask,tedbundyjr/homebrew-cask,MatzFan/homebrew-cask,segiddins/homebrew-cask,nathanielvarona/homebrew-cask,neil-ca-moore/homebrew-cask,carlmod/homebrew-cask,xalep/homebrew-cask,vitorgalvao/homebrew-cask,13k/homebrew-cask,mchlrmrz/homebrew-cask,prime8/homebrew-cask,nrlquaker/homebrew-cask,jen20/homebrew-cask,tsparber/homebrew-cask,maxnordlund/homebrew-cask,afh/homebrew-cask,timsutton/homebrew-cask,petmoo/homebrew-cask,psibre/homebrew-cask,lumaxis/homebrew-cask,vigosan/homebrew-cask,uetchy/homebrew-cask,morsdyce/homebrew-cask,kkdd/homebrew-cask,zhuzihhhh/homebrew-cask,xyb/homebrew-cask,claui/homebrew-cask,christer155/homebrew-cask,mlocher/homebrew-cask,genewoo/homebrew-cask,coeligena/homebrew-customized,enriclluelles/homebrew-cask,dvdoliveira/homebrew-cask,mahori/homebrew-cask,KosherBacon/homebrew-cask,johndbritton/homebrew-cask,zmwangx/homebrew-cask,katoquro/homebrew-cask,mokagio/homebrew-cask,rogeriopradoj/homebrew-cask,miccal/homebrew-cask,delphinus35/homebrew-cask,optikfluffel/homebrew-cask,sohtsuka/homebrew-cask,Nitecon/homebrew-cask,tedski/homebrew-cask,sysbot/homebrew-cask,flada-auxv/homebrew-cask,wuman/homebrew-cask,mAAdhaTTah/homebrew-cask,jpodlech/homebrew-cask,dcondrey/homebrew-cask,dezon/homebrew-cask,nivanchikov/homebrew-cask,haha1903/homebrew-cask,kassi/homebrew-cask,hellosky806/homebrew-cask,mwilmer/homebrew-cask,ahundt/homebrew-cask,renard/homebrew-cask,jeroenj/homebrew-cask,cclauss/homebrew-cask,ksato9700/homebrew-cask,johan/homebrew-cask,joschi/homebrew-cask,sanyer/homebrew-cask,Saklad5/homebrew-cask,royalwang/homebrew-cask,kostasdizas/homebrew-cask,frapposelli/homebrew-cask,valepert/homebrew-cask,fwiesel/homebrew-cask,tjnycum/homebrew-cask,zerrot/homebrew-cask,johntrandall/homebrew-cask,thii/homebrew-cask,morganestes/homebrew-cask,mgryszko/homebrew-cask | ruby | ## Code Before:
class Widelands < Cask
url 'https://launchpad.net/widelands/build17/build-17/+download/widelands-build17-mac.dmg'
homepage 'https://wl.widelands.org/'
version 'Build 17'
sha256 '3812ae9f95697269a94970fc83c2c16ab962f450b5a114057046fde3bcfc5a2c'
link 'Widelands.app'
end
## Instruction:
Update Widelands to Build 18
## Code After:
class Widelands < Cask
url 'https://launchpad.net/widelands/build18/build-18/+download/widelands-build18-mac.dmg'
homepage 'https://wl.widelands.org/'
version 'Build 18'
sha256 '1d209dcf653942788120c6f1abbe6f421fdefe6776f4feed48c58eddeb4c3722'
link 'Widelands.app'
end
| class Widelands < Cask
- url 'https://launchpad.net/widelands/build17/build-17/+download/widelands-build17-mac.dmg'
? ^ ^ ^
+ url 'https://launchpad.net/widelands/build18/build-18/+download/widelands-build18-mac.dmg'
? ^ ^ ^
homepage 'https://wl.widelands.org/'
- version 'Build 17'
? ^
+ version 'Build 18'
? ^
- sha256 '3812ae9f95697269a94970fc83c2c16ab962f450b5a114057046fde3bcfc5a2c'
+ sha256 '1d209dcf653942788120c6f1abbe6f421fdefe6776f4feed48c58eddeb4c3722'
link 'Widelands.app'
end | 6 | 0.857143 | 3 | 3 |
fa73d9b029eede11c297abd701aa0174f7c0c8ad | src/braid/client/gateway/subs.cljs | src/braid/client/gateway/subs.cljs | (ns braid.client.gateway.subs
(:require
[re-frame.core :refer [reg-sub]]
[braid.client.gateway.user-auth.subs]
[braid.client.gateway.create-group.subs]))
(reg-sub
:gateway/field-value
(fn [state [_ field]]
(get-in state [:fields field :value])))
(reg-sub
:gateway/field-errors
(fn [state [_ field]]
(get-in state [:fields field :errors])))
(reg-sub
:gateway/field-status
(fn [state [_ field]]
(cond
(get-in state [:fields field :typing?]) :typing
(get-in state [:fields field :untouched?]) :untouched
(not (empty? (get-in state [:fields field :errors]))) :invalid
(< 0 (get-in state [:fields field :validations-left])) :loading
:else :valid)))
(reg-sub
:gateway/fields-valid?
(fn [state [_ fields]]
(->> fields
(map (fn [field]
(empty? (get-in state [:fields field :errors]))))
(every? true?))))
(reg-sub
:gateway/action-mode
(fn [state _]
(get-in state [:action :mode])))
| (ns braid.client.gateway.subs
(:require
[re-frame.core :refer [reg-sub]]
[braid.client.gateway.user-auth.subs]
[braid.client.gateway.create-group.subs]))
(reg-sub
:gateway/field-value
(fn [state [_ field]]
(get-in state [:fields field :value])))
(reg-sub
:gateway/field-errors
(fn [state [_ field]]
(get-in state [:fields field :errors])))
(reg-sub
:gateway/field-status
(fn [state [_ field]]
(cond
(get-in state [:fields field :typing?]) :typing
(get-in state [:fields field :untouched?]) :untouched
(not (empty? (get-in state [:fields field :errors]))) :invalid
(< 0 (get-in state [:fields field :validations-left])) :loading
:else :valid)))
(reg-sub
:gateway/fields-valid?
(fn [state [_ fields]]
(->> fields
(map (fn [field]
(and
(empty? (get-in state [:fields field :errors]))
(= 0 (get-in state [:fields field :validations-left]))
(not (get-in state [:fields field :typing?])))))
(every? true?))))
(reg-sub
:gateway/action-mode
(fn [state _]
(get-in state [:action :mode])))
| Disable button while typing and doing remote validations | Disable button while typing and doing remote validations
| Clojure | agpl-3.0 | rafd/braid,braidchat/braid,rafd/braid,braidchat/braid | clojure | ## Code Before:
(ns braid.client.gateway.subs
(:require
[re-frame.core :refer [reg-sub]]
[braid.client.gateway.user-auth.subs]
[braid.client.gateway.create-group.subs]))
(reg-sub
:gateway/field-value
(fn [state [_ field]]
(get-in state [:fields field :value])))
(reg-sub
:gateway/field-errors
(fn [state [_ field]]
(get-in state [:fields field :errors])))
(reg-sub
:gateway/field-status
(fn [state [_ field]]
(cond
(get-in state [:fields field :typing?]) :typing
(get-in state [:fields field :untouched?]) :untouched
(not (empty? (get-in state [:fields field :errors]))) :invalid
(< 0 (get-in state [:fields field :validations-left])) :loading
:else :valid)))
(reg-sub
:gateway/fields-valid?
(fn [state [_ fields]]
(->> fields
(map (fn [field]
(empty? (get-in state [:fields field :errors]))))
(every? true?))))
(reg-sub
:gateway/action-mode
(fn [state _]
(get-in state [:action :mode])))
## Instruction:
Disable button while typing and doing remote validations
## Code After:
(ns braid.client.gateway.subs
(:require
[re-frame.core :refer [reg-sub]]
[braid.client.gateway.user-auth.subs]
[braid.client.gateway.create-group.subs]))
(reg-sub
:gateway/field-value
(fn [state [_ field]]
(get-in state [:fields field :value])))
(reg-sub
:gateway/field-errors
(fn [state [_ field]]
(get-in state [:fields field :errors])))
(reg-sub
:gateway/field-status
(fn [state [_ field]]
(cond
(get-in state [:fields field :typing?]) :typing
(get-in state [:fields field :untouched?]) :untouched
(not (empty? (get-in state [:fields field :errors]))) :invalid
(< 0 (get-in state [:fields field :validations-left])) :loading
:else :valid)))
(reg-sub
:gateway/fields-valid?
(fn [state [_ fields]]
(->> fields
(map (fn [field]
(and
(empty? (get-in state [:fields field :errors]))
(= 0 (get-in state [:fields field :validations-left]))
(not (get-in state [:fields field :typing?])))))
(every? true?))))
(reg-sub
:gateway/action-mode
(fn [state _]
(get-in state [:action :mode])))
| (ns braid.client.gateway.subs
(:require
[re-frame.core :refer [reg-sub]]
[braid.client.gateway.user-auth.subs]
[braid.client.gateway.create-group.subs]))
(reg-sub
:gateway/field-value
(fn [state [_ field]]
(get-in state [:fields field :value])))
(reg-sub
:gateway/field-errors
(fn [state [_ field]]
(get-in state [:fields field :errors])))
(reg-sub
:gateway/field-status
(fn [state [_ field]]
(cond
(get-in state [:fields field :typing?]) :typing
(get-in state [:fields field :untouched?]) :untouched
(not (empty? (get-in state [:fields field :errors]))) :invalid
(< 0 (get-in state [:fields field :validations-left])) :loading
:else :valid)))
(reg-sub
:gateway/fields-valid?
(fn [state [_ fields]]
(->> fields
(map (fn [field]
+ (and
- (empty? (get-in state [:fields field :errors]))))
? --
+ (empty? (get-in state [:fields field :errors]))
? ++
+ (= 0 (get-in state [:fields field :validations-left]))
+ (not (get-in state [:fields field :typing?])))))
(every? true?))))
(reg-sub
:gateway/action-mode
(fn [state _]
(get-in state [:action :mode]))) | 5 | 0.131579 | 4 | 1 |
6a92d8d6e6896533ea59a0a7580e79a3e027daf9 | module/manifest.json | module/manifest.json | {
"changes": "* Updated Parse Android SDK to 1.15.8\n* Updated Parse iOS SDK to 1.15.1",
"dependencies": {
"bolts": {
"minimum_version": "1.6.0",
"maximum_version": "1.6.0"
}
},
"description": "Parse.com configuration to enable push notifications.\n\nThe parse module uses the Parse.com SDK to allow for native push notifications.",
"min_platform_version": "v2.5.5",
"namespace": "parse",
"platform_version": "feature_ios11",
"version": "2.16"
}
| {
"changes": "* Updated Parse Android SDK to 1.15.8\n* Updated Parse iOS SDK to 1.15.1",
"dependencies": {
"bolts": {
"minimum_version": "1.6.0",
"maximum_version": "1.6.0"
}
},
"description": "Parse.com configuration to enable push notifications.\n\nThe parse module uses the Parse.com SDK to allow for native push notifications.",
"min_platform_version": "v2.6.1",
"namespace": "parse",
"platform_version": "v2.6.1",
"version": "2.16"
}
| Set platform version to 2.6 | Set platform version to 2.6
| JSON | bsd-2-clause | scthi/trigger.io-parse,trigger-corp/trigger.io-parse,trigger-corp/trigger.io-parse,scthi/trigger.io-parse | json | ## Code Before:
{
"changes": "* Updated Parse Android SDK to 1.15.8\n* Updated Parse iOS SDK to 1.15.1",
"dependencies": {
"bolts": {
"minimum_version": "1.6.0",
"maximum_version": "1.6.0"
}
},
"description": "Parse.com configuration to enable push notifications.\n\nThe parse module uses the Parse.com SDK to allow for native push notifications.",
"min_platform_version": "v2.5.5",
"namespace": "parse",
"platform_version": "feature_ios11",
"version": "2.16"
}
## Instruction:
Set platform version to 2.6
## Code After:
{
"changes": "* Updated Parse Android SDK to 1.15.8\n* Updated Parse iOS SDK to 1.15.1",
"dependencies": {
"bolts": {
"minimum_version": "1.6.0",
"maximum_version": "1.6.0"
}
},
"description": "Parse.com configuration to enable push notifications.\n\nThe parse module uses the Parse.com SDK to allow for native push notifications.",
"min_platform_version": "v2.6.1",
"namespace": "parse",
"platform_version": "v2.6.1",
"version": "2.16"
}
| {
"changes": "* Updated Parse Android SDK to 1.15.8\n* Updated Parse iOS SDK to 1.15.1",
"dependencies": {
"bolts": {
"minimum_version": "1.6.0",
"maximum_version": "1.6.0"
}
},
"description": "Parse.com configuration to enable push notifications.\n\nThe parse module uses the Parse.com SDK to allow for native push notifications.",
- "min_platform_version": "v2.5.5",
? ^ ^
+ "min_platform_version": "v2.6.1",
? ^ ^
"namespace": "parse",
- "platform_version": "feature_ios11",
? ^^^^^^^^^^^^
+ "platform_version": "v2.6.1",
? ^^^^^
"version": "2.16"
} | 4 | 0.285714 | 2 | 2 |
b7b4509191aa240ae893c22588ffdb9db2d0e4ba | app/views/users/passed_courses/show.html.slim | app/views/users/passed_courses/show.html.slim | section
.container
h1.sr-only 自訂
aside.col-md-2
= render 'users/aside_tabs'
article.col-md-10
.jumbotron
h2.m-title.text-center 已修過課程
= form_for :passed_courses do |f|
h4 登記已修過課程
.input-group
input.form-control name='passed_course' value="#{params[:passed_course]}" placeholder='課程名稱'
span.input-group-btn
= f.submit '登記', class: 'btn btn-default'
hr
table.table.table-condensed.table-hover.break-table
h4 已修過課程列表
thead
tr
th 課程名稱
th 取消登記
tbody
- @list.each do |item|
tr
td data-title='課程名稱' = item
td data-title='取消登記'
button.btn.btn-danger.btn-sm type='submit'
span.fa.fa-times
| 取消 | section
.container
h1.sr-only 自訂
aside.col-md-2
= render 'users/aside_tabs'
article.col-md-10
.jumbotron
h2.m-title.text-center 已修過課程
= form_for :passed_courses do |f|
h4 登記已修過課程
.input-group
input.form-control name='passed_course' value="#{params[:passed_course]}" placeholder='課程名稱'
span.input-group-btn
= f.submit '登記', class: 'btn btn-default'
hr
table.table.table-condensed.table-hover.break-table
h4 已修過課程列表
button.pull-right.btn.btn-danger type='submit'
span.fa.fa-times
| 全部取消登記
thead
tr
th 課程名稱
th 取消登記
tbody
- @list.each do |item|
tr
td data-title='課程名稱' = item
td data-title='取消登記'
button.btn.btn-danger.btn-sm type='submit'
span.fa.fa-times
| 取消 | Add destory_all btn to passed_courses | Add destory_all btn to passed_courses
| Slim | mit | atitan/coursenote,atitan/coursenote,atitan/coursenote | slim | ## Code Before:
section
.container
h1.sr-only 自訂
aside.col-md-2
= render 'users/aside_tabs'
article.col-md-10
.jumbotron
h2.m-title.text-center 已修過課程
= form_for :passed_courses do |f|
h4 登記已修過課程
.input-group
input.form-control name='passed_course' value="#{params[:passed_course]}" placeholder='課程名稱'
span.input-group-btn
= f.submit '登記', class: 'btn btn-default'
hr
table.table.table-condensed.table-hover.break-table
h4 已修過課程列表
thead
tr
th 課程名稱
th 取消登記
tbody
- @list.each do |item|
tr
td data-title='課程名稱' = item
td data-title='取消登記'
button.btn.btn-danger.btn-sm type='submit'
span.fa.fa-times
| 取消
## Instruction:
Add destory_all btn to passed_courses
## Code After:
section
.container
h1.sr-only 自訂
aside.col-md-2
= render 'users/aside_tabs'
article.col-md-10
.jumbotron
h2.m-title.text-center 已修過課程
= form_for :passed_courses do |f|
h4 登記已修過課程
.input-group
input.form-control name='passed_course' value="#{params[:passed_course]}" placeholder='課程名稱'
span.input-group-btn
= f.submit '登記', class: 'btn btn-default'
hr
table.table.table-condensed.table-hover.break-table
h4 已修過課程列表
button.pull-right.btn.btn-danger type='submit'
span.fa.fa-times
| 全部取消登記
thead
tr
th 課程名稱
th 取消登記
tbody
- @list.each do |item|
tr
td data-title='課程名稱' = item
td data-title='取消登記'
button.btn.btn-danger.btn-sm type='submit'
span.fa.fa-times
| 取消 | section
.container
h1.sr-only 自訂
aside.col-md-2
= render 'users/aside_tabs'
article.col-md-10
.jumbotron
h2.m-title.text-center 已修過課程
= form_for :passed_courses do |f|
h4 登記已修過課程
.input-group
input.form-control name='passed_course' value="#{params[:passed_course]}" placeholder='課程名稱'
span.input-group-btn
= f.submit '登記', class: 'btn btn-default'
hr
table.table.table-condensed.table-hover.break-table
h4 已修過課程列表
+ button.pull-right.btn.btn-danger type='submit'
+ span.fa.fa-times
+ | 全部取消登記
thead
tr
th 課程名稱
th 取消登記
tbody
- @list.each do |item|
tr
td data-title='課程名稱' = item
td data-title='取消登記'
button.btn.btn-danger.btn-sm type='submit'
span.fa.fa-times
| 取消 | 3 | 0.103448 | 3 | 0 |
ade960c76de6773a176d2cd982ac9a26a2d072ae | tests/unit/network/CubicTemplateTest.py | tests/unit/network/CubicTemplateTest.py | import openpnm as op
from skimage.morphology import ball, disk
class CubicTemplateTest:
def setup_class(self):
pass
def teardown_class(self):
pass
def test_2D_template(self):
net = op.network.CubicTemplate(template=disk(10), spacing=1)
assert net.Np == 317
assert net.Nt == 592
def test_3D_template(self):
net = op.network.CubicTemplate(template=ball(5), spacing=1)
assert net.Np == 515
assert net.Nt == 1302
if __name__ == '__main__':
t = CubicTemplateTest()
t.setup_class()
self = t
for item in t.__dir__():
if item.startswith('test'):
print('running test: '+item)
t.__getattribute__(item)()
| import numpy as np
import openpnm as op
from skimage.morphology import ball, disk
class CubicTemplateTest:
def setup_class(self):
pass
def teardown_class(self):
pass
def test_2D_template(self):
net = op.network.CubicTemplate(template=disk(10), spacing=1)
assert net.Np == 317
assert net.Nt == 592
def test_3D_template(self):
net = op.network.CubicTemplate(template=ball(5), spacing=1)
assert net.Np == 515
assert net.Nt == 1302
def test_labels(self):
template = np.array(
[[1, 1, 1, 1, 1],
[1, 1, 0, 1, 1],
[1, 1, 0, 0, 1],
[1, 0, 0, 0, 1],
[1, 1, 0, 1, 1]]
)
net = op.network.CubicTemplate(template=template)
# Test "surface" label
Ps_surf_desired = np.array([0, 1, 2, 3, 4, 5, 8, 9, 11, 12, 13, 14, 15, 16, 17])
Ps_surf = net.pores("surface")
np.testing.assert_allclose(Ps_surf, Ps_surf_desired)
# Test "internal_surface" label
Ps_int_surf_desired = np.array([6, 7, 10])
Ps_int_surf = net.pores("internal_surface")
np.testing.assert_allclose(Ps_int_surf, Ps_int_surf_desired)
if __name__ == '__main__':
t = CubicTemplateTest()
t.setup_class()
self = t
for item in t.__dir__():
if item.startswith('test'):
print('running test: '+item)
t.__getattribute__(item)()
| Add test for CubicTemplate to ensure proper labeling | Add test for CubicTemplate to ensure proper labeling
| Python | mit | TomTranter/OpenPNM,PMEAL/OpenPNM | python | ## Code Before:
import openpnm as op
from skimage.morphology import ball, disk
class CubicTemplateTest:
def setup_class(self):
pass
def teardown_class(self):
pass
def test_2D_template(self):
net = op.network.CubicTemplate(template=disk(10), spacing=1)
assert net.Np == 317
assert net.Nt == 592
def test_3D_template(self):
net = op.network.CubicTemplate(template=ball(5), spacing=1)
assert net.Np == 515
assert net.Nt == 1302
if __name__ == '__main__':
t = CubicTemplateTest()
t.setup_class()
self = t
for item in t.__dir__():
if item.startswith('test'):
print('running test: '+item)
t.__getattribute__(item)()
## Instruction:
Add test for CubicTemplate to ensure proper labeling
## Code After:
import numpy as np
import openpnm as op
from skimage.morphology import ball, disk
class CubicTemplateTest:
def setup_class(self):
pass
def teardown_class(self):
pass
def test_2D_template(self):
net = op.network.CubicTemplate(template=disk(10), spacing=1)
assert net.Np == 317
assert net.Nt == 592
def test_3D_template(self):
net = op.network.CubicTemplate(template=ball(5), spacing=1)
assert net.Np == 515
assert net.Nt == 1302
def test_labels(self):
template = np.array(
[[1, 1, 1, 1, 1],
[1, 1, 0, 1, 1],
[1, 1, 0, 0, 1],
[1, 0, 0, 0, 1],
[1, 1, 0, 1, 1]]
)
net = op.network.CubicTemplate(template=template)
# Test "surface" label
Ps_surf_desired = np.array([0, 1, 2, 3, 4, 5, 8, 9, 11, 12, 13, 14, 15, 16, 17])
Ps_surf = net.pores("surface")
np.testing.assert_allclose(Ps_surf, Ps_surf_desired)
# Test "internal_surface" label
Ps_int_surf_desired = np.array([6, 7, 10])
Ps_int_surf = net.pores("internal_surface")
np.testing.assert_allclose(Ps_int_surf, Ps_int_surf_desired)
if __name__ == '__main__':
t = CubicTemplateTest()
t.setup_class()
self = t
for item in t.__dir__():
if item.startswith('test'):
print('running test: '+item)
t.__getattribute__(item)()
| + import numpy as np
import openpnm as op
from skimage.morphology import ball, disk
class CubicTemplateTest:
def setup_class(self):
pass
def teardown_class(self):
pass
def test_2D_template(self):
net = op.network.CubicTemplate(template=disk(10), spacing=1)
assert net.Np == 317
assert net.Nt == 592
def test_3D_template(self):
net = op.network.CubicTemplate(template=ball(5), spacing=1)
assert net.Np == 515
assert net.Nt == 1302
+ def test_labels(self):
+ template = np.array(
+ [[1, 1, 1, 1, 1],
+ [1, 1, 0, 1, 1],
+ [1, 1, 0, 0, 1],
+ [1, 0, 0, 0, 1],
+ [1, 1, 0, 1, 1]]
+ )
+ net = op.network.CubicTemplate(template=template)
+ # Test "surface" label
+ Ps_surf_desired = np.array([0, 1, 2, 3, 4, 5, 8, 9, 11, 12, 13, 14, 15, 16, 17])
+ Ps_surf = net.pores("surface")
+ np.testing.assert_allclose(Ps_surf, Ps_surf_desired)
+ # Test "internal_surface" label
+ Ps_int_surf_desired = np.array([6, 7, 10])
+ Ps_int_surf = net.pores("internal_surface")
+ np.testing.assert_allclose(Ps_int_surf, Ps_int_surf_desired)
+
if __name__ == '__main__':
t = CubicTemplateTest()
t.setup_class()
self = t
for item in t.__dir__():
if item.startswith('test'):
print('running test: '+item)
t.__getattribute__(item)() | 19 | 0.612903 | 19 | 0 |
2310360645b74c96625c3e49f97d2ed8d244584c | CodeReader/Helper/Extensions.swift | CodeReader/Helper/Extensions.swift | //
// Extensions.swift
// CodeReader
//
// Created by vulgur on 16/5/11.
// Copyright © 2016年 MAD. All rights reserved.
//
import UIKit
extension UITableView {
func reloadDataWithAutoSizingCells() {
self.reloadData()
self.setNeedsDisplay()
self.layoutIfNeeded()
self.reloadData()
}
}
extension UIScrollView {
func resizeContentSize() {
let contentWidth = self.frame.width
var contentHeight: CGFloat = 0
for subview in subviews {
contentHeight += subview.frame.height
}
self.contentSize = CGSize(width: contentWidth, height: contentHeight)
}
}
extension Date {
var dateString: String {
let formatter = DateFormatter()
formatter.dateFormat = "yyyy-MM-dd HH:mm:ss"
return formatter.string(from: self)
}
}
| //
// Extensions.swift
// CodeReader
//
// Created by vulgur on 16/5/11.
// Copyright © 2016年 MAD. All rights reserved.
//
import UIKit
extension UITableView {
func reloadDataWithAutoSizingCells() {
self.reloadData()
self.setNeedsDisplay()
self.layoutIfNeeded()
self.reloadData()
}
}
extension UIScrollView {
func resizeContentSize() {
let contentWidth = self.frame.width
var contentHeight: CGFloat = 0
for subview in subviews {
contentHeight += subview.frame.height
}
self.contentSize = CGSize(width: contentWidth, height: contentHeight)
}
}
extension Date {
var dateString: String {
let formatter = DateFormatter()
formatter.dateFormat = "yyyy-MM-dd HH:mm:ss"
return formatter.string(from: self)
}
}
extension UITableView {
func hideEmptyCells() {
self.tableFooterView = UIView(frame: .zero)
}
}
| Add extension to UITableView to hide empty cells | Add extension to UITableView to hide empty cells
| Swift | mit | vulgur/Sources,vulgur/Sources,vulgur/Sources | swift | ## Code Before:
//
// Extensions.swift
// CodeReader
//
// Created by vulgur on 16/5/11.
// Copyright © 2016年 MAD. All rights reserved.
//
import UIKit
extension UITableView {
func reloadDataWithAutoSizingCells() {
self.reloadData()
self.setNeedsDisplay()
self.layoutIfNeeded()
self.reloadData()
}
}
extension UIScrollView {
func resizeContentSize() {
let contentWidth = self.frame.width
var contentHeight: CGFloat = 0
for subview in subviews {
contentHeight += subview.frame.height
}
self.contentSize = CGSize(width: contentWidth, height: contentHeight)
}
}
extension Date {
var dateString: String {
let formatter = DateFormatter()
formatter.dateFormat = "yyyy-MM-dd HH:mm:ss"
return formatter.string(from: self)
}
}
## Instruction:
Add extension to UITableView to hide empty cells
## Code After:
//
// Extensions.swift
// CodeReader
//
// Created by vulgur on 16/5/11.
// Copyright © 2016年 MAD. All rights reserved.
//
import UIKit
extension UITableView {
func reloadDataWithAutoSizingCells() {
self.reloadData()
self.setNeedsDisplay()
self.layoutIfNeeded()
self.reloadData()
}
}
extension UIScrollView {
func resizeContentSize() {
let contentWidth = self.frame.width
var contentHeight: CGFloat = 0
for subview in subviews {
contentHeight += subview.frame.height
}
self.contentSize = CGSize(width: contentWidth, height: contentHeight)
}
}
extension Date {
var dateString: String {
let formatter = DateFormatter()
formatter.dateFormat = "yyyy-MM-dd HH:mm:ss"
return formatter.string(from: self)
}
}
extension UITableView {
func hideEmptyCells() {
self.tableFooterView = UIView(frame: .zero)
}
}
| //
// Extensions.swift
// CodeReader
//
// Created by vulgur on 16/5/11.
// Copyright © 2016年 MAD. All rights reserved.
//
import UIKit
extension UITableView {
func reloadDataWithAutoSizingCells() {
self.reloadData()
self.setNeedsDisplay()
self.layoutIfNeeded()
self.reloadData()
}
}
extension UIScrollView {
func resizeContentSize() {
let contentWidth = self.frame.width
var contentHeight: CGFloat = 0
for subview in subviews {
contentHeight += subview.frame.height
}
self.contentSize = CGSize(width: contentWidth, height: contentHeight)
}
}
extension Date {
var dateString: String {
let formatter = DateFormatter()
formatter.dateFormat = "yyyy-MM-dd HH:mm:ss"
return formatter.string(from: self)
}
}
+
+ extension UITableView {
+ func hideEmptyCells() {
+ self.tableFooterView = UIView(frame: .zero)
+ }
+ } | 6 | 0.162162 | 6 | 0 |
edab226942fbab75aa66e16d5814b1c38c0e8507 | 2048/policy.py | 2048/policy.py | import tensorflow as tf
class EpsilonGreedyPolicy:
def __init__(self, env, dqn, epsilon_max, epsilon_min, epsilon_decay):
self.env = env
self.dqn = dqn
self.epsilon_max = epsilon_max
self.epsilon_min = epsilon_min
self.epsilon_decay = epsilon_decay
def take_action(self, state, step):
explore_probability = self.epsilon_min + (self.epsilon_max - self.epsilon_min) * tf.math.exp(
-self.epsilon_decay * tf.cast(step, tf.float32))
if explore_probability > tf.random.uniform(shape=()):
return tf.constant(self.env.action_space.sample(), dtype=tf.int32), explore_probability
state = tf.reshape(state, (1, *state.shape, -1))
q_preds = self.dqn(state)[0]
return tf.argmax(q_preds, output_type=tf.int32), explore_probability
| import tensorflow as tf
class EpsilonGreedyPolicy:
def __init__(self, env, dqn, epsilon_max, epsilon_min, epsilon_decay):
self.env = env
self.dqn = dqn
self.epsilon_max = epsilon_max
self.epsilon_min = epsilon_min
self.epsilon_decay = epsilon_decay
def take_action(self, state, step):
explore_probability = self.epsilon_min + (self.epsilon_max - self.epsilon_min) * tf.math.exp(
-self.epsilon_decay * tf.cast(step, tf.float32))
if explore_probability > tf.random.uniform(shape=()):
return tf.constant(self.env.action_space.sample(), dtype=tf.int32), explore_probability
state = tf.expand_dims(state, axis=0)
q_preds = self.dqn(state)[0]
return tf.argmax(q_preds, output_type=tf.int32), explore_probability
| Fix error in state shape in EGP | [2048] Fix error in state shape in EGP
| Python | mit | akshaykurmi/reinforcement-learning | python | ## Code Before:
import tensorflow as tf
class EpsilonGreedyPolicy:
def __init__(self, env, dqn, epsilon_max, epsilon_min, epsilon_decay):
self.env = env
self.dqn = dqn
self.epsilon_max = epsilon_max
self.epsilon_min = epsilon_min
self.epsilon_decay = epsilon_decay
def take_action(self, state, step):
explore_probability = self.epsilon_min + (self.epsilon_max - self.epsilon_min) * tf.math.exp(
-self.epsilon_decay * tf.cast(step, tf.float32))
if explore_probability > tf.random.uniform(shape=()):
return tf.constant(self.env.action_space.sample(), dtype=tf.int32), explore_probability
state = tf.reshape(state, (1, *state.shape, -1))
q_preds = self.dqn(state)[0]
return tf.argmax(q_preds, output_type=tf.int32), explore_probability
## Instruction:
[2048] Fix error in state shape in EGP
## Code After:
import tensorflow as tf
class EpsilonGreedyPolicy:
def __init__(self, env, dqn, epsilon_max, epsilon_min, epsilon_decay):
self.env = env
self.dqn = dqn
self.epsilon_max = epsilon_max
self.epsilon_min = epsilon_min
self.epsilon_decay = epsilon_decay
def take_action(self, state, step):
explore_probability = self.epsilon_min + (self.epsilon_max - self.epsilon_min) * tf.math.exp(
-self.epsilon_decay * tf.cast(step, tf.float32))
if explore_probability > tf.random.uniform(shape=()):
return tf.constant(self.env.action_space.sample(), dtype=tf.int32), explore_probability
state = tf.expand_dims(state, axis=0)
q_preds = self.dqn(state)[0]
return tf.argmax(q_preds, output_type=tf.int32), explore_probability
| import tensorflow as tf
class EpsilonGreedyPolicy:
def __init__(self, env, dqn, epsilon_max, epsilon_min, epsilon_decay):
self.env = env
self.dqn = dqn
self.epsilon_max = epsilon_max
self.epsilon_min = epsilon_min
self.epsilon_decay = epsilon_decay
def take_action(self, state, step):
explore_probability = self.epsilon_min + (self.epsilon_max - self.epsilon_min) * tf.math.exp(
-self.epsilon_decay * tf.cast(step, tf.float32))
if explore_probability > tf.random.uniform(shape=()):
return tf.constant(self.env.action_space.sample(), dtype=tf.int32), explore_probability
- state = tf.reshape(state, (1, *state.shape, -1))
+ state = tf.expand_dims(state, axis=0)
q_preds = self.dqn(state)[0]
return tf.argmax(q_preds, output_type=tf.int32), explore_probability | 2 | 0.105263 | 1 | 1 |
f8c1f47ba847eadf84da6fdf2060f77a6ba030c0 | .circleci/copy_recursive_regex.sh | .circleci/copy_recursive_regex.sh |
REGEX="$1"
DEST_FOLDER=$2
rm -rf ${DEST_FOLDER}
mkdir -p ${DEST_FOLDER}
for testXml in $(find . -type f -regex ${REGEX}); do
echo ${testXml}
cp ${testXml} ${DEST_FOLDER}
done
echo
|
REGEX="$1"
DEST_FOLDER=$2
rm -rf ${DEST_FOLDER}
mkdir -p ${DEST_FOLDER}
for matching_file in $(find . -type f -regex ${REGEX}); do
echo ${matching_file}
cp ${matching_file} ${DEST_FOLDER}
done
echo
| Generalize recursive regex copy script | Generalize recursive regex copy script
| Shell | apache-2.0 | Tristan971/EasyFXML,Tristan971/EasyFXML | shell | ## Code Before:
REGEX="$1"
DEST_FOLDER=$2
rm -rf ${DEST_FOLDER}
mkdir -p ${DEST_FOLDER}
for testXml in $(find . -type f -regex ${REGEX}); do
echo ${testXml}
cp ${testXml} ${DEST_FOLDER}
done
echo
## Instruction:
Generalize recursive regex copy script
## Code After:
REGEX="$1"
DEST_FOLDER=$2
rm -rf ${DEST_FOLDER}
mkdir -p ${DEST_FOLDER}
for matching_file in $(find . -type f -regex ${REGEX}); do
echo ${matching_file}
cp ${matching_file} ${DEST_FOLDER}
done
echo
|
REGEX="$1"
DEST_FOLDER=$2
rm -rf ${DEST_FOLDER}
mkdir -p ${DEST_FOLDER}
- for testXml in $(find . -type f -regex ${REGEX}); do
? -----
+ for matching_file in $(find . -type f -regex ${REGEX}); do
? ++ +++++++++
- echo ${testXml}
+ echo ${matching_file}
- cp ${testXml} ${DEST_FOLDER}
? -----
+ cp ${matching_file} ${DEST_FOLDER}
? ++ +++++++++
done
echo | 6 | 0.461538 | 3 | 3 |
843494e81a29ec2698ad91e1d0f3675d9c0f52eb | core/user/users.php | core/user/users.php | <?php
namespace Phpws\Core;
class WebSocketUser implements \Phpws\Interfaces\WebsocketUser {
public $socket;
public $id;
public $watcher;
public $headers = NULL;
public $handshaked = false;
public $handlingPartialPacket = false;
public $readBuffer = "";
public $writeNeeded = false;
public $writeBuffer = "";
public $partialMessage = "";
public $hasSentClose = false;
function __construct($id, $socket) {
$this->id = $id;
$this->socket = $socket;
}
}
| <?php
namespace Phpws\Core;
class WebSocketUser implements \Phpws\Interfaces\WebsocketUser {
public $socket;
public $id;
public $watcher;
public $headers = NULL;
public $handshaked = false;
public $handlingPartialPacket = false;
public $readBuffer = "";
public $writeNeeded = false;
public $writeBuffer = "";
public $partialMessage = "";
public $hasSentClose = false;
function __construct($id, $socket) {
$this->id = $id;
$this->socket = $socket;
}
public function getTlsStatus()
{
return false;
}
public function getNetHandle()
{
return $id;
}
}
| Implement the promises of the user interface... | Implement the promises of the user interface...
| PHP | bsd-3-clause | philek/PHP-Websockets,philek/PHP-Websockets | php | ## Code Before:
<?php
namespace Phpws\Core;
class WebSocketUser implements \Phpws\Interfaces\WebsocketUser {
public $socket;
public $id;
public $watcher;
public $headers = NULL;
public $handshaked = false;
public $handlingPartialPacket = false;
public $readBuffer = "";
public $writeNeeded = false;
public $writeBuffer = "";
public $partialMessage = "";
public $hasSentClose = false;
function __construct($id, $socket) {
$this->id = $id;
$this->socket = $socket;
}
}
## Instruction:
Implement the promises of the user interface...
## Code After:
<?php
namespace Phpws\Core;
class WebSocketUser implements \Phpws\Interfaces\WebsocketUser {
public $socket;
public $id;
public $watcher;
public $headers = NULL;
public $handshaked = false;
public $handlingPartialPacket = false;
public $readBuffer = "";
public $writeNeeded = false;
public $writeBuffer = "";
public $partialMessage = "";
public $hasSentClose = false;
function __construct($id, $socket) {
$this->id = $id;
$this->socket = $socket;
}
public function getTlsStatus()
{
return false;
}
public function getNetHandle()
{
return $id;
}
}
| <?php
namespace Phpws\Core;
class WebSocketUser implements \Phpws\Interfaces\WebsocketUser {
public $socket;
public $id;
public $watcher;
public $headers = NULL;
public $handshaked = false;
public $handlingPartialPacket = false;
public $readBuffer = "";
public $writeNeeded = false;
public $writeBuffer = "";
public $partialMessage = "";
public $hasSentClose = false;
function __construct($id, $socket) {
$this->id = $id;
$this->socket = $socket;
}
+
+ public function getTlsStatus()
+ {
+ return false;
+ }
+
+ public function getNetHandle()
+ {
+ return $id;
+ }
} | 10 | 0.4 | 10 | 0 |
4d9f6fd67c5434ffc6de9b5a7dbd5f92447962bd | test/index.js | test/index.js | /**
* Dependencies
*/
var test = require('tape')
var isEmptyObject = require('../')
/**
* Tests
*/
test('isEmptyObject(obj)', function(assert) {
assert.equal(isEmptyObject({}), true, 'returns true if empty')
assert.equal(isEmptyObject({ one: 1 }), false, 'returns false if not empty')
assert.equal(isEmptyObject([]), true, 'returns false if not an object')
assert.equal(isEmptyObject(), undefined, 'returns undefined for undefined')
assert.equal(isEmptyObject(null), undefined, 'returns undefined for null')
assert.equal(isEmptyObject(1), undefined, 'returns undefined for number')
assert.end()
})
| /**
* Dependencies
*/
var test = require('tape')
var isEmptyObject = require('../')
/**
* Tests
*/
test('isEmptyObject(obj)', function(assert) {
assert.equal(isEmptyObject({}), true, 'returns true if empty')
assert.equal(isEmptyObject({ one: 1 }), false, 'returns false if not empty')
assert.equal(isEmptyObject([]), true, 'returns true for empty array')
assert.equal(isEmptyObject([,]), true, 'returns true for empty sparse array')
assert.equal(isEmptyObject([1]), false, 'returns false for array with value')
assert.equal(isEmptyObject([,1]), false, 'returns false for sparse array with value')
assert.equal(isEmptyObject(), undefined, 'returns undefined for undefined')
assert.equal(isEmptyObject(null), undefined, 'returns undefined for null')
assert.equal(isEmptyObject(1), undefined, 'returns undefined for number')
assert.end()
})
| Test more cases for array | Test more cases for array
| JavaScript | mit | kriskowal/is-empty-object | javascript | ## Code Before:
/**
* Dependencies
*/
var test = require('tape')
var isEmptyObject = require('../')
/**
* Tests
*/
test('isEmptyObject(obj)', function(assert) {
assert.equal(isEmptyObject({}), true, 'returns true if empty')
assert.equal(isEmptyObject({ one: 1 }), false, 'returns false if not empty')
assert.equal(isEmptyObject([]), true, 'returns false if not an object')
assert.equal(isEmptyObject(), undefined, 'returns undefined for undefined')
assert.equal(isEmptyObject(null), undefined, 'returns undefined for null')
assert.equal(isEmptyObject(1), undefined, 'returns undefined for number')
assert.end()
})
## Instruction:
Test more cases for array
## Code After:
/**
* Dependencies
*/
var test = require('tape')
var isEmptyObject = require('../')
/**
* Tests
*/
test('isEmptyObject(obj)', function(assert) {
assert.equal(isEmptyObject({}), true, 'returns true if empty')
assert.equal(isEmptyObject({ one: 1 }), false, 'returns false if not empty')
assert.equal(isEmptyObject([]), true, 'returns true for empty array')
assert.equal(isEmptyObject([,]), true, 'returns true for empty sparse array')
assert.equal(isEmptyObject([1]), false, 'returns false for array with value')
assert.equal(isEmptyObject([,1]), false, 'returns false for sparse array with value')
assert.equal(isEmptyObject(), undefined, 'returns undefined for undefined')
assert.equal(isEmptyObject(null), undefined, 'returns undefined for null')
assert.equal(isEmptyObject(1), undefined, 'returns undefined for number')
assert.end()
})
| /**
* Dependencies
*/
var test = require('tape')
var isEmptyObject = require('../')
/**
* Tests
*/
test('isEmptyObject(obj)', function(assert) {
assert.equal(isEmptyObject({}), true, 'returns true if empty')
assert.equal(isEmptyObject({ one: 1 }), false, 'returns false if not empty')
- assert.equal(isEmptyObject([]), true, 'returns false if not an object')
? ^^^^ - ^^ ^^^^^^^^
+ assert.equal(isEmptyObject([]), true, 'returns true for empty array')
? ^^^ ++ ^^^ + ^^^^
+ assert.equal(isEmptyObject([,]), true, 'returns true for empty sparse array')
+ assert.equal(isEmptyObject([1]), false, 'returns false for array with value')
+ assert.equal(isEmptyObject([,1]), false, 'returns false for sparse array with value')
assert.equal(isEmptyObject(), undefined, 'returns undefined for undefined')
assert.equal(isEmptyObject(null), undefined, 'returns undefined for null')
assert.equal(isEmptyObject(1), undefined, 'returns undefined for number')
assert.end()
}) | 5 | 0.25 | 4 | 1 |
d9c16a5304f77848f4b8f2c9344e2f08699ee38e | swgohBotShard.js | swgohBotShard.js | // At max, each shard can have 2500 servers
const config = require("./config.js");
const Discord = require("discord.js");
const Manager = new Discord.ShardingManager("./swgohBot.js",{
totalShards: config.shardCount // Tell it how many shards we want (Approx. 1100 servers per shard)
});
Manager.spawn();
Manager.on("shardCreate", (shard) => {
console.log(`Creating Shard ${shard.id + 1}/${Manager.totalShards}`);
});
| // At max, each shard can have 2500 servers
const config = require("./config.js");
const Discord = require("discord.js");
const Manager = new Discord.ShardingManager("./swgohBot.js",{
totalShards: config.shardCount // Tell it how many shards we want (Approx. 1100 servers per shard)
});
// Give it a large timeout since it refuses to work otherwise
Manager.spawn({timeout: 60000});
Manager.on("shardCreate", (shard) => {
shard.on("reconnecting", () => {
console.log(`Reconnecting shard: [${shard.id}]`);
});
shard.on("spawn", () => {
console.log(`Spawned shard: [${shard.id}]`);
});
// shard.on("ready", () => {
// console.log(`Shard [${shard.id}] is ready`);
// });
shard.on("death", () => {
console.log(`Shard Died: [${shard.id}]`);
});
shard.on("error", (err) => {
console.log("ERROR: Shard had issues starting: \n" + err);
});
});
| Update the sharding file to log more stuff | Update the sharding file to log more stuff
| JavaScript | mit | jmiln/SWGoHBot | javascript | ## Code Before:
// At max, each shard can have 2500 servers
const config = require("./config.js");
const Discord = require("discord.js");
const Manager = new Discord.ShardingManager("./swgohBot.js",{
totalShards: config.shardCount // Tell it how many shards we want (Approx. 1100 servers per shard)
});
Manager.spawn();
Manager.on("shardCreate", (shard) => {
console.log(`Creating Shard ${shard.id + 1}/${Manager.totalShards}`);
});
## Instruction:
Update the sharding file to log more stuff
## Code After:
// At max, each shard can have 2500 servers
const config = require("./config.js");
const Discord = require("discord.js");
const Manager = new Discord.ShardingManager("./swgohBot.js",{
totalShards: config.shardCount // Tell it how many shards we want (Approx. 1100 servers per shard)
});
// Give it a large timeout since it refuses to work otherwise
Manager.spawn({timeout: 60000});
Manager.on("shardCreate", (shard) => {
shard.on("reconnecting", () => {
console.log(`Reconnecting shard: [${shard.id}]`);
});
shard.on("spawn", () => {
console.log(`Spawned shard: [${shard.id}]`);
});
// shard.on("ready", () => {
// console.log(`Shard [${shard.id}] is ready`);
// });
shard.on("death", () => {
console.log(`Shard Died: [${shard.id}]`);
});
shard.on("error", (err) => {
console.log("ERROR: Shard had issues starting: \n" + err);
});
});
| // At max, each shard can have 2500 servers
const config = require("./config.js");
const Discord = require("discord.js");
const Manager = new Discord.ShardingManager("./swgohBot.js",{
totalShards: config.shardCount // Tell it how many shards we want (Approx. 1100 servers per shard)
});
- Manager.spawn();
+
+ // Give it a large timeout since it refuses to work otherwise
+ Manager.spawn({timeout: 60000});
Manager.on("shardCreate", (shard) => {
- console.log(`Creating Shard ${shard.id + 1}/${Manager.totalShards}`);
+ shard.on("reconnecting", () => {
+ console.log(`Reconnecting shard: [${shard.id}]`);
+ });
+ shard.on("spawn", () => {
+ console.log(`Spawned shard: [${shard.id}]`);
+ });
+ // shard.on("ready", () => {
+ // console.log(`Shard [${shard.id}] is ready`);
+ // });
+ shard.on("death", () => {
+ console.log(`Shard Died: [${shard.id}]`);
+ });
+ shard.on("error", (err) => {
+ console.log("ERROR: Shard had issues starting: \n" + err);
+ });
}); | 20 | 1.666667 | 18 | 2 |
7ea7c8dda76cc03c10056230711c74e112cd3a13 | .rubocop.yml | .rubocop.yml | AllCops:
TargetRubyVersion: 2.3
Layout/EmptyLinesAroundClassBody:
EnforcedStyle: empty_lines
Layout/MultilineMethodCallIndentation:
EnforcedStyle: indented
Layout/SpaceInsideHashLiteralBraces:
EnforcedStyle: no_space
Metrics/AbcSize:
Enabled: false
Metrics/BlockLength:
Enabled: false
Metrics/ClassLength:
Enabled: false
Metrics/MethodLength:
Enabled: false
Style/AndOr:
EnforcedStyle: conditionals
Style/BlockDelimiters:
EnforcedStyle: semantic
Style/Documentation:
Enabled: false
Style/PercentLiteralDelimiters:
PreferredDelimiters:
default: '[]'
Style/SignalException:
EnforcedStyle: semantic
| AllCops:
TargetRubyVersion: 2.3
Layout/EmptyLinesAroundClassBody:
EnforcedStyle: empty_lines
Layout/MultilineMethodCallIndentation:
EnforcedStyle: indented
Layout/SpaceInsideHashLiteralBraces:
EnforcedStyle: no_space
Metrics/AbcSize:
Enabled: false
Metrics/BlockLength:
Enabled: false
Metrics/ClassLength:
Enabled: false
Metrics/MethodLength:
Enabled: false
Style/AndOr:
EnforcedStyle: conditionals
Style/BlockDelimiters:
EnforcedStyle: semantic
Style/Documentation:
Enabled: false
Style/SignalException:
EnforcedStyle: semantic
| Remove percent literal RuboCop configuration | Remove percent literal RuboCop configuration
The default RuboCop configuration has been changed to `[]` for both `%i`
and `%w`.
Reference
* https://github.com/brandonweiss/rubocop/commit/148ccf1e4c077b99a26b38ebbdad02a357a6d464
| YAML | apache-2.0 | zendesk/biz,zendesk/biz | yaml | ## Code Before:
AllCops:
TargetRubyVersion: 2.3
Layout/EmptyLinesAroundClassBody:
EnforcedStyle: empty_lines
Layout/MultilineMethodCallIndentation:
EnforcedStyle: indented
Layout/SpaceInsideHashLiteralBraces:
EnforcedStyle: no_space
Metrics/AbcSize:
Enabled: false
Metrics/BlockLength:
Enabled: false
Metrics/ClassLength:
Enabled: false
Metrics/MethodLength:
Enabled: false
Style/AndOr:
EnforcedStyle: conditionals
Style/BlockDelimiters:
EnforcedStyle: semantic
Style/Documentation:
Enabled: false
Style/PercentLiteralDelimiters:
PreferredDelimiters:
default: '[]'
Style/SignalException:
EnforcedStyle: semantic
## Instruction:
Remove percent literal RuboCop configuration
The default RuboCop configuration has been changed to `[]` for both `%i`
and `%w`.
Reference
* https://github.com/brandonweiss/rubocop/commit/148ccf1e4c077b99a26b38ebbdad02a357a6d464
## Code After:
AllCops:
TargetRubyVersion: 2.3
Layout/EmptyLinesAroundClassBody:
EnforcedStyle: empty_lines
Layout/MultilineMethodCallIndentation:
EnforcedStyle: indented
Layout/SpaceInsideHashLiteralBraces:
EnforcedStyle: no_space
Metrics/AbcSize:
Enabled: false
Metrics/BlockLength:
Enabled: false
Metrics/ClassLength:
Enabled: false
Metrics/MethodLength:
Enabled: false
Style/AndOr:
EnforcedStyle: conditionals
Style/BlockDelimiters:
EnforcedStyle: semantic
Style/Documentation:
Enabled: false
Style/SignalException:
EnforcedStyle: semantic
| AllCops:
TargetRubyVersion: 2.3
Layout/EmptyLinesAroundClassBody:
EnforcedStyle: empty_lines
Layout/MultilineMethodCallIndentation:
EnforcedStyle: indented
Layout/SpaceInsideHashLiteralBraces:
EnforcedStyle: no_space
Metrics/AbcSize:
Enabled: false
Metrics/BlockLength:
Enabled: false
Metrics/ClassLength:
Enabled: false
Metrics/MethodLength:
Enabled: false
Style/AndOr:
EnforcedStyle: conditionals
Style/BlockDelimiters:
EnforcedStyle: semantic
Style/Documentation:
Enabled: false
- Style/PercentLiteralDelimiters:
- PreferredDelimiters:
- default: '[]'
-
Style/SignalException:
EnforcedStyle: semantic | 4 | 0.102564 | 0 | 4 |
69af4cc9b2f489c7a98590864bb517a3a45c63fc | lib/firefoxos.js | lib/firefoxos.js | (function(context) {
var event = new Event('network-ready');
context.ajax = function(url, data, callback) {
if (typeof data === 'function') {
callback = data;
data = null;
}
var xhr = new XMLHttpRequest(),
ajaxCallback = function() {
callback(xhr.response, xhr.status, xhr);
if (xhr.status != null) {
window.dispatchEvent(event);
}
};
xhr.onload = ajaxCallback;
xhr.onerror = ajaxCallback;
xhr.startedAt = new Date();
xhr.startedAt.setTime(xhr.startedAt.getTime());
saveTimeStamp(xhr.startedAt);
xhr.open(data !== null ? 'POST' : 'GET', url);
xhr.send(data || null);
};
})(this);
function saveTimeStamp(date){
var expTime = new Date();
expTime.setTime(expTime.getTime() + (14*24*60*60*1000)); //14 days in ms.
document.cookie = "LastAccess" + "=" + date.toUTCString() + "; " + "expires=" + expTime.toUTCString();;
}
function getLatestAccessTimeStamp() {
var sKey = "LastAccess";
return document.cookie.replace(new RegExp("(?:(?:^|.*;)\\s*" + sKey + "\\s*\\=\\s*([^;]*).*$)|^.*$"), "$1") || null;;
}
| (function(context) {
var event = new Event('network-ready');
context.ajax = function(url, data, callback) {
if (typeof data === 'function') {
callback = data;
data = null;
}
var xhr = new XMLHttpRequest(),
ajaxCallback = function() {
callback(xhr.response, xhr.status, xhr);
if (xhr.status !== null) {
context.dispatchEvent(event);
}
};
xhr.onload = ajaxCallback;
xhr.onerror = ajaxCallback;
xhr.startedAt = new Date();
xhr.startedAt.setTime(xhr.startedAt.getTime());
saveTimeStamp(xhr.startedAt);
xhr.open(data !== null ? 'POST' : 'GET', url);
xhr.send(data || null);
};
function saveTimeStamp (date){
var expTime = new Date();
expTime.setTime(expTime.getTime() + (14*24*60*60*1000)); //14 days in ms.
document.cookie = "LastAccess" + "=" + date.toUTCString() + "; " + "expires=" + expTime.toUTCString();;
}
context.getLatestAccessTimeStamp = function() {
var sKey = "LastAccess";
return document.cookie.replace(new RegExp("(?:(?:^|.*;)\\s*" + sKey + "\\s*\\=\\s*([^;]*).*$)|^.*$"), "$1") || null;;
};
})(this);
| Reformat library to pull functions in from global scope. | Reformat library to pull functions in from global scope.
| JavaScript | mpl-2.0 | askore/capstone2014-firefoxOS,askore/capstone2014-firefoxOS | javascript | ## Code Before:
(function(context) {
var event = new Event('network-ready');
context.ajax = function(url, data, callback) {
if (typeof data === 'function') {
callback = data;
data = null;
}
var xhr = new XMLHttpRequest(),
ajaxCallback = function() {
callback(xhr.response, xhr.status, xhr);
if (xhr.status != null) {
window.dispatchEvent(event);
}
};
xhr.onload = ajaxCallback;
xhr.onerror = ajaxCallback;
xhr.startedAt = new Date();
xhr.startedAt.setTime(xhr.startedAt.getTime());
saveTimeStamp(xhr.startedAt);
xhr.open(data !== null ? 'POST' : 'GET', url);
xhr.send(data || null);
};
})(this);
function saveTimeStamp(date){
var expTime = new Date();
expTime.setTime(expTime.getTime() + (14*24*60*60*1000)); //14 days in ms.
document.cookie = "LastAccess" + "=" + date.toUTCString() + "; " + "expires=" + expTime.toUTCString();;
}
function getLatestAccessTimeStamp() {
var sKey = "LastAccess";
return document.cookie.replace(new RegExp("(?:(?:^|.*;)\\s*" + sKey + "\\s*\\=\\s*([^;]*).*$)|^.*$"), "$1") || null;;
}
## Instruction:
Reformat library to pull functions in from global scope.
## Code After:
(function(context) {
var event = new Event('network-ready');
context.ajax = function(url, data, callback) {
if (typeof data === 'function') {
callback = data;
data = null;
}
var xhr = new XMLHttpRequest(),
ajaxCallback = function() {
callback(xhr.response, xhr.status, xhr);
if (xhr.status !== null) {
context.dispatchEvent(event);
}
};
xhr.onload = ajaxCallback;
xhr.onerror = ajaxCallback;
xhr.startedAt = new Date();
xhr.startedAt.setTime(xhr.startedAt.getTime());
saveTimeStamp(xhr.startedAt);
xhr.open(data !== null ? 'POST' : 'GET', url);
xhr.send(data || null);
};
function saveTimeStamp (date){
var expTime = new Date();
expTime.setTime(expTime.getTime() + (14*24*60*60*1000)); //14 days in ms.
document.cookie = "LastAccess" + "=" + date.toUTCString() + "; " + "expires=" + expTime.toUTCString();;
}
context.getLatestAccessTimeStamp = function() {
var sKey = "LastAccess";
return document.cookie.replace(new RegExp("(?:(?:^|.*;)\\s*" + sKey + "\\s*\\=\\s*([^;]*).*$)|^.*$"), "$1") || null;;
};
})(this);
| (function(context) {
var event = new Event('network-ready');
context.ajax = function(url, data, callback) {
if (typeof data === 'function') {
callback = data;
data = null;
}
var xhr = new XMLHttpRequest(),
ajaxCallback = function() {
callback(xhr.response, xhr.status, xhr);
- if (xhr.status != null) {
+ if (xhr.status !== null) {
? +
- window.dispatchEvent(event);
? ^^ ^^^
+ context.dispatchEvent(event);
? ^^ ^^^^
}
};
xhr.onload = ajaxCallback;
xhr.onerror = ajaxCallback;
xhr.startedAt = new Date();
- xhr.startedAt.setTime(xhr.startedAt.getTime());
? ^^^^^^^^^^^^^^^^
+ xhr.startedAt.setTime(xhr.startedAt.getTime());
? ^^
- saveTimeStamp(xhr.startedAt);
? ^^^^^^^^^^^^^^^^
+ saveTimeStamp(xhr.startedAt);
? ^^
xhr.open(data !== null ? 'POST' : 'GET', url);
xhr.send(data || null);
};
+
+ function saveTimeStamp (date){
+ var expTime = new Date();
+ expTime.setTime(expTime.getTime() + (14*24*60*60*1000)); //14 days in ms.
+ document.cookie = "LastAccess" + "=" + date.toUTCString() + "; " + "expires=" + expTime.toUTCString();;
+ }
+
+ context.getLatestAccessTimeStamp = function() {
+ var sKey = "LastAccess";
+ return document.cookie.replace(new RegExp("(?:(?:^|.*;)\\s*" + sKey + "\\s*\\=\\s*([^;]*).*$)|^.*$"), "$1") || null;;
+ };
})(this);
- function saveTimeStamp(date){
- var expTime = new Date();
- expTime.setTime(expTime.getTime() + (14*24*60*60*1000)); //14 days in ms.
- document.cookie = "LastAccess" + "=" + date.toUTCString() + "; " + "expires=" + expTime.toUTCString();;
- }
- function getLatestAccessTimeStamp() {
- var sKey = "LastAccess";
- return document.cookie.replace(new RegExp("(?:(?:^|.*;)\\s*" + sKey + "\\s*\\=\\s*([^;]*).*$)|^.*$"), "$1") || null;;
- } | 28 | 0.717949 | 15 | 13 |
f336558589bab1b3416e4c327e31850e94a26ec4 | app/views/projects/snippets/_snippet.html.haml | app/views/projects/snippets/_snippet.html.haml | %li
%h4.snippet-title
= link_to reliable_snippet_path(snippet) do
= truncate(snippet.title, length: 60)
%span.cgray.monospace.tiny.pull-right
= snippet.file_name
%small.pull-right.cgray
Expires:
- if snippet.expires_at
= snippet.expires_at.to_date.to_s(:short)
- else
Never
.snippet-info
= "##{snippet.id}"
%span
by
= image_tag avatar_icon(snippet.author_email), class: "avatar avatar-inline s16"
= snippet.author_name
%span.light
#{time_ago_with_tooltip(snippet.created_at)}
| %li
%h4.snippet-title
= link_to reliable_snippet_path(snippet) do
= truncate(snippet.title, length: 60)
%span.cgray.monospace.tiny.pull-right
= snippet.file_name
.snippet-info
= "##{snippet.id}"
%span
by
= image_tag avatar_icon(snippet.author_email), class: "avatar avatar-inline s16"
= snippet.author_name
%span.light
#{time_ago_with_tooltip(snippet.created_at)}
| Remove snippet expiration date from UI | Remove snippet expiration date from UI
| Haml | mit | 8thcolor/eurucamp2014-htdsadr,8thcolor/rubyconfau2015-sadr,8thcolor/rubyconfau2015-sadr,8thcolor/eurucamp2014-htdsadr,8thcolor/rubyconfau2015-sadr,8thcolor/eurucamp2014-htdsadr | haml | ## Code Before:
%li
%h4.snippet-title
= link_to reliable_snippet_path(snippet) do
= truncate(snippet.title, length: 60)
%span.cgray.monospace.tiny.pull-right
= snippet.file_name
%small.pull-right.cgray
Expires:
- if snippet.expires_at
= snippet.expires_at.to_date.to_s(:short)
- else
Never
.snippet-info
= "##{snippet.id}"
%span
by
= image_tag avatar_icon(snippet.author_email), class: "avatar avatar-inline s16"
= snippet.author_name
%span.light
#{time_ago_with_tooltip(snippet.created_at)}
## Instruction:
Remove snippet expiration date from UI
## Code After:
%li
%h4.snippet-title
= link_to reliable_snippet_path(snippet) do
= truncate(snippet.title, length: 60)
%span.cgray.monospace.tiny.pull-right
= snippet.file_name
.snippet-info
= "##{snippet.id}"
%span
by
= image_tag avatar_icon(snippet.author_email), class: "avatar avatar-inline s16"
= snippet.author_name
%span.light
#{time_ago_with_tooltip(snippet.created_at)}
| %li
%h4.snippet-title
= link_to reliable_snippet_path(snippet) do
= truncate(snippet.title, length: 60)
%span.cgray.monospace.tiny.pull-right
= snippet.file_name
-
- %small.pull-right.cgray
- Expires:
- - if snippet.expires_at
- = snippet.expires_at.to_date.to_s(:short)
- - else
- Never
.snippet-info
= "##{snippet.id}"
%span
by
= image_tag avatar_icon(snippet.author_email), class: "avatar avatar-inline s16"
= snippet.author_name
%span.light
#{time_ago_with_tooltip(snippet.created_at)} | 7 | 0.318182 | 0 | 7 |
3dad21b32419e1e565eae543258d3b65252139b1 | vmfiles/set_up_site.sh | vmfiles/set_up_site.sh | HOSTNAME=$1
IP=$2
echo 'Run me as user, please!'
sudo ./createvm.sh $HOSTNAME $IP
knife bootstrap $IP -x brain -N $HOSTNAME-toad -P password -r 'role[simple_webserver]' --sudo | HOSTNAME=$1
IP=$2
echo 'Run me as user, please!'
sudo ./createvm.sh $HOSTNAME $IP
sleep 30
knife bootstrap $IP -x brain -N $HOSTNAME-toad -P password -r 'role[simple_webserver]' --sudo | Add sime timeout after VM creation before bootstraping | Add sime timeout after VM creation before bootstraping
| Shell | apache-2.0 | brain-geek/toad-chef-repo,brain-geek/toad-chef-repo | shell | ## Code Before:
HOSTNAME=$1
IP=$2
echo 'Run me as user, please!'
sudo ./createvm.sh $HOSTNAME $IP
knife bootstrap $IP -x brain -N $HOSTNAME-toad -P password -r 'role[simple_webserver]' --sudo
## Instruction:
Add sime timeout after VM creation before bootstraping
## Code After:
HOSTNAME=$1
IP=$2
echo 'Run me as user, please!'
sudo ./createvm.sh $HOSTNAME $IP
sleep 30
knife bootstrap $IP -x brain -N $HOSTNAME-toad -P password -r 'role[simple_webserver]' --sudo | HOSTNAME=$1
IP=$2
echo 'Run me as user, please!'
sudo ./createvm.sh $HOSTNAME $IP
+
+ sleep 30
+
knife bootstrap $IP -x brain -N $HOSTNAME-toad -P password -r 'role[simple_webserver]' --sudo | 3 | 0.428571 | 3 | 0 |
8e58a7d88b5ad19e772f4fb6c912901e4e5f07af | lib/islay/active_record.rb | lib/islay/active_record.rb | class ActiveRecord::Base
private
# Provides access to the user model provided by Devise.
def current_user
Thread.current[:current_user]
end
# A callback handler which updates the user ID columns before save
def update_user_ids
if current_user
self.creator_id = current_user.id if new_record?
self.updater_id = current_user.id
end
end
# Installs a before_save hook for updating the user IDs against a record.
# This requires the creator_id and updater_id columns to be in the table.
#
# This method also installs to associations; creator, updater
def self.track_user_edits
before_save :update_user_ids
belongs_to :creator, :class_name => 'User'
belongs_to :updater, :class_name => 'User'
end
end
| module ActiveRecord
module ConnectionAdapters
class TableDefinition
# This is a migration helper for adding columns used when tracking user
# edits to records. It works in conjunction with the extensions to AR:Base.
def user_tracking(*args)
column(:creator_id, :integer, :null => false, :references => :users)
column(:updater_id, :integer, :null => false, :references => :users)
end
end
end
class Base
private
# Provides access to the user model provided by Devise.
def current_user
Thread.current[:current_user]
end
# A callback handler which updates the user ID columns before save
def update_user_ids
if current_user
self.creator_id = current_user.id if new_record?
self.updater_id = current_user.id
end
end
# Installs a before_save hook for updating the user IDs against a record.
# This requires the creator_id and updater_id columns to be in the table.
#
# This method also installs to associations; creator, updater
def self.track_user_edits
before_save :update_user_ids
belongs_to :creator, :class_name => 'User'
belongs_to :updater, :class_name => 'User'
end
end
end
| Add migration helper for generating user edit columns. | Add migration helper for generating user edit columns.
| Ruby | mit | spookandpuff/islay,spookandpuff/islay,spookandpuff/islay | ruby | ## Code Before:
class ActiveRecord::Base
private
# Provides access to the user model provided by Devise.
def current_user
Thread.current[:current_user]
end
# A callback handler which updates the user ID columns before save
def update_user_ids
if current_user
self.creator_id = current_user.id if new_record?
self.updater_id = current_user.id
end
end
# Installs a before_save hook for updating the user IDs against a record.
# This requires the creator_id and updater_id columns to be in the table.
#
# This method also installs to associations; creator, updater
def self.track_user_edits
before_save :update_user_ids
belongs_to :creator, :class_name => 'User'
belongs_to :updater, :class_name => 'User'
end
end
## Instruction:
Add migration helper for generating user edit columns.
## Code After:
module ActiveRecord
module ConnectionAdapters
class TableDefinition
# This is a migration helper for adding columns used when tracking user
# edits to records. It works in conjunction with the extensions to AR:Base.
def user_tracking(*args)
column(:creator_id, :integer, :null => false, :references => :users)
column(:updater_id, :integer, :null => false, :references => :users)
end
end
end
class Base
private
# Provides access to the user model provided by Devise.
def current_user
Thread.current[:current_user]
end
# A callback handler which updates the user ID columns before save
def update_user_ids
if current_user
self.creator_id = current_user.id if new_record?
self.updater_id = current_user.id
end
end
# Installs a before_save hook for updating the user IDs against a record.
# This requires the creator_id and updater_id columns to be in the table.
#
# This method also installs to associations; creator, updater
def self.track_user_edits
before_save :update_user_ids
belongs_to :creator, :class_name => 'User'
belongs_to :updater, :class_name => 'User'
end
end
end
| + module ActiveRecord
+ module ConnectionAdapters
+ class TableDefinition
+ # This is a migration helper for adding columns used when tracking user
+ # edits to records. It works in conjunction with the extensions to AR:Base.
+ def user_tracking(*args)
+ column(:creator_id, :integer, :null => false, :references => :users)
+ column(:updater_id, :integer, :null => false, :references => :users)
+ end
- class ActiveRecord::Base
- private
-
- # Provides access to the user model provided by Devise.
- def current_user
- Thread.current[:current_user]
- end
-
- # A callback handler which updates the user ID columns before save
- def update_user_ids
- if current_user
- self.creator_id = current_user.id if new_record?
- self.updater_id = current_user.id
end
end
+ class Base
+ private
+
+ # Provides access to the user model provided by Devise.
+ def current_user
+ Thread.current[:current_user]
+ end
+
+ # A callback handler which updates the user ID columns before save
+ def update_user_ids
+ if current_user
+ self.creator_id = current_user.id if new_record?
+ self.updater_id = current_user.id
+ end
+ end
+
- # Installs a before_save hook for updating the user IDs against a record.
+ # Installs a before_save hook for updating the user IDs against a record.
? ++
- # This requires the creator_id and updater_id columns to be in the table.
+ # This requires the creator_id and updater_id columns to be in the table.
? ++
- #
+ #
? ++
- # This method also installs to associations; creator, updater
+ # This method also installs to associations; creator, updater
? ++
- def self.track_user_edits
+ def self.track_user_edits
? ++
- before_save :update_user_ids
+ before_save :update_user_ids
? ++
- belongs_to :creator, :class_name => 'User'
+ belongs_to :creator, :class_name => 'User'
? ++
- belongs_to :updater, :class_name => 'User'
+ belongs_to :updater, :class_name => 'User'
? ++
+ end
end
end | 55 | 2.115385 | 34 | 21 |
bdc6cd397d11d0a116945dc876cd3eacfeeb0832 | services/chat/config/settings.defaults.coffee | services/chat/config/settings.defaults.coffee | module.exports =
internal:
chat:
host: process.env['LISTEN_ADDRESS'] or "localhost"
port: 3010
apis:
web:
url: "http://#{process.env['WEB_HOST'] || "localhost"}:3000"
user: "sharelatex"
pass: "password"
mongo:
url : "mongodb://#{process.env['MONGO_HOST'] || "localhost"}/sharelatex"
redis:
web:
host: process.env['REDIS_HOST'] || "localhost"
port: "6379"
password: "" | module.exports =
internal:
chat:
host: process.env['LISTEN_ADDRESS'] or "localhost"
port: 3010
apis:
web:
url: "http://#{process.env['WEB_HOST'] || "localhost"}:#{process.env['WEB_PORT'] or 3000}"
user: "sharelatex"
pass: "password"
mongo:
url : "mongodb://#{process.env['MONGO_HOST'] || "localhost"}/sharelatex"
redis:
web:
host: process.env['REDIS_HOST'] || "localhost"
port: "6379"
password: "" | Use setting instead of hard-coding port | Use setting instead of hard-coding port
| CoffeeScript | agpl-3.0 | sharelatex/sharelatex | coffeescript | ## Code Before:
module.exports =
internal:
chat:
host: process.env['LISTEN_ADDRESS'] or "localhost"
port: 3010
apis:
web:
url: "http://#{process.env['WEB_HOST'] || "localhost"}:3000"
user: "sharelatex"
pass: "password"
mongo:
url : "mongodb://#{process.env['MONGO_HOST'] || "localhost"}/sharelatex"
redis:
web:
host: process.env['REDIS_HOST'] || "localhost"
port: "6379"
password: ""
## Instruction:
Use setting instead of hard-coding port
## Code After:
module.exports =
internal:
chat:
host: process.env['LISTEN_ADDRESS'] or "localhost"
port: 3010
apis:
web:
url: "http://#{process.env['WEB_HOST'] || "localhost"}:#{process.env['WEB_PORT'] or 3000}"
user: "sharelatex"
pass: "password"
mongo:
url : "mongodb://#{process.env['MONGO_HOST'] || "localhost"}/sharelatex"
redis:
web:
host: process.env['REDIS_HOST'] || "localhost"
port: "6379"
password: "" | module.exports =
internal:
chat:
host: process.env['LISTEN_ADDRESS'] or "localhost"
port: 3010
apis:
web:
- url: "http://#{process.env['WEB_HOST'] || "localhost"}:3000"
+ url: "http://#{process.env['WEB_HOST'] || "localhost"}:#{process.env['WEB_PORT'] or 3000}"
? +++++++++++++++++++++++++++++ +
user: "sharelatex"
pass: "password"
mongo:
url : "mongodb://#{process.env['MONGO_HOST'] || "localhost"}/sharelatex"
redis:
web:
host: process.env['REDIS_HOST'] || "localhost"
port: "6379"
password: "" | 2 | 0.1 | 1 | 1 |
411a91ff3218adc870cee4d962ed296fa6498b36 | docu/build.bat | docu/build.bat | doxygen
cd latex
pdflatex -shell-escape refman.tex
makeindex -s ../refman.ist refman.idx
pdflatex -shell-escape refman.tex
pdflatex -shell-escape refman.tex
cd ..
copy latex\refman.pdf .
| doxygen
cd latex
pdflatex -interaction=nonstopmode -shell-escape refman.tex
makeindex -s ../refman.ist refman.idx
pdflatex -interaction=nonstopmode -shell-escape refman.tex
pdflatex -interaction=nonstopmode -shell-escape refman.tex
cd ..
copy latex\refman.pdf .
| Call pdflatex in non stop mode | Call pdflatex in non stop mode
| Batchfile | bsd-3-clause | t-b/igor-unit-testing-framework,t-b/igor-unit-testing-framework | batchfile | ## Code Before:
doxygen
cd latex
pdflatex -shell-escape refman.tex
makeindex -s ../refman.ist refman.idx
pdflatex -shell-escape refman.tex
pdflatex -shell-escape refman.tex
cd ..
copy latex\refman.pdf .
## Instruction:
Call pdflatex in non stop mode
## Code After:
doxygen
cd latex
pdflatex -interaction=nonstopmode -shell-escape refman.tex
makeindex -s ../refman.ist refman.idx
pdflatex -interaction=nonstopmode -shell-escape refman.tex
pdflatex -interaction=nonstopmode -shell-escape refman.tex
cd ..
copy latex\refman.pdf .
| doxygen
cd latex
- pdflatex -shell-escape refman.tex
+ pdflatex -interaction=nonstopmode -shell-escape refman.tex
makeindex -s ../refman.ist refman.idx
- pdflatex -shell-escape refman.tex
- pdflatex -shell-escape refman.tex
+ pdflatex -interaction=nonstopmode -shell-escape refman.tex
+ pdflatex -interaction=nonstopmode -shell-escape refman.tex
cd ..
copy latex\refman.pdf . | 6 | 0.75 | 3 | 3 |
e23b775b19934cbdf731b59556e752a4643226a6 | src/dir-exists.js | src/dir-exists.js | const fs = require('fs');
const dirExists = (directory) => {
return new Promise((resolve, reject) => {
fs.stat(directory, function(err, stats) {
if (err) {
if (err.errno === 34) {
// Directory does not exist.
resolve(false);
} else {
// Some other error occurred.
reject(err);
}
} else {
// Directory exists.
resolve(true);
}
});
});
};
module.exports = dirExists;
| const fs = require('fs');
const dirExists = (directory) => {
return new Promise((resolve, reject) => {
fs.stat(directory, function(err, stats) {
if (err) {
if (err.code === 'ENOENT') {
// Directory does not exist.
resolve(false);
} else {
// Some other error occurred.
reject(err);
}
} else {
// Directory exists.
resolve(true);
}
});
});
};
module.exports = dirExists;
| Use err.code ENOENT to check for existence | Use err.code ENOENT to check for existence
| JavaScript | mit | life-corp/ymp-utilities | javascript | ## Code Before:
const fs = require('fs');
const dirExists = (directory) => {
return new Promise((resolve, reject) => {
fs.stat(directory, function(err, stats) {
if (err) {
if (err.errno === 34) {
// Directory does not exist.
resolve(false);
} else {
// Some other error occurred.
reject(err);
}
} else {
// Directory exists.
resolve(true);
}
});
});
};
module.exports = dirExists;
## Instruction:
Use err.code ENOENT to check for existence
## Code After:
const fs = require('fs');
const dirExists = (directory) => {
return new Promise((resolve, reject) => {
fs.stat(directory, function(err, stats) {
if (err) {
if (err.code === 'ENOENT') {
// Directory does not exist.
resolve(false);
} else {
// Some other error occurred.
reject(err);
}
} else {
// Directory exists.
resolve(true);
}
});
});
};
module.exports = dirExists;
| const fs = require('fs');
const dirExists = (directory) => {
return new Promise((resolve, reject) => {
fs.stat(directory, function(err, stats) {
if (err) {
- if (err.errno === 34) {
+ if (err.code === 'ENOENT') {
// Directory does not exist.
resolve(false);
} else {
// Some other error occurred.
reject(err);
}
} else {
// Directory exists.
resolve(true);
}
});
});
};
module.exports = dirExists; | 2 | 0.090909 | 1 | 1 |
9dfc9af59fd43b9ddb5c2365108c4107ee7c2bf5 | server/static/global.css | server/static/global.css | .result-img {
height: 200px;
}
#footer {
padding-top: 50px;
}
| .result-img {
height: 200px;
min-width:200px;
background-color: #CCC;
}
#footer {
padding-top: 50px;
}
| Add grey background before loading image | Add grey background before loading image
| CSS | mit | albertyw/devops-reactions-index,albertyw/devops-reactions-index,albertyw/reaction-pics,albertyw/devops-reactions-index,albertyw/reaction-pics,albertyw/reaction-pics,albertyw/devops-reactions-index,albertyw/reaction-pics | css | ## Code Before:
.result-img {
height: 200px;
}
#footer {
padding-top: 50px;
}
## Instruction:
Add grey background before loading image
## Code After:
.result-img {
height: 200px;
min-width:200px;
background-color: #CCC;
}
#footer {
padding-top: 50px;
}
| .result-img {
height: 200px;
+ min-width:200px;
+ background-color: #CCC;
}
#footer {
padding-top: 50px;
} | 2 | 0.333333 | 2 | 0 |
0bce306943df133640215f16ab292061db28393a | README.md | README.md |
A simple Appnexus API client
#Install
Via [composer](https://getcomposer.org):
`$ composer require "f3ath/appnexus"`
#Use
```php
$storage = new F3\AppNexusClient\ArrayTokenStorage(); // Memcached or Apc storage is also available
$appnexus = new F3\AppNexusClient\AppNexusClient('username', 'password', "http://sand.api.appnexus.com", $storage);
var_dump($appnexus->call(F3\AppNexusClient\HttpMethod::GET, '/user'));
```
|
A simple Appnexus API client
#Install
Via [composer](https://getcomposer.org):
`$ composer require "f3ath/appnexus"`
#Use
```php
$storage = new F3\AppNexusClient\ArrayTokenStorage(); // Memcached or Apc storage is also available
$appnexus = new F3\AppNexusClient\AppNexusClient('username', 'password', "http://api-console.client-testing.adnxs.net/", $storage);
var_dump($appnexus->call(F3\AppNexusClient\HttpMethod::GET, '/user'));
```
| Change in documentation to use the API endpoint for the Client-Testing environment | Change in documentation to use the API endpoint for the Client-Testing environment
| Markdown | mit | f3ath/appnexusclient | markdown | ## Code Before:
A simple Appnexus API client
#Install
Via [composer](https://getcomposer.org):
`$ composer require "f3ath/appnexus"`
#Use
```php
$storage = new F3\AppNexusClient\ArrayTokenStorage(); // Memcached or Apc storage is also available
$appnexus = new F3\AppNexusClient\AppNexusClient('username', 'password', "http://sand.api.appnexus.com", $storage);
var_dump($appnexus->call(F3\AppNexusClient\HttpMethod::GET, '/user'));
```
## Instruction:
Change in documentation to use the API endpoint for the Client-Testing environment
## Code After:
A simple Appnexus API client
#Install
Via [composer](https://getcomposer.org):
`$ composer require "f3ath/appnexus"`
#Use
```php
$storage = new F3\AppNexusClient\ArrayTokenStorage(); // Memcached or Apc storage is also available
$appnexus = new F3\AppNexusClient\AppNexusClient('username', 'password', "http://api-console.client-testing.adnxs.net/", $storage);
var_dump($appnexus->call(F3\AppNexusClient\HttpMethod::GET, '/user'));
```
|
A simple Appnexus API client
#Install
Via [composer](https://getcomposer.org):
`$ composer require "f3ath/appnexus"`
#Use
```php
$storage = new F3\AppNexusClient\ArrayTokenStorage(); // Memcached or Apc storage is also available
- $appnexus = new F3\AppNexusClient\AppNexusClient('username', 'password', "http://sand.api.appnexus.com", $storage);
? ----- ^^ ^^^^^^^
+ $appnexus = new F3\AppNexusClient\AppNexusClient('username', 'password', "http://api-console.client-testing.adnxs.net/", $storage);
? +++++++++++++++++++++++ ^^^^^ ^^
var_dump($appnexus->call(F3\AppNexusClient\HttpMethod::GET, '/user'));
``` | 2 | 0.153846 | 1 | 1 |
5ed709189c3c8b5f80716d1df5ad579063a400f8 | client/views/index/index.html | client/views/index/index.html | <head>
<title>Fraction</title>
<link rel="icon" type="image/png" href="/img/favicon.png" />
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1, user-scalable=no">
<link href='http://fonts.googleapis.com/css?family=Open+Sans:400,700' rel='stylesheet' type='text/css'>
</head>
<body>
{{yield}}
</body>
<template name="index">
{{>header}}
{{>wrapper}}
</template>
| <head>
<title>Fraction</title>
<link rel="icon" type="image/png" href="/img/favicon.png" />
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1, user-scalable=no, minimal-ui">
<link href='http://fonts.googleapis.com/css?family=Open+Sans:400,700' rel='stylesheet' type='text/css'>
</head>
<body>
{{yield}}
</body>
<template name="index">
{{>header}}
{{>wrapper}}
</template>
| Add minimal-ui for mobile safari for iOS | Add minimal-ui for mobile safari for iOS
| HTML | mit | rrevanth/news,rrevanth/news | html | ## Code Before:
<head>
<title>Fraction</title>
<link rel="icon" type="image/png" href="/img/favicon.png" />
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1, user-scalable=no">
<link href='http://fonts.googleapis.com/css?family=Open+Sans:400,700' rel='stylesheet' type='text/css'>
</head>
<body>
{{yield}}
</body>
<template name="index">
{{>header}}
{{>wrapper}}
</template>
## Instruction:
Add minimal-ui for mobile safari for iOS
## Code After:
<head>
<title>Fraction</title>
<link rel="icon" type="image/png" href="/img/favicon.png" />
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1, user-scalable=no, minimal-ui">
<link href='http://fonts.googleapis.com/css?family=Open+Sans:400,700' rel='stylesheet' type='text/css'>
</head>
<body>
{{yield}}
</body>
<template name="index">
{{>header}}
{{>wrapper}}
</template>
| <head>
<title>Fraction</title>
<link rel="icon" type="image/png" href="/img/favicon.png" />
- <meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1, user-scalable=no">
+ <meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1, user-scalable=no, minimal-ui">
? ++++++++++++
<link href='http://fonts.googleapis.com/css?family=Open+Sans:400,700' rel='stylesheet' type='text/css'>
</head>
<body>
{{yield}}
</body>
<template name="index">
{{>header}}
{{>wrapper}}
</template> | 2 | 0.133333 | 1 | 1 |
63d2478dd04fb1287232a59225cb11fe567e3dcd | test_apps/test_app/test/simple_storage_deploy_spec.js | test_apps/test_app/test/simple_storage_deploy_spec.js | /*global contract, it, embark, assert, before*/
const SimpleStorage = embark.require('Embark/contracts/SimpleStorage');
contract("SimpleStorage Deploy", function () {
let SimpleStorageInstance;
before(async function() {
SimpleStorageInstance = await SimpleStorage.deploy({arguments: [150]}).send();
});
it("should set constructor value", async function () {
let result = await SimpleStorageInstance.methods.storedData().call();
assert.strictEqual(parseInt(result, 10), 150);
});
it("set storage value", async function () {
await SimpleStorageInstance.methods.set(150).send();
let result = await SimpleStorageInstance.methods.get().call();
assert.strictEqual(parseInt(result, 10), 150);
});
});
| /*global contract, it, embark, assert, before, web3*/
const SimpleStorage = embark.require('Embark/contracts/SimpleStorage');
const Utils = require('embarkjs').Utils;
contract("SimpleStorage Deploy", function () {
let simpleStorageInstance;
before(function(done) {
Utils.secureSend(web3, SimpleStorage.deploy({arguments: [150]}), {}, true, function(err, receipt) {
if(err) {
return done(err);
}
simpleStorageInstance = SimpleStorage;
simpleStorageInstance.options.address = receipt.contractAddress;
done();
});
});
it("should set constructor value", async function () {
let result = await simpleStorageInstance.methods.storedData().call();
assert.strictEqual(parseInt(result, 10), 150);
});
it("set storage value", function (done) {
Utils.secureSend(web3, simpleStorageInstance.methods.set(200), {}, false, async function(err) {
if (err) {
return done(err);
}
let result = await simpleStorageInstance.methods.get().call();
assert.strictEqual(parseInt(result, 10), 200);
done();
});
});
});
| Fix embark test using node option | Fix embark test using node option
| JavaScript | mit | iurimatias/embark-framework,iurimatias/embark-framework | javascript | ## Code Before:
/*global contract, it, embark, assert, before*/
const SimpleStorage = embark.require('Embark/contracts/SimpleStorage');
contract("SimpleStorage Deploy", function () {
let SimpleStorageInstance;
before(async function() {
SimpleStorageInstance = await SimpleStorage.deploy({arguments: [150]}).send();
});
it("should set constructor value", async function () {
let result = await SimpleStorageInstance.methods.storedData().call();
assert.strictEqual(parseInt(result, 10), 150);
});
it("set storage value", async function () {
await SimpleStorageInstance.methods.set(150).send();
let result = await SimpleStorageInstance.methods.get().call();
assert.strictEqual(parseInt(result, 10), 150);
});
});
## Instruction:
Fix embark test using node option
## Code After:
/*global contract, it, embark, assert, before, web3*/
const SimpleStorage = embark.require('Embark/contracts/SimpleStorage');
const Utils = require('embarkjs').Utils;
contract("SimpleStorage Deploy", function () {
let simpleStorageInstance;
before(function(done) {
Utils.secureSend(web3, SimpleStorage.deploy({arguments: [150]}), {}, true, function(err, receipt) {
if(err) {
return done(err);
}
simpleStorageInstance = SimpleStorage;
simpleStorageInstance.options.address = receipt.contractAddress;
done();
});
});
it("should set constructor value", async function () {
let result = await simpleStorageInstance.methods.storedData().call();
assert.strictEqual(parseInt(result, 10), 150);
});
it("set storage value", function (done) {
Utils.secureSend(web3, simpleStorageInstance.methods.set(200), {}, false, async function(err) {
if (err) {
return done(err);
}
let result = await simpleStorageInstance.methods.get().call();
assert.strictEqual(parseInt(result, 10), 200);
done();
});
});
});
| - /*global contract, it, embark, assert, before*/
+ /*global contract, it, embark, assert, before, web3*/
? ++++++
const SimpleStorage = embark.require('Embark/contracts/SimpleStorage');
+ const Utils = require('embarkjs').Utils;
contract("SimpleStorage Deploy", function () {
- let SimpleStorageInstance;
? ^
+ let simpleStorageInstance;
? ^
-
- before(async function() {
? ------
+ before(function(done) {
? ++++
- SimpleStorageInstance = await SimpleStorage.deploy({arguments: [150]}).send();
+ Utils.secureSend(web3, SimpleStorage.deploy({arguments: [150]}), {}, true, function(err, receipt) {
+ if(err) {
+ return done(err);
+ }
+ simpleStorageInstance = SimpleStorage;
+ simpleStorageInstance.options.address = receipt.contractAddress;
+ done();
+ });
});
it("should set constructor value", async function () {
- let result = await SimpleStorageInstance.methods.storedData().call();
? ^
+ let result = await simpleStorageInstance.methods.storedData().call();
? ^
assert.strictEqual(parseInt(result, 10), 150);
});
- it("set storage value", async function () {
? ------
+ it("set storage value", function (done) {
? ++++
- await SimpleStorageInstance.methods.set(150).send();
+ Utils.secureSend(web3, simpleStorageInstance.methods.set(200), {}, false, async function(err) {
+ if (err) {
+ return done(err);
+ }
- let result = await SimpleStorageInstance.methods.get().call();
? ^
+ let result = await simpleStorageInstance.methods.get().call();
? ++ ^
- assert.strictEqual(parseInt(result, 10), 150);
? ^^
+ assert.strictEqual(parseInt(result, 10), 200);
? ++ ^^
+ done();
+ });
});
}); | 32 | 1.454545 | 22 | 10 |
447212b43ba06f7cf7fa87b05e40d3fd081d1908 | config/application.rb | config/application.rb | require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module SCSAppraisals
class Application < Rails::Application
# Custom Logging
config.log_level = :info
config.logstasher.enabled = true
config.logstasher.suppress_app_log = false
config.logstasher.log_level = Logger::INFO
config.logstasher.logger_path = "#{Rails.root}/log/logstash_#{Rails.env}.json"
config.logstasher.source = 'logstasher'
def self.env_integer(key, default)
ENV.fetch(key, default).to_i
end
# For the MOJ internal template
config.app_title = 'SCS 360° Appraisals'
config.phase = 'alpha'
config.generators do |g|
g.test_framework :rspec, fixture: true, views: false
g.integration_tool :rspec, fixture: true, views: true
g.fixture_replacement :factory_girl, dir: "spec/support/factories"
end
config.rack_timeout = env_integer('RACK_TIMEOUT', 14)
config.token_timeout = env_integer('TOKEN_TIMEOUT_IN_MONTHS', 6).months
end
end
| require File.expand_path('../boot', __FILE__)
require 'rails/all'
require 'action_mailer/log_subscriber'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module SCSAppraisals
class Application < Rails::Application
# Custom Logging
config.log_level = :info
config.logstasher.enabled = true
config.logstasher.suppress_app_log = true
config.logstasher.log_level = Logger::INFO
config.logstasher.logger_path = "#{Rails.root}/log/logstash_#{Rails.env}.json"
config.logstasher.source = 'logstasher'
def self.env_integer(key, default)
ENV.fetch(key, default).to_i
end
# For the MOJ internal template
config.app_title = 'SCS 360° Appraisals'
config.phase = 'alpha'
config.generators do |g|
g.test_framework :rspec, fixture: true, views: false
g.integration_tool :rspec, fixture: true, views: true
g.fixture_replacement :factory_girl, dir: "spec/support/factories"
end
config.rack_timeout = env_integer('RACK_TIMEOUT', 14)
config.token_timeout = env_integer('TOKEN_TIMEOUT_IN_MONTHS', 6).months
end
end
| Disable default request logging - use logstasher instead | Disable default request logging - use logstasher instead
| Ruby | mit | ministryofjustice/scs_appraisals,ministryofjustice/scs_appraisals,ministryofjustice/scs_appraisals | ruby | ## Code Before:
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module SCSAppraisals
class Application < Rails::Application
# Custom Logging
config.log_level = :info
config.logstasher.enabled = true
config.logstasher.suppress_app_log = false
config.logstasher.log_level = Logger::INFO
config.logstasher.logger_path = "#{Rails.root}/log/logstash_#{Rails.env}.json"
config.logstasher.source = 'logstasher'
def self.env_integer(key, default)
ENV.fetch(key, default).to_i
end
# For the MOJ internal template
config.app_title = 'SCS 360° Appraisals'
config.phase = 'alpha'
config.generators do |g|
g.test_framework :rspec, fixture: true, views: false
g.integration_tool :rspec, fixture: true, views: true
g.fixture_replacement :factory_girl, dir: "spec/support/factories"
end
config.rack_timeout = env_integer('RACK_TIMEOUT', 14)
config.token_timeout = env_integer('TOKEN_TIMEOUT_IN_MONTHS', 6).months
end
end
## Instruction:
Disable default request logging - use logstasher instead
## Code After:
require File.expand_path('../boot', __FILE__)
require 'rails/all'
require 'action_mailer/log_subscriber'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module SCSAppraisals
class Application < Rails::Application
# Custom Logging
config.log_level = :info
config.logstasher.enabled = true
config.logstasher.suppress_app_log = true
config.logstasher.log_level = Logger::INFO
config.logstasher.logger_path = "#{Rails.root}/log/logstash_#{Rails.env}.json"
config.logstasher.source = 'logstasher'
def self.env_integer(key, default)
ENV.fetch(key, default).to_i
end
# For the MOJ internal template
config.app_title = 'SCS 360° Appraisals'
config.phase = 'alpha'
config.generators do |g|
g.test_framework :rspec, fixture: true, views: false
g.integration_tool :rspec, fixture: true, views: true
g.fixture_replacement :factory_girl, dir: "spec/support/factories"
end
config.rack_timeout = env_integer('RACK_TIMEOUT', 14)
config.token_timeout = env_integer('TOKEN_TIMEOUT_IN_MONTHS', 6).months
end
end
| require File.expand_path('../boot', __FILE__)
require 'rails/all'
+ require 'action_mailer/log_subscriber'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module SCSAppraisals
class Application < Rails::Application
# Custom Logging
config.log_level = :info
config.logstasher.enabled = true
- config.logstasher.suppress_app_log = false
? ^^^^
+ config.logstasher.suppress_app_log = true
? ^^^
config.logstasher.log_level = Logger::INFO
config.logstasher.logger_path = "#{Rails.root}/log/logstash_#{Rails.env}.json"
config.logstasher.source = 'logstasher'
def self.env_integer(key, default)
ENV.fetch(key, default).to_i
end
# For the MOJ internal template
config.app_title = 'SCS 360° Appraisals'
config.phase = 'alpha'
config.generators do |g|
g.test_framework :rspec, fixture: true, views: false
g.integration_tool :rspec, fixture: true, views: true
g.fixture_replacement :factory_girl, dir: "spec/support/factories"
end
config.rack_timeout = env_integer('RACK_TIMEOUT', 14)
config.token_timeout = env_integer('TOKEN_TIMEOUT_IN_MONTHS', 6).months
end
end | 3 | 0.081081 | 2 | 1 |
8d7efd65a3c9e82c4ae101c5be3c2cb01e001a02 | README.md | README.md | libpirt
=======
multithreaded runtime environment for the pi compiler
Authors
=======
- Mickaël Menu ([email protected])
- Maxence WO
- Dany Siriphol
- Loïc Girault ([email protected])
- Joël Hing ([email protected]) | libpirt
=======
multithreaded runtime environment for the pi compiler
Authors
=======
- Mickaël Menu ([email protected])
- Maxence WO
- Dany Siriphol
- Loïc Girault ([email protected])
- Joël Hing ([email protected])
- Florian Thibord ([email protected])
| Test d'un commit depuis la fac | Test d'un commit depuis la fac
| Markdown | mit | fredokun/libpiccolort,fredokun/libpiccolort | markdown | ## Code Before:
libpirt
=======
multithreaded runtime environment for the pi compiler
Authors
=======
- Mickaël Menu ([email protected])
- Maxence WO
- Dany Siriphol
- Loïc Girault ([email protected])
- Joël Hing ([email protected])
## Instruction:
Test d'un commit depuis la fac
## Code After:
libpirt
=======
multithreaded runtime environment for the pi compiler
Authors
=======
- Mickaël Menu ([email protected])
- Maxence WO
- Dany Siriphol
- Loïc Girault ([email protected])
- Joël Hing ([email protected])
- Florian Thibord ([email protected])
| libpirt
=======
multithreaded runtime environment for the pi compiler
Authors
=======
- Mickaël Menu ([email protected])
- Maxence WO
- Dany Siriphol
- Loïc Girault ([email protected])
- Joël Hing ([email protected])
+ - Florian Thibord ([email protected]) | 1 | 0.083333 | 1 | 0 |
b38424b6f1c08cbec2416918169f2b3323cb78c7 | signature/signature_test.go | signature/signature_test.go | package signature
| package signature
import (
"bytes"
"encoding/base64"
"testing"
)
const testTs = "1544544948"
const testQp = "abc=foo&def=bar"
const testBody = `{"a key":"some value"}`
const testSignature = "orb0adPhRCYND1WCAvPBr+qjm4STGtyvNDIDNBZ4Ir4="
func TestCalculateSignature(t *testing.T) {
v := NewValidator("other-secret", 2, nil, nil)
s, err := v.CalculateSignature(testTs, testQp, []byte(testBody))
if err != nil {
t.Errorf("Error calculating signature: %s, expected: orb0adPhRCYND1WCAvPBr+qjm4STGtyvNDIDNBZ4Ir4=", s)
}
drs, _ := base64.StdEncoding.DecodeString(testSignature)
if bytes.Compare(s, drs) != 0 {
t.Errorf("Unexpected signature: %s, expected: orb0adPhRCYND1WCAvPBr+qjm4STGtyvNDIDNBZ4Ir4=", s)
}
}
| Add basic test for calculating signature | Func: Add basic test for calculating signature
| Go | bsd-2-clause | messagebird/go-rest-api | go | ## Code Before:
package signature
## Instruction:
Func: Add basic test for calculating signature
## Code After:
package signature
import (
"bytes"
"encoding/base64"
"testing"
)
const testTs = "1544544948"
const testQp = "abc=foo&def=bar"
const testBody = `{"a key":"some value"}`
const testSignature = "orb0adPhRCYND1WCAvPBr+qjm4STGtyvNDIDNBZ4Ir4="
func TestCalculateSignature(t *testing.T) {
v := NewValidator("other-secret", 2, nil, nil)
s, err := v.CalculateSignature(testTs, testQp, []byte(testBody))
if err != nil {
t.Errorf("Error calculating signature: %s, expected: orb0adPhRCYND1WCAvPBr+qjm4STGtyvNDIDNBZ4Ir4=", s)
}
drs, _ := base64.StdEncoding.DecodeString(testSignature)
if bytes.Compare(s, drs) != 0 {
t.Errorf("Unexpected signature: %s, expected: orb0adPhRCYND1WCAvPBr+qjm4STGtyvNDIDNBZ4Ir4=", s)
}
}
| package signature
+
+ import (
+ "bytes"
+ "encoding/base64"
+ "testing"
+ )
+
+ const testTs = "1544544948"
+ const testQp = "abc=foo&def=bar"
+ const testBody = `{"a key":"some value"}`
+ const testSignature = "orb0adPhRCYND1WCAvPBr+qjm4STGtyvNDIDNBZ4Ir4="
+
+ func TestCalculateSignature(t *testing.T) {
+ v := NewValidator("other-secret", 2, nil, nil)
+ s, err := v.CalculateSignature(testTs, testQp, []byte(testBody))
+ if err != nil {
+ t.Errorf("Error calculating signature: %s, expected: orb0adPhRCYND1WCAvPBr+qjm4STGtyvNDIDNBZ4Ir4=", s)
+ }
+ drs, _ := base64.StdEncoding.DecodeString(testSignature)
+ if bytes.Compare(s, drs) != 0 {
+ t.Errorf("Unexpected signature: %s, expected: orb0adPhRCYND1WCAvPBr+qjm4STGtyvNDIDNBZ4Ir4=", s)
+ }
+ } | 23 | 23 | 23 | 0 |
3653f0a114cebc88b7ff5ea7ec82e0ec06100bee | src/_includes/feed/footer.xml | src/_includes/feed/footer.xml | {% capture feed_footer %}
<p><a href="{{ site.url }}{{ post.url }}" rel="nofollow">{{ post.title }}</a> was originally published by <a href="{{ site.url }}/about/" rel="nofollow">{{ site.owner.name }}</a> on <a href="{{ site.url }}" rel="nofollow">{{ site.title }}</a></p>
{% endcapture %}
{{ feed_footer | markdownify | xml_escape }}
| {% capture feed_footer %}
<p><a href="{{ site.url }}{{ post.url }}" rel="nofollow">{{ post.title }}</a> was originally published {% if site.author.name %}by <a href="{{ site.author.url | default: site.url }}" rel="nofollow">{{ site.author.name }}</a>{% endif %} on <a href="{{ site.url }}" rel="nofollow">{{ site.title }}</a></p>
{% endcapture %}
{{ feed_footer | markdownify | xml_escape }}
| Update author variable name and make conditional | Update author variable name and make conditional
| XML | mit | mmistakes/made-mistakes-jekyll,mmistakes/made-mistakes-jekyll,mmistakes/made-mistakes-jekyll | xml | ## Code Before:
{% capture feed_footer %}
<p><a href="{{ site.url }}{{ post.url }}" rel="nofollow">{{ post.title }}</a> was originally published by <a href="{{ site.url }}/about/" rel="nofollow">{{ site.owner.name }}</a> on <a href="{{ site.url }}" rel="nofollow">{{ site.title }}</a></p>
{% endcapture %}
{{ feed_footer | markdownify | xml_escape }}
## Instruction:
Update author variable name and make conditional
## Code After:
{% capture feed_footer %}
<p><a href="{{ site.url }}{{ post.url }}" rel="nofollow">{{ post.title }}</a> was originally published {% if site.author.name %}by <a href="{{ site.author.url | default: site.url }}" rel="nofollow">{{ site.author.name }}</a>{% endif %} on <a href="{{ site.url }}" rel="nofollow">{{ site.title }}</a></p>
{% endcapture %}
{{ feed_footer | markdownify | xml_escape }}
| {% capture feed_footer %}
- <p><a href="{{ site.url }}{{ post.url }}" rel="nofollow">{{ post.title }}</a> was originally published by <a href="{{ site.url }}/about/" rel="nofollow">{{ site.owner.name }}</a> on <a href="{{ site.url }}" rel="nofollow">{{ site.title }}</a></p>
+ <p><a href="{{ site.url }}{{ post.url }}" rel="nofollow">{{ post.title }}</a> was originally published {% if site.author.name %}by <a href="{{ site.author.url | default: site.url }}" rel="nofollow">{{ site.author.name }}</a>{% endif %} on <a href="{{ site.url }}" rel="nofollow">{{ site.title }}</a></p>
{% endcapture %}
{{ feed_footer | markdownify | xml_escape }} | 2 | 0.5 | 1 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.