hexsha
stringlengths 40
40
| size
int64 140
1.03M
| ext
stringclasses 94
values | lang
stringclasses 21
values | max_stars_repo_path
stringlengths 3
663
| max_stars_repo_name
stringlengths 4
120
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
368k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
663
| max_issues_repo_name
stringlengths 4
120
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
663
| max_forks_repo_name
stringlengths 4
135
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 140
1.03M
| avg_line_length
float64 2.32
23.1k
| max_line_length
int64 11
938k
| alphanum_fraction
float64 0.01
1
| score
float32 3
4.25
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
a000b9813b740e22ba48a6895f809fcd7e62c73f
| 3,496 |
tsx
|
TypeScript
|
src/containers/StatDisplay/StatDisplayList.tsx
|
vegerot/slippi-stats
|
fb9ec0c750b96f9dc78f614f8cc90d3719f749b8
|
[
"MIT"
] | 9 |
2020-11-14T17:26:35.000Z
|
2022-02-01T19:04:02.000Z
|
src/containers/StatDisplay/StatDisplayList.tsx
|
vegerot/slippi-stats
|
fb9ec0c750b96f9dc78f614f8cc90d3719f749b8
|
[
"MIT"
] | null | null | null |
src/containers/StatDisplay/StatDisplayList.tsx
|
vegerot/slippi-stats
|
fb9ec0c750b96f9dc78f614f8cc90d3719f749b8
|
[
"MIT"
] | 4 |
2020-11-14T17:47:20.000Z
|
2021-11-09T02:56:09.000Z
|
/** @jsx jsx */
import { css, jsx } from "@emotion/core";
import { reorder } from "lib/util";
import React from "react";
import { DragDropContext, Draggable, Droppable } from "react-beautiful-dnd";
import { Theme } from "styles/theme";
import { Divider } from "./Divider";
import { StatDisplayItem } from "./StatDisplayItem";
import { Statistic } from "./Statistic";
interface StatDisplayListProps {
theme: Theme;
stats: string;
setStats: (s: string) => void;
}
export const StatDisplayList: React.FC<StatDisplayListProps> = (props) => {
const { theme, stats, setStats } = props;
const [items, setItems] = React.useState<string[]>(stats.split(","));
React.useEffect(() => {
setItems(stats.split(","));
}, [stats]);
const updateStats = (statIds: string[]) => {
// First update the local state
setItems(statIds);
// Then update the URL state
setStats(statIds.join(","));
};
const onDragEnd = (result: any) => {
// dropped outside the list
if (!result.destination) {
return;
}
const newItems = reorder(items, result.source.index, result.destination.index);
updateStats(newItems);
};
const onRemove = (statId: string) => {
const newItems = items.filter((s) => s !== statId);
updateStats(newItems);
};
return (
<DragDropContext onDragEnd={onDragEnd}>
<Droppable droppableId="droppable">
{(dropProvided, dropSnapshot) => (
<div
{...dropProvided.droppableProps}
ref={dropProvided.innerRef}
css={css`
margin: -1rem 0;
`}
>
{items.map((item, index) => {
const key = item ? item : "divider";
return (
<Draggable key={key} draggableId={key} index={index}>
{(dragProvided, dragSnapshot) => {
const additionalStyles = item ? null : dragProvided.dragHandleProps;
return (
<StatDisplayItem
ref={dragProvided.innerRef}
hasItem={Boolean(item)}
isDraggingOver={dropSnapshot.isDraggingOver}
{...dragProvided.draggableProps}
{...additionalStyles}
style={dragProvided.draggableProps.style}
>
{item ? (
<div
css={css`
position: relative;
`}
>
<Statistic statId={item} theme={theme} {...dragProvided.dragHandleProps} />
<div className="remove" onClick={() => onRemove(item)}>
✕
<span
css={css`
margin-left: 1rem;
`}
>
REMOVE
</span>
</div>
</div>
) : (
<Divider />
)}
</StatDisplayItem>
);
}}
</Draggable>
);
})}
{dropProvided.placeholder}
</div>
)}
</Droppable>
</DragDropContext>
);
};
| 32.981132 | 103 | 0.451659 | 3.09375 |
6b7c6f0c0cedd0b9bec0880af64c3bde5cadbcdf
| 9,717 |
js
|
JavaScript
|
js/site.js
|
SuperEnoki/amani
|
a8ce46bf276aa7e1d626c23eb3a37088f64d0a53
|
[
"MIT"
] | 1 |
2021-08-25T11:13:04.000Z
|
2021-08-25T11:13:04.000Z
|
js/site.js
|
SuperEnoki/amani
|
a8ce46bf276aa7e1d626c23eb3a37088f64d0a53
|
[
"MIT"
] | null | null | null |
js/site.js
|
SuperEnoki/amani
|
a8ce46bf276aa7e1d626c23eb3a37088f64d0a53
|
[
"MIT"
] | 1 |
2021-08-31T09:53:49.000Z
|
2021-08-31T09:53:49.000Z
|
jQuery(document).ready(function($) {
"use strict";
// Initializing scripts
var amani_grid = '.blog-feed';
var amani_grid_item = '.grid-item';
function amani_init() {
amani_magic_masonry();
// Instagram image width/height fix
$('.header-instagram').imagesLoaded(function() {
var header_instagram_width = $('.header-instagram li').width();
$('.header-instagram li').css('max-height', header_instagram_width);
$('.header-instagram').addClass('visible');
});
$('.footer-instagram').imagesLoaded(function() {
var footer_instagram_width = $('.footer-instagram li').width();
$('.footer-instagram li').css('max-height', footer_instagram_width);
$('.footer-instagram').addClass('visible');
});
}
/*
BEGIN
*/
// Menu dividing to fist - last half
var main_nav_length = Math.floor($('.main-nav div > ul > li').length / 2) + 1;
$('.main-nav div > ul > li:nth-child(n + ' + main_nav_length + ')').addClass('last-half');
// Search form click
$(document).on('click', '.search-trigger', function(e) {
$('body').addClass('search-active');
setTimeout(function() {
$('.search-wrap input').focus();
}, 300);
});
$('.search-wrap').on('click', function(e) {
var target = $(e.target);
if($(target).is('input') === false) {
$('body').removeClass('search-active');
}
});
// Escape Key
$(document).keyup(function(e) {
if (e.keyCode == 27) { // escape key maps to keycode `27`
$('body').removeClass('search-active');
$('body').removeClass('menu-active');
}
});
// Responsive hamburger click
$(document).on('click', '.responsive-menu-trigger', function() {
if($('body').hasClass('menu-active')) {
$('body').removeClass('menu-active');
} else {
history.pushState({id: 'menu'}, '', '');
$('body').addClass('menu-active');
}
});
window.addEventListener("popstate", function(e) {
if(history.state.id == 'menu') {
$('body').removeClass('menu-active');
}
});
$('.responsive-wrap').on('click', function(e) {
var target = $(e.target);
if($(target).is('a') === false) {
$('body').removeClass('menu-active');
}
});
// Scrolltop click
$(document).on('click', '.scrolltop', function() {
$('html, body').animate({ scrollTop: 0 }, 300);
});
// Wrap Calendar in Divs for better styling
$('.widget_calendar td:not(:has(>a))').wrapInner('<div></div>');
// Responsive submenu click
$(document).on('click', '.responsive-nav .menu-item-has-children > a', function(e) {
e.preventDefault();
var curmenu = $(this).parent();
var submenu = $(this).parent().find('> ul');
if(submenu.is(':visible')) {
submenu.hide();
curmenu.removeClass('active');
curmenu.parent().find('> li').show();
} else {
submenu.show();
curmenu.addClass('active');
curmenu.parent().find('> li:not(.active)').hide();
}
});
// Dropdown menu
$('nav ul.menu li').hover(function() {
var timeout = $(this).data('timeout');
var $currentUl = $(this).find('> ul');
if(timeout) clearTimeout(timeout);
if($currentUl.hasClass('visible') === false && $currentUl.length > 0) {
$(this).find('> ul').addClass('visible');
}
}, function() {
$(this).data('timeout', setTimeout($.proxy(function() {
$(this).find('> ul').removeClass('visible');
}, this), 200));
});
// Infinite Scroll
$.bktis = {
containerSelector: '.blog-feed',
postSelector: '.grid-item',
paginationSelector: '.navigation',
nextSelector: '.next',
loadingHtml: '',
show: function(elems) { elems.show(); },
nextPageUrl: null,
init: function(options) {
for (var key in options) {
$.bktis[key] = options[key];
}
$(function() {
$.bktis.extractNextPageUrl($('body'));
$(window).bind('scroll', $.bktis.scroll);
$.bktis.scroll();
});
},
scroll: function() {
$($.bktis.containerSelector).imagesLoaded(function() {
if ($.bktis.nearBottom() && $.bktis.shouldLoadNextPage()) {
$.bktis.loadNextPage();
}
});
},
nearBottom: function() {
var scrollTop = $(window).scrollTop(),
windowHeight = $(window).height(),
lastPostOffset = $($.bktis.containerSelector).find($.bktis.postSelector).last().offset();
if (!lastPostOffset) return;
return (scrollTop > (lastPostOffset.top - windowHeight));
},
shouldLoadNextPage: function() {
return !!$.bktis.nextPageUrl;
},
loadNextPage: function() {
var nextPageUrl = $.bktis.nextPageUrl,
loading = $($.bktis.loadingHtml);
$.bktis.nextPageUrl = null;
loading.insertAfter($.bktis.containerSelector);
$.get(nextPageUrl, function(html) {
var dom = $(html),
posts = dom.find($.bktis.containerSelector).find($.bktis.postSelector);
$.bktis.show(posts.hide().appendTo($.bktis.containerSelector));
$.bktis.extractNextPageUrl(dom);
$.bktis.scroll();
});
},
extractNextPageUrl: function(dom) {
var pagination = dom.find($.bktis.paginationSelector);
$.bktis.nextPageUrl = pagination.find($.bktis.nextSelector).attr('href');
pagination.remove();
}
}
if($('.theme-body').hasClass('infinite_scroll') == true) {
$.bktis.init({
containerSelector: amani_grid,
postSelector: amani_grid_item,
paginationSelector: '.navigation',
nextSelector: '.next',
loadingHtml: '<div class="infinite-scroll-spinner"></div>',
show: function(elems) {
elems.show();
amani_init();
}
});
}
// Magic Masonry
function amani_magic_masonry() {
const grid = document.querySelector('.blog_layout-masonry' + ' ' + amani_grid);
// checking if grid container exist
if(typeof(grid) != 'undefined' && grid != null) {
$(amani_grid).append("<div class='infinite-scroll-spinner'></div>");
$(amani_grid).imagesLoaded(function() {
const rowHeight = parseInt($(grid).css("grid-auto-rows"));
const rowGap = parseInt($(grid).css("grid-row-gap"));
grid.style.gridAutoRows = "auto";
grid.style.alignItems = "self-start";
grid.querySelectorAll(amani_grid_item).forEach(item => {
item.style.gridRowEnd = `span ${Math.ceil(
(item.clientHeight + rowGap) / (rowHeight + rowGap)
)}`;
if($(item).hasClass('visible') == false) {
$(item).addClass('visible');
}
});
grid.removeAttribute("style");
$('.infinite-scroll-spinner').fadeOut('normal', function() { $(this).remove(); });
});
} else {
$(amani_grid).imagesLoaded(function() {
$(amani_grid_item).addClass('visible');
});
$('.infinite-scroll-spinner').fadeOut('normal', function() { $(this).remove(); });
}
}
// When images loaded we show items
$('.featured-posts').imagesLoaded(function() {
$('.featured-posts .grid-item').addClass('visible');
});
// Slideshow
var amani_slideshow = (function() {
function amani_slideshow(element, options) {
var _ = this;
_.settings = $.extend($.fn.amani_slideshow.defaults, options);
_.el = element;
_.$element = $(element);
_.$photos = _.$element.children();
_.count = _.$photos.length;
_.init();
}
amani_slideshow.prototype.init = function() {
var _ = this;
if(_.$element.find('.slideshow-paginator').length < 1) {
_.$element.append('<nav class="slideshow-paginator" />');
for (var i = 0; i < _.count; i++) {
_.$element.find('.slideshow-paginator').append('<span/>');
}
_.$element.find('.slideshow-paginator span:first-child').addClass('current');
_.$element.find('.grid-item:first-child').addClass('current');
_.$element.find('.slideshow-paginator').on('slide_switch', 'span', function() {
_.$element.find('.slideshow-paginator span').removeClass('current');
$(this).addClass('current');
var slide_switch = $(this).index();
_.$photos.removeClass('current');
_.$photos.eq(slide_switch).addClass('current');
});
_.$element.find('.slideshow-paginator').on('click', 'span', function() {
$(this).trigger('slide_switch');
});
_.$element.data('interval', _.settings.interval);
_.play();
_.autoPlayPause();
}
}
amani_slideshow.prototype.play = function() {
var _ = this;
if(_.$element.data('stopped') != 1) {
var $paginator_current = _.$element.find('.slideshow-paginator span.current');
var $paginator_next = $paginator_current.next();
if($paginator_next.length > 0) {
$paginator_next.trigger('slide_switch');
} else {
_.$element.find('.slideshow-paginator span:first-child').trigger('slide_switch');
}
setTimeout(function() { _.play(); }, _.$element.data('interval'));
} else {
setTimeout(function() { _.play(); }, _.$element.data('interval'));
}
};
amani_slideshow.prototype.autoPlayPause = function() {
var _ = this;
_.$element.on({
mouseenter: function(){
_.$element.data('stopped', 1);
},
mouseleave: function(){
_.$element.data('stopped', 0);
}
});
};
$.fn.amani_slideshow = function(options) {
var instance;
instance = this.data('amani_slideshow');
if (!instance) {
return this.each(function() {
return $(this).data('amani_slideshow', new amani_slideshow(this,options));
});
}
if (options === true) return instance;
if ($.type(options) === 'string') instance[options]();
return this;
};
$.fn.amani_slideshow.defaults = {
interval: 5000,
};
}).call(this);
// Init
amani_init();
$('.top_featured_layout-slideshow .featured-top').amani_slideshow({
interval: 5000
});
document.addEventListener('theme-reinit', function() {
amani_init();
$('.top_featured_layout-slideshow .featured-top').amani_slideshow({
interval: 5000
});
});
$(window).resize(function() {
amani_init();
});
$(window).focus(function() {
amani_init();
});
});
| 27.218487 | 94 | 0.617063 | 3.109375 |
1a4357d52dee977cfc6596753d1ab58374e8af64
| 1,817 |
py
|
Python
|
cypher.py
|
JCode1986/ceasar-cipher
|
bd6259ae8ce51ae8a9e8a7bbbaebf46d9d60c0e6
|
[
"MIT"
] | null | null | null |
cypher.py
|
JCode1986/ceasar-cipher
|
bd6259ae8ce51ae8a9e8a7bbbaebf46d9d60c0e6
|
[
"MIT"
] | null | null | null |
cypher.py
|
JCode1986/ceasar-cipher
|
bd6259ae8ce51ae8a9e8a7bbbaebf46d9d60c0e6
|
[
"MIT"
] | null | null | null |
def encrypt(message, key):
encrypted_message = ''
for char in message:
if char.isalpha():
#ord() returns an integer representing the Unicode code point of the character
unicode_num = ord(char)
unicode_num += key
if char.isupper():
if unicode_num > ord('Z'):
unicode_num -= 26
elif unicode_num < ord('A'):
unicode_num += 26
elif char.islower():
if unicode_num > ord('z'):
unicode_num -= 26
elif unicode_num < ord('a'):
unicode_num += 26
#chr() returns a character from a string
encrypted_message += chr(unicode_num)
else:
encrypted_message += char
return encrypted_message
def decrypt(encoded, key):
return encrypt(encoded, -key)
def encrypt_input():
e_message = input('\nEnter message to encrypt: ')
e_key = int(input('\nEnter key number from 1 - 26: '))
while e_key > 26:
e_key = int(input('\nEnter key number from 1 - 26: '))
return f'\nYour encrypted message is =====> {encrypt(e_message, e_key)}'
def decrypt_input():
d_message = input('\nEnter message to decrypt: ')
d_key = int(input('\nEnter key number from 1 - 26: '))
while d_key > 26:
d_key = int(input('\nEnter key number from 1 - 26: '))
return f'\nYour decrypted message is =====> {decrypt(d_message, d_key)}'
def start():
question = input('\nEncrpyt (e) or Decrypt (d) a message? ')
if question == 'e':
return encrypt_input()
if question == 'd':
return decrypt_input()
# else:
# start()
if __name__ == "__main__":
while True:
print(start())
| 27.953846 | 90 | 0.545405 | 3.53125 |
a351b57d89baf640edb8ca533f3a52dc59fb3324
| 1,095 |
c
|
C
|
Alura/Avançando na Linguagem/fogefoge.c
|
Evaldo-comp/C
|
63128d8193575a0af8af435bdf990c6dd629e746
|
[
"MIT"
] | null | null | null |
Alura/Avançando na Linguagem/fogefoge.c
|
Evaldo-comp/C
|
63128d8193575a0af8af435bdf990c6dd629e746
|
[
"MIT"
] | null | null | null |
Alura/Avançando na Linguagem/fogefoge.c
|
Evaldo-comp/C
|
63128d8193575a0af8af435bdf990c6dd629e746
|
[
"MIT"
] | 1 |
2020-10-07T08:06:29.000Z
|
2020-10-07T08:06:29.000Z
|
#include<stdio.h>
#include<stdlib.h>
#include "fogefoge.h"
char** mapa;
int linhas;
int colunas;
void lemapa(){
FILE* f;
f = fopen("mapa.txt", "r"); // abre o mapa em doc de texto apenas para leitura
if (f == 0) { // faz a verificação do arquivo, se estiver vazio, ele sai
printf("Erro na leitura do mapa");
exit(1);
}
// ler as linhas e colunas no mapa e guarda nas suas respectivas variáveis
fscanf(f, "%d %d", &linhas, &colunas);
alocamapa(); // call de função
for(int i =0; i < 5; i++){
fscanf(f, "%s", mapa[i]);
}
fclose(f);
}
//função para alocação dinâmica de memória
void alocamapa(){
mapa = malloc(sizeof(char*) * linhas);
for(int i = 0; i < linhas; i++){
mapa[i] = malloc(sizeof(char) * colunas + 1);
}
}
// função para liberar memória
void liberamapa(){
for(int i = 0; i < linhas; i++){
free (mapa[i]);
}
free(mapa);
}
// função principal
int main(){
lemapa();
for(int i = 0; i < linhas; i++){
printf("%s\n", mapa[i]);
}
liberamapa();
}
| 18.559322 | 82 | 0.552511 | 3.015625 |
962ff91e7d1a252eec8d680879245a200ff4e7f7
| 1,602 |
ps1
|
PowerShell
|
Public/Get-RedditComment.ps1
|
1RedOne/PSReddit
|
5d887c04525409d3ff069ca66e29dea39cba9a55
|
[
"Apache-2.0"
] | 19 |
2015-09-16T19:00:33.000Z
|
2020-03-11T18:03:57.000Z
|
Public/Get-RedditComment.ps1
|
1RedOne/PSReddit
|
5d887c04525409d3ff069ca66e29dea39cba9a55
|
[
"Apache-2.0"
] | 3 |
2016-06-13T08:43:20.000Z
|
2020-02-09T02:59:48.000Z
|
Public/Get-RedditComment.ps1
|
1RedOne/PSReddit
|
5d887c04525409d3ff069ca66e29dea39cba9a55
|
[
"Apache-2.0"
] | 9 |
2015-09-16T19:03:15.000Z
|
2020-03-11T18:04:00.000Z
|
<#
.Synopsis
Gets the comments of a Reddit link, or several.
.DESCRIPTION
Uses the Reddit API to get comments made on a given link, collection of posts or the id.
.EXAMPLE
Get-RedditComment -id "3i9psm"
.EXAMPLE
"https://www.reddit.com/r/redditdev/comments/3i9psm/how_can_i_find_the_id_of_the_original_post_in_a/" | Get-RedditComment
.EXAMPLE
Get-RedditPost -Name PowerShell | Select-Object -First 1 | Get-RedditComment
#>
function Get-RedditComment
{
[CmdletBinding()]
Param (
[Parameter(
Position = 1,
Mandatory = $true,
ValueFromPipeline = $true,
ValueFromPipelineByPropertyName = $true
)]
[Alias("Link", "Name")]
[string]
$id
)
Process
{
## Depending on how we passed the id to the function, we need to
## strip some characters.
switch ($id)
{
{($id -like "t3_*")}
{
$id = $id -replace "t3_", ""
break
}
{($id -like "http*")}
{
$id = $id.Split("/")[6]
break
}
}
$uri = 'http://www.reddit.com/comments/{0}.json' -f $id
Write-Verbose "Sending request to $uri"
$listings = (Invoke-RestMethod $uri) | Where kind -eq 'Listing'
# Comments have a type 't1' in Reddit API
$comments = $listings | ForEach-Object { $_.data.children } | Where-Object kind -eq 't1' | Select-Object -Expand data
$comments | ForEach-Object { $_.PSObject.TypeNames.Insert(0,'PowerReddit.Comment'); $_ }
}
}
| 28.607143 | 125 | 0.567416 | 3.140625 |
c385eb76d4bd5255dee10f84abba9f6a04a083ac
| 1,150 |
cs
|
C#
|
Assets/Scripts/Sounds/SoundManager.cs
|
KristopherMoore/Midnight-The-Final-Sun
|
a6846369717ecea929a625a5b31e0500e34d978e
|
[
"Unlicense"
] | 3 |
2019-05-15T15:42:55.000Z
|
2019-10-10T19:32:58.000Z
|
Assets/Scripts/Sounds/SoundManager.cs
|
KristopherMoore/Midnight-The-Final-Sun
|
a6846369717ecea929a625a5b31e0500e34d978e
|
[
"Unlicense"
] | 1 |
2019-05-18T21:23:43.000Z
|
2019-05-18T21:23:43.000Z
|
Assets/Scripts/Sounds/SoundManager.cs
|
KristopherMoore/Midnight-The-Final-Sun
|
a6846369717ecea929a625a5b31e0500e34d978e
|
[
"Unlicense"
] | null | null | null |
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class SoundManager : MonoBehaviour {
public static SoundManager Instance;
//variables for the players Weapon
private GameObject playerWeapon;
private AudioSource weaponAudioSource;
// Use this for initialization
void Start ()
{
Instance = this;
findEquippedWeapon();
weaponAudioSource = playerWeapon.GetComponent<AudioSource>();
}
public void PlaySound(string soundName)
{
if(soundName == "Fire")
{
weaponAudioSource.Play();
}
}
private void findEquippedWeapon()
{
//find our Main Camera gameObject
playerWeapon = GameObject.Find("Main Camera");
//grab our weapon bone
playerWeapon = HelperK.FindSearchAllChildren(playerWeapon.transform, "WEAPON").gameObject;
//find the name of the object current the child of our WEAPON bone, this way we dont need to know the name of the weapon currently equipped. We just know where it will be
playerWeapon = playerWeapon.transform.GetChild(0).gameObject;
}
}
| 26.136364 | 178 | 0.68087 | 3 |
1a8f48a7766b74f41bbae07e905b92e629940ae6
| 1,512 |
py
|
Python
|
inference.py
|
Jia-Wei-Liao/Set14_Dataset_Super-Resolution
|
a24098d8dc52ea463b2e1dca838ad60da019a720
|
[
"MIT"
] | null | null | null |
inference.py
|
Jia-Wei-Liao/Set14_Dataset_Super-Resolution
|
a24098d8dc52ea463b2e1dca838ad60da019a720
|
[
"MIT"
] | null | null | null |
inference.py
|
Jia-Wei-Liao/Set14_Dataset_Super-Resolution
|
a24098d8dc52ea463b2e1dca838ad60da019a720
|
[
"MIT"
] | null | null | null |
import os
import tqdm
import imageio
import argparse
import options.options as option
from solvers import create_solver
from data import create_dataset, create_dataloader
from utils import util
def main(args):
opt = option.parse(args.opt)
opt = option.dict_to_nonedict(opt)
solver = create_solver(opt)
bm_names = []
test_loaders = []
for _, dataset_opt in sorted(opt['datasets'].items()):
test_set = create_dataset(dataset_opt)
test_loader = create_dataloader(test_set, dataset_opt)
test_loaders.append(test_loader)
bm_names.append(test_set.name())
for bm, test_loader in zip(bm_names, test_loaders):
save_path_list = opt['solver']['pretrained_path'].split(os.sep)[:-2]
save_path = '/'.join(save_path_list)
save_img_path = os.path.join(save_path, 'result')
os.makedirs(save_img_path, exist_ok=True)
for batch in tqdm.tqdm(test_loader):
solver.feed_data(batch, need_HR=False)
solver.test()
visuals = solver.get_current_visual(need_HR=False)
imageio.imwrite(os.path.join(
save_img_path,
os.path.basename(batch['LR_path'][0])[:-4]+'_pred.png'
), visuals['SR'])
print("finish!")
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-opt', type=str, required=True,
help='path to options json file.')
args = parser.parse_args()
main(args)
| 30.857143 | 76 | 0.647487 | 3.171875 |
f4af96701c441a0950125d7b80f203c18f017bdf
| 2,777 |
tsx
|
TypeScript
|
src/components/addpeer/TabSteps.tsx
|
mastermind88/wiretrustee-dashboard
|
6724e5bfaabdb5662ef48de2c965a1260e41a1a3
|
[
"BSD-3-Clause"
] | 3 |
2022-03-27T19:33:56.000Z
|
2022-03-28T15:03:23.000Z
|
src/components/addpeer/TabSteps.tsx
|
mastermind88/wiretrustee-dashboard
|
6724e5bfaabdb5662ef48de2c965a1260e41a1a3
|
[
"BSD-3-Clause"
] | 3 |
2022-03-25T16:33:52.000Z
|
2022-03-26T11:51:07.000Z
|
src/components/addpeer/TabSteps.tsx
|
mastermind88/wiretrustee-dashboard
|
6724e5bfaabdb5662ef48de2c965a1260e41a1a3
|
[
"BSD-3-Clause"
] | 2 |
2022-03-26T08:17:09.000Z
|
2022-03-26T11:29:53.000Z
|
import {useDispatch, useSelector} from "react-redux";
import Highlight from 'react-highlight';
import "highlight.js/styles/mono-blue.css";
import "highlight.js/lib/languages/bash";
import { StepCommand } from './types'
import {
Typography,
Space,
Steps, Button
} from "antd";
import {copyToClipboard} from "../../utils/common";
import {CheckOutlined, CopyOutlined} from "@ant-design/icons";
import React, {useEffect, useState} from "react";
const { Title, Text } = Typography;
const { Step } = Steps;
type Props = {
stepsItems: Array<StepCommand>
};
const TabSteps:React.FC<Props> = ({stepsItems}) => {
const [steps, setSteps] = useState(stepsItems)
useEffect(() => setSteps(stepsItems), [stepsItems])
const onCopyClick = (key: string | number, commands:React.ReactNode | string, copied: boolean) => {
if (!(typeof commands === 'string')) return
copyToClipboard(commands)
const step = steps.find(s => s.key === key)
if (step) step.copied = copied
setSteps([...steps])
if (copied) {
setTimeout(() => {
onCopyClick(key, commands, false)
}, 2000)
}
}
return (
<Steps direction="vertical" current={0}>
{steps.map(c =>
<Step
key={c.key}
title={c.title}
description={
<Space className="nb-code" direction="vertical" size="small" style={{display: "flex"}}>
{ (c.commands && (typeof c.commands === 'string' || c.commands instanceof String)) ? (
<Highlight className='bash'>
{c.commands}
</Highlight>
) : (
c.commands
)}
{ c.showCopyButton &&
<>
{ !c.copied ? (
<Button type="text" size="large" className="btn-copy-code" icon={<CopyOutlined/>}
style={{color: "rgb(107, 114, 128)"}}
onClick={() => onCopyClick(c.key, c.commands, true)}/>
): (
<Button type="text" size="large" className="btn-copy-code" icon={<CheckOutlined/>}
style={{color: "green"}}/>
)}
</>
}
</Space>
}
/>
)}
</Steps>
)
}
export default TabSteps;
| 35.151899 | 118 | 0.443644 | 3.15625 |
c679f84c442722ea50c6ed1d56b59b685f35c94f
| 2,333 |
py
|
Python
|
models.py
|
nas3444/capstoneproject
|
6653591ecf26fc466a3429969c514563b00f17cd
|
[
"PostgreSQL",
"MIT"
] | null | null | null |
models.py
|
nas3444/capstoneproject
|
6653591ecf26fc466a3429969c514563b00f17cd
|
[
"PostgreSQL",
"MIT"
] | null | null | null |
models.py
|
nas3444/capstoneproject
|
6653591ecf26fc466a3429969c514563b00f17cd
|
[
"PostgreSQL",
"MIT"
] | null | null | null |
from datetime import datetime
import os
from sqlalchemy import Column, String, Integer, create_engine, ForeignKey
from flask_sqlalchemy import SQLAlchemy
import json
from config import DatabaseURI
from flask_migrate import Migrate
# Creating DB
database_path = DatabaseURI.SQLALCHEMY_DATABASE_URI
db = SQLAlchemy()
def setup_db(app, database_path=database_path):
app.config["SQLALCHEMY_DATABASE_URI"] = database_path
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
db.app = app
db.init_app(app)
db.create_all()
migrate = Migrate(app, db)
# Movies Model
class Movie(db.Model):
__tablename__ = 'movies'
id = Column(Integer, primary_key=True)
title = Column(String, nullable=False, unique=True)
image = Column(String)
release_date = Column(db.DateTime)
actors = db.relationship('Actor', backref='Movie',
lazy='dynamic')
def __init__(self, title, image, release_date):
self.title = title
self.image = image
self.release_date = release_date
def insert(self):
db.session.add(self)
db.session.commit()
def update(self):
db.session.commit()
def delete(self):
db.session.delete(self)
db.session.commit()
def format(self):
return {
'id': self.id,
'title': self.title,
'image': self.image,
'release_date': self.release_date
}
# Actors Model
class Actor(db.Model):
___tablename___ = 'actors'
id = Column(Integer, primary_key=True)
name = Column(String, nullable=False, unique=True)
age = Column(Integer)
gender = Column(String)
movie_id = db.Column(db.Integer, db.ForeignKey('movies.id'))
def __init__(self, name, age, gender, movie_id):
self.name = name
self.age = age
self.gender = gender
self.movie_id = movie_id
def insert(self):
db.session.add(self)
db.session.commit()
def update(self):
db.session.commit()
def delete(self):
db.session.delete(self)
db.session.commit()
def format(self):
return {
'id': self.id,
'name': self.name,
'age': self.age,
'gender': self.gender,
'movie_id': self.movie_id
}
| 24.819149 | 73 | 0.621089 | 3.28125 |
4b68327de115fb63425fb394ef35d2566477cad3
| 4,240 |
cpp
|
C++
|
2 Year/2015 Pattern/DSL/Group A/Assignment 9.cpp
|
bhushanasati25/College
|
638ab4f038a783beae297652623e8c6679465fef
|
[
"MIT"
] | 4 |
2020-10-22T15:37:09.000Z
|
2022-02-17T17:30:03.000Z
|
2 Year/2015 Pattern/DSL/Group A/Assignment 9.cpp
|
mohitkhedkar/College
|
f713949827d69f13b1bf8fb082e86e8bead7ac6e
|
[
"MIT"
] | null | null | null |
2 Year/2015 Pattern/DSL/Group A/Assignment 9.cpp
|
mohitkhedkar/College
|
f713949827d69f13b1bf8fb082e86e8bead7ac6e
|
[
"MIT"
] | 5 |
2021-06-19T01:23:18.000Z
|
2022-02-26T14:47:15.000Z
|
// Write C/C++ program for storing matrix. Write functions for
// 1. Check whether given matrix is upper triangular or not
// 2. Compute summation of diagonal elements
// 3. Compute transpose of matrix
// 4. Add, subtract and multiply two matrices
// Author: Mohit Khedkar
#include<iostream>
using namespace std;
void diagonal(int[3][3]);
void triangular(int[3][3]);
void transpose(int[3][3]);
void arithmatic(int[3][3]);
int main()
{ int mat[3][3], choice;
cout<<"\nEnter the elements in matrix";
for(int i=0;i<3;i++)
{
for(int j=0;j<3;j++)
{
cin>>mat[i][j];
}
}
for(int i=0;i<3;i++)
{
for(int j=0;j<3;j++)
{
cout<<mat[i][j]<<"\t";
}
cout<<"\n";
}
cout<<"\nMENU\n 1) To check for diagonal elements \n 2) To check for upper triangular matrix \n 3) Transpose \n 4) Arithmatic operations\n";
cin>>choice;
switch(choice)
{
case 1 :diagonal(mat);
break;
case 2 :triangular(mat);
break;
case 3 :transpose(mat);
break;
case 4 :arithmatic(mat);
break;
default : cout<<"\nEnter the valid option!!!";
break;
}
return 0;
}
void diagonal(int mat[3][3])
{ int a=0;
for(int i=0;i<3;i++)
{
for(int j=0;j<3;j++)
{
if(i==j&&mat[i][j]==0)
{
a++;
}
}
}
if(a==3){
cout<<"\nIt is a diagonal matrix";}
else
cout<<"\nIt is not a diagonal matrix";
}
void triangular(int mat[3][3])
{int b=0;
for(int i=0;i<3;i++)
{
for(int j=0;j<3;j++)
{
if(i>j&&mat[i][j]==0)
{
b++;
}
}
}
if(b==3)
{cout<<"\nIt is an upper triangular matrix\n";
}
else
cout<<"It is not an upper traingular matrix";
}
void transpose(int mat[3][3])
{for(int j=0;j<3;j++)
{
for(int i=0;i<3;i++)
{
cout<<mat[i][j]<<"\t";
}
cout<<"\n";
}
}
void arithmatic(int mat[3][3])
{ int art[3][3],choice, mut[3][3],sum[3][3],sub[3][3];
cout<<"\nEnter the values in another matrix\n";
for(int k=0;k<3;k++)
{
for(int l=0;l<3;l++)
{
cin>>art[k][l];
cout<<" ";
}
}
cout<<"1)Addition \n 2) Subtraction \n 3) Multiplication";
cout<<"\nChoose the operation you want to perform : ";
cin>>choice;
switch(choice)
{
case 1 : for(int i=0;i<3;i++)
{
for(int j=0;j<3;j++)
{
sum[i][j]=mat[i][j]+art[i][j];
}
}
for(int i=0;i<3;i++)
{
for(int j=0;j<3;j++)
{
cout<<sum[i][j]<<"\t";
}
cout<<"\n";
}
break;
case 2 :for(int i=0;i<3;i++)
{
for(int j=0;j<3;j++)
{
sub[i][j]=mat[i][j]-art[i][j];
}
}
for(int i=0;i<3;i++)
{
for(int j=0;j<3;j++)
{
cout<<sub[i][j]<<"\t";
}
cout<<"\n";
}
break;
case 3 :for(int i=0;i<3;i++)
{
for(int j=0;j<3;j++)
{
mut[i][j]=0;
}
}
for(int k=0;k<3;k++){
for(int l=0;l<3;l++){
for(int a=0;a<3;a++){
mut[k][l]=mut[k][l]+(mat[l][a]*art[a][k]);
}
}
}
for(int i=0;i<3;i++)
{
for(int j=0;j<3;j++)
{
cout<<mut[j][i]<<"\t";
}
cout<<"\n";
}
break;
default : cout<<"\nEnter the valid option!!!";
break;
}
}
| 20.582524 | 143 | 0.365094 | 3.15625 |
1a6b424be4bcf4853505ce2d9e68268f8bc06dd5
| 59,076 |
py
|
Python
|
src/models/infection_model.py
|
MOCOS-COVID19/modelling-ncov2019
|
c47a2c980713da970b67db851a95eb68727517a9
|
[
"MIT"
] | 7 |
2020-04-02T15:23:28.000Z
|
2021-11-28T16:32:46.000Z
|
src/models/infection_model.py
|
MOCOS-COVID19/modelling-ncov2019
|
c47a2c980713da970b67db851a95eb68727517a9
|
[
"MIT"
] | 19 |
2020-03-17T08:46:06.000Z
|
2020-10-13T07:58:27.000Z
|
src/models/infection_model.py
|
eMaerthin/modelling-ncov2019
|
c47a2c980713da970b67db851a95eb68727517a9
|
[
"MIT"
] | 5 |
2020-03-13T20:32:39.000Z
|
2020-08-24T18:40:01.000Z
|
"""
This is mostly based on references/infection_alg.pdf
"""
import ast
from functools import (lru_cache, partial)
import json
import logging
import mocos_helper
#import random
import time
from collections import defaultdict
import pickle
import psutil
from shutil import copyfile
from math import log
from git import Repo
import pandas as pd
import scipy.optimize
import scipy.stats
from src.models.schemas import *
from src.models.defaults import *
from src.models.states_and_functions import *
from src.visualization.visualize import Visualize
import click
from dotenv import find_dotenv, load_dotenv
from queue import (PriorityQueue)
q = PriorityQueue()
class InfectionModel:
def __init__(self, params_path: str, df_individuals_path: str, df_households_path: str = '') -> None:
self.params_path = params_path
self.df_individuals_path = df_individuals_path
self.df_households_path = df_households_path
logger.info('Loading params...')
self._params = dict()
with open(params_path, 'r') as params_file:
params = json.loads(
params_file.read()
) # TODO: check whether this should be moved to different place
logger.info('Parsing params...')
for key, schema in infection_model_schemas.items():
self._params[key] = schema.validate(params.get(key, defaults[key]))
default_household_input_path = os.path.join(self._params[OUTPUT_ROOT_DIR], self._params[EXPERIMENT_ID],
'input_df_households.csv') # TODO: ensure households are valid!
if df_households_path == '':
self.df_households_path = default_household_input_path
self._global_time = None
self._max_time = None
self._vis = None
self._max_time_offset = 0.0
self._expected_case_severity = None
self._df_individuals = None
self._df_households = None
#self._individuals_gender = None
self._individuals_age = None
self._individuals_household_id = None
self._individuals_indices = None
self._households_capacities = None
self._households_inhabitants = None
self._init_for_stats = None
self._affected_people = 0
self._active_people = 0
self._quarantined_people = 0
self._detected_people = 0
self._immune_people = 0
self._deaths = 0
self._icu_needed = 0
self._disable_friendship_kernel = False
self._set_up_data_frames()
self._infection_status = None
self._detection_status = None
self._quarantine_status = None
self._expected_case_severity = None
if self._params[REUSE_EXPECTED_CASE_SEVERITIES]:
self._expected_case_severity = self.draw_expected_case_severity()
self._infections_dict = None
self._progression_times_dict = None
t0_f, t0_args, t0_kwargs = self.setup_random_distribution(T0)
self.rv_t0 = lambda: t0_f(*t0_args, **t0_kwargs)
t1_f, t1_args, t1_kwargs = self.setup_random_distribution(T1)
self.rv_t1 = lambda: t1_f(*t1_args, **t1_kwargs)
t2_f, t2_args, t2_kwargs = self.setup_random_distribution(T2)
self.rv_t2 = lambda: t2_f(*t2_args, **t2_kwargs)
tdeath_f, tdeath_args, tdeath_kwargs = self.setup_random_distribution(TDEATH)
self.rv_tdeath = lambda: tdeath_f(*tdeath_args, **tdeath_kwargs)
# TODO: This should be refactored
self.fear_fun = dict()
self.fear_weights_detected = dict()
self.fear_weights_deaths = dict()
self.fear_scale = dict()
self.fear_loc = dict()
self.fear_limit_value = dict()
self.serial_intervals = []
self.band_time = None
self._last_affected = None
self._per_day_increases = {}
self._disable_constant_age_kernel = False
self._constant_age_helper_age_dict = {}
self._constant_age_individuals = defaultdict(list)
self._setup_constant_age_kernel()
def _setup_constant_age_kernel(self):
if self._params[CONSTANT_AGE_SETUP] is None:
self._disable_constant_age_kernel = True
return
if isinstance(self._params[CONSTANT_AGE_SETUP][AGE], int):
self._constant_age_helper_age_dict[self._params[CONSTANT_AGE_SETUP][AGE]] = 0
else:
if self._params[CONSTANT_AGE_SETUP][INTER_AGE_CONTACTS]:
# so all ages specified can be mixed
for age in self._params[CONSTANT_AGE_SETUP][AGE]:
self._constant_age_helper_age_dict[age] = 0
else:
for i, age in enumerate(self._params[CONSTANT_AGE_SETUP][AGE]):
self._constant_age_helper_age_dict[age] = i
for age, individual_list_key in self._constant_age_helper_age_dict.items():
self._constant_age_individuals[individual_list_key].extend([
k for k, v in self._individuals_age_dct.items() if v==age
])
def get_detection_status_(self, person_id):
return self._detection_status.get(person_id, default_detection_status)
def get_quarantine_status_(self, person_id):
return self._quarantine_status.get(person_id, default_quarantine_status)
def get_infection_status(self, person_id):
return self._infection_status.get(person_id, InfectionStatus.Healthy.value)
@staticmethod
def parse_random_seed(random_seed):
mocos_helper.seed(random_seed)
def _set_up_data_frames(self) -> None:
"""
The purpose of this method is to set up two dataframes.
One is self._df_individuals that stores features for the population
Second is self._df_households that stores list of people idx per household
building df_households is time consuming, therefore we try to reuse previously computed df_households
:return:
"""
logger.info('Set up data frames: Reading population csv...')
self._df_individuals = pd.read_csv(self.df_individuals_path)
self._df_individuals.index = self._df_individuals.idx
self._individuals_age = self._df_individuals[AGE].values
self._individuals_age_dct = self._df_individuals[AGE].to_dict()
self._individuals_gender_dct = self._df_individuals[GENDER].to_dict()
self._individuals_household_id = self._df_individuals[HOUSEHOLD_ID].to_dict()
self._individuals_indices = self._df_individuals.index.values
if SOCIAL_COMPETENCE in self._df_individuals.columns:
if self._params[TRANSMISSION_PROBABILITIES][FRIENDSHIP] == 0:
logger.info('Friendship = 0.0 - Disable friendship kernel...')
self._disable_friendship_kernel = True
else:
logger.info('Set up data frames: Social competence and loading social activity sampler...')
self._social_activity_scores = self._df_individuals[SOCIAL_COMPETENCE].to_dict()
self._social_activity_sampler = mocos_helper.AgeDependentFriendSampler(
self._individuals_indices,
self._individuals_age,
self._df_individuals[GENDER].values,
self._df_individuals[SOCIAL_COMPETENCE].values
)
self._disable_friendship_kernel = False
else:
logger.info('Social competence missing - Disable friendship kernel...')
self._disable_friendship_kernel = True
logger.info('Set up data frames: Building households df...')
if os.path.exists(self.df_households_path):
self._df_households = pd.read_csv(self.df_households_path, index_col=HOUSEHOLD_ID,
converters={ID: ast.literal_eval})
else:
self._df_households = pd.DataFrame({ID: self._df_individuals.groupby(HOUSEHOLD_ID)[ID].apply(list)})
os.makedirs(os.path.dirname(self.df_households_path), exist_ok=True)
self._df_households.to_csv(self.df_households_path)
self._df_households[CAPACITY] = self._df_households[ID].apply(lambda x: len(x))
d = self._df_households.to_dict()
self._households_inhabitants = d[ID] #self._df_households[ID]
self._households_capacities = d[CAPACITY] #self._df_households[CAPACITY]
if not self._params[LOG_OUTPUTS]:
self._df_households = None
self._df_individuals = None
@staticmethod
def append_event(event: Event) -> None:
q.put(event)
def _fill_queue_based_on_auxiliary_functions(self) -> None:
# TODO: THIS IS NOT WORKING WHEN CAP = INF, let's fix it
# (one possible way to fix it: generate say first N events and a followup "filling EVENT"
# on time T(N) of N-th event - at T(N) generate N more events and enqueue next portion.
# Alternatively add just one event of type AUXILIARY_FUNCTION/IMPORT_INTENSITY
# that will then draw time of next event of that type
"""
The purpose of this method is to mark some people of the population as sick according to provided function.
Possible functions: see possible values of ImportIntensityFunctions enum
Outcome of the function can be adjusted by overriding default parameters:
multiplier, rate, cap, infectious_probability.
:return:
"""
def _generate_event_times(func, rate, multiplier, cap, root_buffer=100, root_guess=0) -> list:
"""
Here a naive way of generating event times is proposed.
The idea is to generate N events
:param func: currently two functions are supported: exponential a*exp(r*t) and polynomial a*r^t
:param rate: a parameter that is making the slope more steep
:param multiplier: a parameter that scales the time down
:param cap: the maximum amount of cases generated and added to queue list
:param root_buffer: one-directional range to find roots in
:param root_guess: guess on first solution (i=1)
:return:
"""
root_min = root_guess - root_buffer
root_max = root_guess + root_buffer
time_events_ = []
def bisect_fun(x, integer):
return func(x, rate=rate, multiplier=multiplier) - integer
for i in range(1, 1 + cap):
bisect_fun = partial(bisect_fun, integer=i)
root = scipy.optimize.bisect(bisect_fun, root_min, root_max)
time_events_.append(root)
root_min = root
root_max = root + root_buffer
return time_events_
import_intensity = self._params[IMPORT_INTENSITY]
f_choice = ImportIntensityFunctions(import_intensity[FUNCTION])
if f_choice == ImportIntensityFunctions.NoImport:
return
func = import_intensity_functions[f_choice]
multiplier = import_intensity[MULTIPLIER]
rate = import_intensity[RATE]
cap = import_intensity[CAP]
infectious_prob = import_intensity[INFECTIOUS]
event_times = _generate_event_times(func=func, rate=rate, multiplier=multiplier, cap=cap)
for event_time in event_times:
person_id = self._individuals_indices[mocos_helper.randint(0, len(self._individuals_indices))]
t_state = TMINUS1
if mocos_helper.rand() < infectious_prob:
t_state = T0
self.append_event(Event(event_time, person_id, t_state, None, IMPORT_INTENSITY, self.global_time))
def _fill_queue_based_on_initial_conditions(self):
"""
The purpose of this method is to mark some people of the population as sick according to provided
initial conditions.
Conditions can be provided using one of two supported schemas.
schema v1 is list with details per person, while schema v2 is dictionary specifying selection algorithm
and cardinalities of each group of patients (per symptom).
:return:
"""
def _assign_t_state(status):
if status == CONTRACTION:
return TMINUS1
if status == INFECTIOUS:
return T0
if status == IMMUNE:
return TRECOVERY
raise ValueError(f'invalid initial infection status {status}')
initial_conditions = self._params[INITIAL_CONDITIONS]
if isinstance(initial_conditions, list): # schema v1
for initial_condition in initial_conditions:
person_idx = initial_condition[PERSON_INDEX]
t_state = _assign_t_state(initial_condition[INFECTION_STATUS])
if EXPECTED_CASE_SEVERITY in initial_condition:
self._expected_case_severity[person_idx] = initial_condition[EXPECTED_CASE_SEVERITY]
self.append_event(Event(initial_condition[CONTRACTION_TIME], person_idx, t_state, None,
INITIAL_CONDITIONS, self.global_time))
elif isinstance(initial_conditions, dict): # schema v2
if initial_conditions[SELECTION_ALGORITHM] == InitialConditionSelectionAlgorithms.RandomSelection.value:
# initially all indices can be drawn
#choice_set = self._individuals_indices# self._df_individuals.index.values
choice_set = list(self._individuals_indices)
for infection_status, cardinality in initial_conditions[CARDINALITIES].items():
if cardinality > 0:
if cardinality < 1:
c = cardinality
cardinality = int(cardinality * len(choice_set))
if cardinality == 0:
logger.info(f"too small cardinality provided {cardinality} ({c})")
continue
else:
cardinality = int(cardinality)
#selected_rows = np.random.choice(choice_set, cardinality, replace=False)
# now only previously unselected indices can be drawn in next steps
#choice_set = np.array(list(set(choice_set) - set(selected_rows)))
choice_set, selected_rows = mocos_helper.randomly_split_list(choice_set, howmuch=cardinality)
t_state = _assign_t_state(infection_status)
for row in selected_rows:
whom = None
if t_state == TRECOVERY:
whom = row
self.append_event(Event(self.global_time, row, t_state, whom, INITIAL_CONDITIONS,
self.global_time))
else:
err_msg = f'Unsupported selection algorithm provided {initial_conditions[SELECTION_ALGORITHM]}'
logger.error(err_msg)
raise ValueError(err_msg)
else:
err_msg = f'invalid schema provided {initial_conditions}'
logger.error(err_msg)
raise ValueError(err_msg)
@property
def global_time(self):
return self._global_time
@property
def df_individuals(self):
return self._df_individuals
@property
def stop_simulation_threshold(self):
return self._params[STOP_SIMULATION_THRESHOLD]
@property
def case_severity_distribution(self):
return self._params[CASE_SEVERITY_DISTRIBUTION]
@property
def disease_progression(self):
return self._params[DISEASE_PROGRESSION][DEFAULT]
@property
def affected_people(self):
return self._affected_people
@property
def detected_people(self):
return self._detected_people
@property
def quarantined_people(self):
return self._quarantined_people
@property
def active_people(self):
return self._active_people
@property
def deaths(self):
return self._deaths
def draw_expected_case_severity(self):
case_severity_dict = self.case_severity_distribution
keys = list(case_severity_dict.keys())
d = {}
for age_min, age_max, fatality_prob in default_age_induced_fatality_rates:
cond_lb = self._individuals_age >= age_min
cond_ub = self._individuals_age < age_max
cond = np.logical_and(cond_lb, cond_ub)
if np.count_nonzero(cond) == 0:
continue
age_induced_severity_distribution = dict()
age_induced_severity_distribution[CRITICAL] = fatality_prob/self._params[DEATH_PROBABILITY][CRITICAL]
for x in case_severity_dict:
if x != CRITICAL:
age_induced_severity_distribution[x] = case_severity_dict[x] / (1 - case_severity_dict[CRITICAL]) * (1 - age_induced_severity_distribution[CRITICAL])
realizations = mocos_helper.sample_with_replacement_shuffled((age_induced_severity_distribution[x] for x in case_severity_dict), len(self._individuals_indices[cond]))
values = [keys[r] for r in realizations]
df = pd.DataFrame(values, index=self._individuals_indices[cond])
d = {**d, **df.to_dict()[0]}
return d
def setup_random_distribution(self, t):
params = self.disease_progression[t]
distribution = params.get(DISTRIBUTION, default_distribution[DISTRIBUTION])
if distribution == FROM_FILE:
filepath = params.get('filepath', None).replace('$ROOT_DIR', config.ROOT_DIR)
Schema(lambda x: os.path.exists(x)).validate(filepath)
array = np.load(filepath)
approximate_distribution = params.get('approximate_distribution', None)
if approximate_distribution == LOGNORMAL:
shape, loc, scale = scipy.stats.lognorm.fit(array, floc=0)
return mocos_helper.lognormal, [], {'mean': log(scale), 'sigma': shape}
if approximate_distribution == GAMMA:
shape, loc, scale = scipy.stats.gamma.fit(array, floc=0)
return mocos_helper.gamma, [], {'alpha': shape, 'beta': scale}
if approximate_distribution:
raise NotImplementedError(f'Approximating to this distribution {approximate_distribution}'
f'is not yet supported but we can quickly add it if needed')
raise NotImplementedError(f'Currently not supporting empirical distribution'
f' without approximating it')
if distribution == LOGNORMAL:
mean = params.get('mean', 0.0)
sigma = params.get('sigma', 1.0)
return mocos_helper.lognormal, [], {'mean': mean, 'sigma': sigma}
if distribution == EXPONENTIAL:
lambda_ = params.get('lambda', 1.0)
return mocos_helper.exponential, [], {'scale': 1/lambda_}
if distribution == POISSON:
lambda_ = params.get('lambda', 1.0)
return mocos_helper.poisson, [], {'lam': lambda_}
raise ValueError(f'Sampling from distribution {distribution} is not yet supported but we can quickly add it')
def add_potential_contractions_from_transport_kernel(self, person_id):
pass
def set_up_internal_fear(self, kernel_id):
fear_factors = self._params[FEAR_FACTORS]
fear_factor = fear_factor_schema.validate(fear_factors.get(kernel_id, fear_factors.get(DEFAULT, None)))
if not fear_factor:
return fear_functions[FearFunctions.FearDisabled], 0, 0, 0, 0, 0
f = fear_functions[FearFunctions(fear_factor[FEAR_FUNCTION])]
limit_value = fear_factor[LIMIT_VALUE]
scale = fear_factor[SCALE_FACTOR]
loc = fear_factor[LOC_FACTOR]
weights_deaths = fear_factor[DEATHS_MULTIPLIER]
weights_detected = fear_factor[DETECTED_MULTIPLIER]
return f, weights_detected, weights_deaths, scale, loc, limit_value
def fear(self, kernel_id) -> float:
if kernel_id not in self.fear_fun:
res = self.set_up_internal_fear(kernel_id)
(self.fear_fun[kernel_id], self.fear_weights_detected[kernel_id],
self.fear_weights_deaths[kernel_id], self.fear_scale[kernel_id],
self.fear_loc[kernel_id], self.fear_limit_value[kernel_id]) = res
detected = self.detected_people
deaths = self.deaths
time = self._global_time
if self._params[MOVE_ZERO_TIME_ACCORDING_TO_DETECTED]:
if self._max_time_offset != np.inf:
time -= self._max_time_offset
else:
time = -np.inf
return self.fear_fun[kernel_id](detected, deaths, time, self.fear_weights_detected[kernel_id],
self.fear_weights_deaths[kernel_id], self.fear_loc[kernel_id],
self.fear_scale[kernel_id], self.fear_limit_value[kernel_id])
def gamma(self, kernel_id):
return self._params[TRANSMISSION_PROBABILITIES][kernel_id]
def household_kernel_old_implementation(self, person_id):
prog_times = self._progression_times_dict[person_id]
start = prog_times[T0]
end = prog_times[T2] or prog_times[TRECOVERY]
total_infection_rate = (end - start) * self.gamma('household')
infected = mocos_helper.poisson(total_infection_rate)
if infected == 0:
return
household_id = self._individuals_household_id[person_id]
inhabitants = self._households_inhabitants[household_id]
possible_choices = [i for i in inhabitants if i != person_id]
for choice_idx in mocos_helper.sample_idxes_with_replacement_uniform(len(possible_choices), infected):
person_idx = possible_choices[choice_idx]
if self.get_infection_status(person_idx) == InfectionStatus.Healthy:
contraction_time = mocos_helper.uniform(low=start, high=end)
self.append_event(Event(contraction_time, person_idx, TMINUS1, person_id, HOUSEHOLD, self.global_time))
def add_potential_contractions_from_household_kernel(self, person_id):
if self._params[OLD_IMPLEMENTATION_FOR_HOUSEHOLD_KERNEL]:
self.household_kernel_old_implementation(person_id)
return
prog_times = self._progression_times_dict[person_id]
start = prog_times[T0]
end = prog_times[T2] or prog_times[TRECOVERY]
household_id = self._individuals_household_id[person_id]
inhabitants = self._households_inhabitants[household_id]
possible_choices = [i for i in inhabitants if i != person_id]
for person_idx in possible_choices:
if self.get_infection_status(person_idx) == InfectionStatus.Healthy:
scale = len(possible_choices) / self.gamma('household')
contraction_time = start + mocos_helper.exponential(scale=scale)
if contraction_time >= end:
continue
self.append_event(Event(contraction_time, person_idx, TMINUS1, person_id, HOUSEHOLD, self.global_time))
def add_potential_contractions_from_constant_kernel(self, person_id):
""" Constant kernel draws a number of infections based on base gamma and enqueue randomly selected events """
prog_times = self._progression_times_dict[person_id]
start = prog_times[T0]
end = prog_times[T1]
if end is None:
end = prog_times[T2]
total_infection_rate = (end - start) * self.gamma('constant')
infected = mocos_helper.poisson(total_infection_rate)
if infected == 0:
return
selected_rows = mocos_helper.nonreplace_sample_few(self._individuals_indices,
infected, person_id)
for person_idx in selected_rows:
if self.get_infection_status(person_idx) == InfectionStatus.Healthy:
contraction_time = mocos_helper.uniform(low=start, high=end)
self.append_event(Event(contraction_time, person_idx, TMINUS1, person_id, CONSTANT, self.global_time))
def add_potential_contractions_from_constant_age_kernel(self, person_id):
if self._disable_constant_age_kernel is True:
return
age = self._individuals_age_dct[person_id]
if age not in self._constant_age_helper_age_dict:
return
prog_times = self._progression_times_dict[person_id]
start = prog_times[T0]
end = prog_times[T1]
if end is None:
end = prog_times[T2]
total_infection_rate = (end - start) * self.gamma('constant_age')
infected = mocos_helper.poisson(total_infection_rate)
if infected == 0:
return
selected_rows = mocos_helper.nonreplace_sample_few(
self._constant_age_individuals[self._constant_age_helper_age_dict[age]],
infected,
person_id
)
for person_idx in selected_rows:
if self.get_infection_status(person_idx) == InfectionStatus.Healthy:
contraction_time = mocos_helper.uniform(low=start, high=end)
self.append_event(Event(contraction_time, person_idx, TMINUS1, person_id, CONSTANT_AGE, self.global_time))
def add_potential_contractions_from_friendship_kernel(self, person_id):
if self._disable_friendship_kernel is True:
return
prog_times = self._progression_times_dict[person_id]
start = prog_times[T0]
end = prog_times[T1]
if end is None:
end = prog_times[T2]
total_infection_rate = (end - start) * self.gamma('friendship')
no_infected = mocos_helper.poisson(total_infection_rate * self._social_activity_scores[person_id])
# Add a constant multiplicand above?
age = self._individuals_age_dct[person_id]
gender = self._individuals_gender_dct[person_id]
for _ in range(no_infected):
infected_idx = self._social_activity_sampler.gen(age, gender)
if self.get_infection_status(infected_idx) == InfectionStatus.Healthy:
contraction_time = mocos_helper.uniform(low=start, high=end)
self.append_event(Event(contraction_time, infected_idx, TMINUS1, person_id, FRIENDSHIP, self.global_time))
def handle_t0(self, person_id):
self._active_people += 1
if self.get_infection_status(person_id) in [
InfectionStatus.Healthy,
InfectionStatus.Contraction
]:
self._infection_status[person_id] = InfectionStatus.Infectious.value
else:
raise AssertionError(f'Unexpected state detected: {self.get_infection_status(person_id)}'
f'person_id: {person_id}')
household_id = self._individuals_household_id[person_id] # self._df_individuals.loc[person_id, HOUSEHOLD_ID]
capacity = self._households_capacities[household_id] # self._df_households.loc[household_id][ID]
if capacity > 1:
self.add_potential_contractions_from_household_kernel(person_id)
self.add_potential_contractions_from_constant_kernel(person_id)
self.add_potential_contractions_from_friendship_kernel(person_id)
self.add_potential_contractions_from_constant_age_kernel(person_id)
def generate_disease_progression(self, person_id, event_time: float,
initial_infection_status: str) -> None:
"""Returns list of disease progression events
"future" disease_progression should be recalculated when the disease will be recognised at the state level
t0 - time when individual becomes infectious (Mild symptoms)
t1 - time when individual stay home/visit doctor due to Mild/Serious? symptoms
t2 - time when individual goes to hospital due to Serious symptoms
tdeath - time when individual dies (depending on death probability)
trecovery - time when individual is recovered (in case the patient will not die from covid19)
If person is Infected:
A - tminus1 is known (event time),
B - t0 is calculated as tminus1 + rv_t0,
If person is added to population as Infectious:
A - t0 is known (event time),
B - tminus 1 is calculated as t0 - rv_t0
For all infected:
A - t1 is calculated as t0 + rv_t1
If person will develop Severe or Critical symptoms:
A - t2 is calculated as t0 + rv_t2
B - if t1 is larger than t2, discard t1
C - calculate trecovery time as t0 + 6 weeks <- these 6 weeks are from WHO report, in python we use uniform[4w,8w]
D - calculate tdetection as t2
If person will develop Asymptomatic or Mild symptoms:
A - calculate trecovery time as t0 + 2 weeks <- these 2 weeks are from WHO report, in python we use uniform[11d,17d]
B - draw a random number uniform[0,1] and if less than detection_mild_proba, calculate tdetection as t0 + 2
Draw a random number uniform[0,1] and if less than death_probability[expected_case(person_id)]:
A - calculate tdeath time as t0 + rv_tdeath,
B - discard all times that are larger than tdeath
"""
if initial_infection_status == InfectionStatus.Contraction:
tminus1 = event_time
t0 = tminus1 + self.rv_t0()
self.append_event(Event(t0, person_id, T0, person_id, DISEASE_PROGRESSION, tminus1))
self._infection_status[person_id] = initial_infection_status
elif initial_infection_status == InfectionStatus.Infectious:
t0 = event_time
# tminus1 does not to be defined, but for completeness let's calculate it
tminus1 = t0 - self.rv_t0()
else:
raise ValueError(f'invalid initial infection status {initial_infection_status}')
t2 = None
if self._expected_case_severity[person_id] in [
ExpectedCaseSeverity.Severe,
ExpectedCaseSeverity.Critical
]:
t2 = t0 + self.rv_t2()
self.append_event(Event(t2, person_id, T2, person_id, DISEASE_PROGRESSION, t0))
t1 = t0 + self.rv_t1()
if not t2 or t1 < t2:
self.append_event(Event(t1, person_id, T1, person_id, DISEASE_PROGRESSION, t0))
else:
# if t2 < t1 then we reset t1 to avoid misleading in data exported from the simulation
t1 = None
tdetection = None
trecovery = None
tdeath = None
if mocos_helper.rand() <= self._params[DEATH_PROBABILITY][self._expected_case_severity[person_id]]:
tdeath = t0 + self.rv_tdeath()
self.append_event(Event(tdeath, person_id, TDEATH, person_id, DISEASE_PROGRESSION, t0))
else:
if self._expected_case_severity[person_id] in [
ExpectedCaseSeverity.Mild,
ExpectedCaseSeverity.Asymptomatic
]:
trecovery = t0 + mocos_helper.uniform(14.0 - 3.0, 14.0 + 3.0) # TODO: this should not be hardcoded!
else:
trecovery = t0 + mocos_helper.uniform(42.0 - 14.0, 42.0 + 14.0)
self.append_event(Event(trecovery, person_id, TRECOVERY, person_id, DISEASE_PROGRESSION, t0))
""" Following is for checking whther tdetection should be picked up"""
calculate_tdetection = self._params[TURN_ON_DETECTION]
if self._expected_case_severity[person_id] in [
ExpectedCaseSeverity.Mild,
ExpectedCaseSeverity.Asymptomatic
]:
if mocos_helper.rand() > self._params[DETECTION_MILD_PROBA]:
calculate_tdetection = False
if calculate_tdetection:
""" If t2 is defined (severe/critical), then use this time; if not; use some offset from t0 """
tdetection = t2 or t0 + 2 # TODO: this should not be hardcoded
ev = Event(tdetection, person_id, TDETECTION, person_id, DETECTION, t0)
self.append_event(ev)
self._progression_times_dict[person_id] = {ID: person_id, TMINUS1: tminus1, T0: t0, T1: t1, T2: t2,
TDEATH: tdeath, TRECOVERY: trecovery, TDETECTION: tdetection}
if initial_infection_status == InfectionStatus.Infectious:
self.handle_t0(person_id)
@property
def df_infections(self):
return pd.DataFrame.from_dict(self._infections_dict, orient='index')
@property
def df_progression_times(self):
return pd.DataFrame.from_dict(self._progression_times_dict, orient='index')
def save_progression_times(self, path):
with open(path, "w") as f:
f.write('idx,tminus1,t0,t1,t2,tdeath,trecovery,tdetection,quarantine\n')
for elem in self._progression_times_dict.values():
str = f'{elem.get(ID, None)},{elem.get(TMINUS1, None)},{elem.get(T0, None)},'\
f'{elem.get(T1, None)},{elem.get(T2, None)},{elem.get(TDEATH, None)},'\
f'{elem.get(TRECOVERY, None)},{elem.get(TDETECTION, None)},{elem.get(QUARANTINE, None)}\n'
f.write(str)
def save_potential_contractions(self, path):
with open(path, "w") as f:
f.write('source_id,target_id,contraction_time,kernel\n')
for elem in self._infections_dict.values():
if elem.get(CONTRACTION_TIME) <= self._global_time: # skiping events that were not realized yet
str = f'{elem.get(SOURCE, None)},{elem.get(TARGET, None)},{elem.get(CONTRACTION_TIME, None)},'\
f'{elem.get(KERNEL, None)}\n'
f.write(str)
def prevalance_at(self, time):
return len([1 for elem in self._infections_dict.values() if elem.get(CONTRACTION_TIME, np.inf) <= time])
def mean_day_increase_until(self, time):
mean_increase = 0.0
i = 0
for k, v in self._per_day_increases.items():
if k <= time:
mean_increase = (mean_increase * i + v) / (i + 1)
return mean_increase
def detected_cases(self, df_r1):
cond1 = ~df_r1.tdetection.isna()
cond2a = ~df_r1.trecovery.isna()
cond2b = df_r1.tdetection > df_r1.trecovery
cond2 = ~np.logical_and(cond2a, cond2b)
if len(df_r1[~df_r1.tdeath.isna()]) > 0:
cond3a = ~df_r1.tdeath.isna()
cond3b = df_r1.tdetection > df_r1.tdeath
cond3 = ~np.logical_and(cond3a, cond3b)
cond23 = np.logical_and(cond2, cond3)
else:
cond23 = cond2
cond = np.logical_and(cond1, cond23)
df = df_r1[cond]
detected_cases = df.sort_values(by='tdetection').tdetection
return detected_cases
@staticmethod
def store_parameter(simulation_output_dir, parameter, filename):
save_path = os.path.join(simulation_output_dir, filename)
with open(save_path, 'wb') as f:
pickle.dump(parameter, f)
def _save_population_parameters(self, simulation_output_dir):
run_id = f'{int(time.monotonic() * 1e9)}_{self._params[RANDOM_SEED]}'
if self._params[SAVE_EXPECTED_SEVERITY]:
self.store_parameter(simulation_output_dir, self._expected_case_severity, 'expected_case_severity.pkl')
self.store_parameter(simulation_output_dir, self._infection_status, 'infection_status.pkl')
self.store_parameter(simulation_output_dir, self._detection_status, 'detection_status.pkl')
self.store_parameter(simulation_output_dir, self._quarantine_status, 'quarantine_status.pkl')
def _save_dir(self, prefix=''):
underscore_if_prefix = '_' if len(prefix) > 0 else ''
json_name = os.path.splitext(os.path.basename(self.params_path))[0]
run_id = f'{prefix}{underscore_if_prefix}{json_name}_{int(time.monotonic() * 1e9)}_{self._params[RANDOM_SEED]}'
simulation_output_dir = os.path.join(self._params[OUTPUT_ROOT_DIR],
self._params[EXPERIMENT_ID],
run_id)
os.makedirs(simulation_output_dir)
return simulation_output_dir
def save_serial_interval(self, simulation_output_dir):
if len(self.serial_intervals) == 0:
return np.nan
np_intervals = np.array(self.serial_intervals)
serial_interval_median = np.median(np_intervals)
description = scipy.stats.describe(np_intervals)
serial_interval_str = f'serial interval: measured from {self._params[SERIAL_INTERVAL][MIN_TIME]}'\
f' to {self._params[SERIAL_INTERVAL][MAX_TIME]};'\
f' median={serial_interval_median}, stats describe: {description}'
logger.info(serial_interval_str)
np.save(os.path.join(simulation_output_dir, 'serial_intervals.npy'), np_intervals)
output_log_file = os.path.join(simulation_output_dir, 'serial_interval_stats.txt')
with open(output_log_file, "w") as out:
out.write(serial_interval_str)
return serial_interval_median
def log_outputs(self, simulation_output_dir):
self._save_population_parameters(simulation_output_dir)
copyfile(self.params_path, os.path.join(simulation_output_dir,
f'input_{os.path.basename(self.params_path)}'))
if self._params[SAVE_INPUT_DATA]:
copyfile(self.df_individuals_path, os.path.join(simulation_output_dir,
f'input_{os.path.basename(self.df_individuals_path)}'))
household_input_path = os.path.join(self._params[OUTPUT_ROOT_DIR], self._params[EXPERIMENT_ID],
'input_df_households.csv')
if not os.path.exists(household_input_path):
self._df_households.to_csv(household_input_path)
repo = Repo(config.ROOT_DIR)
git_active_branch_log = os.path.join(simulation_output_dir, 'git_active_branch_log.txt')
with open(git_active_branch_log, 'w') as f:
f.write(f'Active branch name {repo.active_branch.name}\n')
f.write(str(repo.active_branch.log()))
git_status = os.path.join(simulation_output_dir, 'git_status.txt')
with open(git_status, 'w') as f:
f.write(repo.git.status())
serial_interval = self.save_serial_interval(simulation_output_dir)
if self._params[ENABLE_VISUALIZATION]:
self._vis.visualize_simulation(simulation_output_dir, serial_interval, self.fear,
self.active_people, self._max_time_offset, self.detected_cases,
self.df_progression_times,
self.df_infections
)
def update_max_time_offset(self):
if self._params[MOVE_ZERO_TIME_ACCORDING_TO_DETECTED]:
if self._max_time_offset == np.inf:
if self._params[NUMBER_OF_DETECTED_AT_ZERO_TIME] <= self._detected_people:
self._max_time_offset = self._global_time
self._init_for_stats = self._active_people
def quick_return_condition(self, initiated_through):
""" Checks if event of type 'initiated_through' should be abandoned given current situation """
if initiated_through == HOUSEHOLD:
return False
r = mocos_helper.rand()
if initiated_through == CONSTANT and len(self._params[R_OUT_SCHEDULE]) > 0:
t = self._global_time - self._max_time_offset
for s in self._params[R_OUT_SCHEDULE]:
if s[MIN_TIME] <= t <= s[MAX_TIME]:
if r > s[OVERRIDE_R_FRACTION]:
return True
else:
return False
if r > self.fear(initiated_through):
return True
return False
def add_new_infection(self, person_id, infection_status,
initiated_by, initiated_through):
self._detection_status[person_id] = DetectionStatus.NotDetected.value
self._infections_dict[len(self._infections_dict)] = {
SOURCE: initiated_by,
TARGET: person_id,
CONTRACTION_TIME: self.global_time,
KERNEL: initiated_through
}
if self.global_time >= self._params[SERIAL_INTERVAL][MIN_TIME]:
if self.global_time < self._params[SERIAL_INTERVAL][MAX_TIME]:
if initiated_by is not None:
serial_interval = self.global_time - self._progression_times_dict[initiated_by][TMINUS1]
self.serial_intervals.append(serial_interval)
self._affected_people += 1
self.generate_disease_progression(person_id,
self.global_time,
infection_status)
# 'Event', [TIME, PERSON_INDEX, TYPE, INITIATED_BY, INITIATED_THROUGH, ISSUED_TIME])
def process_event(self, event) -> bool:
type_ = getattr(event, TYPE)
time = getattr(event, TIME)
if int(time / self._params[LOG_TIME_FREQ]) != int(self._global_time / self._params[LOG_TIME_FREQ]):
memory_use = ps.memory_info().rss / 1024 / 1024
fearC = self.fear(CONSTANT)
fearH = self.fear(HOUSEHOLD)
per_day_increase = 0
if self._last_affected:
per_day_increase = (self.affected_people - self._last_affected)/self._last_affected*100
self._last_affected = self.affected_people
self._per_day_increases[int(self._global_time)] = per_day_increase
logger.info(f'Time: {time:.2f}'
f'\tAffected: {self.affected_people}'
f'\tDetected: {self.detected_people}'
f'\tQuarantined: {self.quarantined_people}'
f'\tPer-day-increase: {per_day_increase:.2f} %'
f'\tActive: {self.active_people}'
f'\tDeaths: {self.deaths}'
f'\tFearC: {fearC}'
f'\tFearH: {fearH}'
f'\tPhysical memory use: {memory_use:.2f} MB')
self._global_time = time
if self._global_time > self._max_time + self._max_time_offset:
return False
person_id = getattr(event, PERSON_INDEX)
initiated_by = getattr(event, INITIATED_BY)
initiated_through = getattr(event, INITIATED_THROUGH)
# TODO the remaining attribute will be useful when we will take into account for backtracing
# issued_time = getattr(event, ISSUED_TIME)
if initiated_by is None and initiated_through != DISEASE_PROGRESSION:
if self.get_infection_status(person_id) == InfectionStatus.Healthy:
if type_ == TMINUS1:
self.add_new_infection(person_id, InfectionStatus.Contraction.value,
initiated_by, initiated_through)
elif type_ == T0:
self.add_new_infection(person_id, InfectionStatus.Infectious.value,
initiated_by, initiated_through)
elif type_ == TMINUS1:
# check if this action is still valid first
try:
initiated_inf_status = self._infection_status[initiated_by]
except KeyError:
logging.error(f'infection status should not be blank for infection! key: {initiated_by}')
if initiated_inf_status in active_states:
if self.quick_return_condition(initiated_through):
return True
current_status = self.get_infection_status(person_id)
if current_status == InfectionStatus.Healthy:
new_infection = False
# TODO below is a spaghetti code that should be sorted out! SORRY!
if initiated_through != HOUSEHOLD:
if initiated_inf_status != InfectionStatus.StayHome:
new_infection = True
if self.get_quarantine_status_(initiated_by) == QuarantineStatus.Quarantine:
new_infection = False
if self.get_quarantine_status_(person_id) == QuarantineStatus.Quarantine:
new_infection = False
else: # HOUSEHOLD kernel:
new_infection = True
if new_infection:
self.add_new_infection(person_id, InfectionStatus.Contraction.value,
initiated_by, initiated_through)
elif type_ == T0:
if self.get_infection_status(person_id) == InfectionStatus.Contraction:
self.handle_t0(person_id)
elif type_ == T1:
if self.get_infection_status(person_id) == InfectionStatus.Infectious:
self._infection_status[person_id] = InfectionStatus.StayHome.value
elif type_ == T2:
if self.get_infection_status(person_id) in [
InfectionStatus.StayHome,
InfectionStatus.Infectious
]:
self._infection_status[person_id] = InfectionStatus.Hospital.value
if self._expected_case_severity[person_id] == ExpectedCaseSeverity.Critical:
self._icu_needed += 1
elif type_ == TDEATH:
if self.get_infection_status(person_id) not in [
InfectionStatus.Death,
InfectionStatus.Recovered
]:
self._deaths += 1
if self._expected_case_severity[person_id] == ExpectedCaseSeverity.Critical:
if self._progression_times_dict[person_id][T2] < self.global_time:
self._icu_needed -= 1
self._active_people -= 1
self._infection_status[person_id] = InfectionStatus.Death.value
elif type_ == TRECOVERY: # TRECOVERY is exclusive with regards to TDEATH (when this comment was added)
if self.get_infection_status(person_id) not in [
InfectionStatus.Recovered,
InfectionStatus.Death
]:
if initiated_through != INITIAL_CONDITIONS:
self._active_people -= 1
if self._expected_case_severity[person_id] == ExpectedCaseSeverity.Critical:
if self._progression_times_dict[person_id][T2] < self.global_time:
self._icu_needed -= 1
self._infection_status[person_id] = InfectionStatus.Recovered
self._immune_people += 1
elif type_ == TDETECTION:
if self.get_infection_status(person_id) not in [
InfectionStatus.Recovered,
InfectionStatus.Healthy
]:
if self.get_detection_status_(person_id) == DetectionStatus.NotDetected:
self._detection_status[person_id] = DetectionStatus.Detected.value
self._detected_people += 1
self.update_max_time_offset()
household_id = self._individuals_household_id[person_id]
for inhabitant in self._households_inhabitants[household_id]:
if self.get_quarantine_status_(inhabitant) == QuarantineStatus.NoQuarantine:
if self.get_infection_status(inhabitant) != InfectionStatus.Death:
self._quarantine_status[inhabitant] = QuarantineStatus.Quarantine.value
self._quarantined_people += 1
if inhabitant not in self._progression_times_dict:
self._progression_times_dict[inhabitant] = {}
self._progression_times_dict[inhabitant][QUARANTINE] = self.global_time
if self.get_infection_status(inhabitant) in [InfectionStatus.Infectious,
InfectionStatus.StayHome]:
# TODO: this has to be implemented better, just a temporary solution:
if self._progression_times_dict[inhabitant].get(TDETECTION, None) is None:
new_detection_time = self.global_time + 2.0
self._progression_times_dict[inhabitant][TDETECTION] = new_detection_time
ev = Event(new_detection_time, inhabitant, TDETECTION,
person_id, 'quarantine_followed_detection',
self.global_time)
self.append_event(ev)
else:
raise ValueError(f'unexpected status of event: {event}')
return True
def run_simulation(self):
def _inner_loop(iter):
threshold_type = self._params[STOP_SIMULATION_THRESHOLD_TYPE]
value_to_be_checked = None
start = time.time()
times_mean = 0.0
i = 0
while not q.empty():
event_start = time.time()
if threshold_type == PREVALENCE:
value_to_be_checked = self.affected_people
elif threshold_type == DETECTIONS:
value_to_be_checked = self.detected_people
if value_to_be_checked is None:
logging.error(f"we have an error here")
if value_to_be_checked >= self.stop_simulation_threshold:
logging.info(
f"The outbreak reached a high number {self.stop_simulation_threshold} ({threshold_type})")
break
event = q.get()
if not self.process_event(event):
logging.info(f"Processing event {event} returned False")
q.task_done()
break
q.task_done()
event_end = time.time()
elapsed = event_end - event_start
times_mean = ( times_mean * i + elapsed ) / (i + 1)
i += 1
end = time.time()
print(f'Sim runtime {end - start}, event proc. avg time: {times_mean}')
# cleaning up priority queue:
while not q.empty():
q.get_nowait()
q.task_done()
simulation_output_dir = self._save_dir()
self.save_progression_times(os.path.join(simulation_output_dir, 'output_df_progression_times.csv'))
self.save_potential_contractions(os.path.join(simulation_output_dir, 'output_df_potential_contractions.csv'))
if self._params[LOG_OUTPUTS]:
logger.info('Log outputs')
self.log_outputs(simulation_output_dir)
if self._icu_needed >= self._params[ICU_AVAILABILITY]:
return True
if value_to_be_checked >= self.stop_simulation_threshold:
return True
return False
seeds = None
if isinstance(self._params[RANDOM_SEED], str):
seeds = eval(self._params[RANDOM_SEED]) # TODO: warning, this is unsafe! not use in production
elif isinstance(self._params[RANDOM_SEED], int):
seeds = [self._params[RANDOM_SEED]]
runs = 0
output_log = 'Last_processed_time;Total_#Affected;Total_#Detected;Total_#Deceased;Total_#Quarantined;'\
'c;c_norm;Init_#people;Band_hit_time;Subcritical;runs;fear;detection_rate;'\
'incidents_per_last_day;over_icu;hospitalized;zero_time_offset;total_#immune'
if self._params[ENABLE_ADDITIONAL_LOGS]:
output_log += ';Prevalence_30days;Prevalence_60days;Prevalence_90days;Prevalence_120days;'\
'Prevalence_150days;Prevalence_180days;Prevalence_360days;'\
'increase_10;increase_20;increase_30;increase_40;increase_50;increase_100;increase_150'
output_log += '\n'
for i, seed in enumerate(seeds):
runs += 1
self.parse_random_seed(seed)
self.setup_simulation()
logger.info('Filling queue based on initial conditions...')
self._fill_queue_based_on_initial_conditions()
logger.info('Filling queue based on auxiliary functions...')
self._fill_queue_based_on_auxiliary_functions()
logger.info('Initialization step is done!')
outbreak = _inner_loop(i + 1)
last_processed_time = self._global_time
c = self._params[TRANSMISSION_PROBABILITIES][CONSTANT]
c_norm = c * self._params[AVERAGE_INFECTIVITY_TIME_CONSTANT_KERNEL]
subcritical = self._active_people < self._init_for_stats / 2 # at 200 days
bandtime = self.band_time
#if bandtime:
# return 0
fear_ = self.fear(CONSTANT)
detection_rate = self._params[DETECTION_MILD_PROBA]
affected = self.affected_people
detected = self.detected_people
deceased = self.deaths
quarantined = self.quarantined_people
incidents_per_last_day = self.prevalance_at(self._global_time) - self.prevalance_at(self._global_time - 1)
hospitalized = self._icu_needed
zero_time_offset = self._max_time_offset
immune = self._immune_people
output_add = f'{last_processed_time };{affected};{detected};{deceased};{quarantined};{c};{c_norm};'\
f'{self._init_for_stats};{bandtime};{subcritical};{runs};{fear_};{detection_rate};'\
f'{incidents_per_last_day};{outbreak};{hospitalized};{zero_time_offset};{immune}'
if self._params[ENABLE_ADDITIONAL_LOGS]:
prev30 = self.prevalance_at(30)
prev60 = self.prevalance_at(60)
prev90 = self.prevalance_at(90)
prev120 = self.prevalance_at(120)
prev150 = self.prevalance_at(150)
prev180 = self.prevalance_at(180)
prev360 = self.prevalance_at(360)
mean_increase_at_10 = self.mean_day_increase_until(10)
mean_increase_at_20 = self.mean_day_increase_until(20)
mean_increase_at_30 = self.mean_day_increase_until(30)
mean_increase_at_40 = self.mean_day_increase_until(40)
mean_increase_at_50 = self.mean_day_increase_until(50)
mean_increase_at_100 = self.mean_day_increase_until(100)
mean_increase_at_150 = self.mean_day_increase_until(150)
output_add += f'{prev30};{prev60};{prev90};{prev120};{prev150};{prev180};{prev360};'\
f'{mean_increase_at_10};{mean_increase_at_20};{mean_increase_at_30};'\
f'{mean_increase_at_40};{mean_increase_at_50};{mean_increase_at_100};'\
f'{mean_increase_at_150}'
output_add += '\n'
logger.info(output_add)
output_log = f'{output_log}{output_add}'
logger.info(output_log)
simulation_output_dir = self._save_dir('aggregated_results')
output_log_file = os.path.join(simulation_output_dir, 'results.txt')
if self._params[ENABLE_VISUALIZATION]:
self._vis.visualize_scenario(simulation_output_dir)
with open(output_log_file, "w") as out:
out.write(output_log)
def setup_simulation(self):
self._init_for_stats = 0 # TODO support different import methods
if isinstance(self._params[INITIAL_CONDITIONS], dict):
cardinalities = self._params[INITIAL_CONDITIONS][CARDINALITIES]
self._init_for_stats = cardinalities.get(CONTRACTION, 0) + cardinalities.get(INFECTIOUS, 0)
# TODO and think how to better group them, ie namedtuple state_stats?
self._affected_people = 0
self._active_people = 0
self._detected_people = 0
self._quarantined_people = 0
self._immune_people = 0
self._deaths = 0
self._icu_needed = 0
self._max_time_offset = 0
if self._params[MOVE_ZERO_TIME_ACCORDING_TO_DETECTED]:
self._max_time_offset = np.inf
self._fear_factor = {}
self._infection_status = {}
self._infections_dict = {}
self._progression_times_dict = {}
self._per_day_increases = {}
self._global_time = self._params[START_TIME]
self._max_time = self._params[MAX_TIME]
if not self._params[REUSE_EXPECTED_CASE_SEVERITIES]:
self._expected_case_severity = self.draw_expected_case_severity()
self._last_affected = None
self.band_time = None
self._quarantine_status = {}
self._detection_status = {}
if self._params[ENABLE_VISUALIZATION]:
self._vis = Visualize(self._params, self.df_individuals,
self._expected_case_severity, logger)
logger = logging.getLogger(__name__)
@click.command()
@click.option('--params-path', type=click.Path(exists=True))
@click.option('--df-individuals-path', type=click.Path(exists=True))
@click.option('--df-households-path', type=click.Path())
@click.argument('run-simulation') #ignored
def runner(params_path, df_individuals_path, run_simulation, df_households_path=''):
im = InfectionModel(params_path=params_path,
df_individuals_path=df_individuals_path,
df_households_path=df_households_path or '')
im.run_simulation()
# TODO: think about separate thread/process to generate random numbers, facilitate sampling
if __name__ == '__main__':
log_fmt = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
logging.basicConfig(level=logging.INFO, format=log_fmt)
pid = os.getpid()
ps = psutil.Process(pid)
pd.set_option('display.max_columns', None)
#fire.Fire(InfectionModel)
# find .env automagically by walking up directories until it's found, then
# load up the .env entries as environment variables
load_dotenv(find_dotenv())
runner()
| 49.643697 | 178 | 0.629799 | 3.234375 |
a3c55351f480edd5591c58e49706631229c505b7
| 3,870 |
java
|
Java
|
src/com/github/anorber/argparse/ArgumentParser.java
|
anorber/argparse
|
7b29c645b7db8afb4ed1fb14ecd73ebdf2b7080f
|
[
"WTFPL"
] | null | null | null |
src/com/github/anorber/argparse/ArgumentParser.java
|
anorber/argparse
|
7b29c645b7db8afb4ed1fb14ecd73ebdf2b7080f
|
[
"WTFPL"
] | null | null | null |
src/com/github/anorber/argparse/ArgumentParser.java
|
anorber/argparse
|
7b29c645b7db8afb4ed1fb14ecd73ebdf2b7080f
|
[
"WTFPL"
] | 1 |
2019-04-23T03:10:32.000Z
|
2019-04-23T03:10:32.000Z
|
package com.github.anorber.argparse;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
/**
* @author anorber
*
* @param <E>
*/
public class ArgumentParser<E> implements Iterable<Option<E>> {
private final ArgumentList<E> arguments;
private final FoundOpts<E> foundOpts;
/**
*/
public ArgumentParser() {
foundOpts = new FoundOpts<E>();
arguments = new ArgumentList<E>(foundOpts);
}
/**
* @param other an ArgumentParser to clone
*/
public ArgumentParser(final ArgumentParser<E> other) {
foundOpts = new FoundOpts<E>(other.foundOpts);
arguments = new ArgumentList<E>(other.arguments, foundOpts);
}
/**
* Adds an argument to this parser.
*
* @param argument an option argument
*/
public void addArgument(final Argument<? extends E> argument) {
if (argument == null) {
throw new IllegalArgumentException("argument should not be null");
}
arguments.add(argument);
}
/**
* Parses args for opts.
*
* @param args the args to be parsed
* @return the rest of the args after that the opts was parsed
* or null if args was null
* @throws ArgumentParserException if somthing goes wrong
*/
public String[] parse(final String[] args) throws ArgumentParserException {
if (args == null) {
return null;
}
final int pos = arguments.parseOpts(args);
return Arrays.copyOfRange(args, pos, args.length);
}
/**
* Tells if this parser has seen <code>option</code>.
*
* @param option enum representing an option
* @return true if this parser found the option
*/
public boolean hasOption(final E option) {
return foundOpts.containsOption(option);
}
/**
* Returns an array with the arguments given to this option.
*
* @param option enum representing an option
* @return the arguments in the order they appeared
*/
public String[] getArguments(final E option) {
final List<String> opt = foundOpts.getArgs(option);
if (opt == null) {
return null;
}
return opt.toArray(new String[opt.size()]);
}
/**
* Returns all arguments for an option as an array. All strings are split
* at the delimiter character.
*
* @param option the option who's args we want
* @param delimiter character to use for splitting argument strings
* @return string array of arguments
*/
public String[] getArguments(final E option, final char delimiter) {
final List<String> buf = new ArrayList<String>();
final List<String> options = foundOpts.getArgs(option);
if (options == null) {
return new String[0];
}
for (String arg : options) {
for (String substr : arg.split("\\" + delimiter)) {
buf.add(substr);
}
}
return buf.toArray(new String[buf.size()]);
}
/**
* Returns all arguments for an option as a string.
*
* @param option the option who's args we want
* @param delimiter character to insert between arguments
* @return string of all arguments
*/
public String getArgumentsString(final E option, final char delimiter) {
final String[] args = getArguments(option);
if (args == null) {
return "";
}
final StringBuilder buf = new StringBuilder(args[0]);
for (int i = 1; i < args.length; ++i) {
buf.append(delimiter).append(args[i]);
}
return buf.toString();
}
/* @see java.lang.Runnable#run()
*/
@Override
public Iterator<Option<E>> iterator() {
return foundOpts.getIterator();
}
/* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
return arguments.hashCode() ^ foundOpts.hashCode();
}
/* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(final Object obj) {
return obj instanceof ArgumentParser
&& arguments.equals(((ArgumentParser<?>) obj).arguments)
&& foundOpts.equals(((ArgumentParser<?>) obj).foundOpts);
}
}
| 25.8 | 76 | 0.673127 | 3.015625 |
6916eca49f48313405468ebb35a5fbd7cf5a2102
| 1,308 |
swift
|
Swift
|
Foundation/NetworkClient/Utils/Parameters/Parameters+Value.swift
|
ihor-yarovyi/ReduxStarter
|
c2096d7b90e887e216999bb14e777faa66ace673
|
[
"MIT"
] | null | null | null |
Foundation/NetworkClient/Utils/Parameters/Parameters+Value.swift
|
ihor-yarovyi/ReduxStarter
|
c2096d7b90e887e216999bb14e777faa66ace673
|
[
"MIT"
] | null | null | null |
Foundation/NetworkClient/Utils/Parameters/Parameters+Value.swift
|
ihor-yarovyi/ReduxStarter
|
c2096d7b90e887e216999bb14e777faa66ace673
|
[
"MIT"
] | null | null | null |
//
// Parameters+Value.swift
// NetworkClient
//
// Created by Ihor Yarovyi on 8/23/21.
//
import Foundation
extension NetworkClient.Utils.Parameters {
struct Value: ParametersSubscript {
// MARK: - Private
let storage: Storage
let keys: [String]
var keyPath: String {
keys.joined(separator: ".")
}
func getValue<T>() -> T? {
storage.dictionary[keyPath: keyPath] as? T
}
func setValue<T: ParametersEncodable>(_ value: T, includeNull: Bool = false) {
if let encodedValue = value.encodedValue {
storage.dictionary[keyPath: keyPath] = encodedValue
} else if includeNull {
storage.dictionary[keyPath: keyPath] = NSNull()
} else {
storage.dictionary[keyPath: keyPath] = nil
}
}
init(storage: Storage, keys: [String]) {
self.storage = storage
self.keys = keys
}
func parametersValue(for key: String) -> Value {
NetworkClient.Utils.Parameters.Value(storage: storage, keys: keys + [key])
}
var value: Any? {
storage.dictionary[keyPath: keyPath]
}
}
}
| 26.693878 | 86 | 0.526758 | 3.015625 |
2cfb9af139ad425a03cf71bd0daefa032bb9e9dd
| 1,568 |
cpp
|
C++
|
6 复试/1 机试/资料/清华计算机考研机试/2018/cong.cpp
|
ladike/912_project
|
5178c1c93ac6ca30ffc72dd689f5c6932704b4ab
|
[
"MIT"
] | 1 |
2022-03-02T16:05:49.000Z
|
2022-03-02T16:05:49.000Z
|
6 复试/1 机试/资料/清华计算机考研机试/2018/cong.cpp
|
ladike/912_project
|
5178c1c93ac6ca30ffc72dd689f5c6932704b4ab
|
[
"MIT"
] | null | null | null |
6 复试/1 机试/资料/清华计算机考研机试/2018/cong.cpp
|
ladike/912_project
|
5178c1c93ac6ca30ffc72dd689f5c6932704b4ab
|
[
"MIT"
] | null | null | null |
# include<iostream>
# include<map>
# include<vector>
using namespace std;
typedef struct{int x,y;} position;
typedef struct{int id,f;} idfight;
typedef struct{int id;position p;int d;int f;bool live;} cong;
typedef vector<cong> conglist;
conglist all_cong;
map<int,vector<idfight> >war_map;
int n,m,k,times;
void init(){
cin>>n>>m>>k;
for(int i=0;i<n;i++){
int x,y,d,f;
cin >> x >>y>>d>>f;
cong c1 ={i,{x,y},d,f,1};
all_cong.push_back(c1);
}
cin >> times;
}
void action(cong &c){
if(c.live){
switch(c.d){
case 0: if(c.p.y==m) c.d=1;else c.p.y++;break;
case 1: if(c.p.y==1) c.d=0;else c.p.y--;break;
case 2: if(c.p.x==1) c.d=3;else c.p.x--;break;
case 3: if(c.p.y==n) c.d=2;else c.p.x++;break;
default:;break;
}
int pi = c.p.x*1000+c.p.y;
idfight idf = {c.id,c.f};
war_map[pi].push_back(idf);
}
}
void printans(){
for(vector<cong>::iterator i = all_cong.begin();i!=all_cong.end();i++)
cout<<(*i).p.y<<" "<<(*i).p.x<<endl;
}
void fight(){
map<int,vector<idfight> >::iterator it;
it = war_map.begin();
while(it!=war_map.end()){
if((*it).second.size()>1){
int max = 0;
for(vector<idfight>::iterator i = (*it).second.begin();i!=(*it).second.end();i++){
if((*i).f>max)max = (*i).f;
}
for(vector<idfight>::iterator i = (*it).second.begin();i!=(*it).second.end();i++){
if((*i).f<max) all_cong[(*i).id].live=0;
}
}
it++;
}
}
int main() {
init();
while(times--){
for(vector<cong>::iterator i = all_cong.begin();i!=all_cong.end();i++){
action(*i);
}
fight();
war_map.clear();
}
printans();
return 0;
}
| 23.402985 | 87 | 0.586097 | 3.015625 |
2d7ae418b1cebf4e9eb40cb39c85af6ffbcff61f
| 1,169 |
go
|
Go
|
example/example.go
|
kibaamor/golog
|
ee92c16f5ccbe42b6e3870aaf337f8407d8aa045
|
[
"MIT"
] | null | null | null |
example/example.go
|
kibaamor/golog
|
ee92c16f5ccbe42b6e3870aaf337f8407d8aa045
|
[
"MIT"
] | null | null | null |
example/example.go
|
kibaamor/golog
|
ee92c16f5ccbe42b6e3870aaf337f8407d8aa045
|
[
"MIT"
] | null | null | null |
package main
import (
"context"
"os"
"github.com/kibaamor/golog"
)
func main() {
// basic logger
logger := golog.NewTermLogger(os.Stderr, true)
// got: `[INFO] 1:1 k1:v1 k2:[1 1]`
_ = logger.Log(context.Background(), golog.LevelInfo, 1, 1, "k1", "v1", "k2", []int{1, 1})
// combine multiple logger
// Discard is logger with discard everything
logger = golog.MultiLogger(logger, golog.Discard)
// got: `[INFO] 1:1 k1:v1 k2:[1 1]`
_ = logger.Log(context.Background(), golog.LevelInfo, 1, 1, "k1", "v1", "k2", []int{1, 1})
// filter with log level
logger = golog.WithFilter(logger, golog.FilterLevel(golog.LevelWarn))
// got: ``
_ = logger.Log(context.Background(), golog.LevelInfo, 1, 1)
// auto add timestamp and caller information
logger = golog.WithHandler(logger, golog.HandlerDefaultTimestamp, golog.HandlerDefaultCaller)
// got:`[2021-12-10 12:33:26.968][example.go:24][WARN] 1:1`
_ = logger.Log(context.Background(), golog.LevelWarn, 1, 1)
// Helper provides useful apis, such as Info, Infow.
helper := golog.NewHelper(logger)
// got: `[2021-12-10 12:37:52.699][helper.go:76][ERROR] golog: hi`
helper.Errorf("golog: %v", "hi")
}
| 28.512195 | 94 | 0.673225 | 3.03125 |
447a084bb3d50424a483e401e93fb5de6c9bdd6c
| 1,941 |
py
|
Python
|
meteor_reasoner/utils/loader.py
|
wdimmy/MeTeoR
|
7d0b48bf32eca17a1d507476112379daa3dafc31
|
[
"MIT"
] | 8 |
2021-12-01T14:17:06.000Z
|
2022-03-05T13:22:27.000Z
|
meteor_reasoner/utils/loader.py
|
wdimmy/MeTeoR
|
7d0b48bf32eca17a1d507476112379daa3dafc31
|
[
"MIT"
] | null | null | null |
meteor_reasoner/utils/loader.py
|
wdimmy/MeTeoR
|
7d0b48bf32eca17a1d507476112379daa3dafc31
|
[
"MIT"
] | null | null | null |
from meteor_reasoner.utils.parser import *
from collections import defaultdict
def load_dataset(lines):
"""
Read string-like facts into a dictionary object.
Args:
lines (list of strings): a list of facts in the form of A(x,y,z)@[1,2] or A@[1,2)
Returns:
A defaultdict object, in which the key is the predicate and the value is a dictionary (key is
the entity and the value is a list of Interval instances) or a list of Interval instance when
there is no entity.
"""
D = defaultdict(lambda: defaultdict(list))
for line in lines:
line = line.strip().replace(" ","")
if line == "":
continue
try:
predicate, entity, interval = parse_str_fact(line)
except:
continue
if predicate not in D:
if entity:
D[predicate][entity] = [interval]
else:
D[predicate] = [interval]
else:
if isinstance(D[predicate], list) and entity is not None:
raise ValueError("One predicate can not have both entity and Null cases!")
if not isinstance(D[predicate], list) and entity is None:
raise ValueError("One predicate can not have both entity and Null cases!")
if entity:
if entity in D[predicate]:
D[predicate][entity].append(interval)
else:
D[predicate][entity] = [interval]
else:
D[predicate].append(interval)
return D
def load_program(rules):
"""
Format each string-like rule into a rule instance.
Args:
rules (list of strings): each string represents a rule, e.g. A(X):- Boxminus[1,2]B(X)
Returns:
list of rule instances
"""
program = []
for line in rules:
rule = parse_rule(line)
program.append(rule)
return program
| 28.970149 | 101 | 0.574446 | 3.1875 |
ef1d2b289a50d0621dcdf6358e1b4068a88f2b00
| 2,333 |
c
|
C
|
Homework/SimpleShell/simpleshell.c
|
gajavegr/csse332
|
060d3cf18cd38d9f4524c42f1e755f5d24de2c51
|
[
"MIT"
] | null | null | null |
Homework/SimpleShell/simpleshell.c
|
gajavegr/csse332
|
060d3cf18cd38d9f4524c42f1e755f5d24de2c51
|
[
"MIT"
] | null | null | null |
Homework/SimpleShell/simpleshell.c
|
gajavegr/csse332
|
060d3cf18cd38d9f4524c42f1e755f5d24de2c51
|
[
"MIT"
] | null | null | null |
/* Copyright 2016 Rose-Hulman
But based on idea from http://cnds.eecs.jacobs-university.de/courses/caoslab-2007/
*/
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <sys/types.h>
#include <unistd.h>
#include <sys/wait.h>
#include <stdbool.h>
#include <string.h>
void handler(){
wait(NULL);
}
int main() {
char command[82];
char *parsed_command[2];
//takes at most two input arguments
// infinite loop but ^C quits
while (1) {
printf("SHELL%% ");
fgets(command, 82, stdin);
command[strlen(command) - 1] = '\0';//remove the \n
int len_1;
for(len_1 = 0;command[len_1] != '\0';len_1++){
if(command[len_1] == ' ')
break;
}
command[len_1] = '\0';
parsed_command[0] = command;
if(len_1 == strlen(command)){
printf("Command is '%s' with no arguments\n", parsed_command[0]);
parsed_command[1] = NULL;
}
else{
parsed_command[1] = command + len_1 + 1;
printf("Command is '%s' with argument '%s'\n", parsed_command[0], parsed_command[1]);
}
char *const command2[] = {parsed_command[0],parsed_command[1], NULL};
char b = parsed_command[0][0];
char g = parsed_command[0][1];
bool background = (b=='B')&&(g == 'G');
// printf("background: %d",background);
int childnum = fork();
// printf("%d\n",childnum);
int childnum2;
// int grandchild;
// int child;
if (childnum == 0 && background){
childnum2 = fork();
if (childnum2 == 0){
char* stringWithoutPrefix = &parsed_command[0][2];
execvp(stringWithoutPrefix,command2);
exit(0);
}
else{
wait(&childnum);
printf("Background command finished\n");
exit(0);
}
}
else if (childnum == 0 && !background){
execvp(parsed_command[0],command2);
signal(SIGCHLD, handler);
exit(0);
}
else if (childnum !=0 && !background){
signal(SIGCHLD, handler);
wait(NULL);
}
}
// wait(NULL);
}
| 30.298701 | 98 | 0.498928 | 3.078125 |
995fae73bf8a3b8fde6063a9927e84ca93e9dc7a
| 8,518 |
lua
|
Lua
|
nvim_2021/lua/core/galaxyline.lua
|
bamzi/dotconfig
|
4db4130e0a12a3c5aa1db05d353ee9c54fcff676
|
[
"MIT"
] | null | null | null |
nvim_2021/lua/core/galaxyline.lua
|
bamzi/dotconfig
|
4db4130e0a12a3c5aa1db05d353ee9c54fcff676
|
[
"MIT"
] | null | null | null |
nvim_2021/lua/core/galaxyline.lua
|
bamzi/dotconfig
|
4db4130e0a12a3c5aa1db05d353ee9c54fcff676
|
[
"MIT"
] | null | null | null |
-- if not package.loaded['galaxyline'] then
-- return
-- end
require "core.status_colors"
local Log = require "core.log"
local status_ok, gl = pcall(require, "galaxyline")
if not status_ok then
Log:get_default().error "Failed to load galaxyline"
return
end
-- NOTE: if someone defines colors but doesn't have them then this will break
local palette_status_ok, colors = pcall(require, color_theme .. ".palette")
if not palette_status_ok then
colors = galaxyline_config.colors
end
local condition = require "galaxyline.condition"
local gls = gl.section
gl.short_line_list = { "NvimTree", "vista", "dbui", "packer" }
local function get_mode_name()
local names = {
n = "NORMAL",
i = "INSERT",
c = "COMMAND",
v = "VISUAL",
V = "VISUAL LINE",
t = "TERMINAL",
R = "REPLACE",
[""] = "VISUAL BLOCK",
}
return names[vim.fn.mode()]
end
table.insert(gls.left, {
ViMode = {
provider = function()
-- auto change color according the vim mode
local mode_color = {
n = colors.blue,
i = colors.green,
v = colors.purple,
[""] = colors.purple,
V = colors.purple,
c = colors.magenta,
no = colors.blue,
s = colors.orange,
S = colors.orange,
[""] = colors.orange,
ic = colors.yellow,
R = colors.red,
Rv = colors.red,
cv = colors.blue,
ce = colors.blue,
r = colors.cyan,
rm = colors.cyan,
["r?"] = colors.cyan,
["!"] = colors.blue,
t = colors.blue,
}
if galaxyline_config.show_mode then
local name = get_mode_name()
-- Fall back to the default behavior is a name is not defined
if name ~= nil then
vim.api.nvim_command("hi GalaxyViMode guibg=" .. mode_color[vim.fn.mode()])
vim.api.nvim_command("hi GalaxyViMode guifg=" .. colors.alt_bg)
return " " .. name .. " "
end
end
vim.api.nvim_command("hi GalaxyViMode guibg=" .. colors.alt_bg)
vim.api.nvim_command("hi GalaxyViMode guifg=" .. mode_color[vim.fn.mode()])
return "▊"
end,
separator_highlight = { "NONE", colors.alt_bg },
highlight = { "NONE", colors.alt_bg },
},
})
-- print(vim.fn.getbufvar(0, 'ts'))
vim.fn.getbufvar(0, "ts")
table.insert(gls.left, {
GitIcon = {
provider = function()
return " "
end,
condition = condition.check_git_workspace,
separator = " ",
separator_highlight = { "NONE", colors.alt_bg },
highlight = { colors.orange, colors.alt_bg },
},
})
table.insert(gls.left, {
GitBranch = {
provider = "GitBranch",
condition = condition.check_git_workspace,
separator = " ",
separator_highlight = { "NONE", colors.alt_bg },
highlight = { colors.grey, colors.alt_bg },
},
})
table.insert(gls.left, {
DiffAdd = {
provider = "DiffAdd",
condition = condition.hide_in_width,
icon = " ",
highlight = { colors.green, colors.alt_bg },
},
})
table.insert(gls.left, {
DiffModified = {
provider = "DiffModified",
condition = condition.hide_in_width,
icon = " 柳",
highlight = { colors.blue, colors.alt_bg },
},
})
table.insert(gls.left, {
DiffRemove = {
provider = "DiffRemove",
condition = condition.hide_in_width,
icon = " ",
highlight = { colors.red, colors.alt_bg },
},
})
table.insert(gls.left, {
Filler = {
provider = function()
return " "
end,
highlight = { colors.grey, colors.alt_bg },
},
})
-- get output from shell command
function os.capture(cmd, raw)
local f = assert(io.popen(cmd, "r"))
local s = assert(f:read "*a")
f:close()
if raw then
return s
end
s = string.gsub(s, "^%s+", "")
s = string.gsub(s, "%s+$", "")
s = string.gsub(s, "[\n\r]+", " ")
return s
end
-- cleanup virtual env
local function env_cleanup(venv)
if string.find(venv, "/") then
local final_venv = venv
for w in venv:gmatch "([^/]+)" do
final_venv = w
end
venv = final_venv
end
return venv
end
local PythonEnv = function()
if vim.bo.filetype == "python" then
local venv = os.getenv "CONDA_DEFAULT_ENV"
if venv ~= nil then
return " (" .. env_cleanup(venv) .. ")"
end
venv = os.getenv "VIRTUAL_ENV"
if venv ~= nil then
return " (" .. env_cleanup(venv) .. ")"
end
return ""
end
return ""
end
table.insert(gls.left, {
VirtualEnv = {
provider = PythonEnv,
event = "BufEnter",
highlight = { colors.green, colors.alt_bg },
},
})
table.insert(gls.right, {
DiagnosticError = {
provider = "DiagnosticError",
icon = " ",
highlight = { colors.red, colors.alt_bg },
},
})
table.insert(gls.right, {
DiagnosticWarn = {
provider = "DiagnosticWarn",
icon = " ",
highlight = { colors.orange, colors.alt_bg },
},
})
table.insert(gls.right, {
DiagnosticInfo = {
provider = "DiagnosticInfo",
icon = " ",
highlight = { colors.yellow, colors.alt_bg },
},
})
table.insert(gls.right, {
DiagnosticHint = {
provider = "DiagnosticHint",
icon = " ",
highlight = { colors.blue, colors.alt_bg },
},
})
table.insert(gls.right, {
TreesitterIcon = {
provider = function()
if next(vim.treesitter.highlighter.active) ~= nil then
return " "
end
return ""
end,
separator = " ",
separator_highlight = { "NONE", colors.alt_bg },
highlight = { colors.green, colors.alt_bg },
},
})
local function get_attached_provider_name(msg)
msg = msg or "LSP Inactive"
local buf_clients = vim.lsp.buf_get_clients()
if next(buf_clients) == nil then
return msg
end
local buf_client_names = {}
for _, client in pairs(buf_clients) do
if client.name ~= "null-ls" then
table.insert(buf_client_names, client.name)
end
end
local null_ls = require "lsp.null-ls"
local null_ls_providers = null_ls.list_supported_provider_names(vim.bo.filetype)
vim.list_extend(buf_client_names, null_ls_providers)
return table.concat(buf_client_names, ", ")
end
table.insert(gls.right, {
ShowLspClient = {
provider = get_attached_provider_name,
condition = function()
local tbl = { ["dashboard"] = true, [" "] = true }
if tbl[vim.bo.filetype] then
return false
end
return true
end,
icon = " ",
highlight = { colors.grey, colors.alt_bg },
},
})
table.insert(gls.right, {
LineInfo = {
provider = "LineColumn",
separator = " ",
separator_highlight = { "NONE", colors.alt_bg },
highlight = { colors.grey, colors.alt_bg },
},
})
table.insert(gls.right, {
PerCent = {
provider = "LinePercent",
separator = " ",
separator_highlight = { "NONE", colors.alt_bg },
highlight = { colors.grey, colors.alt_bg },
},
})
table.insert(gls.right, {
Tabstop = {
provider = function()
local label = "Spaces: "
if not vim.api.nvim_buf_get_option(0, "expandtab") then
label = "Tab size: "
end
return label .. vim.api.nvim_buf_get_option(0, "shiftwidth") .. " "
end,
condition = condition.hide_in_width,
separator = " ",
separator_highlight = { "NONE", colors.alt_bg },
highlight = { colors.grey, colors.alt_bg },
},
})
table.insert(gls.right, {
BufferType = {
provider = "FileTypeName",
condition = condition.hide_in_width,
separator = " ",
separator_highlight = { "NONE", colors.alt_bg },
highlight = { colors.grey, colors.alt_bg },
},
})
table.insert(gls.right, {
FileEncode = {
provider = "FileEncode",
condition = condition.hide_in_width,
separator = " ",
separator_highlight = { "NONE", colors.alt_bg },
highlight = { colors.grey, colors.alt_bg },
},
})
table.insert(gls.right, {
Space = {
provider = function()
return " "
end,
separator = " ",
separator_highlight = { "NONE", colors.alt_bg },
highlight = { colors.grey, colors.alt_bg },
},
})
table.insert(gls.short_line_left, {
BufferType = {
provider = "FileTypeName",
separator = " ",
separator_highlight = { "NONE", colors.alt_bg },
highlight = { colors.alt_bg, colors.alt_bg },
},
})
table.insert(gls.short_line_left, {
SFileName = {
provider = "SFileName",
condition = condition.buffer_not_empty,
highlight = { colors.alt_bg, colors.alt_bg },
},
})
--table.insert(gls.short_line_right[1] = {BufferIcon = {provider = 'BufferIcon', highlight = {colors.grey, colors.alt_bg}}})
| 24.267806 | 124 | 0.606715 | 3.25 |
854aaf6ee38815128406a1b3c213544eb71339d6
| 1,094 |
cs
|
C#
|
Tests/TestPrograms/LinqTest6/LinqTest6.cs
|
ravimad/SEAL
|
41dcf675e988645d07844ae575a8b774c5ae3164
|
[
"MS-PL"
] | 1 |
2016-03-13T23:19:45.000Z
|
2016-03-13T23:19:45.000Z
|
Tests/TestPrograms/LinqTest6/LinqTest6.cs
|
ravimad/SEAL
|
41dcf675e988645d07844ae575a8b774c5ae3164
|
[
"MS-PL"
] | null | null | null |
Tests/TestPrograms/LinqTest6/LinqTest6.cs
|
ravimad/SEAL
|
41dcf675e988645d07844ae575a8b774c5ae3164
|
[
"MS-PL"
] | null | null | null |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace LinqTest6
{
public class LinqTest
{
public int f;
public void foo(LinqTest lt)
{
var col = new List<LinqTest> { lt };
var y = new List<LinqTest>(RemoveDefaults(col));
var x = y[0];
x.f = 1;
}
/// <summary>
/// Removes default values from a list
/// </summary>
/// <typeparam name="T">Value type</typeparam>
/// <param name="Value">List to cull items from</param>
/// <param name="EqualityComparer">Equality comparer used (defaults to GenericEqualityComparer)</param>
/// <returns>An IEnumerable with the default values removed</returns>
public IEnumerable<T> RemoveDefaults<T>(IEnumerable<T> Value)
{
if (Value == null)
yield break;
foreach (T Item in Value.Where(x => !x.Equals(default(T))))
yield return Item;
}
}
}
| 33.151515 | 112 | 0.533821 | 3.078125 |
dd576c7367022340c5dbe754d4f5432be2fcbe34
| 2,288 |
java
|
Java
|
Examples/src/main/java/com/aspose/slides/examples/Slides/Table/SettingTextFormattingInsideTable.java
|
Muhammad-Adnan-Ahmad/Aspose.Slides-for-Java
|
8ef35cedb58ded3d007c3dd18a16ed15edc08f40
|
[
"MIT"
] | 1 |
2018-10-25T13:03:50.000Z
|
2018-10-25T13:03:50.000Z
|
Examples/src/main/java/com/aspose/slides/examples/Slides/Table/SettingTextFormattingInsideTable.java
|
Muhammad-Adnan-Ahmad/Aspose.Slides-for-Java
|
8ef35cedb58ded3d007c3dd18a16ed15edc08f40
|
[
"MIT"
] | null | null | null |
Examples/src/main/java/com/aspose/slides/examples/Slides/Table/SettingTextFormattingInsideTable.java
|
Muhammad-Adnan-Ahmad/Aspose.Slides-for-Java
|
8ef35cedb58ded3d007c3dd18a16ed15edc08f40
|
[
"MIT"
] | 2 |
2020-05-13T06:04:43.000Z
|
2020-10-26T07:26:15.000Z
|
package com.aspose.slides.examples.Slides.Table;
import com.aspose.slides.IAutoShape;
import com.aspose.slides.IParagraph;
import com.aspose.slides.IPortion;
import com.aspose.slides.ISlide;
import com.aspose.slides.ITable;
import com.aspose.slides.ITextFrame;
import com.aspose.slides.ParagraphFormat;
import com.aspose.slides.PortionFormat;
import com.aspose.slides.Presentation;
import com.aspose.slides.SaveFormat;
import com.aspose.slides.ShapeType;
import com.aspose.slides.TextAlignment;
import com.aspose.slides.TextFrameFormat;
import com.aspose.slides.TextVerticalType;
import com.aspose.slides.examples.Utils;
public class SettingTextFormattingInsideTable {
public static void main(String[] args) {
//ExStart:SettingTextFormattingInsideTable
// The path to the documents directory.
String dataDir = Utils.getDataDir(SettingTextFormattingInsideTable.class);
// Instantiate Presentation class that represents PPTX
Presentation pres = new Presentation();
// Access first slide
ISlide sld = pres.getSlides().get_Item(0);
ITable someTable = (ITable)pres.getSlides().get_Item(0).getShapes().get_Item(0); // let's say that the first shape on the first slide is a table
// setting table cells' font height
PortionFormat portionFormat = new PortionFormat();
portionFormat.setFontHeight(25);
someTable.setTextFormat(portionFormat);
// setting table cells' text alignment and right margin in one call
ParagraphFormat paragraphFormat = new ParagraphFormat();
paragraphFormat.setAlignment(TextAlignment.Right);
paragraphFormat.setMarginRight(20);
someTable.setTextFormat(paragraphFormat);
// setting table cells' text vertical type
TextFrameFormat textFrameFormat = new TextFrameFormat();
textFrameFormat.setTextVerticalType(TextVerticalType.Vertical);
someTable.setTextFormat(textFrameFormat);
// Save the PPTX to Disk
pres.save(dataDir + "Textbox.pptx", SaveFormat.Pptx);
//ExEnd:SettingTextFormattingInsideTable
}
}
| 37.508197 | 161 | 0.689248 | 3 |
c671cb5facf5eb5b1969379304b31fb0a2a4c6cc
| 5,291 |
py
|
Python
|
jackal/scripts/dns_discover.py
|
mwgielen/jackal
|
7fe62732eb5194b7246215d5277fb37c398097bf
|
[
"MIT"
] | 10 |
2018-01-17T20:11:30.000Z
|
2022-02-20T21:31:37.000Z
|
jackal/scripts/dns_discover.py
|
mwgielen/jackal
|
7fe62732eb5194b7246215d5277fb37c398097bf
|
[
"MIT"
] | null | null | null |
jackal/scripts/dns_discover.py
|
mwgielen/jackal
|
7fe62732eb5194b7246215d5277fb37c398097bf
|
[
"MIT"
] | 1 |
2018-06-21T16:47:16.000Z
|
2018-06-21T16:47:16.000Z
|
#!/usr/bin/env python3
import argparse
import ipaddress
import re
import socket
import subprocess
import dns.resolver
import dns.zone
import psutil
from jackal import HostSearch, RangeSearch
from jackal.utils import print_error, print_notification, print_success
def get_configured_dns():
"""
Returns the configured DNS servers with the use f nmcli.
"""
ips = []
try:
output = subprocess.check_output(['nmcli', 'device', 'show'])
output = output.decode('utf-8')
for line in output.split('\n'):
if 'DNS' in line:
pattern = r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}"
for hit in re.findall(pattern, line):
ips.append(hit)
except FileNotFoundError:
pass
return ips
def get_resolv_dns():
"""
Returns the dns servers configured in /etc/resolv.conf
"""
result = []
try:
for line in open('/etc/resolv.conf', 'r'):
if line.startswith('search'):
result.append(line.strip().split(' ')[1])
except FileNotFoundError:
pass
return result
def zone_transfer(address, dns_name):
"""
Tries to perform a zone transfer.
"""
ips = []
try:
print_notification("Attempting dns zone transfer for {} on {}".format(dns_name, address))
z = dns.zone.from_xfr(dns.query.xfr(address, dns_name))
except dns.exception.FormError:
print_notification("Zone transfer not allowed")
return ips
names = z.nodes.keys()
print_success("Zone transfer successfull for {}, found {} entries".format(address, len(names)))
for n in names:
node = z[n]
data = node.get_rdataset(dns.rdataclass.IN, dns.rdatatype.A)
if data:
# TODO add hostnames to entries.
# hostname = n.to_text()
for item in data.items:
address = item.address
ips.append(address)
return ips
def resolve_domains(domains, disable_zone=False):
"""
Resolves the list of domains and returns the ips.
"""
dnsresolver = dns.resolver.Resolver()
ips = []
for domain in domains:
print_notification("Resolving {}".format(domain))
try:
result = dnsresolver.query(domain, 'A')
for a in result.response.answer[0]:
ips.append(str(a))
if not disable_zone:
ips.extend(zone_transfer(str(a), domain))
except dns.resolver.NXDOMAIN as e:
print_error(e)
return ips
def parse_ips(ips, netmask, include_public):
"""
Parses the list of ips, turns these into ranges based on the netmask given.
Set include_public to True to include public IP adresses.
"""
hs = HostSearch()
rs = RangeSearch()
ranges = []
ips = list(set(ips))
included_ips = []
print_success("Found {} ips".format(len(ips)))
for ip in ips:
ip_address = ipaddress.ip_address(ip)
if include_public or ip_address.is_private:
# To stop the screen filling with ranges.
if len(ips) < 15:
print_success("Found ip: {}".format(ip))
host = hs.id_to_object(ip)
host.add_tag('dns_discover')
host.save()
r = str(ipaddress.IPv4Network("{}/{}".format(ip, netmask), strict=False))
ranges.append(r)
included_ips.append(ip)
else:
print_notification("Excluding ip {}".format(ip))
ranges = list(set(ranges))
print_success("Found {} ranges".format(len(ranges)))
for rng in ranges:
# To stop the screen filling with ranges.
if len(ranges) < 15:
print_success("Found range: {}".format(rng))
r = rs.id_to_object(rng)
r.add_tag('dns_discover')
r.save()
stats = {}
stats['ips'] = included_ips
stats['ranges'] = ranges
return stats
def main():
netmask = '255.255.255.0'
interfaces = psutil.net_if_addrs()
for _, details in interfaces.items():
for detail in details:
if detail.family == socket.AF_INET:
ip_address = ipaddress.ip_address(detail.address)
if not (ip_address.is_link_local or ip_address.is_loopback):
netmask = detail.netmask
break
parser = argparse.ArgumentParser(
description="Uses the configured DNS servers to estimate ranges.")
parser.add_argument(
"--include-public", help="Include public IP addresses", action="store_true")
parser.add_argument(
"-nm", "--netmask", help="The netmask to use to create ranges, default: {}".format(netmask), type=str, default=netmask)
parser.add_argument("--no-zone", help="Disable to attempt to get a zone transfer from the dns server.", action="store_true")
arguments = parser.parse_args()
ips = []
ips.extend(get_configured_dns())
domains = get_resolv_dns()
ips.extend(resolve_domains(domains, arguments.no_zone))
stats = parse_ips(ips, arguments.netmask, arguments.include_public)
print_notification("Found {} ips and {} ranges".format(
len(stats['ips']), len(stats['ranges'])))
if __name__ == '__main__':
main()
| 31.873494 | 128 | 0.603289 | 3.0625 |
2c36cea73bd5b397283c4173a823bdbf5f278ab4
| 12,089 |
py
|
Python
|
CICE-interface/CICE/configuration/scripts/timeseries.py
|
minsukji/ci-debug
|
3e8bbbe6652b702b61d2896612f6aa8e4aa6c803
|
[
"Apache-2.0",
"CC0-1.0"
] | null | null | null |
CICE-interface/CICE/configuration/scripts/timeseries.py
|
minsukji/ci-debug
|
3e8bbbe6652b702b61d2896612f6aa8e4aa6c803
|
[
"Apache-2.0",
"CC0-1.0"
] | null | null | null |
CICE-interface/CICE/configuration/scripts/timeseries.py
|
minsukji/ci-debug
|
3e8bbbe6652b702b61d2896612f6aa8e4aa6c803
|
[
"Apache-2.0",
"CC0-1.0"
] | null | null | null |
#!/usr/bin/env python
'''
This script generates timeseries plots of CICE diagnostic output.
It is generated to replicate the previous timeseries.csh script.
Written by: Matthew Turner
Date: August, 2019
'''
import os
import sys
import logging
import numpy as np
def find_logfile(log_dir):
'''
This function searches for the most recently created log file in the provided directory.
'''
logger.debug('Getting a list of files in {}'.format(log_dir))
try:
path = '{}/logs'.format(log_dir.rstrip('/'))
files = [os.path.join(path,f) for f in os.listdir('{}/logs'.format(log_dir)) \
if f.startswith('cice.runlog')]
except:
path = log_dir
files = [os.path.join(path,f) for f in os.listdir(log_dir) if f.startswith('cice.runlog')]
# Check if any files were found. If not, exit
if len(files) == 0:
logger.error('No cice.runlog* files found. Please make sure you are passing the \
correct directory.')
sys.exit(1)
# Get the most recently created file
outfile = max(files, key = os.path.getctime)
logger.debug('List of files = {}'.format([f for f in files]))
logger.debug('Most recent file is {}'.format(outfile))
return outfile
def get_data(logfile,field):
'''
This function extracts data from a CICE log file for the specific field.
'''
import datetime
import re
logger.debug('Extracting data for {}'.format(field))
# Build the regular expression to extract the data
field_regex = field.replace('(','\(').replace('^','\^').replace(')','\)')
number_regex = '[-+]?\d+\.?\d+([eE][-+]?\d+)?'
my_regex = '^{}\s+=\s+({})\s+({})'.format(field_regex,number_regex,number_regex)
dtg = []
arctic = []
antarctic = []
with open(logfile) as f:
for line in f.readlines():
m1 = re.search('istep1:\s+(\d+)\s+idate:\s+(\d+)\s+sec:\s+(\d+)', line)
if m1:
# Extract the current date-time group from the file
date = m1.group(2)
seconds = int(m1.group(3))
hours = seconds // 3600
minutes = (seconds - hours*3600) // 60
leftover = seconds - hours*3600 - minutes*60
curr_date = '{}-{:02d}:{:02d}:{:02d}'.format(date,hours,minutes,leftover)
dtg.append(datetime.datetime.strptime(curr_date, '%Y%m%d-%H:%M:%S'))
logger.debug('Currently on timestep {}'.format(dtg[-1]))
m = re.search(my_regex, line)
if m:
# Extract the data from the file
if 'E' in m.group(1) or 'e' in m.group(1):
expon = True
else:
expon = False
arctic.append(float(m.group(1)))
antarctic.append(float(m.group(3)))
logger.debug(' Arctic = {}, Antarctic = {}'.format(arctic[-1], antarctic[-1]))
return dtg, arctic, antarctic, expon
def latexit(string):
s = string[::-1].replace('(','($',1)
return (s.replace(')','$)',1))[::-1]
def plot_timeseries(log, field, dtg, arctic, antarctic, expon, dtg_base=None, arctic_base=None, \
antarctic_base=None, base_dir=None, grid=False):
'''
Plot the timeseries data from the CICE log file
'''
import re
casename = re.sub(r"/logs", "", os.path.abspath(log).rstrip('/')).split('/')[-1]
if base_dir:
base_casename = re.sub(r"/logs", "", os.path.abspath(base_dir).rstrip('/')).split('/')[-1]
# Load the plotting libraries, but set the logging level for matplotlib
# to WARNING so that matplotlib debugging info is not printed when running
# with '-v'
logging.getLogger('matplotlib').setLevel(logging.WARNING)
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
import matplotlib.ticker as ticker
fig = plt.figure(figsize=(12,8))
ax = fig.add_axes([0.05,0.08,0.9,0.9])
# Add the arctic data to the plot
ax.plot(dtg,arctic,label='Arctic')
# Add the baseline arctic data to the plot, if available
if arctic_base:
ax.plot(dtg_base,arctic_base,label='Baseline Arctic')
# Add the antarctic data to the plot
ax.plot(dtg,antarctic,label='Antarctic')
# Add the baseline antarctic data to the plot, if available
if antarctic_base:
ax.plot(dtg_base,antarctic_base,label='Baseline Antarctic')
ax.set_xlabel('')
ax.set_title('{} Diagnostic Output'.format(latexit(field)))
ax.set_ylabel(latexit(field))
# Format the x-axis labels
ax.xaxis.set_major_formatter(mdates.DateFormatter('%Y/%m/%d'))
ax.xaxis.set_minor_locator(mdates.MonthLocator())
# Add a text box that prints the test case name and the baseline case name (if given)
try:
text_field = "Test/Case: {}\nBaseline: {}".format(casename,base_casename)
from matplotlib.offsetbox import AnchoredText
anchored_text = AnchoredText(text_field,loc=2)
ax.add_artist(anchored_text)
except:
text_field = "Test/Case: {}".format(casename)
from matplotlib.offsetbox import AnchoredText
anchored_text = AnchoredText(text_field,loc=2)
ax.add_artist(anchored_text)
ax.legend(loc='upper right')
# Add grid lines if the `--grid` argument was passed at the command line.
if grid:
ax.grid(ls='--')
# Reduce the number of ticks on the y axis
nbins = 10
try:
minval = min( \
min(min(arctic), min(antarctic)), \
min(min(arctic_base), min(antarctic_base)))
maxval = max( \
max(max(arctic), max(antarctic)), \
max(max(arctic_base), max(antarctic_base)))
except:
minval = min(min(arctic), min(antarctic))
maxval = max(max(arctic), max(antarctic))
step = (maxval-minval)/nbins
ax.yaxis.set_ticks(np.arange(minval, maxval+step, step))
# Format the y-axis tick labels, based on whether or not the values in the log file
# are in scientific notation or float notation.
if expon:
ax.yaxis.set_major_formatter(ticker.FormatStrFormatter('%0.3e'))
else:
ax.yaxis.set_major_formatter(ticker.FormatStrFormatter('%0.5f'))
# Rotate and right align the x labels
for tick in ax.get_xticklabels():
tick.set_rotation(45)
# Create an output file and save the figure
field_tmp = field.split('(')[0].rstrip()
try:
outfile = '{}_{}_base-{}.png'.format(field_tmp.replace(' ','_'), casename,base_casename)
except:
outfile = '{}_{}.png'.format(field_tmp.replace(' ','_'), casename)
logger.info('Saving file to {}'.format(outfile))
plt.savefig(outfile,dpi=300,bbox_inches='tight')
def main():
import argparse
parser = argparse.ArgumentParser(description="To generate timeseries plots, this script \
can be passed a directory containing a logs/ subdirectory, \
or it can be run in the directory with the log files, \
without being passed a directory. It will pull the \
diagnostic data from the most recently modified log file.\
\
If no flags are passed selecting the variables to plot, \
then plots will be created for all available variables.")
parser.add_argument('log_dir', nargs='?', default=os.getcwd(), \
help="Path to diagnostic output log file. A specific log file can \
be passed, or a case directory. If a directory is passed, \
the most recent log file will be used. If no directory or \
file is passed, the script will look for a log file in the \
current directory.")
parser.add_argument('--bdir',dest='base_dir', help='Path to the the log file for a baseline \
dataset, if desired. A specific log file or case directory can \
be passed. If a directory is passed, the most recent log file \
will be used.')
parser.add_argument('-v', '--verbose', dest='verbose', help='Print debug output?', \
action='store_true')
parser.add_argument('--area', dest='area', help='Create a plot for total ice area?', \
action='store_true')
parser.add_argument('--extent', dest='extent', help='Create a plot for total ice extent?', \
action='store_true')
parser.add_argument('--volume', dest='ice_volume', help='Create a plot for total ice volume?', \
action='store_true')
parser.add_argument('--snw_vol', dest='snow_volume', help='Create a plot for total snow \
volume?', action='store_true')
parser.add_argument('--speed', dest='speed', help='Create a plot for rms ice speed?', \
action='store_true')
parser.add_argument('--grid',dest='grid', help='Add grid lines to the figures?', \
action='store_true')
# Set the defaults for the command line options
parser.set_defaults(verbose=False)
parser.set_defaults(area=False)
parser.set_defaults(extent=False)
parser.set_defaults(ice_volume=False)
parser.set_defaults(snow_volume=False)
parser.set_defaults(speed=False)
parser.set_defaults(grid=False)
args = parser.parse_args()
# If no fields are passed, plot all fields
if not ( args.area or args.extent or args.ice_volume or args.snow_volume or args.speed ):
args.area = True
args.extent = True
args.ice_volume = True
args.snow_volume = True
args.speed = True
# Build the fieldlist based on which fields are passed
fieldlist = []
if args.area:
fieldlist.append('total ice area (km^2)')
if args.extent:
fieldlist.append('total ice extent(km^2)')
if args.ice_volume:
fieldlist.append('total ice volume (m^3)')
if args.snow_volume:
fieldlist.append('total snw volume (m^3)')
if args.speed:
fieldlist.append('rms ice speed (m/s)')
# Setup the logger
global logger
if args.verbose:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
# Find the test and baseline log files, based on the input directories.
if os.path.isdir(args.log_dir):
logger.debug('{} is a directory'.format(args.log_dir))
log = find_logfile(args.log_dir)
log_dir = args.log_dir
else:
logger.debug('{} is a file'.format(args.log_dir))
log = args.log_dir
log_dir = args.log_dir.rsplit('/',1)[0]
logger.info('Log file = {}'.format(log))
if args.base_dir:
if os.path.isdir(args.base_dir):
base_log = find_logfile(args.base_dir)
base_dir = args.base_dir
else:
base_log = args.base_dir
base_dir = args.base_dir.rsplit('/',1)[0]
logger.info('Base Log file = {}'.format(base_log))
# Loop through each field and create the plot
for field in fieldlist:
logger.debug('Current field = {}'.format(field))
# Get the data from the log files
dtg, arctic, antarctic, expon = get_data(log, field)
if args.base_dir:
dtg_base, arctic_base, antarctic_base, expon_base = get_data(base_log,field)
# Plot the data
if args.base_dir:
plot_timeseries(log_dir, field, dtg, arctic, antarctic, expon, dtg_base, \
arctic_base, antarctic_base, base_dir, grid=args.grid)
else:
plot_timeseries(log_dir, field, dtg, arctic, antarctic, expon, grid=args.grid)
if __name__ == "__main__":
main()
| 40.431438 | 100 | 0.599636 | 3.25 |
e4d10e7aea947dc4d3f2ed09e0c9f35cd1df41cf
| 2,161 |
go
|
Go
|
gobang/game_context.go
|
holyshared/learn-golang
|
c522f264812b079c72b1802d1aacafbfbd8bf473
|
[
"MIT"
] | 3 |
2017-04-22T06:31:21.000Z
|
2022-03-27T15:00:25.000Z
|
gobang/game_context.go
|
holyshared/learn-golang
|
c522f264812b079c72b1802d1aacafbfbd8bf473
|
[
"MIT"
] | null | null | null |
gobang/game_context.go
|
holyshared/learn-golang
|
c522f264812b079c72b1802d1aacafbfbd8bf473
|
[
"MIT"
] | 2 |
2017-04-26T11:14:15.000Z
|
2019-04-15T10:05:02.000Z
|
package gobang
import (
"encoding/json"
)
func NewGameContext(rule *GameRule, playerStone, npcPlayerStone Stone) *GameContext {
board := NewBoard(rule.BoardSize())
player := NewGamePlayer(playerStone, board)
ctx := &NpcAIContext{
rule: rule,
board: board,
playerStone: playerStone,
npcPlayerStone: npcPlayerStone,
}
ai := NewNpcAI(ctx)
npcPlayer := NewNpcPlayer(npcPlayerStone, ai)
return &GameContext{
GameRule: rule,
board: board,
currentPlayer: player,
player: player,
npcPlayer: npcPlayer,
}
}
type GameContext struct {
*GameRule
board *Board
currentPlayer Player
player *GamePlayer
npcPlayer *NpcPlayer
}
func (g *GameContext) CurrentBoard() *Board {
return g.board
}
func (g *GameContext) SelectCell(point Point2D) (*Cell, error) {
if !g.board.HaveCell(point) {
return nil, NewCellNotFoundError(point)
}
return g.board.SelectCell(point), nil
}
func (g *GameContext) CurrentPlayer() Player {
return g.currentPlayer
}
func (g *GameContext) GamePlayer() *GamePlayer {
return g.player
}
func (g *GameContext) NpcPlayer() *NpcPlayer {
return g.npcPlayer
}
func (g *GameContext) ChangeToNextPlayer() {
var player Player
if g.currentPlayer == g.npcPlayer {
player = g.player
} else {
player = g.npcPlayer
}
g.currentPlayer = player
}
func (g *GameContext) CheckBoard() PutStoneResult {
player := g.CurrentPlayer()
matcher := NewCellReachedMatcher(player.Stone(), g.ReachedStoneCount())
result := matcher.Matches(g.board)
if result.HasResult() {
return Reached
}
if g.board.IsAllFilled() {
return Filled
}
return Continue
}
func (g *GameContext) MarshalJSON() ([]byte, error) {
jsonObject := struct {
Rule *GameRule `json:"rule"`
Board *Board `json:"board"`
CurrentPlayer Player `json:"currentPlayer"`
Player *GamePlayer `json:"player"`
NpcPlayer *NpcPlayer `json:"npcPlayer"`
}{
Rule: g.GameRule,
Board: g.board,
CurrentPlayer: g.currentPlayer,
Player: g.player,
NpcPlayer: g.npcPlayer,
}
return json.Marshal(jsonObject)
}
| 20.580952 | 85 | 0.669597 | 3.03125 |
455daa08d49aad80d104a4e2e1ae20034489d089
| 1,323 |
dart
|
Dart
|
lib/model/dependency_data.dart
|
sowderca/azure_devops_sdk
|
1ef1b3b5f72dca3d5075d211f97196caa99494ad
|
[
"MIT"
] | 2 |
2019-10-07T12:30:29.000Z
|
2021-03-19T11:49:53.000Z
|
lib/model/dependency_data.dart
|
sowderca/azure_devops_sdk
|
1ef1b3b5f72dca3d5075d211f97196caa99494ad
|
[
"MIT"
] | null | null | null |
lib/model/dependency_data.dart
|
sowderca/azure_devops_sdk
|
1ef1b3b5f72dca3d5075d211f97196caa99494ad
|
[
"MIT"
] | null | null | null |
part of azure_devops_sdk.api;
class DependencyData {
/* Gets or sets the category of dependency data. */
String input = null;
/* Gets or sets the key-value pair to specify properties and their values. */
List<Object> map = [];
DependencyData();
@override
String toString() {
return 'DependencyData[input=$input, map=$map, ]';
}
DependencyData.fromJson(Map<String, dynamic> json) {
if (json == null) return;
if (json['input'] == null) {
input = null;
} else {
input = json['input'];
}
if (json['map'] == null) {
map = null;
} else {
map = Object.listFromJson(json['map']);
}
}
Map<String, dynamic> toJson() {
Map <String, dynamic> json = {};
if (input != null)
json['input'] = input;
if (map != null)
json['map'] = map;
return json;
}
static List<DependencyData> listFromJson(List<dynamic> json) {
return json == null ? List<DependencyData>() : json.map((value) => DependencyData.fromJson(value)).toList();
}
static Map<String, DependencyData> mapFromJson(Map<String, dynamic> json) {
var map = Map<String, DependencyData>();
if (json != null && json.isNotEmpty) {
json.forEach((String key, dynamic value) => map[key] = DependencyData.fromJson(value));
}
return map;
}
}
| 25.941176 | 112 | 0.606198 | 3.046875 |
b03b64bcc05fab5fdefca7483f31bbb0137b66e6
| 1,343 |
py
|
Python
|
python/quiz/main.py
|
r-angeles/kivy-lab
|
baf4bf18aff28a1c9cd525c9b8ec949cb08e8356
|
[
"MIT"
] | 2 |
2021-09-18T20:16:41.000Z
|
2022-02-13T22:56:27.000Z
|
python/quiz/main.py
|
r-angeles/kivy-lab
|
baf4bf18aff28a1c9cd525c9b8ec949cb08e8356
|
[
"MIT"
] | null | null | null |
python/quiz/main.py
|
r-angeles/kivy-lab
|
baf4bf18aff28a1c9cd525c9b8ec949cb08e8356
|
[
"MIT"
] | null | null | null |
import csv
class Quiz:
all_quizzes = []
def __init__(self, question, choices, answer):
self.question = question
self.choices = choices
self.answer = answer
Quiz.all_quizzes.append(self)
@classmethod
def instantiate_from_csv(cls):
with open('practise/python/quiz/questions.csv', 'r') as f:
reader = csv.DictReader(f)
items = list(reader)
for item in items:
Quiz(
question=item.get('question'),
choices=item.get('choices'),
answer=int(item.get('answer')),
)
def __repr__(self):
return f"Item('{self.question}', {self.choices}, {self.answer})"
# To do:
# Print each instance to the quiz interface
# Split choices into list using split method.
# Add for loop on QuizInterface class to loop over the list.
# Add a row on csv ('result') whether a user has correctly answered a question (default on false)
# Add method on interface to take inputs from console
# Add method looping through 'result' sum(), checking how many is True/False
class QuizInterface:
def print_quiz(self, quiz):
print('A Quiz!')
print('===================')
def main():
Quiz.instantiate_from_csv()
print(Quiz.all_quizzes)
if __name__ == "__main__":
main()
| 27.408163 | 97 | 0.613552 | 3.515625 |
46e58369023202bd01a3e73e67bd5a35f6907105
| 11,014 |
py
|
Python
|
tests/utils/test_shape_utils.py
|
897615138/tfsnippet-jill
|
2fc898a4def866c8d3c685168df1fa22083bb143
|
[
"MIT"
] | 63 |
2018-06-06T11:56:40.000Z
|
2022-03-22T08:00:59.000Z
|
tests/utils/test_shape_utils.py
|
897615138/tfsnippet-jill
|
2fc898a4def866c8d3c685168df1fa22083bb143
|
[
"MIT"
] | 39 |
2018-07-04T12:40:53.000Z
|
2022-02-09T23:48:44.000Z
|
tests/utils/test_shape_utils.py
|
897615138/tfsnippet-jill
|
2fc898a4def866c8d3c685168df1fa22083bb143
|
[
"MIT"
] | 34 |
2018-06-25T09:59:22.000Z
|
2022-02-23T12:46:33.000Z
|
import pytest
import numpy as np
import tensorflow as tf
from tfsnippet.utils import *
class IntShapeTestCase(tf.test.TestCase):
def test_int_shape(self):
self.assertEqual(get_static_shape(tf.zeros([1, 2, 3])), (1, 2, 3))
self.assertEqual(
get_static_shape(tf.placeholder(tf.float32, [None, 2, 3])),
(None, 2, 3)
)
self.assertIsNone(get_static_shape(tf.placeholder(tf.float32, None)))
class ResolveNegativeAxisTestCase(tf.test.TestCase):
def test_resolve_negative_axis(self):
# good case
self.assertEqual(resolve_negative_axis(4, (0, 1, 2)), (0, 1, 2))
self.assertEqual(resolve_negative_axis(4, (0, -1, -2)), (0, 3, 2))
# bad case
with pytest.raises(ValueError, match='`axis` out of range: \\(-5,\\) '
'vs ndims 4.'):
_ = resolve_negative_axis(4, (-5,))
with pytest.raises(ValueError, match='`axis` has duplicated elements '
'after resolving negative axis.'):
_ = resolve_negative_axis(4, (0, -4))
class GetBatchSizeTestCase(tf.test.TestCase):
def test_get_batch_size(self):
def run_check(sess, x, axis, x_in=None, dynamic=True):
if x_in is None:
x_in = tf.constant(x)
dynamic = False
batch_size = get_batch_size(x_in, axis)
if dynamic:
self.assertIsInstance(batch_size, tf.Tensor)
self.assertEqual(sess.run(batch_size, feed_dict={x_in: x}),
x.shape[axis])
else:
self.assertEqual(batch_size, x.shape[axis])
with self.test_session() as sess:
x = np.zeros([2, 3, 4], dtype=np.float32)
# check when shape is totally static
run_check(sess, x, 0)
run_check(sess, x, 1)
run_check(sess, x, 2)
run_check(sess, x, -1)
# check when some shape is dynamic, but the batch axis is not
run_check(sess, x, 0, tf.placeholder(tf.float32, [2, None, None]),
dynamic=False)
run_check(sess, x, 1, tf.placeholder(tf.float32, [None, 3, None]),
dynamic=False)
run_check(sess, x, 2, tf.placeholder(tf.float32, [None, None, 4]),
dynamic=False)
run_check(sess, x, -1, tf.placeholder(tf.float32, [None, None, 4]),
dynamic=False)
# check when the batch axis is dynamic
run_check(sess, x, 0, tf.placeholder(tf.float32, [None, 3, 4]),
dynamic=True)
run_check(sess, x, 1, tf.placeholder(tf.float32, [2, None, 4]),
dynamic=True)
run_check(sess, x, 2, tf.placeholder(tf.float32, [2, 3, None]),
dynamic=True)
run_check(sess, x, -1, tf.placeholder(tf.float32, [2, 3, None]),
dynamic=True)
# check when the shape is totally dynamic
x_in = tf.placeholder(tf.float32, None)
run_check(sess, x, 0, x_in, dynamic=True)
run_check(sess, x, 1, x_in, dynamic=True)
run_check(sess, x, 2, x_in, dynamic=True)
run_check(sess, x, -1, x_in, dynamic=True)
class GetRankTestCase(tf.test.TestCase):
def test_get_rank(self):
with self.test_session() as sess:
# test static shape
ph = tf.placeholder(tf.float32, (1, 2, 3))
self.assertEqual(get_rank(ph), 3)
# test partially dynamic shape
ph = tf.placeholder(tf.float32, (1, None, 3))
self.assertEqual(get_rank(ph), 3)
# test totally dynamic shape
ph = tf.placeholder(tf.float32, None)
self.assertEqual(
sess.run(get_rank(ph), feed_dict={
ph: np.arange(6, dtype=np.float32).reshape((1, 2, 3))
}),
3
)
class GetDimensionSizeTestCase(tf.test.TestCase):
def test_get_dimension_size(self):
with self.test_session() as sess:
# test static shape
ph = tf.placeholder(tf.float32, (1, 2, 3))
self.assertEqual(get_dimension_size(ph, 0), 1)
self.assertEqual(get_dimension_size(ph, 1), 2)
self.assertEqual(get_dimension_size(ph, 2), 3)
self.assertEqual(get_dimension_size(ph, -1), 3)
# test dynamic shape, but no dynamic axis is queried
ph = tf.placeholder(tf.float32, (1, None, 3))
self.assertEqual(get_dimension_size(ph, 0), 1)
self.assertEqual(get_dimension_size(ph, 2), 3)
self.assertEqual(get_dimension_size(ph, -1), 3)
# test dynamic shape
def _assert_equal(a, b):
self.assertIsInstance(a, tf.Tensor)
self.assertEqual(sess.run(a, feed_dict={ph: ph_in}), b)
ph = tf.placeholder(tf.float32, (1, None, 3))
ph_in = np.arange(6, dtype=np.float32).reshape((1, 2, 3))
_assert_equal(get_dimension_size(ph, 1), 2)
_assert_equal(get_dimension_size(ph, -2), 2)
axis_ph = tf.placeholder(tf.int32, None)
self.assertEqual(
sess.run(get_dimension_size(ph, axis_ph),
feed_dict={ph: ph_in, axis_ph: 1}),
2
)
# test fully dynamic shape
ph = tf.placeholder(tf.float32, None)
_assert_equal(get_dimension_size(ph, 0), 1)
_assert_equal(get_dimension_size(ph, 1), 2)
_assert_equal(get_dimension_size(ph, 2), 3)
_assert_equal(get_dimension_size(ph, -2), 2)
def test_get_dimensions_size(self):
with self.test_session() as sess:
# test empty query
ph = tf.placeholder(tf.float32, None)
self.assertTupleEqual(get_dimensions_size(ph, ()), ())
# test static shape
ph = tf.placeholder(tf.float32, (1, 2, 3))
self.assertTupleEqual(get_dimensions_size(ph), (1, 2, 3))
self.assertTupleEqual(get_dimensions_size(ph, [0]), (1,))
self.assertTupleEqual(get_dimensions_size(ph, [1]), (2,))
self.assertTupleEqual(get_dimensions_size(ph, [2]), (3,))
self.assertTupleEqual(get_dimensions_size(ph, [2, 0, 1]), (3, 1, 2))
# test dynamic shape, but no dynamic axis is queried
ph = tf.placeholder(tf.float32, (1, None, 3))
self.assertTupleEqual(get_dimensions_size(ph, [0]), (1,))
self.assertTupleEqual(get_dimensions_size(ph, [2]), (3,))
self.assertTupleEqual(get_dimensions_size(ph, [2, 0]), (3, 1))
# test dynamic shape
def _assert_equal(a, b):
ph_in = np.arange(6, dtype=np.float32).reshape((1, 2, 3))
self.assertIsInstance(a, tf.Tensor)
np.testing.assert_equal(sess.run(a, feed_dict={ph: ph_in}), b)
ph = tf.placeholder(tf.float32, (1, None, 3))
_assert_equal(get_dimensions_size(ph), (1, 2, 3))
_assert_equal(get_dimensions_size(ph, [1]), (2,))
_assert_equal(get_dimensions_size(ph, [2, 0, 1]), (3, 1, 2))
# test fully dynamic shape
ph = tf.placeholder(tf.float32, None)
_assert_equal(get_dimensions_size(ph), (1, 2, 3))
_assert_equal(get_dimensions_size(ph, [0]), (1,))
_assert_equal(get_dimensions_size(ph, [1]), (2,))
_assert_equal(get_dimensions_size(ph, [2]), (3,))
_assert_equal(get_dimensions_size(ph, [2, 0, 1]), (3, 1, 2))
def test_get_shape(self):
with self.test_session() as sess:
# test static shape
ph = tf.placeholder(tf.float32, (1, 2, 3))
self.assertTupleEqual(get_shape(ph), (1, 2, 3))
# test dynamic shape
def _assert_equal(a, b):
ph_in = np.arange(6, dtype=np.float32).reshape((1, 2, 3))
self.assertIsInstance(a, tf.Tensor)
np.testing.assert_equal(sess.run(a, feed_dict={ph: ph_in}), b)
ph = tf.placeholder(tf.float32, (1, None, 3))
_assert_equal(get_shape(ph), (1, 2, 3))
# test fully dynamic shape
ph = tf.placeholder(tf.float32, None)
_assert_equal(get_shape(ph), (1, 2, 3))
class ConcatShapesTestCase(tf.test.TestCase):
def test_concat_shapes(self):
with self.test_session() as sess:
# test empty
self.assertTupleEqual(concat_shapes(()), ())
# test static shapes
self.assertTupleEqual(
concat_shapes(iter([
(1, 2),
(3,),
(),
(4, 5)
])),
(1, 2, 3, 4, 5)
)
# test having dynamic shape
shape = concat_shapes([
(1, 2),
tf.constant([3], dtype=tf.int32),
(),
tf.constant([4, 5], dtype=tf.int32),
])
self.assertIsInstance(shape, tf.Tensor)
np.testing.assert_equal(sess.run(shape), (1, 2, 3, 4, 5))
class IsShapeEqualTestCase(tf.test.TestCase):
def test_is_shape_equal(self):
def check(x, y, x_ph=None, y_ph=None):
ans = x.shape == y.shape
feed_dict = {}
if x_ph is not None:
feed_dict[x_ph] = x
x = x_ph
if y_ph is not None:
feed_dict[y_ph] = y
y = y_ph
result = is_shape_equal(x, y)
if is_tensor_object(result):
result = sess.run(result, feed_dict=feed_dict)
self.assertEqual(result, ans)
with self.test_session() as sess:
# check static shapes
x1 = np.random.normal(size=[2, 3, 4])
x2 = np.random.normal(size=[2, 1, 4])
x3 = np.random.normal(size=[1, 2, 3, 4])
check(x1, np.copy(x1))
check(x1, x2)
check(x1, x3)
# check partial dynamic shapes
x1_ph = tf.placeholder(dtype=tf.float32, shape=[2, None, 4])
x2_ph = tf.placeholder(dtype=tf.float32, shape=[2, None, 4])
x3_ph = tf.placeholder(dtype=tf.float32, shape=[None] * 4)
check(x1, np.copy(x1), x1_ph, x2_ph)
check(x1, x2, x1_ph, x2_ph)
check(x1, x3, x1_ph, x3_ph)
# check fully dimension shapes
x1_ph = tf.placeholder(dtype=tf.float32, shape=None)
x2_ph = tf.placeholder(dtype=tf.float32, shape=None)
x3_ph = tf.placeholder(dtype=tf.float32, shape=None)
check(x1, np.copy(x1), x1_ph, x2_ph)
check(x1, x2, x1_ph, x2_ph)
check(x1, x3, x1_ph, x3_ph)
| 39.056738 | 80 | 0.539041 | 3.375 |
548d0b621ebfc016f29d6f153be64937a7dd5ae3
| 1,712 |
dart
|
Dart
|
lib/c/clipOvalSample.dart
|
Seek-knowledge/flutter_widget_sample
|
914c21084aeef9ba8e6ace20c0857742c6902092
|
[
"MIT"
] | 1 |
2018-09-11T08:10:21.000Z
|
2018-09-11T08:10:21.000Z
|
lib/c/clipOvalSample.dart
|
Seek-knowledge/flutter_widget_sample
|
914c21084aeef9ba8e6ace20c0857742c6902092
|
[
"MIT"
] | null | null | null |
lib/c/clipOvalSample.dart
|
Seek-knowledge/flutter_widget_sample
|
914c21084aeef9ba8e6ace20c0857742c6902092
|
[
"MIT"
] | null | null | null |
import 'package:flutter_ui_demo/base.dart';
import 'package:flutter/material.dart';
// ignore: must_be_immutable
class ClipOvalSample extends BaseContentApp {
static const String routeName = 'ClipOvalSample';
@override
String get title => routeName;
@override
Widget get contentWidget => _Sample();
@override
String get desc =>
'''
一个 widget,用来裁剪子widget,让其成为一个椭圆、圆形的 widget,超出这个范围会被裁剪掉不显示。
''';
@override
String get sampleCode =>
'''
ClipOval(
clipper: _ImageClipper(),
child: Image.asset('images/img.jpeg'),
),
class _ImageClipper extends CustomClipper<Rect> {
@override
Rect getClip(Size size) {
return Rect.fromLTWH(0.0, size.height / 4, size.width, size.height / 4 * 3);
}
@override
bool shouldReclip(CustomClipper<Rect> oldClipper) {
return true;
}
}
''';
}
class _Sample extends StatelessWidget {
@override
Widget build(BuildContext context) {
return Center(
child: Column(
children: <Widget>[
Text('默认是裁剪成圆形,如下所示'),
SizedBox(height: 10.0),
ClipOval(
child: Image.asset('images/img.jpeg'),
),
SizedBox(height: 30.0,),
Text('可以传入一个矩形区域,这样在裁剪的时候就可以变成一个椭圆,如下所示'),
SizedBox(height: 10.0,),
ClipOval(
clipper: _ImageClipper(),
child: Image.asset('images/img.jpeg'),
),
],
),
);
}
}
class _ImageClipper extends CustomClipper<Rect> {
@override
Rect getClip(Size size) {
return Rect.fromLTWH(0.0, size.height / 4, size.width, size.height / 4 * 3);
}
@override
bool shouldReclip(CustomClipper<Rect> oldClipper) {
return true;
}
}
| 21.948718 | 82 | 0.619159 | 3.078125 |
bd76efc26c2a65125e5c0d4c54068ed2695e8219
| 1,860 |
rb
|
Ruby
|
app/services/qa_server/performance_per_byte_data_service.rb
|
LD4P/qa_server
|
5740f1868a6b8b895428303753d0f0739da1788d
|
[
"Apache-2.0"
] | 5 |
2019-05-12T18:06:49.000Z
|
2022-03-01T16:47:39.000Z
|
app/services/qa_server/performance_per_byte_data_service.rb
|
LD4P/qa_server
|
5740f1868a6b8b895428303753d0f0739da1788d
|
[
"Apache-2.0"
] | 243 |
2018-08-17T14:06:15.000Z
|
2022-03-31T20:01:47.000Z
|
app/services/qa_server/performance_per_byte_data_service.rb
|
LD4P/qa_server
|
5740f1868a6b8b895428303753d0f0739da1788d
|
[
"Apache-2.0"
] | 5 |
2018-09-14T13:42:02.000Z
|
2022-03-01T12:43:24.000Z
|
# frozen_string_literal: true
# This class calculates performance stats based on size of data.
module QaServer
class PerformancePerByteDataService
class << self
include QaServer::PerformanceHistoryDataKeys
class_attribute :stats_calculator_class, :performance_data_class
self.stats_calculator_class = QaServer::PerformancePerByteCalculatorService
self.performance_data_class = QaServer::PerformanceHistory
# Performance data based on size of data.
# @param authority_name [String] name of an authority
# @param action [Symbol] :search, :fetch, or :all_actions
# @param n [Integer] calculate stats for last n records
# @returns [Hash] performance statistics based on size of data
# @example returns for n=2
# { data_raw_bytes_from_source: [16271, 16271],
# retrieve_bytes_per_ms: [67.24433786890475, 55.51210410757532],
# retrieve_ms_per_byte: [0.014871140555351083, 0.018014089288745542]
# graph_load_bytes_per_ms_ms: [86.74089418722461, 54.97464153778724],
# graph_load_ms_per_byte: [0.011528587632974647, 0.018190205011389522],
# normalization_bytes_per_ms: [64.70169466560836, 89.25337465693322],
# normalization_ms_per_byte: [0.01530700843338457, 0.015455545718983178]
# }
def calculate(authority_name:, action:, n: 10)
records = records_by(authority_name, action)
stats_calculator_class.new(records: records, n: n).calculate
end
private
def records_by(authority_name, action)
where_clause = {}
where_clause[:authority] = authority_name unless authority_name.nil? || authority_name == ALL_AUTH
where_clause[:action] = action unless action.nil? || action == ALL_ACTIONS
performance_data_class.where(where_clause)
end
end
end
end
| 44.285714 | 106 | 0.714516 | 3 |
aa43d666ad707500f4b7c54042968dd63529e68e
| 1,812 |
lua
|
Lua
|
test/test.lua
|
actboy168/lml
|
9e20597a6df872a93589b9385038470e3343c149
|
[
"MIT"
] | 2 |
2020-01-26T02:26:33.000Z
|
2021-11-11T00:32:13.000Z
|
test/test.lua
|
actboy168/lml
|
9e20597a6df872a93589b9385038470e3343c149
|
[
"MIT"
] | null | null | null |
test/test.lua
|
actboy168/lml
|
9e20597a6df872a93589b9385038470e3343c149
|
[
"MIT"
] | 1 |
2019-10-11T10:47:48.000Z
|
2019-10-11T10:47:48.000Z
|
package.path = [[.\test\?.lua]]
package.cpath = [[.\build\msvc\bin\?.dll]]
local lml = require 'lml'
local print_r = require 'print_r'
function LOAD(filename)
local f = assert(io.open(filename, 'rb'))
local r = lml(f:read 'a')
f:close()
return r
end
local function EQUAL(a, b)
for k, v in pairs(a) do
if type(v) == 'table' then
EQUAL(v, b[k])
else
assert(v == b[k])
end
end
end
local n = 0
local function TEST(script, t)
n = n + 1
local name = 'TEST-' .. n
local r = lml(script, name)
local ok, e = pcall(EQUAL, r, t)
if not ok then
print(script)
print('--------------------------')
print_r(r)
print('--------------------------')
print_r(t)
print('--------------------------')
error(name)
end
local ok, e = pcall(EQUAL, t, r)
if not ok then
print(script)
print('--------------------------')
print_r(r)
print('--------------------------')
print_r(t)
print('--------------------------')
error(name)
end
end
TEST(
[==[
TEST
]==]
,
{
'', false,
{ 'TEST' },
}
)
TEST(
[==[
TEST: STATE
]==]
,
{
'', false,
{ 'TEST', 'STATE' },
}
)
TEST(
[==[
TEST
A
B
]==]
,
{
'', false,
{ 'TEST', false, {'A'}, {'B'}}
}
)
TEST(
[==[
TEST: STATE
A
B
]==]
,
{
'', false,
{ 'TEST', 'STATE', {'A'}, {'B'}}
}
)
TEST(
[==[
TEST: STATE
A: STATE_A
B: STATE_B
]==]
,
{
'', false,
{ 'TEST', 'STATE', {'A', 'STATE_A'}, {'B', 'STATE_B'}}
}
)
TEST(
[==[
TEST: STATE
A: STATE_A
A1
A2
B: STATE_B
B1
B2
]==]
,
{
'', false,
{ 'TEST', 'STATE', {'A', 'STATE_A', {'A1'}, {'A2'}}, {'B', 'STATE_B', {'B1'}, {'B2'}}}
}
)
TEST(
[==[
'TE:ST': '''A000'''
]==]
,
{
'', false,
{ 'TE:ST', "'A000'"}
}
)
TEST(
[==[
TEST
A
'多行字符串
1': '多行字符串
2'
B
]==]
,
{
'', false,
{ 'TEST', false, {'A', false, {'多行字符串\n1', '多行字符串\n2', {'B'}}}}
}
)
print('test ok!')
| 11.468354 | 86 | 0.450883 | 3.1875 |
25a7d3a01c57af5a7b71ad97053f46c8a20dec59
| 4,999 |
lua
|
Lua
|
lua/oleo/helpers.lua
|
egresh/oleo
|
1be4c936c5d52e91363e4fd47192090f6b824718
|
[
"MIT"
] | 3 |
2022-02-10T04:56:20.000Z
|
2022-02-16T08:10:02.000Z
|
lua/oleo/helpers.lua
|
egresh/oleo
|
1be4c936c5d52e91363e4fd47192090f6b824718
|
[
"MIT"
] | null | null | null |
lua/oleo/helpers.lua
|
egresh/oleo
|
1be4c936c5d52e91363e4fd47192090f6b824718
|
[
"MIT"
] | null | null | null |
local M = {}
-- get the project root for a git project
function M.project_root()
local root = vim.fn.system("git rev-parse --show-toplevel")
if string.find(root, "fatal:") then
root = vim.fn.getcwd()
else
root = string.gsub(root, "\n", "")
end
return root
end
-- print over a parameter list
function M.pp(...)
local count = select("#", ...)
for idx, param in ipairs({ ... }) do
if count > 1 then
print("Param " .. idx .. ":")
end
for k, v in pairs(param) do
print(k, v)
end
end
end
-- unload modules
function M.unload(modules)
print("Unloading " .. #modules .. " modules")
for _, v in pairs(modules) do
if package.loaded[v] then
package.loaded[v] = nil
print("Module " .. v .. " has been unloaded")
else
print("Module " .. v .. " wasn't already loaded")
end
end
end
function M.put(...)
local objects = {}
for i = 1, select("#", ...) do
local v = select(i, ...)
table.insert(objects, vim.inspect(v))
end
print(table.concat(objects, "\n"))
return ...
end
function M.dump(...)
local objects = vim.tbl_map(vim.inspect, { ... })
print(unpack(objects))
return ...
end
function M.get_rtp()
local rtp = vim.o.runtimepath
local t = {}
for dir in rtp:gmatch("([%w%-%/%.]+),?") do
table.insert(t, dir)
end
table.sort(t)
return t
end
function M.rtp()
local rtp = M.get_rtp()
M.pp(rtp)
end
function M.show_path()
return vim.fn.join(vim.opt.path:get(), "\n")
end
function M.show_tagfiles()
return vim.fn.join(vim.opt.tags:get(), "\n")
end
function M.buf_count()
local buffers = vim.api.nvim_list_bufs()
M.dump(buffers)
end
function M.reload_config()
local loaded = {}
for k in pairs(package.loaded) do
table.insert(loaded, k)
package.loaded[k] = nil
end
table.sort(loaded)
for k, v in ipairs(loaded) do
print(k, v)
require(v)
end
end
function M.dumptotmpfile(tbl, filename)
local tmpname = "/Users/egresh/tmp/"
if filename == nil then
tmpname = tmpname .. "neovim_dump_file.txt"
else
tmpname = tmpname .. filename
end
vim.api.nvim_command("silent! redir! > " .. tmpname)
vim.o.more = false
M.dump(tbl)
vim.api.nvim_command("redir END")
vim.o.more = true
vim.api.nvim_command('call nvim_input("<cr>")')
end
function M.get_package_path()
local path = {}
for _, v in ipairs(vim.fn.split(package.path, ";")) do
table.insert(path, v)
end
return path
end
function M.neovim_config_files()
local scan = require("plenary.scandir")
local lua_files = scan.scan_dir(vim.fn.stdpath("config") .. "/lua")
local after_files = scan.scan_dir(vim.fn.stdpath("config") .. "/after")
local files = {}
local dirs = { lua_files, after_files }
for _, tbl in ipairs(dirs) do
for _, file in ipairs(tbl) do
table.insert(files, file)
end
end
table.insert(files, 1, vim.env.MYVIMRC)
return files
end
function M.package_grep(name)
local matched = {}
for k, _ in pairs(package.loaded) do
if k:match(name) then
table.insert(matched, k)
end
end
return matched
end
function M.winenter()
local filetype = vim.api.nvim_buf_get_option(0, "filetype")
if filetype == "toggleterm" then
vim.cmd("IndentBlanklineDisable")
return
elseif filetype ~= "NvimTree" then
vim.wo.number = true
vim.wo.relativenumber = true
end
vim.wo.signcolumn = "auto:9"
end
function M.load_plugin(plugin)
print("The plugin is: " .. plugin)
local status_ok, error = pcall(require(tostring(plugin)))
if not status_ok then
print("ERROR: unable to load plugin " .. error)
else
print("Plugin Loaded: " .. plugin)
end
end
function M.format_lualine()
local bufname = vim.api.nvim_buf_get_name(0)
if not string.find(bufname, "toggleterm") then
return { tostring(math.random(10)) }
-- return {
-- { "filetype", icon_only = true },
-- { "filename", file_status = true, path = 0 },
-- }
end
local program_name
local term_number
-- expected output...
-- "term://~/.local/share/neovimwip/nvim//45438:htop;#toggleterm#2"
_, _, program_name, term_number = string.find(bufname, "%d+:([%w]+);#toggleterm#(%d+)")
print("the program name is " .. program_name)
if program_name == nil then
return ""
end
return { string.format("Term: %s # %d", program_name, term_number) }
end
function ConfigureTerminal()
vim.wo.relativenumber = false
vim.wo.number = false
vim.cmd("highlight! link TermCursor Cursor")
vim.cmd("highlight! TermCursorNC guibg=red guifg=white ctermbg=1 ctermfg=15")
vim.cmd('exec "normal i"')
end
return M
| 22.722727 | 91 | 0.594919 | 3.34375 |
fb1efd27a1fab3a7d9597e39ab0e9e355a9a77c7
| 1,705 |
kt
|
Kotlin
|
golem-core/srcjvm/golem/platformsupport/repr.kt
|
venabled/golem
|
2f285908ff7adbb4e4b9df038255f33bb671bf18
|
[
"Apache-2.0"
] | 1 |
2018-01-20T02:16:59.000Z
|
2018-01-20T02:16:59.000Z
|
golem-core/srcjvm/golem/platformsupport/repr.kt
|
venabled/golem
|
2f285908ff7adbb4e4b9df038255f33bb671bf18
|
[
"Apache-2.0"
] | null | null | null |
golem-core/srcjvm/golem/platformsupport/repr.kt
|
venabled/golem
|
2f285908ff7adbb4e4b9df038255f33bb671bf18
|
[
"Apache-2.0"
] | 1 |
2018-10-24T20:54:10.000Z
|
2018-10-24T20:54:10.000Z
|
package golem.platformsupport
import golem.*
import golem.matrix.*
import java.io.ByteArrayOutputStream
import java.io.PrintStream
import java.text.DecimalFormat
fun <T> repr(mat: Matrix<T>): String {
val fmtString = when (matFormat) {
SHORT_NUMBER -> "0.00##"
LONG_NUMBER -> "0.00############"
VERY_LONG_NUMBER -> "0.00#############################"
SCIENTIFIC_NUMBER -> "0.00#####E0#"
SCIENTIFIC_LONG_NUMBER -> "0.00############E0#"
SCIENTIFIC_VERY_LONG_NUMBER -> "0.00############################E0#"
else -> "0.00############"
}
var formatter = DecimalFormat(fmtString)
val bstream = ByteArrayOutputStream()
val pstream = PrintStream(bstream)
mat.run {
val lens = IntArray(numCols())
eachIndexed { row, col, element ->
val formatted = formatter.format(element)
if (lens[col] < formatted.length) lens[col] = formatted.length
}
var indent = "mat["
eachIndexed { row, col, element ->
var formatted = formatter.format(element)
if (col == 0) {
if (row > 0)
pstream.append("end\n")
pstream.append(indent)
indent = " "
}
if (formatted[0] != '-')
formatted = " " + formatted
pstream.append(formatted)
if (col != lens.size - 1)
pstream.append(",")
(-1..(lens[col] - formatted.length)).forEach { pstream.append(" ") }
}
pstream.append("]")
}
return bstream.toString()
}
| 32.788462 | 80 | 0.48563 | 3.078125 |
4d48c67a76adff4b154fa661b70e7c1dfc491439
| 1,256 |
cs
|
C#
|
TRProject/Assets/Scripts/InteractionHandler.cs
|
mtaulty/Techorama2018
|
792c1774dcf9c73dd99d83f5132798e4f026328c
|
[
"MIT"
] | 1 |
2018-06-03T09:46:05.000Z
|
2018-06-03T09:46:05.000Z
|
TRProject/Assets/Scripts/InteractionHandler.cs
|
mtaulty/Techorama2018
|
792c1774dcf9c73dd99d83f5132798e4f026328c
|
[
"MIT"
] | null | null | null |
TRProject/Assets/Scripts/InteractionHandler.cs
|
mtaulty/Techorama2018
|
792c1774dcf9c73dd99d83f5132798e4f026328c
|
[
"MIT"
] | null | null | null |
using HoloToolkit.Unity.InputModule;
using HoloToolkit.Unity.InputModule.Utilities.Interactions;
using UnityEngine;
public class InteractionHandler : MonoBehaviour, IFocusable, IInputClickHandler
{
public void OnInputClicked(InputClickedEventData eventData)
{
if (!this.tapped)
{
this.tapped = true;
// switch on gravity and let it fall.
var rigidBody = this.gameObject.AddComponent<Rigidbody>();
rigidBody.freezeRotation = true;
this.waitingToLand = true;
}
}
void OnCollisionStay(Collision collision)
{
if (this.waitingToLand && (collision.relativeVelocity.magnitude < 0.01f))
{
this.waitingToLand = false;
Destroy(this.gameObject.GetComponent<Rigidbody>());
this.gameObject.GetComponent<TwoHandManipulatable>().enabled = true;
}
}
public void OnFocusEnter()
{
if (!this.tapped)
{
this.gameObject.transform.localScale *= 1.2f;
}
}
public void OnFocusExit()
{
if (!this.tapped)
{
this.gameObject.transform.localScale = Vector3.one;
}
}
bool waitingToLand;
bool tapped = false;
}
| 27.304348 | 81 | 0.609873 | 3.015625 |
af58e142e51307836e1cb2e7404429bfedc68ec9
| 3,792 |
py
|
Python
|
tests/test_api.py
|
obytes/fastql
|
3e77f92d0330e0ea4ffd5383691283529699ca79
|
[
"MIT"
] | 32 |
2021-10-05T15:39:22.000Z
|
2022-02-03T17:06:18.000Z
|
tests/test_api.py
|
obytes/FastQL
|
3e77f92d0330e0ea4ffd5383691283529699ca79
|
[
"MIT"
] | 11 |
2022-02-04T04:00:58.000Z
|
2022-03-28T15:22:46.000Z
|
tests/test_api.py
|
obytes/fastql
|
3e77f92d0330e0ea4ffd5383691283529699ca79
|
[
"MIT"
] | 4 |
2021-11-16T15:57:31.000Z
|
2021-12-19T07:36:46.000Z
|
import asyncio
import json
from threading import Timer
import httpx
import pytest
from ariadne.asgi import GQL_CONNECTION_INIT, GQL_START
from websockets import connect
my_storage = {}
@pytest.fixture
def storage():
return my_storage
@pytest.mark.asyncio
async def test_create_user(host, credentials, storage):
query = """
mutation createUser($email: String!, $password: String!) {
createUser(email: $email, password: $password) {
id,
errors
}
}
"""
async with httpx.AsyncClient() as client:
response = await client.post(
f"http://{host}/",
timeout=60,
json={"query": query, "variables": credentials},
)
json_response = json.loads(response.text)
assert ("errors" in json_response) == False
assert json_response["data"]["createUser"]["id"] is not None
storage["user_id"] = json_response["data"]["createUser"]["id"]
@pytest.mark.asyncio
async def test_auth_user(host, credentials, storage):
query = """
mutation authUser($email: String!, $password: String!) {
createToken(email: $email, password: $password) {
errors,
token
}
}
"""
async with httpx.AsyncClient() as client:
response = await client.post(
f"http://{host}/",
headers={},
timeout=60,
json={"query": query, "variables": credentials},
)
json_response = json.loads(response.text)
assert ("errors" in json_response) == False
assert json_response["data"]["createToken"]["token"] is not None
storage["token"] = json_response["data"]["createToken"]["token"]
async def create_blog(host, storage):
query = """
mutation createblog($title: String!, $description: String!) {
createblog(title: $title, description: $description) {
errors
id
}
}
"""
token = storage["token"]
async with httpx.AsyncClient() as client:
response = await client.post(
f"http://{host}/",
headers={"Authorization": f"Bearer {token}"},
timeout=60,
json={
"query": query,
"variables": {"title": "title", "description": "description"},
},
)
json_response = json.loads(response.text)
assert ("errors" in json_response) == True
assert json_response["data"]["createblog"]["id"] is not None
@pytest.mark.asyncio
async def test_create_blog(server, host, storage):
await create_blog(host, storage)
@pytest.mark.asyncio
async def test_subscription(server, host, storage):
query = """
subscription reviewblog($token: String!) {
reviewblog(token: $token) {
errors
id
}
}
"""
variables = {"token": f'Bearer {storage["token"]}'}
ws = await connect(f"ws://{host}/", subprotocols=["graphql-ws"])
await ws.send(json.dumps({"type": GQL_CONNECTION_INIT}))
await ws.send(
json.dumps(
{"type": GQL_START, "payload": {"query": query, "variables": variables},}
)
)
received = await ws.recv()
assert received == '{"type": "connection_ack"}'
def delay_create_blog(server, host):
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
loop.run_until_complete(create_blog(server, host))
timer = Timer(1.0, delay_create_blog, (server, host, storage))
timer.start()
received = await ws.recv()
await ws.close()
json_response = json.loads(received)
assert ("errors" in json_response) == False
assert json_response["payload"]["data"]["reviewblog"]["id"] is not None
| 29.169231 | 85 | 0.587553 | 3.265625 |
059df1ccc63fccb4bf18b4ef223debb5f545b72c
| 951 |
py
|
Python
|
tests/perf.py
|
wemoloh/frepr
|
a0a33efdc6df53301966c9240e5f534ac6bf1426
|
[
"BSD-3-Clause"
] | 1 |
2019-05-31T20:38:45.000Z
|
2019-05-31T20:38:45.000Z
|
tests/perf.py
|
wemoloh/frepr
|
a0a33efdc6df53301966c9240e5f534ac6bf1426
|
[
"BSD-3-Clause"
] | null | null | null |
tests/perf.py
|
wemoloh/frepr
|
a0a33efdc6df53301966c9240e5f534ac6bf1426
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import print_function
from random import getrandbits
from struct import pack, unpack
from timeit import timeit
import frepr
def random_double():
return unpack('d', pack('Q', getrandbits(64)))[0]
values = []
def repr_all():
for value in values:
repr(value)
def main(nvalues, nruns):
print('Creating {} random double values...'.format(nvalues))
global values
values = [random_double() for _ in range(nvalues)]
print('Applying native repr() to all values {} times...'.format(nruns))
t_repr = timeit(repr_all, number=nruns)
print('{} seconds'.format(t_repr))
print('Applying new repr() to all values {} times...'.format(nruns))
frepr.install()
t_frepr = timeit(repr_all, number=nruns)
frepr.uninstall()
print('{} seconds'.format(t_frepr))
print('New repr() is {} times faster.'.format(t_repr / t_frepr))
if __name__ == '__main__':
main(1000000, 10)
| 25.702703 | 75 | 0.664564 | 3.046875 |
1a3419d9a369b6178fead14fb5ba597f06af7429
| 991 |
py
|
Python
|
ExerciciosPythonMundo1/ex017.py
|
JamesonSantos/Curso-Python-Exercicios-Praticados
|
1fc1618ccf8692a552bac408e842dea8328e4e1a
|
[
"MIT"
] | null | null | null |
ExerciciosPythonMundo1/ex017.py
|
JamesonSantos/Curso-Python-Exercicios-Praticados
|
1fc1618ccf8692a552bac408e842dea8328e4e1a
|
[
"MIT"
] | null | null | null |
ExerciciosPythonMundo1/ex017.py
|
JamesonSantos/Curso-Python-Exercicios-Praticados
|
1fc1618ccf8692a552bac408e842dea8328e4e1a
|
[
"MIT"
] | null | null | null |
'''
n1 = float(input('Comprimento do cateto oposto: '))
n2 = float(input('Comprimento do cateto adjacente: '))
hi = (n1 ** 2 + n2 ** 2) ** (1/2)
print('A hipotenusa vai medir {:.2f}'.format(hi))
'''
'''
from math import hypot
n1 = float(input('Comprimento do cateto oposto: '))
n2 = float(input('Comprimento do cateto adjacente: '))
hi = hypot(n1, n2)
print('A hipotenusa vai medir {:.2f}'.format(hi))
'''
'''
import math
n1 = float(input('Comprimento do cateto oposto: '))
n2 = float(input('Comprimento do cateto adjacente: '))
hi = math.hypot(n1, n2)
print('A hipotenusa vai medir {:.2f}'.format(hi))
'''
'''import math
n1 = float(input('Comprimento do cateto oposto: '))
n2 = float(input('Comprimento do cateto adjacente: '))
print('A hipotenusa vai medir {:.2f}'.format(math.hypot(n1, n2)))'''
from math import hypot
n1 = float(input('Comprimento do cateto oposto: '))
n2 = float(input('Comprimento do cateto adjacente: '))
print('A hipotenusa vai medir {:.2f}'.format(hypot(n1,n2)))
| 30.96875 | 68 | 0.673058 | 3.09375 |
fa2b0fe8c1ef7aae8745830c2f78cadeadfc2e1b
| 4,165 |
cpp
|
C++
|
Silver/Silver/silver_shader.cpp
|
c272/silver
|
2721731c7803882e5b70118253237d565a3b6709
|
[
"Apache-2.0"
] | null | null | null |
Silver/Silver/silver_shader.cpp
|
c272/silver
|
2721731c7803882e5b70118253237d565a3b6709
|
[
"Apache-2.0"
] | 1 |
2018-09-16T14:09:18.000Z
|
2018-09-16T14:09:18.000Z
|
Silver/Silver/silver_shader.cpp
|
c272/silver
|
2721731c7803882e5b70118253237d565a3b6709
|
[
"Apache-2.0"
] | null | null | null |
//Including basics.
#include "silver_inc.h"
#include "silver_shader.h"
// SECTION 1
// FUNCTIONS
///Shader class constructor, takes the paths (relative or complete) of the vertex and fragment shader that will
///be used in the program.
///Parameters: (char* vertexPath, char* fragmentPath)
slvr::Shader::Shader(char* vertexShaderPath, char* fragmentShaderPath) {
std::ifstream vertexStream;
std::ifstream fragmentStream;
std::string vertexCode;
std::string fragmentCode;
//Enabling exceptions on streams.
vertexStream.exceptions(std::ifstream::failbit | std::ifstream::badbit);
fragmentStream.exceptions(std::ifstream::failbit | std::ifstream::badbit);
//Attempting a stream.
try {
vertexStream.open(const_cast<const char*>(vertexShaderPath));
fragmentStream.open(const_cast<const char*>(fragmentShaderPath));
//Creating a stringstream to load buffer into.
std::stringstream vertexSS, fragmentSS;
vertexSS << vertexStream.rdbuf();
fragmentSS << fragmentStream.rdbuf();
//Closing streams, buffers have now been read out.
vertexStream.close();
fragmentStream.close();
//Reading into strings.
vertexCode = vertexSS.str();
fragmentCode = fragmentSS.str();
}
catch (std::ifstream::failure e) {
std::cout << "SHADER FAILURE: Could not load vertex/fragment Shader file.\n";
}
//Converting vertex/frag to a const char*.
const char* vertexShaderSource = vertexCode.c_str();
const char* fragmentShaderSource = fragmentCode.c_str();
//Attempting to compile shaders.
GLuint vertex, fragment;
int noerrors;
char* log = new char[512];
//FIRST - Vertex Shader.
vertex = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertex, 1, &vertexShaderSource, NULL);
glCompileShader(vertex);
//Checking for errors.
glGetShaderiv(vertex, GL_COMPILE_STATUS, &noerrors);
if (!noerrors) {
//Failure, dump log.
glGetShaderInfoLog(vertex, 512, NULL, log);
std::cout << "SHADER FAILURE: Vertex shader failed to compile.\n" << log;
}
//SECOND - Fragment Shader.
fragment = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragment, 1, &fragmentShaderSource, NULL);
glCompileShader(fragment);
//Checking for errors.
glGetShaderiv(fragment, GL_COMPILE_STATUS, &noerrors);
if (!noerrors) {
//Failure, dump log.
glGetShaderInfoLog(fragment, 512, NULL, log);
std::cout << "SHADER FAILURE: Fragment shader failed to compile.\n" << log;
}
//Attempting to create and link shader program.
ID = glCreateProgram();
glAttachShader(ID, vertex);
glAttachShader(ID, fragment);
glLinkProgram(ID);
glGetProgramiv(ID, GL_LINK_STATUS, &noerrors);
if (!noerrors)
{
glGetProgramInfoLog(ID, 512, NULL, log);
std::cout << "ERROR::SHADER::PROGRAM::LINKING_FAILED\n" << log << std::endl;
}
//Deleting shaders after link, no longer required.
glDeleteShader(vertex);
glDeleteShader(fragment);
}
///Destructor for the Shader class, deletes the program referenced by private UInt ID.
///Parameters: ()
slvr::Shader::~Shader() {
glDeleteProgram(ID);
}
///Sets the shader as the current active shader in the engine/OGL.
///Parameters: ()
void slvr::Shader::use() {
glUseProgram(ID);
}
///Sets a shader uniform property to the value given.
///One example could be "colour".
///Parameters: (std::string name_of_uniform, bool value_of_parameter)
void slvr::Shader::setUniformBool(const std::string &name, bool value) const
{
glUniform1i(glGetUniformLocation(ID, name.c_str()), (int)value);
}
///Sets a shader uniform property to the value given.
///One example could be "colour".
///Parameters: (std::string name_of_uniform, int value_of_parameter)
void slvr::Shader::setUniformInt(const std::string &name, int value) const
{
glUniform1i(glGetUniformLocation(ID, name.c_str()), value);
}
///Sets a shader uniform property to the value given.
///One example could be "colour".
///Parameters: (std::string name_of_uniform, float value_of_parameter)
void slvr::Shader::setUniformFloat(const std::string &name, float value) const
{
glUniform1f(glGetUniformLocation(ID, name.c_str()), value);
}
///A getter for the raw GL ID of the compiled shader program.
///Parameters: ()
GLuint slvr::Shader::getID() {
return ID;
}
| 30.851852 | 111 | 0.735654 | 3.234375 |
38464be3edab166ec0a094b4888c1fd9bccc5e40
| 3,378 |
php
|
PHP
|
app/Http/Traits/DateTrait.php
|
KamalEssam/store-e-commer-backend
|
4f85cf8fc58e544d982efb548093391ba3459b69
|
[
"MIT"
] | null | null | null |
app/Http/Traits/DateTrait.php
|
KamalEssam/store-e-commer-backend
|
4f85cf8fc58e544d982efb548093391ba3459b69
|
[
"MIT"
] | null | null | null |
app/Http/Traits/DateTrait.php
|
KamalEssam/store-e-commer-backend
|
4f85cf8fc58e544d982efb548093391ba3459b69
|
[
"MIT"
] | null | null | null |
<?php
namespace App\Http\Traits;
use Carbon\Carbon;
use Config;
trait DateTrait
{
/**
* to know if this day = today or tomorrow
* @param $day
* @return string
*/
public static function getDayName($day)
{
switch ($day) {
case self::getDateByFormat(self::getToday(), 'Y-m-d') :
$day = trans('lang.today');
break;
case self::getDateByFormat(self::getTomorrow(), 'Y-m-d') :
$day = trans('lang.tomorrow');
break;
default :
return $day;
}
return $day;
}
/**
* get day name by index
* @param $day_index
* @return mixed
*/
public static function getDayNameByIndex($day_index)
{
foreach (Config::get('lists.days') as $single_day) {
if ($day_index == $single_day['day']) {
$day_name = $single_day[app()->getLocale() . '_name'];
break;
}
}
return $day_name;
}
/**
* get index of the day
* @param $day
* @return mixed
*/
public static function getDayIndex($day)
{
// get index day from days list in config
$day_parsing = Carbon::parse($day, "Africa/Cairo");
//get name of day (sunday,monday,....)
$day_name = self::getDateByFormat($day_parsing, 'l');
foreach (Config::get('lists.days') as $single_day) {
if ($day_name == $single_day['en_name']) {
$index = $single_day['day'];
break;
}
}
return $index;
}
/**
* parse date
* @param $date
* @return Carbon
*/
public static function parseDate($date)
{
return Carbon::parse($date, "Africa/Cairo");
}
/**
* get today
* @return Carbon
*/
public static function getToday()
{
return Carbon::today("Africa/Cairo");
}
/**
* get today
* @return Carbon
*/
public static function getTomorrow()
{
return Carbon::tomorrow("Africa/Cairo");
}
/**
* @param $date
* @param $format
* @return string
*/
public static function getDateByFormat($date, $format)
{
return Carbon::parse($date, "Africa/Cairo")->format($format);
}
/**
* get day formatted with am and pm
* @param $time
* @param string $format
* @return string
*/
public static function getTimeByFormat($time, $format)
{
return Carbon::parse($time, "Africa/Cairo")->format($format);
// return date($format, strtotime($time));
}
/**
* @param $date_from
* @return string
*/
public static function readableDate($date_from)
{
$result = Carbon::createFromTimeStamp(strtotime($date_from), "Africa/Cairo")->diffForHumans();
return $result;
}
/**
* add no of days to specific day
* @param $add_from_day
* @param $no_of_days
* @return mixed
*/
public static function addDays($add_from_day, $no_of_days)
{
return $add_from_day->addDays($no_of_days);
}
/**
* @param $format
* @return string
*/
public static function getTodayFormat($format)
{
return Carbon::now("Africa/Cairo")->format($format);
}
}
| 22.671141 | 102 | 0.522795 | 3.09375 |
8cd3088906b49c3e5511d5794659bb3003f5ef97
| 2,305 |
go
|
Go
|
handler/chat.go
|
JermineHu/ait
|
e0474aee2be77a27c3ddbefce42f12ccb154ab00
|
[
"MIT"
] | null | null | null |
handler/chat.go
|
JermineHu/ait
|
e0474aee2be77a27c3ddbefce42f12ccb154ab00
|
[
"MIT"
] | null | null | null |
handler/chat.go
|
JermineHu/ait
|
e0474aee2be77a27c3ddbefce42f12ccb154ab00
|
[
"MIT"
] | null | null | null |
package handler
import (
"github.com/labstack/echo"
. "github.com/JermineHu/ait/consts"
"strings"
"fmt"
"sync"
"github.com/gorilla/websocket"
"html/template"
)
const MAX_CONNECTION int = 100
const JOIN_ROOM_FAILED int = -1
const Debug = true
type ChatRoom struct {
sync.Mutex
clients map[int]*websocket.Conn
currentId int
}
func (cr *ChatRoom)joinRoom(ws *websocket.Conn) int {
cr.Lock()
defer cr.Unlock()
if len(cr.clients) >= MAX_CONNECTION {
return JOIN_ROOM_FAILED
}
cr.currentId++
cr.clients[cr.currentId] = ws
return cr.currentId
}
func (cr *ChatRoom)leftRoom(id int) {
delete(cr.clients, id)
}
func (cr *ChatRoom)sendMessage(msg string) {
for _, ws := range cr.clients {
if err := ws.WriteMessage(websocket.TextMessage, []byte(msg)); err != nil {
log4Demo("发送失败,Err:" + err.Error())
continue
}
}
}
var room ChatRoom
func init() {
roomMap := make(map[int]*websocket.Conn, MAX_CONNECTION)
room = ChatRoom{clients:roomMap, currentId:0}
}
func log4Demo(msg string) {
if Debug {
fmt.Println(msg)
}
}
func WrapChatRoutes(c *echo.Group) {
ChatHandler(c)
ChatIndexPageHandler(c)
}
var (
upgrader = websocket.Upgrader{}
)
func ChatHandler(c *echo.Group){
h:= func(c echo.Context) (err error){
ws, err := upgrader.Upgrade(c.Response(), c.Request(), nil)//将当前http请求升级为websocket
if err != nil {
return
}
defer ws.Close()//跳出函数前关闭socket
var id int
if id = room.joinRoom(ws); id == JOIN_ROOM_FAILED { //将当前的socket对象加入room池子,用于后续的批量广播
err=ws.WriteMessage(websocket.TextMessage,[]byte( "加入聊天室失败"))
if err != nil {
c.Logger().Error(err)
}
return
}
defer room.leftRoom(id) //离开房间就要从池子里删除
ipAddress := strings.Split(ws.RemoteAddr().String(), ":")[0] + ":"
for {
_, msg, err := ws.ReadMessage()//读取消息
if err != nil {
c.Logger().Error(err)
return err
}
send_msg:= ipAddress + string(msg)
room.sendMessage(send_msg)//将消息广播给进入该room的所有人,此处优化方案可以采用Kafka提高异步处理,提高并发效率
}
return
}
c.GET(ChatRoomRoute, h)
}
func ChatIndexPageHandler(c *echo.Group) {
h := func(c echo.Context) (err error) {
t, _ := template.ParseFiles("assets/test.html")
err=t.Execute(c.Response().Writer, nil)
if err!=nil {
log4Demo("Page Err:" + err.Error())
return
}
return
}
c.GET(ChatRoomIndexPageRoute, h)
}
| 20.580357 | 86 | 0.67679 | 3.28125 |
a4218ac2d0ce1202c6ee4f8d5b421837d385e80b
| 2,584 |
php
|
PHP
|
system/core/Lang.php
|
nazar-007/article.kg
|
552e97ef4b32628b680b95d76b20c70e50f1be1d
|
[
"MIT"
] | null | null | null |
system/core/Lang.php
|
nazar-007/article.kg
|
552e97ef4b32628b680b95d76b20c70e50f1be1d
|
[
"MIT"
] | null | null | null |
system/core/Lang.php
|
nazar-007/article.kg
|
552e97ef4b32628b680b95d76b20c70e50f1be1d
|
[
"MIT"
] | null | null | null |
<?php
defined('BASEPATH') OR exit('No direct script access allowed');
class CI_Lang {
public $language = array();
public $is_loaded = array();
public function __construct()
{
log_message('info', 'Language Class Initialized');
}
public function load($langfile, $idiom = '', $return = FALSE, $add_suffix = TRUE, $alt_path = '')
{
if (is_array($langfile))
{
foreach ($langfile as $value)
{
$this->load($value, $idiom, $return, $add_suffix, $alt_path);
}
return;
}
$langfile = str_replace('.php', '', $langfile);
if ($add_suffix === TRUE)
{
$langfile = preg_replace('/_lang$/', '', $langfile).'_lang';
}
$langfile .= '.php';
if (empty($idiom) OR ! preg_match('/^[a-z_-]+$/i', $idiom))
{
$config =& get_config();
$idiom = empty($config['language']) ? 'english' : $config['language'];
}
if ($return === FALSE && isset($this->is_loaded[$langfile]) && $this->is_loaded[$langfile] === $idiom)
{
return;
}
// Load the base file, so any others found can override it
$basepath = BASEPATH.'language/'.$idiom.'/'.$langfile;
if (($found = file_exists($basepath)) === TRUE)
{
include($basepath);
}
// Do we have an alternative path to look in?
if ($alt_path !== '')
{
$alt_path .= 'language/'.$idiom.'/'.$langfile;
if (file_exists($alt_path))
{
include($alt_path);
$found = TRUE;
}
}
else
{
foreach (get_instance()->load->get_package_paths(TRUE) as $package_path)
{
$package_path .= 'language/'.$idiom.'/'.$langfile;
if ($basepath !== $package_path && file_exists($package_path))
{
include($package_path);
$found = TRUE;
break;
}
}
}
if ($found !== TRUE)
{
show_error('Unable to load the requested language file: language/'.$idiom.'/'.$langfile);
}
if ( ! isset($lang) OR ! is_array($lang))
{
log_message('error', 'Language file contains no data: language/'.$idiom.'/'.$langfile);
if ($return === TRUE)
{
return array();
}
return;
}
if ($return === TRUE)
{
return $lang;
}
$this->is_loaded[$langfile] = $idiom;
$this->language = array_merge($this->language, $lang);
log_message('info', 'Language file loaded: language/'.$idiom.'/'.$langfile);
return TRUE;
}
public function line($line, $log_errors = TRUE)
{
$value = isset($this->language[$line]) ? $this->language[$line] : FALSE;
// Because killer robots like unicorns!
if ($value === FALSE && $log_errors === TRUE)
{
log_message('error', 'Could not find the language line "'.$line.'"');
}
return $value;
}
}
| 22.469565 | 104 | 0.595201 | 3.109375 |
fedb4a94731a90e87be5e86d5b9783aae73f0909
| 5,325 |
go
|
Go
|
rmid3tag.go
|
inazak/rmid3tag
|
e3038bb2622e86b7f96cc20b5c364e96bacecb83
|
[
"MIT"
] | null | null | null |
rmid3tag.go
|
inazak/rmid3tag
|
e3038bb2622e86b7f96cc20b5c364e96bacecb83
|
[
"MIT"
] | null | null | null |
rmid3tag.go
|
inazak/rmid3tag
|
e3038bb2622e86b7f96cc20b5c364e96bacecb83
|
[
"MIT"
] | 1 |
2021-02-26T11:57:36.000Z
|
2021-02-26T11:57:36.000Z
|
package rmid3tag
import (
"fmt"
"os"
"io"
"bytes"
"golang.org/x/text/transform"
"golang.org/x/text/encoding/unicode"
)
type Stat struct {
Size int64
V1TagExist bool
V2TagExist bool
OffsetMPEGFrame int64
}
func (s *Stat) SizeOfMPEGFrame() int64 {
if s.V1TagExist {
return s.Size - s.OffsetMPEGFrame - 128
}
return s.Size - s.OffsetMPEGFrame
}
// GetStat() provide mp3 file imformation.
// Stat structure has MPEG frame offset and size.
// +---------------+
// | ID3v2tag |
// | (optional) |
// +---------------+ <-- offset
// | | |
// | MPEG Frames | | size of mpeg frame
// | | |
// +---------------+ <-+
// | ID3v1tag |
// | (optional) |
// +---------------+
//
func GetStat(filename string) (stat *Stat, err error) {
stat = &Stat{}
// open original file
f, err := os.OpenFile(filename, os.O_RDONLY, 0644)
if err != nil {
return stat, err
}
defer f.Close()
// filesize
info, err := f.Stat()
if err != nil {
return stat, err
}
stat.Size = info.Size()
// v2tag
stat.V2TagExist, stat.OffsetMPEGFrame, err = getID3v2TagSize(f)
if err != nil {
return stat, err
}
// v1tag
stat.V1TagExist, err = isExistID3v1Tag(f, stat.Size -128)
if err != nil {
return stat, err
}
return stat, nil
}
func getID3v2TagSize(r io.ReaderAt) (isExist bool, size int64, err error) {
marker := make([]byte, 4)
n, err := r.ReadAt(marker, 0)
if n != 4 || err != nil {
return false, 0, err
}
var offset int64 = 0 //header start position
if string(marker[:3]) != "ID3" {
// some mp3 file has irregular byte at the beginning,
// so ignore them.
if string(marker[1:]) == "ID3" {
offset += 1
} else {
// then marker not found
return false, 0, nil
}
}
isExist = true
data := make([]byte, 4)
n, err = r.ReadAt(data, 6 + offset)
if n != 4 || err != nil {
return isExist, 0, err
}
size = int64(decodeSynchsafe(data, 4))
size += 10 + offset //add v2 header size
// Some files have padding greater than the specified size,
// so search until the mpeg frame is found.
for ;; size++ {
ok, err := isExistMP3Frame(r, size)
if err != nil {
return isExist, size, fmt.Errorf("mpeg frame not found")
}
if ok { // found
return isExist, size, nil
}
}
return isExist, size, nil // do not reach here
}
func isExistID3v1Tag(r io.ReaderAt, offset int64) (bool, error) {
marker := make([]byte, 3)
n, err := r.ReadAt(marker, offset)
if n != 3 || err != nil {
return false, err
}
if string(marker) != "TAG" {
return false, nil
}
return true, nil
}
func isExistMP3Frame(r io.ReaderAt, offset int64) (bool, error) {
data := make([]byte, 2)
n, err := r.ReadAt(data, offset)
if n != 2 || err != nil {
return false, err
}
// beginning byte pattern of mpeg frame
pattern := [][]byte{
{0xff,0xfb}, {0xff,0xfa},
}
for _, p := range pattern {
if bytes.HasPrefix(data, p) {
return true, nil
}
}
return false, nil
}
//// utilites
func decodeSynchsafe(data []byte, size int) int {
result := 0
for i:=0; i<size; i++ {
result += (int(data[i]) & 0x7f) << uint(7 * (size-1-i))
}
return result
}
func encodeSynchsafe(data int, size int) []byte {
result := make([]byte, size)
for i:=0; i<size; i++ {
result[i] = byte((data & 0x7f) >> uint(7 * (size-1-i)))
}
return result
}
//// for create
func CreateMinimumTag(title, artist string) ([]byte, error) {
tf, err := CreateTitleFrame(title)
if err != nil {
return []byte{}, err
}
af, err := CreateArtistFrame(artist)
if err != nil {
return []byte{}, err
}
return CreateID3V2Tag(tf, af), nil
}
func CreateID3V2Tag(frames ...[]byte) []byte {
size := 0
for _, frame := range frames {
size += len(frame)
}
buf := &bytes.Buffer{}
buf.WriteString("ID3")
buf.Write([]byte{0x3,0x0,0x0}) //version 2.3
buf.Write(encodeSynchsafe(size, 4))
for _, frame := range frames {
buf.Write(frame)
}
return buf.Bytes()
}
func CreateTitleFrame(text string) ([]byte, error) {
return CreateTextFrame("TIT2", text)
}
func CreateArtistFrame(text string) ([]byte, error) {
return CreateTextFrame("TPE1", text)
}
func CreateTextFrame(id, text string) ([]byte, error) {
data, err := encodeTextFrameData(text)
if err != nil {
return []byte{}, err
}
size := len(data)
buf := &bytes.Buffer{}
buf.WriteString(id)
buf.WriteByte(byte(0xff&(size>>24)))
buf.WriteByte(byte(0xff&(size>>16)))
buf.WriteByte(byte(0xff&(size>>8)))
buf.WriteByte(byte(0xff&size))
buf.Write([]byte{0x0,0x0})
buf.Write(data)
return buf.Bytes(), nil
}
func encodeTextFrameData(s string) ([]byte, error) {
u16bytes,err := toUTF16BEWithBOM(s)
if err != nil {
return []byte{}, err
}
buf := &bytes.Buffer{}
buf.Write([]byte{0x1}) //encoding UTF16/useBOM
buf.Write(u16bytes)
buf.Write([]byte{0x0,0x0}) //null terminater
return buf.Bytes(), nil
}
func toUTF16BEWithBOM(s string) ([]byte, error) {
u16str, _, err := transform.String(
unicode.UTF16(unicode.BigEndian, unicode.UseBOM).NewEncoder(), s)
if err != nil {
return []byte{}, err
}
return []byte(u16str), nil
}
| 19.434307 | 75 | 0.587606 | 3.078125 |
43b7bfb24c73450508ddb376daea8bc79f451041
| 2,818 |
lua
|
Lua
|
archived_files/default_test_run.lua
|
MuteSpirit/yunit
|
5a850fa720086cb40627328d0533110d4cc11790
|
[
"MIT"
] | null | null | null |
archived_files/default_test_run.lua
|
MuteSpirit/yunit
|
5a850fa720086cb40627328d0533110d4cc11790
|
[
"MIT"
] | null | null | null |
archived_files/default_test_run.lua
|
MuteSpirit/yunit
|
5a850fa720086cb40627328d0533110d4cc11790
|
[
"MIT"
] | null | null | null |
local lfs = require "yunit.lfs"
local fs = require "yunit.filesystem"
local testRunner = require "yunit.test_runner"
local testResultHandlers = require "yunit.test_result_handlers"
local usedLtueArray = {}
--[=[
use this function to control what LTUE need to load and use, i.e. for usage only 'yunit.luaunit' run:
lua -l yunit.work_in_scite -l yunit.default_test_run -e "use{'yunit.luaunit'}" -e "run[[test.t.lua]]"
in command line
--]=]
function use(ltueArray)
usedLtueArray = ltueArray
end
--[=[
@param[in] inArg Maybe:
1) path to test container
2) path of directory with test containers (recursive search)
3) table with elements from item 1, 2, 3
--]=]
function run(inArg)
if nil == inArg then
error('not nill expected as argument, but was one')
return
end
local runner = testRunner.TestRunner:new()
runner:addResultHandler(testResultHandlers.EstimatedTime:new())
if testResultHandler then
runner:addResultHandler(testResultHandler)
end
runner:addLoadtHandler(testResultHandlers.TextLoadTestContainerHandler:new())
local listOfUsedLtueWasNotSpecifiedInCommandLine = not next(usedLtueArray)
if listOfUsedLtueWasNotSpecifiedInCommandLine then
runner:loadLtue('yunit.luaunit')
runner:loadLtue('yunit.cppunit')
else
for _, ltueName in ipairs(usedLtueArray) do
runner:loadLtue(ltueName)
end
end
local fixFailed = testResultHandlers.FixFailed:new()
runner:addResultHandler(fixFailed)
runner:addLoadtHandler(fixFailed)
local function handleArg(arg)
if 'string' == type(arg) then
local path = arg
if not fs.isExist(path) then
error('receive path to unexist file/directory: "' .. path .. '"')
return
elseif fs.isDir(path) then
runner:runTestContainersFromDir(path)
elseif fs.isFile(path) then
runner:runTestContainer(path)
else
error('receive path to unknown file system object type (not file and not directory): "' .. path .. '"')
return
end
elseif 'table' == type(arg) then
for _, path in pairs(arg) do
handleArg(path)
end
else
error('table or string expected, but was %s', type(arg))
return
end
end
runner:onTestsBegin()
handleArg(inArg)
runner:onTestsEnd()
if not fixFailed:passed() then
print(fixFailed:message())
io.stdout:flush()
io.stderr:flush()
os.exit(-1)
end
end
| 31.311111 | 120 | 0.598297 | 3.3125 |
2c9a3bf6b9e629d54a1ec7e85b97bc1415ee0361
| 6,278 |
py
|
Python
|
musx/paint.py
|
ricktaube/musx
|
5fb116b1a1ade9ef42a9a3c8311c604795e0af6a
|
[
"BSD-3-Clause"
] | 9 |
2021-06-03T21:36:53.000Z
|
2021-06-13T01:53:17.000Z
|
musx/paint.py
|
musx-admin/musx
|
5fb116b1a1ade9ef42a9a3c8311c604795e0af6a
|
[
"BSD-3-Clause"
] | 2 |
2021-06-03T18:38:57.000Z
|
2021-06-13T10:46:28.000Z
|
musx/paint.py
|
musx-admin/musx
|
5fb116b1a1ade9ef42a9a3c8311c604795e0af6a
|
[
"BSD-3-Clause"
] | 1 |
2022-02-12T23:04:27.000Z
|
2022-02-12T23:04:27.000Z
|
###############################################################################
"""
The paint.py module provides two high-level composers that can produce a wide
variety of interesting textures and music. The `brush()` composer outputs Notes
in sequential order, similar to how a paint brush makes lines on a canvas. In
contrast, the `spray()` composer generates Notes by applying random selection
to its input parameters.
For examples of using paint.py see gamelan.py, blues.py and messiaen.py in
the demos directory.
"""
from musx import Note, cycle, choose
def brush(score, *, length=None, end=None, rhythm=.5, duration=None, pitch=60, amplitude=.5, instrument=0, microdivs=1):
"""
Outputs Notes in sequential order, automatically looping parameter
list values until the algorithm stops.
Parameters
----------
score : Score
The Notes that are generated will be added to this score.
length : number
The number of MIDI events to generate. Either length or end must be
specified.
end : number
An end time after which no more events will be generated.
Either end or length must be specified.
rhythm : number | list
A rhythm or list of rhythms that specify the amount of time to wait
between notes. Negative rhythm values are interpreted as musical
rests, i.e. events are not output but time advances. The default value
is 0.5.
duration : number | list
A duration or list of durations that specify the amount of time each
MIDI event lasts. The default value is the current rhythm.
pitch : number | list
A MIDI key number or list of key numbers to play. The list can contain
sublists of key numbers; in this case each sublist is treated as a
chord (the key numbers in the sublist are performed simultaneously.)
amplitude : number | list
A value or list of values between 0.0 and 1.0 for determining the
loudness of the MIDI events.
instrument : number | list
A MIDI channel number 0 to 15, or a list of channel numbers. Channel
value 9 will send events to the synthesizer's drum map for triggering
various percussion sounds.
tuning : int
A value 1 to 16 setting the divisions per semitone used for microtonal
quantization of floating point keynums. See Note, Seq and the
micro.py demo file for more information.
"""
# user must specify either length or end parameter
counter = 0
if length:
if end: raise TypeError("specify either length or end, not both.")
stopitr = length
thisitr = (lambda: counter)
else:
if not end: raise TypeError("specify either length or end.")
stopitr = end
thisitr = (lambda: score.elapsed)
# convert all values into cycles
cyc = (lambda x: cycle(x if type(x) is list else [x]))
rhy = cyc(rhythm)
dur = cyc(duration)
key = cyc(pitch)
amp = cyc(amplitude)
chan = cyc(instrument)
while (thisitr() < stopitr):
t = score.now
#print("counter=", counter, "now=", t)
r = next(rhy)
d = next(dur)
k = next(key)
a = next(amp)
c = next(chan)
if r > 0:
if not d: d = r
if type(k) is list:
for j in k:
m = Note(time=t, duration=d, pitch=j, amplitude=a, instrument=c)
score.add(m)
else:
m = Note(time=t, duration=d, pitch=k, amplitude=a, instrument=c)
score.add(m)
counter += 1
yield abs(r)
def spray(score, *, length=None, end=None, rhythm=.5, duration=None, pitch= 60, band=0, amplitude=.5, instrument=0):
"""
Generates Notes using discrete random selection. Most parameters allow
lists of values to be specified, in which case elements are randomly selected
from the lists every time an event is output.
Parameters
----------
Parameters are the same as brush() except for these changes or additions:
pitch : number | list
A MIDI key number or list of key numbers to play. If a list is specified
a key number is randomly selected from the list for each midi event.
band : number | list
A number is treated as a half-step range on either side of the current
key choice from which the next key number will be chosen. If a list of
intervals is specified then randomly selected intervals are added
added to the current key number to determine the key number played.
The list can also contain sublists of intervals, in which case each
sublist is treated as a chord, i.e. the intervals in the sublist are
added to the current key and performed simultaneously.
"""
# user must specify either length or end parameter
counter = 0
if length:
if end: raise TypeError("specify either leng or end, not both.")
stopitr = length
thisitr = (lambda: counter)
else:
if not end: raise TypeError("specify either length or end.")
stopitr = end
thisitr = (lambda: score.elapsed)
# convert each param into a chooser pattern.
ran = (lambda x: choose(x if type(x) is list else [x]))
rhy = ran(rhythm)
dur = ran(duration)
key = ran(pitch)
amp = ran(amplitude)
chan = ran(instrument)
band = choose( [i for i in range(-band, band+1)] if type(band) is int else band )
while (thisitr() < stopitr):
t = score.now
#print("counter=", counter, "now=", t)
r = next(rhy)
d = next(dur)
k = next(key)
a = next(amp)
c = next(chan)
b = next(band)
if type(b) is list:
k = [k+i for i in b]
else:
k = k + b
#print("pitch=", k, end=" ")
if r > 0:
if not d: d = r
if type(k) is list:
for j in k:
m = Note(time=t, duration=d, pitch=j, amplitude=a, instrument=c)
score.add(m)
else:
m = Note(time=t, duration=d, pitch=k, amplitude=a, instrument=c)
score.add(m)
counter += 1
yield abs(r)
| 39.2375 | 120 | 0.609111 | 3.671875 |
860f48c705e8958e97c8dfa3a708ab2ab42b5819
| 4,188 |
rb
|
Ruby
|
RUBY/language/language_other_file.rb
|
lemming-life/Snippets
|
796d34f3d33cb0e38d38197938bc36397ce8a27b
|
[
"Unlicense"
] | null | null | null |
RUBY/language/language_other_file.rb
|
lemming-life/Snippets
|
796d34f3d33cb0e38d38197938bc36397ce8a27b
|
[
"Unlicense"
] | null | null | null |
RUBY/language/language_other_file.rb
|
lemming-life/Snippets
|
796d34f3d33cb0e38d38197938bc36397ce8a27b
|
[
"Unlicense"
] | null | null | null |
puts "Executing language_other_file.rb"
# A class
class Shape
def initialize
@x = 0 # member variable
@y = 0 # member variable
end
def set_x(new_x)
@x = new_x
end
def get_x()
return @x # Explicit return
end
def set_y(new_y)
@y = new_y
end
def get_y # No need for ()
@y # Implicit return
end
# Setter (can be used to verify that data compatible)
def x=(new_x)
@x = new_x if new_x.is_a?(Numeric)
end
# Getter
def x
@x # implicit return
end
def y=(new_y)
@y = new_y if new_y.is_a?(Numeric)
end
def y
@y
end
def get_area
return 0
end
end
# Create an object
shape = Shape.new
shape.set_x(1) # Use a method, pass a value
shape.get_x # Use a method to get
shape.x = 5 # Use the setter
shape.x # Use the getter
# Class using inheritance
class Circle < Shape
#attr_reader :radius, :area # Getters
#attr_writer :radius :area # Setters
attr_accessor :radius, :color # Both getter and setter
def initialize
radius = 1
end
# Override get_area of Shape
def get_area
return 3.14 * radius.to_f
end
end
class Rectangle < Shape
attr_accessor :height, :width
def initialize
height = 1
width = 1
end
def get_area
return width.to_f * height.to_f
end
end
circle = Circle.new
circle.x = 1 # Uses the x setter from Shape
circle.y = -1
circle.radius = 2
circle.get_area # 6.28
# Allows us to use the StopWatch module
require_relative "stopwatch"
module Phone
def play_ringtone
puts "ring, ring..."
end
def vibrate
puts "brrr"
end
end
class FancyPhone
include Phone
# Include StopWatch methods in the Phone
include StopWatch
end
fancyPhone = FancyPhone.new
fancyPhone.play_ringtone
fancyPhone.start_stop_watch
fancyPhone.output_stop_watch
fancyPhone.stop_stop_watch
class NotFancyPhone
prepend Phone
# Note how vibrate is a method in both Phone and NotFancyPhone
# the preprend Phone means that the Phone one will be used instead of the NotFancyPhone
def vibrate
puts "no vibrate"
end
end
noFancyPhone = NotFancyPhone.new
noFancyPhone.vibrate # brrr
# Symbols (no value given)
:a_symbol
# Enumeration
class Colors
include Enumerable
def each
yield "red"
yield "green"
yield "blue"
end
end
colors = Colors.new
colors.each do |color|
aColor = color
end
colors.first # red
colors.find{ |color| color = "red" }.to_s # "red"
colors.select { |color| color.size <=4 } # ["red", "blue"]
colors.reject { |color| color.size >=4 } # ["red"]
colors.min # blue
colors.max # red
colors.sort #["blue", "green", "red"]
colors.reverse_each { |color| aColor = color} # blue, green, red
# Open classes allow us to
# have more class instance member additions
# even though class Colors was already defined.
# You can do this with any class, and you could
# modify some very essential classes, this is called "moneky patching"
class Colors
def some_other_method
end
end
# Duck Typing
# if it behaves like a duck then it is a duck
# all of Ruby is done this way, which is quite different
# from static typed languages...
class Animal
def do_quack(maybe_a_duck)
# as long as whatever is passed has a quack method
# it is ok to have here
maybe_a_duck.quack()
end
end
class Duck
def quack
puts "quack from Duck"
end
end
class Person
#Notice how Person has a quack method
def quack
puts "quack from Person"
end
end
Animal.new.do_quack(Duck.new) # ok
Animal.new.do_quack(Person.new) # also ok
# Here's an example of using methodName=
class Something
def setter=(x)
@x = x
end
end
# We can now do this:
something = Something.new
something.setter=(5)
something.setter = 5 # Same as above
# Classes are "open" so we can add more
# methods and variables to a class.
# The additions can be in any file.
class Something
def more_stuff
end
end
| 18.780269 | 91 | 0.644222 | 3.4375 |
b368bf296dbdcc636fab9eb58ef641d1b9f373aa
| 5,735 |
py
|
Python
|
stepperModule.py
|
nathrun/Stepper-motor-controller
|
d77aa21691fe441685db429e178c42c1d3195003
|
[
"MIT"
] | null | null | null |
stepperModule.py
|
nathrun/Stepper-motor-controller
|
d77aa21691fe441685db429e178c42c1d3195003
|
[
"MIT"
] | null | null | null |
stepperModule.py
|
nathrun/Stepper-motor-controller
|
d77aa21691fe441685db429e178c42c1d3195003
|
[
"MIT"
] | null | null | null |
#---Documentation---------------------------------------------------------------
# This module was designed to run with a L298N h-bridge module. I created this
# in my spare time and did not test extensively so there might be bugs
# def __init__:
# params - motorPinsArray is an array that contains the GPIO pins that go to
# these inputs -> [IN1,IN2,IN3,IN4]
# - stepsPerRevolution is the amount of steps your stepper motor
# requires to do a single revolution
# - defaultRPM -> pretty obvious... can be either an integer or float
#
# def setDefaultRPM:
# params -defaultRPM -> new default rpm for object, can be either an
# integer or float
# will return True if operation happened successfully
#
# def spinMotor:
# params - numRevolution is a float that indicates exactly how many
# revolution you would like the stepper motor to turn. If negative,
# the motor will turn in the opposite direction.
# - stepPhase (optional, default='dual'), refers to either 'single'
# phase stepping or 'dual' phase stepping
# (dual phase, both coils will always be engaged)
# - stepType (optional, default='full'), can only be used of stepPhase
# is equal to 'dual'.
# - rpm (optional), if you want to set a temporary rpm,
# either an integer or float
#-------------------------------------------------------------------------------
import time
import RPi.GPIO as GPIO
class stepperController(object):
#array that rearanges the format of the pins to be used on an L298N h-bridge
pinShuffle = [0,3,1,2]
#dualPhaseStepping[0] for half stepping
#dualPhaseStepping[1] for full stepping
#can not do half stepping on singlePhaseStepping
singlePhaseStepping = [
[1,0,0,0],
[0,1,0,0],
[0,0,1,0],
[0,0,0,1]
]
dualPhaseStepping = [
[
[1,1,0,0],
[0,1,1,0],
[0,0,1,1],
[1,0,0,1]
],
[
[1,0,0,0],
[1,1,0,0],
[0,1,0,0],
[0,1,1,0],
[0,0,1,0],
[0,0,1,1],
[0,0,0,1],
[1,0,0,1]
]
]
def __init__(self,motorPinsArray, stepsPerRevolution, defaultRPM): #<-- needs to be tested
self.pins = motorPinsArray
self.stepsInRevolution = stepsPerRevolution
self.d_RPM = defaultRPM
#add some checks for the values entered
if(type(self.pins) != list):
#send exception
print('please enter list')
if (type(self.stepsInRevolution) != int):
print('stepsPerRevolution must be an integer value')
if (type(self.d_RPM) != int and type(self.d_RPM) != float):
print('defaultRPM must be an integer value')
#---end of def __init__-----------------------------------------------------
#Function returns a bool, False if any arguments are not correct
#and True once the stepper motor has completed spinning.
def spinMotor(self, numRevolution, stepPhase="dual", stepType='full', rpm=0): #<-- needs to be tested
if(stepPhase != 'dual' and stepPhase != 'single'):
return 'stepPhase must equal "single" or "dual"'
#should change to throw exception as well for more detail
if(stepType != 'half' and stepType != 'full'):
return 'stepType must equal "half" or "full"'
#should change to throw exception as well for more detail
curSeq = []
steps = self.stepsInRevolution
if(stepPhase == 'single'):
if(stepType == 'half'):
print('can not do half steps on single phase stepping. defualted to full steps')
curSeq = self.singlePhaseStepping
elif(stepType == 'half'):
curSeq = self.dualPhaseStepping[1]
steps = steps*2
else:
curSeq = self.dualPhaseStepping[0]
if (rpm==0):
stepBreak = 60.0/(steps*self.d_RPM)
else:
stepBreak = 60.0/(steps*rpm)
#if numRevolution < 0, reverse curSeq and change numRevolution to possitive
if (numRevolution < 0):
curSeq.reverse()
numRevolution *= -1
print 'DEBUG curSeq'
print curSeq
print 'DEBUG end'
if (numRevolution ==0):
return True #skip irrelavant setup and gpio.cleanup
#assign GPIO pins here
if(GPIO.getmode != GPIO.BCM):
GPIO.setmode(GPIO.BCM)
for pin in self.pins:
GPIO.setup(pin, GPIO.OUT, initial=GPIO.LOW)
#make for loop to run through curSeq to make motor spin the correct amount of times
phase = 0
for x in range(int(round(steps*numRevolution))):
for pin in range(4):
GPIO.output(self.pins[self.pinShuffle[pin]], curSeq[phase][pin])
time.sleep(stepBreak)
phase += 1
if(phase >= len(curSeq)):
phase = 0
#end of turning phase
#set pins to LOW
for pin in self.pins:
GPIO.output(pin,0)
GPIO.cleanup()
return True
#---end of def spinMotor()------------------------------------------------------
def setDefaultRPM(self, defaultRPM):
result = True if(type(defaultRPM)==int or type(defaultRPM)== float) else 'defaultRPM must be an integer or a float'
if result==True:
self.d_RPM = defaultRPM
return result
| 39.280822 | 124 | 0.53932 | 3.265625 |
cdbfb2cd74b2c13adb668c8c5ca77bba6afbcc95
| 1,217 |
cs
|
C#
|
Assets/Scripts/Player.cs
|
goldenxp/blocks
|
880b0ea45a8bf813269802f0ae97aa1859ea9534
|
[
"MIT"
] | null | null | null |
Assets/Scripts/Player.cs
|
goldenxp/blocks
|
880b0ea45a8bf813269802f0ae97aa1859ea9534
|
[
"MIT"
] | null | null | null |
Assets/Scripts/Player.cs
|
goldenxp/blocks
|
880b0ea45a8bf813269802f0ae97aa1859ea9534
|
[
"MIT"
] | null | null | null |
using UnityEngine;
using UnityEngine.InputSystem;
public class Player : Mover
{
public static Player instance;
public static Player Get() { return instance; }
Vector3 direction = Vector3.zero;
public InputActionReference actionMove;
void Awake()
{
instance = this;
}
void OnEnable()
{
actionMove.action.Enable();
}
void OnDisable()
{
actionMove.action.Disable();
}
void Update ()
{
if (CanInput())
CheckInput();
}
public bool CanInput()
{
return !Game.isMoving && !Game.Get().holdingUndo;
}
public void CheckInput()
{
Vector2 axes = actionMove.action.ReadValue<Vector2>();
float hor = axes.x;
float ver = axes.y;
if (hor == 0 && ver == 0)
return;
if (hor != 0 && ver != 0)
{
if (direction == Game.Get().MoveLeft || direction == Game.Get().MoveRight)
hor = 0;
else
ver = 0;
}
if (hor == 1)
direction = Game.Get().MoveRight;
else if (hor == -1)
direction = Game.Get().MoveLeft;
else if (ver == -1)
direction = Game.Get().MoveDown;
else if (ver == 1)
direction = Game.Get().MoveUp;
if (CanMove(direction))
{
MoveIt(direction);
Game.Get().MoveStart(direction);
} else
Game.moversToMove.Clear();
}
}
| 16.671233 | 77 | 0.6212 | 3 |
23c683accc8989017b53e094dc35f7dbd6bad4da
| 2,543 |
js
|
JavaScript
|
packages/aws-client/tests/test-SQS.js
|
lindsleycj/cumulus
|
77bfd0f51ce55237febe7da506e137e7f981845a
|
[
"Apache-2.0"
] | 185 |
2018-07-23T20:31:12.000Z
|
2022-03-21T06:29:12.000Z
|
packages/aws-client/tests/test-SQS.js
|
lindsleycj/cumulus
|
77bfd0f51ce55237febe7da506e137e7f981845a
|
[
"Apache-2.0"
] | 564 |
2018-07-19T15:46:59.000Z
|
2022-03-23T14:53:33.000Z
|
packages/aws-client/tests/test-SQS.js
|
lindsleycj/cumulus
|
77bfd0f51ce55237febe7da506e137e7f981845a
|
[
"Apache-2.0"
] | 114 |
2018-08-02T13:33:56.000Z
|
2022-03-14T18:58:57.000Z
|
const test = require('ava');
const cryptoRandomString = require('crypto-random-string');
const { Console } = require('console');
const { Writable } = require('stream');
const Logger = require('@cumulus/logger');
const { sqs } = require('../services');
const {
createQueue,
getQueueNameFromUrl,
parseSQSMessageBody,
sqsQueueExists,
sendSQSMessage,
} = require('../SQS');
const randomString = () => cryptoRandomString({ length: 10 });
class TestStream extends Writable {
constructor(options) {
super(options);
this.output = '';
}
_write(chunk, _encoding, callback) {
this.output += chunk;
callback();
}
}
class TestConsole extends Console {
constructor() {
const stdoutStream = new TestStream();
const stderrStream = new TestStream();
super(stdoutStream, stderrStream);
this.stdoutStream = stdoutStream;
this.stderrStream = stderrStream;
}
get stdoutLogEntries() {
return this.stdoutStream.output
.trim()
.split('\n')
.filter((line) => line.length > 0)
.map(JSON.parse);
}
get stderrLogEntries() {
return this.stderrStream.output
.trim()
.split('\n')
.filter((line) => line.length > 0)
.map(JSON.parse);
}
}
test('parseSQSMessageBody parses messages correctly', (t) => {
const messageBody = { test: 'value' };
const bodyString = JSON.stringify(messageBody);
t.deepEqual(parseSQSMessageBody({ Body: bodyString }), messageBody);
t.deepEqual(parseSQSMessageBody({ body: bodyString }), messageBody);
t.deepEqual(parseSQSMessageBody({}), {});
});
test('sqsQueueExists detects if the queue does not exist or is not accessible', async (t) => {
const queueUrl = await createQueue(randomString());
t.true(await sqsQueueExists(queueUrl));
t.false(await sqsQueueExists(randomString()));
await sqs().deleteQueue({ QueueUrl: queueUrl }).promise();
});
test('getQueueNameFromUrl extracts queue name from a queue URL', (t) => {
const queueName = 'MyQueue';
const queueUrl = `https://sqs.us-east-2.amazonaws.com/123456789012/${queueName}`;
const extractedName = getQueueNameFromUrl(queueUrl);
t.is(extractedName, queueName);
});
test('sendSQSMessage logs errors', async (t) => {
const testConsole = new TestConsole();
const log = new Logger({ console: testConsole });
await t.throwsAsync(
sendSQSMessage('fakequeue', 'Queue message', log),
{ instanceOf: Error }
);
t.is(testConsole.stderrLogEntries.length, 1);
t.regex(testConsole.stderrLogEntries[0].message, /fakequeue/);
});
| 27.641304 | 94 | 0.679119 | 3.25 |
963b6ab3f4a0a07caea1f5ff76925de3c3894ba7
| 7,092 |
rb
|
Ruby
|
ordering/queue.rb
|
kmindspark/bud-sandbox
|
76d77685ed52abf358deb910fa52fa4e444c432c
|
[
"BSD-3-Clause"
] | 24 |
2015-02-11T11:31:53.000Z
|
2021-10-31T20:25:22.000Z
|
ordering/queue.rb
|
bloom-lang/bud-sandbox
|
4f654bfa1f20e2e52a574c034c4ec12c3a71875d
|
[
"BSD-3-Clause"
] | null | null | null |
ordering/queue.rb
|
bloom-lang/bud-sandbox
|
4f654bfa1f20e2e52a574c034c4ec12c3a71875d
|
[
"BSD-3-Clause"
] | 4 |
2016-10-31T19:54:07.000Z
|
2020-12-10T02:45:17.000Z
|
# @abstract PriorityQueueProtocol is the abstract interface for priority queues
# Any implementation of a queue should subclass PriorityQueueProtocol
module PriorityQueueProtocol
state do
# Push items into the queue.
# Useful Mnemonic: push "item" with priority "priority" into queue "queue."
# Note: queue is essentially optional - a single queue can be used without specifying queue because it will automatically be included as nil
# @param [Object] item is the item that will be pushed into the queue
# @param [Number] priority specifies the priority of the item in the queue
# @param [Number] queue specifies which queue to push the item in
interface input, :push, [:item, :priority, :queue]
# Removes items out of the queue, regardless of priority.
# Useful Mnemonic: remove "item" from queue "queue"
# @param [Object] item specifies which item to remove
# @param [Number] queue specifies which queue to remove the item from
# @return [remove_response] upon successful removal.
interface input, :remove, [:item, :queue]
# Pop items out of the queue.
# Removes the top priority item in queue queue: outputs the item into pop_response.
# Useful Mnemonic: pop from queue "queue"
# @param [Number] queue specifies which queue to pop from
# @return [pop_response] when the pop request is successfully processed.
interface input, :pop, [:queue]
# Peek the top item in the queue.
# Like pop, but does not remove the item from the queue.
# Useful Mnemonic: peek from queue "queue"
# @param [Number] queue specifies which queue to peek at
# @return [peek_response] when the peek request is successfully processed.
interface input, :peek, [:queue]
# If there is a remove request, remove and return the item regardless of priority
# @param [Object] item is the item that will be pushed into the queue
# @param [Number] priority specifies the priority of the item in the queue
# @param [Number] queue specifies which queue to push the item in
interface output, :remove_response, push.schema
# If there is a pop request, remove and return the top priority item from the queue
# @param [Object] item is the item that will be pushed into the queue
# @param [Number] priority specifies the priority of the item in the queue
# @param [Number] queue specifies which queue to push the item in
interface output, :pop_response, push.schema
# If there is a peek request, return (but don't remove) the top priority item from the queue
# @param [Object] item is the item that will be pushed into the queue
# @param [Number] priority specifies the priority of the item in the queue
# @param [Number] queue specifies which queue to push the item in
interface output, :peek_response, push.schema
end
end
# @abstract FIFOQueueProtocol is the abstract interface for fifo queues
module FIFOQueueProtocol
state do
# Push items into the queue.
# Note: queue is essentially optional - a single queue can be used without specifying queue because it will automatically be included as nil
# @param [Object] item is the item that will be pushed into the queue
# @param [Number] queue specifies which queue to push the item in
interface input, :push, [:item, :queue]
# Pop items out of the queue.
# Removes the top priority item in queue queue: outputs the item into pop_response.
# @param [Number] queue specifies which queue to pop from
# @return [pop_response] when the pop request is successfully processed.
interface input, :pop, [:queue]
# Peek the top item in the queue.
# Like pop, but does not remove the item from the queue.
# @param [Number] queue specifies which queue to peek at
# @return [peek_response] when the peek request is successfully processed.
interface input, :peek, [:queue]
# If there is a pop request, remove and return the first item that was inserted into the queue
# @param [Object] item is the item that will be pushed into the queue
# @param [Number] queue specifies which queue to push the item in
interface output, :pop_response, [:item, :queue]
# If there is a peek request, return (but don't remove) the first item that was inserted into the queue
# @param [Object] item is the item that will be pushed into the queue
# @param [Number] queue specifies which queue to push the item in
interface output, :peek_response, [:item, :queue]
end
end
# PriorityQueue is the basic implementation of a priority queue.
# The front of the queue is always the lowest priority item.
# @see PriorityQueue implements PriorityQueueProtocol
module PriorityQueue
include PriorityQueueProtocol
state do
# The items that are currently in the queue
table :items, [:item, :priority, :queue]
# The lowest priority item for each queue.
# Does not necessarily contain one item per queue (contains all items with the current lowest priority)
scratch :lowest, [:item, :priority, :queue]
# Temporary collection to contain the pop response.
# Does not necessarily contain one item per queue (contains all items with the current lowest priority)
# An interposition for breaking ties
scratch :lowest_popr, [:item, :priority, :queue]
# Temporary collection to contain the peek response.
# Does not necessarily contain one item per queue (contains all items with the current lowest priority)
# An interposition for breaking ties
scratch :lowest_peekr, [:item, :priority, :queue]
end
bloom :remember do
items <= push
end
bloom :calc_lowest do
# Users can override method of choosing best priority
# By default it is based on the ruby min
lowest <= items.argmin([:queue], :priority)
lowest_popr <= (pop * lowest).rights(:queue => :queue)
lowest_peekr <= (peek * lowest).rights(:queue => :queue)
end
bloom :break_tie do
# Users can override method of breaking ties
# By default it is chosen arbitrarily
pop_response <= lowest_popr.argagg(:choose, [:queue, :priority], :item)
peek_response <= lowest_peekr.argagg(:choose, [:queue, :priority], :item)
end
bloom :remove_item do
remove_response <= (remove * items).rights(:queue => :queue, :item => :item)
end
bloom :drop do
items <- remove_response
items <- pop_response
end
bloom :debug do
# stdio <~ lowest.inspected
# stdio <~ pop_response.inspected
end
end
# FIFOQueue is the basic implementation of a fifo queue.
# The front of the queue is always the earliest item that was inserted out of the items in the queue.
# Uses budtime to order the items.
# @see FIFOQueue implements FIFOQueueProtocol
# @see FIFOQueue imports PriorityQueue
module FIFOQueue
include FIFOQueueProtocol
import PriorityQueue => :pq
bloom do
pq.push <= push {|p| [p.item, budtime, p.queue]}
pq.pop <= pop
pq.peek <= peek
pop_response <= pq.pop_response {|p| [p.item, p.queue]}
peek_response <= pq.peek_response {|p| [p.item, p.queue]}
end
end
| 43.243902 | 144 | 0.715877 | 3.703125 |
da730687955932d4556ccc8bfca5c70f3c9fbbeb
| 1,586 |
tsx
|
TypeScript
|
src/utils/context.tsx
|
yannbf/braga.dev
|
c4ba0f4a04ca62ad1147cd3ca7291df419327a6f
|
[
"MIT"
] | 1 |
2022-02-17T20:16:09.000Z
|
2022-02-17T20:16:09.000Z
|
src/utils/context.tsx
|
yannbf/braga.dev
|
c4ba0f4a04ca62ad1147cd3ca7291df419327a6f
|
[
"MIT"
] | 10 |
2020-07-20T07:15:43.000Z
|
2022-03-26T12:29:34.000Z
|
src/utils/context.tsx
|
yannbf/braga.dev
|
c4ba0f4a04ca62ad1147cd3ca7291df419327a6f
|
[
"MIT"
] | null | null | null |
import React, { useState, useEffect } from 'react';
import Cookies from 'js-cookie';
export enum ThemeEnum {
light = 'light',
dark = 'dark',
}
interface ContextValue {
theme: ThemeEnum;
toggleTheme: () => void;
}
export const ThemeContext = React.createContext<ContextValue | undefined>({
theme: ThemeEnum.dark,
toggleTheme: () => null,
});
export const useTheme = (): ContextValue => {
const context = React.useContext<ContextValue | undefined>(ThemeContext);
if (!context) {
throw new Error('useTheme must be within a ThemeProvider');
}
return context;
};
export interface Theme {
color: string;
}
export const ContextThemeProvider: React.FC = ({ children }) => {
// Set the default theme state to the value stored in the user's cookie and fallback
// to 'dark' if no cookie is found
const [theme, setTheme] = useState(ThemeEnum.dark);
/**
* Toggle between light and dark themes and set the current theme
* value as a cookie. Also need to re-initialize the animate on scroll
* module to ensure elements don't disappear.
* @returns {void}
*/
const toggleTheme = () => {
const newThemeValue = theme === ThemeEnum.light ? ThemeEnum.dark : ThemeEnum.light;
Cookies.set('theme', newThemeValue);
setTheme(newThemeValue);
};
useEffect(() => {
if (Cookies.get('theme') !== theme) {
setTheme(Cookies.get('theme') as ThemeEnum);
}
}, [theme]);
return (
<ThemeContext.Provider
value={{
theme,
toggleTheme,
}}
>
{children}
</ThemeContext.Provider>
);
};
| 24.4 | 87 | 0.652585 | 3.1875 |
af674f4a0aa5533bd74643e9a5bef25460b19717
| 2,179 |
py
|
Python
|
scripts/amr2txt/preproces.py
|
pywirrarika/GPT-too-AMR2text
|
82d700cb81bc332bb07221d380cdf2318324109c
|
[
"Apache-2.0"
] | 37 |
2020-05-20T02:51:43.000Z
|
2022-02-23T13:43:47.000Z
|
scripts/amr2txt/preproces.py
|
pywirrarika/GPT-too-AMR2text
|
82d700cb81bc332bb07221d380cdf2318324109c
|
[
"Apache-2.0"
] | 4 |
2020-07-18T12:36:20.000Z
|
2021-06-05T14:09:21.000Z
|
scripts/amr2txt/preproces.py
|
pywirrarika/GPT-too-AMR2text
|
82d700cb81bc332bb07221d380cdf2318324109c
|
[
"Apache-2.0"
] | 8 |
2020-05-22T23:43:37.000Z
|
2022-03-09T23:16:25.000Z
|
import os
import json
import re
import argparse
def argument_parser():
parser = argparse.ArgumentParser(description='Preprocess AMR data')
# Multiple input parameters
parser.add_argument(
"--in-amr",
help="input AMR file",
type=str
)
parser.add_argument(
"--out-amr",
help="output (post-processed) AMR file",
type=str
)
parser.add_argument(
"--out-tokens",
help="tokens from AMR",
type=str
)
parser.add_argument(
"--stog-fix",
action='store_true',
help="Reformat AMR token to be parseable by publict stog"
)
args = parser.parse_args()
return args
def fix_tokens_file(file_path):
"""
Replace each
# ::tok sentence
by json parsable version
# ::token json-parseable-sentence
so that
sentence == json.loads(json-parseable-sentence)
"""
token_line = re.compile('^# ::tok (.*)')
# read and modifiy token lines
new_amr = []
tokens = []
with open(file_path) as fid:
for line in fid:
fetch = token_line.match(line.rstrip())
if fetch:
sentence = fetch.groups()[0]
tokens.append(sentence)
json_str = json.dumps(sentence)
new_amr.append(f'# ::tokens {json_str}\n')
else:
new_amr.append(line)
return new_amr, tokens
if __name__ == '__main__':
# Argument handlig
args = argument_parser()
assert os.path.isfile(args.in_amr), \
f'{args.in_amr} is missing or is not a file'
# create pre-processed AMR and extract tokens
new_amr, tokens = fix_tokens_file(args.in_amr)
assert tokens, "did not find tokens, AMR already formatted?"
# write pre-processed AMR
if args.stog_fix:
print(args.out_amr)
with open(args.out_amr, 'w') as fid:
for line in new_amr:
fid.write(line)
# write tokens
if args.out_tokens:
print(args.out_tokens)
with open(args.out_tokens, 'w') as fid:
for tok_sent in tokens:
fid.write(f'{tok_sent}\n')
| 22.936842 | 71 | 0.580083 | 3.28125 |
446e002c8c15a745cda335facc4ab78fcda296e5
| 1,802 |
py
|
Python
|
tests/test_create.py
|
tachyondecay/quickpaste
|
880de852b45e0b3b2bbfdff93888bf54b19a416e
|
[
"MIT"
] | null | null | null |
tests/test_create.py
|
tachyondecay/quickpaste
|
880de852b45e0b3b2bbfdff93888bf54b19a416e
|
[
"MIT"
] | null | null | null |
tests/test_create.py
|
tachyondecay/quickpaste
|
880de852b45e0b3b2bbfdff93888bf54b19a416e
|
[
"MIT"
] | null | null | null |
from app.create_app import limiter, shortlink
from app.repositories import db
def test_should_return_200(client):
rv = client.get('/')
assert rv.status_code == 200
assert rv.headers['Content-type'] == 'text/html; charset=utf-8'
def test_should_return_redirect_to_home(client):
rv = client.post('/')
assert rv.status_code == 302
assert rv.headers['Location'] == 'http://localhost/'
def test_should_return_413(client):
text = 'aaaaaaaaaaaaaaaaaaaaa'
rv = client.post('/', data={'text': text})
assert rv.status_code == 413
assert rv.headers['Content-type'] == 'text/html; charset=utf-8'
def test_should_return_429(client):
limiter.enabled = True
client.get('/')
client.get('/')
rv = client.get('/')
assert rv.status_code == 429
def test_should_return_400(client):
rv = client.post('/', headers={'X-Respondwith': 'link'})
assert rv.status_code == 400
assert rv.headers['Content-type'] == 'text/plain; charset=utf-8'
def test_should_return_500(app, client):
with app.app_context():
db.engine.execute('DROP TABLE pastes')
# Need to do this to reset migrations history
db.engine.execute('DROP TABLE alembic_version')
rv = client.post('/', data={'text': 'foo'})
assert rv.status_code == 500
def test_should_return_redirect_to_paste(client):
rv = client.post('/', data={'text': 'hello_world'})
assert rv.status_code == 302
assert rv.headers['Location'] == 'http://localhost/{}'.format(
shortlink.encode(1))
def test_should_return_link_to_paste(client):
rv = client.post('/', data={'text': 'hello_world'},
headers={'X-Respondwith': 'link'})
assert rv.status_code == 200
assert rv.headers['Content-type'] == 'text/plain; charset=utf-8'
| 30.542373 | 68 | 0.660932 | 3.171875 |
0743b9384ab1481076a655e9d9047ea3d8f4f0e2
| 1,395 |
sql
|
SQL
|
Chapter04/CH05_23_merge_using_checksums.sql
|
PhilippeBinggeli/Hands-On-Data-Science-with-SQL-Server-2017
|
f0af444e190ce7fcaf5e65fc2d5bae4f6f66a73b
|
[
"MIT"
] | 8 |
2018-07-09T16:08:23.000Z
|
2021-11-08T13:10:52.000Z
|
Chapter04/CH05_23_merge_using_checksums.sql
|
PhilippeBinggeli/Hands-On-Data-Science-with-SQL-Server-2017
|
f0af444e190ce7fcaf5e65fc2d5bae4f6f66a73b
|
[
"MIT"
] | null | null | null |
Chapter04/CH05_23_merge_using_checksums.sql
|
PhilippeBinggeli/Hands-On-Data-Science-with-SQL-Server-2017
|
f0af444e190ce7fcaf5e65fc2d5bae4f6f66a73b
|
[
"MIT"
] | 9 |
2018-08-07T09:54:39.000Z
|
2021-05-21T17:44:23.000Z
|
DROP TABLE IF EXISTS #res -- temporary table used to catch what was done
CREATE TABLE #res (Id int, Discontinued bit, WhatHappens nvarchar(10))
-- common table expression is added to resolve a state of every record
;WITH cte AS
(
SELECT lp.*
, IIF(sp.ProductKey is null, 'UPDATE', 'NONE') as DesiredAction
FROM Landing.Products AS lp
LEFT JOIN Staging.Products as sp ON lp.ProductKey = sp.ProductKey
AND CHECKSUM(lp.ProductKey, lp.ProductName, lp.ListPrice) =
CHECKSUM(sp.ProductKey, sp.ProductName, sp.ListPrice)
)
MERGE Staging.Products AS sp
USING cte AS lp -- Landing.Products is used no more, instead the CTE is used
ON sp.ProductKey = lp.ProductKey
WHEN MATCHED and DesiredAction = 'UPDATE' THEN -- new condition added
UPDATE SET
ProductName = lp.ProductName
, ListPrice = lp.ListPrice
, Discontinued = 0
WHEN NOT MATCHED BY TARGET THEN -- this node remains without changes
INSERT (ProductKey, ProductName, ListPrice)
VALUES (lp.ProductKey, lp.ProductName, lp.ListPrice)
WHEN NOT MATCHED BY SOURCE and sp.Discontinued = 0 THEN -- new condition added
UPDATE SET Discontinued = 1 -- this is a logical delete
-- when we want actual delete,
-- we'll just write DELETE
OUTPUT inserted.Id, Inserted.Discontinued, $action AS WhatHappens into #res
;
SELECT * FROM #res -- inspecting results
| 43.59375 | 78 | 0.712545 | 3.421875 |
79cfdc247e5f41b287c94cb795da5ea26e774a58
| 8,998 |
php
|
PHP
|
resources/lang/es/customers.php
|
aBillander/aBillander
|
137d38e3fe3a84176ca33c228d0225078528a380
|
[
"MIT"
] | 18 |
2018-09-20T13:28:45.000Z
|
2022-03-27T19:51:30.000Z
|
resources/lang/es/customers.php
|
aBillander/aBillander
|
137d38e3fe3a84176ca33c228d0225078528a380
|
[
"MIT"
] | 14 |
2017-09-16T01:55:44.000Z
|
2022-03-07T10:51:23.000Z
|
resources/lang/es/customers.php
|
aBillander/aBillander
|
137d38e3fe3a84176ca33c228d0225078528a380
|
[
"MIT"
] | 14 |
2015-05-08T06:11:30.000Z
|
2021-05-26T13:42:00.000Z
|
<?php
return [
/*
|--------------------------------------------------------------------------
| Customers Language Lines :: index
|--------------------------------------------------------------------------
|
| .
|
*/
'Customers' => 'Clientes',
'Name' => 'Nombre',
'Email' => 'Correo Electrónico',
'Phone' => 'Teléfono',
'External Reference' => 'Referencia Externa',
'' => '',
'' => '',
'' => '',
'Invite Customer' => 'Invitar a un Cliente',
'Invite' => 'Invitar',
'Send an Invitation Email' => 'Enviar una Invitación por Email',
' :_> :company invites you to his Customer Center' => ' :_> :company le invita a su Centro de Clientes',
/*
|--------------------------------------------------------------------------
| Customers Language Lines :: create
|--------------------------------------------------------------------------
|
| .
|
*/
'Customers - Create' => 'Clientes - Crear',
'New Customer' => 'Nuevo Cliente',
'Back to Customers' => 'Volver a Clientes',
'Fiscal Name' => 'Nombre Fiscal',
'Commercial Name' => 'Nombre Comercial',
'Identification' => 'NIF / CIF',
'New Customers will take values from the Customer Group, but you can change these values later on.' =>
'El nuevo Cliente tomará valores del Grupo de Clientes, pero podrán cambiarse más adelante.',
'' => '',
'' => '',
'' => '',
'' => '',
'' => '',
'' => '',
/*
|--------------------------------------------------------------------------
| Customers Language Lines :: edit
|--------------------------------------------------------------------------
|
| .
|
*/
'Customers - Edit' => 'Clientes - Modificar',
'Edit Customer' => 'Modificar Cliente',
'This Customer is BLOCKED' => 'Este Cliente está BLOQUEADO',
'Group Shipping Slips' => 'Agrupar Albaranes',
'Group Orders' => 'Agrupar Pedidos',
'Invoice' => 'Factura',
'Main Data' => 'Datos Generales',
'Commercial' => 'Comercial',
'Banks Accounts' => 'Bancos',
'Address Book' => 'Direcciones',
'Special Prices' => 'Precios Especiales',
'Statistics' => 'Estadísticas',
'ABCC Access' => 'Acceso ABCC',
'Web Shop' => 'Tienda Online',
'Website' => 'Web',
'Misc' => 'Otros',
'Sequence for Invoices' => 'Serie de Facturas',
'Template for Invoices' => 'Plantilla para Facturas',
'Template for Orders' => 'Plantilla para Pedidos',
'Template for Shipping Slips' => 'Plantilla para Albaranes',
'Payment Method' => 'Forma de Pago',
'Payment Currency' => 'Divisa de Pago',
'Is Invoiceable?' => '¿Es Facturable?',
'Accept e-Invoice?' => '¿Admite Factura Electrónica?',
'Automatic Invoice?' => '¿Factura Automática?',
'Include this Customer in Automatic Invoice Generation Process.' => 'Incluir este Cliente en el Proceso de Facturación Automática.',
'Outstanding Amount Allowed' => 'Riesgo Máximo Permitido',
'Outstanding Amount' => 'Riesgo Alcanzado',
'Unresolved Amount' => 'Impagado',
'Sales Equalization' => 'Recargo de Equivalencia',
'Customer Group' => 'Grupo de Clientes',
'Price List' => 'Tarifa',
'Sales Representative' => 'Agente Comercial',
'Shipping Address' => 'Dirección de Envío',
'Carrier' => 'Transportista',
'Shipping Method' => 'Método de Envío',
'Webshop ID' => 'Código en la Tienda Web',
'Longitude' => 'Longitud',
'Latitude' => 'Latitud',
'Payment Day(s)' => 'Día(s) de Pago',
'Comma separated list of days, as in: 3,17' => 'Lista separada por comas, como: 3,17',
'No Payment Month' => 'Mes de No Pago',
'Document Discount (%)' => 'Descuento en Documento (%)',
'Prompt Payment Discount (%)' => 'Descuento Pronto Pago (%)',
'Special Addresses' => 'Direcciones especiales',
'Fiscal (main) Address' => 'Dirección Fiscal (principal)',
'This Address will appear on Invoices' => 'Es la Dirección que aparecerá en las Facturas',
'Shipping Address' => 'Dirección de Envío',
'Default Shipping Address' => 'Es la Dirección de Envío por defecto',
'Alias' => 'Alias',
'Address' => 'Dirección',
'Contact' => 'Contacto',
'Fiscal' => 'Fiscal',
'Shipping' => 'Envío',
'You need one Address at list, for Customer (:id) :name' => 'Debe crear al menos una Dirección Postal para el Cliente (:id) :name',
'Shipping Address has been updated for Customer (:id) :name' => 'Se ha actualizado la Dirección de Envío del Cliente (:id) :name',
'Main Address has been updated for Customer (:id) :name' => 'Se ha actualizado la Dirección Principal del Cliente (:id) :name',
'Default Shipping Address has been updated for Customer (:id) :name' => 'Se ha actualizado la Dirección de Envío por defecto para el Cliente (:id) :name',
'You should set the Main Address for Customer (:id) :name' => 'Debe indicar la Dirección Principal para el Cliente (:id) :name',
'No Payment Month' => 'Mes de No Pago',
'Orders' => 'Pedidos',
'View Order' => 'Ir al Pedido',
'Order #' => 'Número',
'Date' => 'Fecha',
'Created via' => 'Creado por',
'Delivery Date' => 'Fecha Entrega',
'Total' => 'Total',
'Products' => 'Productos',
'Product' => 'Producto',
'Document' => 'Documento',
'Price Rules' => 'Reglas de Precio',
'Category' => 'Categoría',
'Discount Percent' => 'Porcentaje de Descuento',
'Discount Amount' => 'Cantidad de Descuento',
'tax inc.' => 'IVA inc.',
'tax exc.' => 'IVA exc.',
'Price' => 'Precio',
'Currency' => 'Divisa',
'From Quantity' => 'Desde Cantidad',
'Date from' => 'Fecha desde',
'Date to' => 'Fecha hasta',
'Create Price Rule' => 'Crear Regla de Precio',
'Product Reference' => 'Referencia de Producto',
'Search by Product Reference or Name' => 'Busque por Nombre o Referencia',
'Customer Center Access' => 'Acceso al Centro de Clientes',
'Allow Customer Center access?' => '¿Permitir acceso al Centro de Clientes?',
'Notify Customer? (by email)' => '¿Informar al Cliente? (por email)',
'Shopping Cart' => 'Contenido del Carrito',
'Cart Items' => 'Productos en el Carrito',
'Reference' => 'Referencia',
'Product Name' => 'Producto',
'Customer Price' => 'Precio',
'Prices are exclusive of Tax' => 'El Precio no incluye Impuestos',
'Quantity' => 'Cantidad',
'Total' => 'Total',
'View Image' => 'Ver Imagen',
'Product Images' => 'Imágenes de Productos',
'EAN Code' => 'Código EAN',
'Manufacturer' => 'Marca',
'Stock' => 'Stock',
'Can not create a User for this Customer:' => 'No es posible crear un Usuario para este Cliente:',
'This Customer has not a valid email address.' => 'Este Cliente no tiene una dirección de correo electrónico válida.',
'' => '',
'' => '',
'Product consumption' => 'Consumo del Producto',
'Customer Final Price' => 'Precio Final',
'Bank Accounts' => 'Cuentas Bancarias',
'Bank Name' => 'Nombre del Banco',
'Bank Account Code' => 'Código Cuenta Cliente',
'Bank code' => 'Entidad',
'Bank Branch code' => 'Oficina',
'Control' => 'Control',
'Account' => 'Cuenta',
'Calculate Iban' => 'Calcular Iban',
'Iban' => 'Iban',
'To make it more readable, you can enter spaces.' => 'Para que sea mas legible, puede introducir espacios.',
'Swift' => 'Swift',
'Mandate (for automatic payment remittances)' => 'Mandato (para Remesas de pago automático)',
'Mandate Reference' => 'Referencia única',
'You can use Customer Identification (only letters and digits) plus space plus Mandate Date.' => 'Puede usar el NIF/CIF del Cliente, más un espacio, más la Fecha del Mandato.',
'Mandate Date' => 'Fecha de Firma',
'View Cart' => 'Ver Carrito',
'Add New User' => 'Nuevo Usuario para este Cliente',
'Create User' => 'Crear Usuario',
'Update User' => 'Modificar Usuario',
'Invoice Shipping Slips' => 'Facturar Albaranes',
'Accounting ID' => 'Código Contabilidad',
'Search by Name or Reference.' => 'Busque por Nombre o Referencia.',
'Reset search: empty field plus press [return].' => 'Reiniciar búsqueda: vacíe el campo y pulse [intro].',
'CustomerOrder' => 'Pedidos',
'CustomerShippingSlip' => 'Albaranes',
'CustomerInvoice' => 'Facturas',
'Description' => 'Descripción',
/*
|--------------------------------------------------------------------------
| Customers Language Lines :: VAT Regime List
|--------------------------------------------------------------------------
|
| .
|
*/
'VAT Regime' => 'Régimen de IVA',
'General' => 'General',
'Intra-Community' => 'Intracomunitario',
'Export' => 'Exportación',
'Exempt' => 'Exento',
'Invoice by Shipping Address?' => '¿Facturar por Dirección de Envío?',
'One Invoice per Shipping Address' => 'Una Factura por Dirección de Envío',
'One Invoice per Shipping Slip and Shipping Address' => 'Una Factura por Albarán y Dirección de Envío',
];
| 35.286275 | 177 | 0.574683 | 3 |
b03bf655f5cc98ef3426a900e94c4c6077ff7207
| 905 |
py
|
Python
|
main.py
|
cavalcantigor/tweets-ceuma
|
4985e3c2241632823e710c039afe7b7f247b51f3
|
[
"MIT"
] | null | null | null |
main.py
|
cavalcantigor/tweets-ceuma
|
4985e3c2241632823e710c039afe7b7f247b51f3
|
[
"MIT"
] | null | null | null |
main.py
|
cavalcantigor/tweets-ceuma
|
4985e3c2241632823e710c039afe7b7f247b51f3
|
[
"MIT"
] | null | null | null |
from tweepy import (
API, Stream, OAuthHandler
)
from tt_keys import consumer_secret, consumer_key, access_token_secret, access_token
from ceuma_stream import StreamListenerCeuma
def timeline():
# get tweets from timeline
timeline_result = api.home_timeline()
return timeline_result
def search_tweets(query):
# get tweets that matches the query search
search_result = api.search(query, text_mode='extended')
return search_result
def print_tweets(tweets):
# print tweets
for tweet in tweets:
print(tweet.text)
# set consumer key and consumer secret key
auth = OAuthHandler(consumer_key, consumer_secret)
# set access token and access token secret
auth.set_access_token(access_token, access_token_secret)
# create api object
api = API(auth)
listener = StreamListenerCeuma()
stream = Stream(auth, listener)
stream.filter(track=['ceuma', 'dino', 'gol'])
| 23.815789 | 84 | 0.755801 | 3.125 |
06b6c59a68adfd7a23ce91086c462482161d71c5
| 1,376 |
py
|
Python
|
singleview.py
|
puhachov/Discovering-spammers-from-multiple-views
|
0484552af19e68148bd7c29d3a726b4323c00834
|
[
"MIT"
] | 1 |
2022-01-23T11:28:53.000Z
|
2022-01-23T11:28:53.000Z
|
singleview.py
|
puhachov/Discovering-spammers-from-multiple-views
|
0484552af19e68148bd7c29d3a726b4323c00834
|
[
"MIT"
] | null | null | null |
singleview.py
|
puhachov/Discovering-spammers-from-multiple-views
|
0484552af19e68148bd7c29d3a726b4323c00834
|
[
"MIT"
] | null | null | null |
import numpy as np
from sklearn.svm import SVC
from sklearn.model_selection import train_test_split
from sklearn.metrics import confusion_matrix
import pandas as pd
class singleview():
def __init__(self, data, class_):
self.X = np.copy(np.transpose(data))
self.ground_truth = np.sum(class_, axis = 1)
def evaluate(self, model, training_size):
X_train, X_test, y_train, y_test = train_test_split(self.X, self.ground_truth)
clf = model.fit(X_train, y_train)
y_pred = clf.predict(X_test)
confusion_matrix_ = confusion_matrix(y_test, y_pred)
precision = confusion_matrix_[0,0]/(confusion_matrix_[0,0] + confusion_matrix_[0,1])
recall = confusion_matrix_[0,0]/(confusion_matrix_[0,0] + confusion_matrix_[1,0])
F1_score = 2*precision*recall/(precision + recall)
confusion_matrix_df = pd.DataFrame(data = confusion_matrix_,
columns = ['Actual_Spammer', 'Actual_Legitimate'],
index = ['Predicted_Spammer ','Predicted_Legitimate'])
print("Precision: {}\n".format(precision))
print("Recall: {}\n".format(recall))
print("F1-score: {}\n".format(F1_score))
print("Confusion Matrix:\n {}\n".format(confusion_matrix_))
return precision, recall, F1_score, confusion_matrix_
| 38.222222 | 96 | 0.653343 | 3.234375 |
06e65ba65ef82394c79328fc0c89f38e06d4e776
| 1,055 |
py
|
Python
|
2017/day01.py
|
bovarysme/advent
|
9a7a3310984d4b7548ad23e2dfa017c6fe9e2c9c
|
[
"MIT"
] | 4 |
2017-12-05T00:53:21.000Z
|
2018-12-03T14:00:56.000Z
|
2017/day01.py
|
bovarysme/advent
|
9a7a3310984d4b7548ad23e2dfa017c6fe9e2c9c
|
[
"MIT"
] | null | null | null |
2017/day01.py
|
bovarysme/advent
|
9a7a3310984d4b7548ad23e2dfa017c6fe9e2c9c
|
[
"MIT"
] | null | null | null |
def pairs(digits):
for i in range(len(digits) - 1):
yield digits[i], digits[i+1]
yield digits[-1], digits[0]
def halfway(digits):
half = len(digits) // 2
for i in range(half):
yield digits[i], digits[half+i]
for i in range(half, len(digits)):
yield digits[i], digits[i-half]
def solve(iterator, digits):
return sum(int(x) for x, y in iterator(digits) if x == y)
def part_one(digits):
return solve(pairs, digits)
def part_two(digits):
return solve(halfway, digits)
if __name__ == '__main__':
assert part_one('1122') == 3
assert part_one('1111') == 4
assert part_one('1234') == 0
assert part_one('91212129') == 9
assert part_two('1212') == 6
assert part_two('1221') == 0
assert part_two('123425') == 4
assert part_two('123123') == 12
assert part_two('12131415') == 4
with open('inputs/day1.txt', 'r') as f:
digits = f.read().rstrip()
print('Answer for part one:', part_one(digits))
print('Answer for part two:', part_two(digits))
| 22.934783 | 61 | 0.609479 | 3.28125 |
af57ebbdb444061df146a19cd7a6babc819e1eeb
| 9,411 |
py
|
Python
|
StudyStuff/DataAnalysis/read csv.py
|
MatKier/LatinIME-ebm-study-Android-Studio
|
1a09013befd4327bae140ed8376d3bde93315451
|
[
"Apache-2.0"
] | 1 |
2018-09-19T09:49:55.000Z
|
2018-09-19T09:49:55.000Z
|
StudyStuff/DataAnalysis/read csv.py
|
MatKier/LatinIME-ebm-study-Android-Studio
|
1a09013befd4327bae140ed8376d3bde93315451
|
[
"Apache-2.0"
] | null | null | null |
StudyStuff/DataAnalysis/read csv.py
|
MatKier/LatinIME-ebm-study-Android-Studio
|
1a09013befd4327bae140ed8376d3bde93315451
|
[
"Apache-2.0"
] | null | null | null |
import pandas as pd
import numpy as np
import math as maths
import matplotlib.pyplot as plt
import os
usecols_ = ["x", "y", "offsetX", "offsetY", "keyCenterX", "keyCenterY", "holdTime", "flightTime", "pressure"]
usecols = ["offsetX", "offsetY", "keyCenterX", "keyCenterY", "holdTime", "flightTime", "pressure"]
defaultHoldTimeError = []
longHoldTimeError = []
defaultFlightTimeError = []
longFlightTimeError = []
defaultAreaError = []
bigAreaError = []
centerOffsetError = []
leftOffsetError = []
rightOffsetError = []
topOffsetError = []
bottomOffsetError = []
path_ = "C:/Users/mathi/OneDrive/Bachelor Stuff/Hauptstudie/Pilot/KeyStrokeLog/"
path = "E:/OneDrive/Bachelor Stuff/Hauptstudie/Pilot/KeyStrokeLog/"
targetGroupPath = path + "ID_targetValues/"
pidList = os.listdir(path)
pidList.remove("ID_targetValues")
pidList.remove("read csv.py")
#One iteration = one Participant
for pid in pidList:
taskGroupPath = path + pid + "/"
taskGroupList = os.listdir(taskGroupPath)
taskPathDict = {}
targetPathDict = {}
for taskGroup in taskGroupList:
taskDirs = [task for task in sorted(os.listdir(taskGroupPath + taskGroup))]
taskPathDict[taskGroup] = [(taskGroup + "/" + taskDir) for taskDir in taskDirs]
targetPathDict[taskGroup] = [(taskGroup + "/" + taskDir) for taskDir in taskDirs if not taskDir == "17_user-created password"]
# One iteration = one taskgroup
for key in sorted(taskPathDict.keys()):
#One iteration = one task (3 csv files per task)
for task in sorted(taskPathDict[key]):
# last 3 csv files in <task> read as a list of dataframes
csvFileList = [pd.read_csv(taskGroupPath + task + "/" + entry, sep=';', header=0, usecols=usecols) for entry in [taskEntry for taskEntry in sorted(os.listdir(taskGroupPath + task), reverse=True) if taskEntry.startswith('valid')][:3]]
# combines the 3 dataframes into one group
groupedCsvList = pd.concat(csvFileList).groupby(level=0)
mean = groupedCsvList.mean()
if task[:2] != "17":
targetCsv = pd.read_csv(targetGroupPath + task + "/" + os.listdir(targetGroupPath + task)[0], sep=';', header=0, usecols=usecols)
meanDistanceToTarget = targetCsv.subtract(mean, fill_value=0)
# Temp lists for calculating the mean error of all events for the current task and feature
tempDefaultHoldTimeError = []
tempLongHoldTimeError = []
tempDefaultFlightTimeError = []
tempLongFlightTimeError = []
tempDefaultAreaError = []
tempBigAreaError = []
tempCenterOffsetError = []
tempLeftOffsetError = []
tempRightOffsetError = []
tempTopOffsetError = []
tempBottomOffsetError = []
taskId = task[-2:]
# Iterate over the mean error of all touch events in this task and add the mean error for every event to its corresponding tempList
# One iteration = one touch event (up or down)
for index, meanDistanceToTargetRrow in meanDistanceToTarget.iterrows():
# HoldTime Error
if targetCsv.at[index, 'holdTime'] == 80:
tempDefaultHoldTimeError.append(meanDistanceToTargetRrow['holdTime'])
elif targetCsv.at[index, 'holdTime'] == 300:
tempLongHoldTimeError.append(meanDistanceToTargetRrow['holdTime'])
# FlightTime Error
if targetCsv.at[index, 'flightTime'] == 260:
tempDefaultFlightTimeError.append(meanDistanceToTargetRrow['flightTime'])
elif targetCsv.at[index, 'flightTime'] == 1000:
tempLongFlightTimeError.append(meanDistanceToTargetRrow['flightTime'])
# Area Error
if targetCsv.at[index, 'pressure'] == 0.20:
tempDefaultAreaError.append(meanDistanceToTargetRrow['pressure'])
elif targetCsv.at[index, 'pressure'] == 0.45:
tempBigAreaError.append(meanDistanceToTargetRrow['pressure'])
# Offset
# groupedCsvList contains the grouped values for the current task,
#'[" offsetX", " offsetY"].get_group(index).reset_index(drop=True)' returns a dataframe containing the (grouped)offsets for the event at index
offsets = groupedCsvList["offsetX", "offsetY"].get_group(index).reset_index(drop=True)
# Calculates the average distance between the target-offset and the users' offset (3 points for each event(index))
distSum = 0
for j in range(len(offsets)):
distSum += maths.hypot(targetCsv.at[index, 'offsetX'] - offsets.at[j, 'offsetX'], targetCsv.at[index, 'offsetY'] - offsets.at[j, 'offsetY'])
avgDist = distSum / len(offsets)
if targetCsv.at[index, 'offsetX'] == 0 and targetCsv.at[index, 'offsetY'] == 0:
tempCenterOffsetError.append(avgDist)
elif targetCsv.at[index, 'offsetX'] == -45 and targetCsv.at[index, 'offsetY'] == 0:
tempLeftOffsetError.append(avgDist)
elif targetCsv.at[index, 'offsetX'] == 45 and targetCsv.at[index, 'offsetY'] == 0:
tempRightOffsetError.append(avgDist)
elif targetCsv.at[index, 'offsetX'] == 0 and targetCsv.at[index, 'offsetY'] == -80:
tempTopOffsetError.append(avgDist)
elif targetCsv.at[index, 'offsetX'] == 0 and targetCsv.at[index, 'offsetY'] == 80:
tempBottomOffsetError.append(avgDist)
# Calculate the mean of all the event means of the current task
# and add it to its corresponding list
# HoldTime
defaultHoldTimeError.append([np.mean(tempDefaultHoldTimeError), taskId, pid])
longHoldTimeError.append([np.mean(tempLongHoldTimeError), taskId, pid])
# FlightTime
defaultFlightTimeError.append([np.mean(tempDefaultFlightTimeError), taskId, pid])
longFlightTimeError.append([np.mean(tempLongFlightTimeError), taskId, pid])
# Area
defaultAreaError.append([np.mean(tempDefaultAreaError), taskId, pid])
bigAreaError.append([np.mean(tempBigAreaError), taskId, pid])
# Offset
centerOffsetError.append([np.mean(tempCenterOffsetError), taskId, pid])
leftOffsetError.append([np.mean(tempLeftOffsetError), taskId, pid])
rightOffsetError.append([np.mean(tempRightOffsetError), taskId, pid])
topOffsetError.append([np.mean(tempTopOffsetError), taskId, pid])
bottomOffsetError.append([np.mean(tempBottomOffsetError), taskId, pid])
print 'finishied ' + pid
offset_means = (np.nanmean([error[0] for error in centerOffsetError]),
np.nanmean([error[0] for error in leftOffsetError]),
np.nanmean([error[0] for error in rightOffsetError]),
np.nanmean([error[0] for error in topOffsetError]),
np.nanmean([error[0] for error in bottomOffsetError]))
offset_std = (np.nanstd([error[0] for error in centerOffsetError]),
np.nanstd([error[0] for error in leftOffsetError]),
np.nanstd([error[0] for error in rightOffsetError]),
np.nanstd([error[0] for error in topOffsetError]),
np.nanstd([error[0] for error in bottomOffsetError]))
fig, ax = plt.subplots()
index = np.arange(len(offset_means))
bar_width = 0.35
opacity = 0.4
error_config = {'ecolor': '0.3'}
rects = ax.bar(index, offset_means, bar_width,
alpha=opacity, color='g',
yerr=offset_std, error_kw=error_config)
ax.set_xlabel('Offset direction')
ax.set_ylabel('Error')
ax.set_title('Offset error by offset direction')
ax.set_xticks(index)
ax.set_xticklabels(('Center', 'Left', 'Righ', 'Top', 'Bottom'))
ax.legend()
fig.tight_layout()
plt.show()
area_means = (np.nanmean([error[0] for error in defaultAreaError]),
np.nanmean([error[0] for error in bigAreaError]))
area_std = (np.nanstd([error[0] for error in defaultAreaError]),
np.nanstd([error[0] for error in bigAreaError]))
fig, ax = plt.subplots()
index = np.arange(len(area_means))
bar_width = 0.75
opacity = 0.4
error_config = {'ecolor': '0.3'}
rects = ax.bar(index, area_means, bar_width,
alpha=opacity, color='b',
yerr=area_std, error_kw=error_config)
ax.set_xlabel('Area characteristic')
ax.set_ylabel('Error')
ax.set_title('Area error by area characteristic')
ax.set_xticks(index)
ax.set_xticklabels(('Default', 'Big'))
ax.legend()
fig.tight_layout()
plt.show()
flight_time_means = (np.nanmean([error[0] for error in defaultFlightTimeError]),
np.nanmean([error[0] for error in longFlightTimeError]))
flight_time_std = (np.nanstd([error[0] for error in defaultFlightTimeError]),
np.nanstd([error[0] for error in longFlightTimeError]))
fig, ax = plt.subplots()
index = np.arange(len(area_means))
bar_width = 0.75
opacity = 0.4
error_config = {'ecolor': '0.3'}
rects = ax.bar(index, flight_time_means, bar_width,
alpha=opacity, color='r',
yerr=flight_time_std, error_kw=error_config)
ax.set_xlabel('Flight time characteristic')
ax.set_ylabel('Error')
ax.set_title('Flight time error by flight time characteristic')
ax.set_xticks(index)
ax.set_xticklabels(('Default', 'Long'))
ax.legend()
fig.tight_layout()
plt.show()
hold_time_means = (np.nanmean([error[0] for error in defaultHoldTimeError]),
np.nanmean([error[0] for error in longHoldTimeError]))
hold_time_std = (np.nanstd([error[0] for error in defaultHoldTimeError]),
np.nanstd([error[0] for error in longHoldTimeError]))
fig, ax = plt.subplots()
index = np.arange(len(area_means))
bar_width = 0.75
opacity = 0.4
error_config = {'ecolor': '0.3'}
rects = ax.bar(index, hold_time_means, bar_width,
alpha=opacity, color='y',
yerr=hold_time_std, error_kw=error_config)
ax.set_xlabel('Hold time characteristic')
ax.set_ylabel('Error')
ax.set_title('Hold time error by hold time characteristic')
ax.set_xticks(index)
ax.set_xticklabels(('Default', 'Long'))
ax.legend()
fig.tight_layout()
plt.show()
| 39.049793 | 236 | 0.71714 | 3.109375 |
3f5777ddaff0f1d03886c297c46b680629832a12
| 1,693 |
rb
|
Ruby
|
lib/weese/bus/stop.rb
|
emma-k-alexandra/weese
|
3a86f1a1308ab4575366e4a8c945caff0e629848
|
[
"MIT"
] | null | null | null |
lib/weese/bus/stop.rb
|
emma-k-alexandra/weese
|
3a86f1a1308ab4575366e4a8c945caff0e629848
|
[
"MIT"
] | null | null | null |
lib/weese/bus/stop.rb
|
emma-k-alexandra/weese
|
3a86f1a1308ab4575366e4a8c945caff0e629848
|
[
"MIT"
] | null | null | null |
# frozen_string_literal: true
require 'weese/bus/urls'
module Weese
module Bus
# A MetroBus stop.
class Stop
# @return [Integer] The WMATA Stop ID of this Stop
attr_accessor :id
#
# Create a Stop
#
# @param [Integer] id WMATA Stop ID
#
def initialize(id)
@id = id
end
end
# These requests require a Stop
module RequiresStop
include Requests::Requester
#
# Next bus arrivals at a given stop.
# {https://developer.wmata.com/docs/services/5476365e031f590f38092508/operations/5476365e031f5909e4fe331d WMATA Documentation}
#
# @param [Stop] stop A Stop
#
# @raise [WeeseError] If request or JSON parse fails
#
# @return [Hash] JSON Response
#
def next_buses(stop)
fetch(
Requests::Request.new(
@api_key,
Bus::Urls::NEXT_BUSES,
StopID: stop.id
)
)
end
#
# Buses scheduled at a stop for an optional given date.
# {https://developer.wmata.com/docs/services/54763629281d83086473f231/operations/5476362a281d830c946a3d6c WMATA Documentation}
#
# @param [Stop] stop A Stop
# @param [Date] date An optional Date
#
# @raise [WeeseError] If request or JSON parse fails
#
# @return [Hash] JSON Response
#
def stop_schedule(stop, date = nil)
query = { StopID: stop.id }
query['Date'] = date.to_s if date
fetch(
Requests::Request.new(
@api_key,
Bus::Urls::STOP_SCHEDULE,
query
)
)
end
end
end
end
| 23.191781 | 132 | 0.564678 | 3.234375 |
8bfec23cb84d4ca7cd88c802baf664132c0e43ae
| 1,733 |
dart
|
Dart
|
example/lib/main.dart
|
Flutter-Italia-Developers/blur_matrix
|
a3e9ab786df31cb1b556d9fc6b0348b30869f3f5
|
[
"BSD-2-Clause"
] | null | null | null |
example/lib/main.dart
|
Flutter-Italia-Developers/blur_matrix
|
a3e9ab786df31cb1b556d9fc6b0348b30869f3f5
|
[
"BSD-2-Clause"
] | null | null | null |
example/lib/main.dart
|
Flutter-Italia-Developers/blur_matrix
|
a3e9ab786df31cb1b556d9fc6b0348b30869f3f5
|
[
"BSD-2-Clause"
] | 1 |
2021-08-24T12:16:53.000Z
|
2021-08-24T12:16:53.000Z
|
import 'package:blur_matrix/blur_matrix.dart';
import 'package:flutter/material.dart';
void main() {
runApp(MyApp());
}
class MyApp extends StatelessWidget {
@override
Widget build(BuildContext context) {
return MaterialApp(
title: 'Flutter Demo',
theme: ThemeData(
primarySwatch: Colors.blue,
),
home: MyHomePage(title: 'Blur Matrix Demo'),
);
}
}
class MyHomePage extends StatefulWidget {
MyHomePage({Key? key, required this.title}) : super(key: key);
final String title;
@override
_MyHomePageState createState() => _MyHomePageState();
}
class _MyHomePageState extends State<MyHomePage> {
late List<List<Color>> colors;
@override
void initState() {
// colors = [
// [Colors.red, Colors.blue, Colors.yellowAccent],
// [Colors.green, Colors.black, Colors.cyanAccent],
// [Colors.yellowAccent, Colors.deepPurpleAccent, Colors.white],
// [Colors.red, Colors.blue, Colors.yellowAccent],
// ];
colors = [
[Colors.green.withOpacity(0.6), Colors.white.withOpacity(0.8)],
[Colors.black.withOpacity(0.8), Colors.blue.withOpacity(0.6)],
];
super.initState();
}
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
title: Text(widget.title),
),
body: Stack(
fit: StackFit.expand,
children: [
Image.asset('assets/lago-blu-cervino.webp', fit: BoxFit.fill),
ConstrainedBox(
constraints: BoxConstraints.expand(),
child: BlurMatrixAnimate(
colors: colors,
),
),
],
),
);
}
}
| 25.115942 | 79 | 0.59146 | 3.125 |
cd451447fc3d8f8c4a199faf28913daa4b825a3a
| 32,839 |
cs
|
C#
|
MedicationMngAPI/App_Code/Service.cs
|
johphil/MedicationMngApp
|
6e2fc996d60d1a2fcfb17aaf260b90636b25ec3f
|
[
"MIT"
] | null | null | null |
MedicationMngAPI/App_Code/Service.cs
|
johphil/MedicationMngApp
|
6e2fc996d60d1a2fcfb17aaf260b90636b25ec3f
|
[
"MIT"
] | 1 |
2021-06-12T01:45:45.000Z
|
2021-06-12T01:49:06.000Z
|
MedicationMngAPI/App_Code/Service.cs
|
johphil/MedicationMngApp
|
6e2fc996d60d1a2fcfb17aaf260b90636b25ec3f
|
[
"MIT"
] | null | null | null |
using System;
using System.Collections.Generic;
using System.Configuration;
using System.Data;
using System.Data.SqlClient;
using System.Globalization;
using System.Linq;
using System.Runtime.Serialization;
using System.ServiceModel;
using System.ServiceModel.Activation;
using System.ServiceModel.Web;
using System.Text;
// NOTE: You can use the "Rename" command on the "Refactor" menu to change the class name "Service" in code, svc and config file together.
[AspNetCompatibilityRequirements(RequirementsMode = AspNetCompatibilityRequirementsMode.Allowed)]
public class Service : IService
{
//Database Conenction String
protected string conStr = ConfigurationManager.ConnectionStrings["MEDMNG_DBF"].ConnectionString;
/// <summary>
/// Used to get the account information of the user
/// </summary>
/// <param name="account_id">ID assigned to the user's account</param>
/// <returns>Account object which contains the information</returns>
public Account GetAccountDetails(string account_id)
{
try
{
using (SqlConnection connection = new SqlConnection(conStr))
{
using (SqlCommand command = new SqlCommand("spGetAccountDetails", connection))
{
command.CommandType = CommandType.StoredProcedure;
command.Parameters.Add("id", SqlDbType.Int).Value = DBConvert.From(int.Parse(account_id));
connection.Open();
using (SqlDataReader reader = command.ExecuteReader())
{
if (reader.Read())
{
return new Account
{
Account_ID = DBConvert.To<int>(reader[0]),
FirstName = DBConvert.To<string>(reader[1]),
LastName = DBConvert.To<string>(reader[2]),
Birthday = DBConvert.To<string>(reader[3]),
Email = DBConvert.To<string>(reader[4]),
Username = DBConvert.To<string>(reader[5]),
Date_Registered = DBConvert.To<string>(reader[6])
};
}
}
}
}
return null;
}
catch
{
return null;
}
}
/// <summary>
/// Used to add a new user account after successfull registration
/// </summary>
/// <param name="account">Account object which contains the supplied information by the user</param>
/// <returns>Returns integer value -69 if username exists, -70 if email exists, -1 if failed, and 1 if success</returns>
public int AddAccount(Account account)
{
try
{
using (SqlConnection connection = new SqlConnection(conStr))
{
using (SqlCommand command = new SqlCommand("spAddAccount", connection))
{
command.CommandType = CommandType.StoredProcedure;
command.Parameters.Add("firstname", SqlDbType.VarChar, 99).Value = DBConvert.From(account.FirstName);
command.Parameters.Add("lastname", SqlDbType.VarChar, 99).Value = DBConvert.From(account.LastName);
command.Parameters.Add("birthday", SqlDbType.Date).Value = DBConvert.From(account.Birthday);
command.Parameters.Add("email", SqlDbType.VarChar, 99).Value = DBConvert.From(account.Email);
command.Parameters.Add("username", SqlDbType.VarChar, 99).Value = DBConvert.From(account.Username);
command.Parameters.Add("password", SqlDbType.VarChar, 99).Value = DBConvert.From(PassHash.MD5Hash(account.Password));
connection.Open();
return (int)command.ExecuteScalar();
}
}
}
catch
{
return -1;
}
}
/// <summary>
/// Used to updated the user's account information to the database
/// </summary>
/// <param name="account">Account object which contains the supplied information by the user</param>
/// <returns>returns positive integer if success otherwise failed</returns>
public int UpdateAccountDetails(Account account)
{
try
{
using (SqlConnection connection = new SqlConnection(conStr))
{
using (SqlCommand command = new SqlCommand("spUpdateAccountDetails", connection))
{
command.CommandType = CommandType.StoredProcedure;
command.Parameters.Add("account_id", SqlDbType.Int).Value = DBConvert.From(account.Account_ID);
command.Parameters.Add("firstname", SqlDbType.VarChar, 99).Value = DBConvert.From(account.FirstName);
command.Parameters.Add("lastname", SqlDbType.VarChar, 99).Value = DBConvert.From(account.LastName);
connection.Open();
return command.ExecuteNonQuery();
}
}
}
catch
{
return -1;
}
}
/// <summary>
/// Used to activate or deactivate a medication
/// </summary>
/// <param name="med_take_id">ID assigned to the selected medication</param>
/// <param name="enabled">1 if true, 0 if false</param>
/// <returns>returns positive integer if success otherwise failed</returns>
public int UpdateMedTakeEnable(string med_take_id, string enabled)
{
try
{
using (SqlConnection connection = new SqlConnection(conStr))
{
using (SqlCommand command = new SqlCommand("spUpdateMedTakeStatus", connection))
{
command.CommandType = CommandType.StoredProcedure;
command.Parameters.Add("med_take_id", SqlDbType.Int).Value = DBConvert.From(int.Parse(med_take_id));
command.Parameters.Add("isactive", SqlDbType.Bit).Value = DBConvert.From(int.Parse(enabled));
connection.Open();
return command.ExecuteNonQuery();
}
}
}
catch
{
return -1;
}
}
/// <summary>
/// Used to update the user's account password
/// </summary>
/// <param name="account_id">ID assigned to the user's account</param>
/// <param name="old_password">Current password of the user</param>
/// <param name="new_password">New password supplied by the user</param>
/// <returns>returns positive integer if success otherwise failed</returns>
public int UpdateAccountPassword(int account_id, string old_password, string new_password)
{
try
{
using (SqlConnection connection = new SqlConnection(conStr))
{
using (SqlCommand command = new SqlCommand("spUpdateAccountPassword", connection))
{
command.CommandType = CommandType.StoredProcedure;
command.Parameters.Add("account_id", SqlDbType.Int).Value = DBConvert.From(account_id);
command.Parameters.Add("oldPw", SqlDbType.VarChar, 99).Value = DBConvert.From(PassHash.MD5Hash(old_password));
command.Parameters.Add("newPw", SqlDbType.VarChar, 99).Value = DBConvert.From(PassHash.MD5Hash(new_password));
connection.Open();
return (int)command.ExecuteScalar();
}
}
}
catch
{
return -1;
}
}
/// <summary>
/// Used to authenticate the user credentials before accessing the main interface of the application
/// </summary>
/// <param name="username">Username of the user</param>
/// <param name="password">Password of the user</param>
/// <returns>returns the account id of the authenticated user</returns>
public int LoginAccount(string username, string password)
{
try
{
using (SqlConnection connection = new SqlConnection(conStr))
{
using (SqlCommand command = new SqlCommand("spLoginAccount", connection))
{
command.CommandType = CommandType.StoredProcedure;
command.Parameters.Add("username", SqlDbType.VarChar, 99).Value = DBConvert.From(username);
command.Parameters.Add("password", SqlDbType.VarChar, 99).Value = DBConvert.From(PassHash.MD5Hash(password));
connection.Open();
return (int)command.ExecuteScalar(); //returns id of user
}
}
}
catch
{
return -1;
}
}
/// <summary>
/// Used to add the user's ratings and recommendation
/// </summary>
/// <param name="ratings">Ratings_Recommendation object which contains the ratings and feedback by the user</param>
/// <returns>returns positive integer if success otherwise failed</returns>
public int AddRatingsRecommendation(Ratings_Recommendation ratings)
{
try
{
using (SqlConnection connection = new SqlConnection(conStr))
{
using (SqlCommand command = new SqlCommand("spAddRatings", connection))
{
command.CommandType = CommandType.StoredProcedure;
command.Parameters.Add("account_id", SqlDbType.Int).Value = DBConvert.From(ratings.Account_ID);
command.Parameters.Add("ratings", SqlDbType.Int).Value = DBConvert.From(ratings.Ratings);
command.Parameters.Add("recommendation", SqlDbType.VarChar).Value = DBConvert.From(ratings.Recommendation);
connection.Open();
return command.ExecuteNonQuery();
}
}
}
catch
{
return -1;
}
}
/// <summary>
/// Used to add a new medication from the user
/// </summary>
/// <param name="medtake">MedTake object</param>
/// <param name="medtakeschedules">Collection of MedTakeSchedule object</param>
/// <returns>returns positive integer if success otherwise failed</returns>
public int AddMedTake(MedTake medtake, List<MedTakeSchedule> medtakeschedules)
{
try
{
using (SqlConnection connection = new SqlConnection(conStr))
{
connection.Open();
using (SqlTransaction transaction = connection.BeginTransaction())
{
try
{
using (SqlCommand command = new SqlCommand("spAddMedTake", connection, transaction))
{
command.CommandType = CommandType.StoredProcedure;
command.Parameters.Add("account_id", SqlDbType.Int).Value = DBConvert.From(medtake.Account_ID);
command.Parameters.Add("med_name", SqlDbType.VarChar, 20).Value = DBConvert.From(medtake.Med_Name);
command.Parameters.Add("med_count", SqlDbType.Int).Value = DBConvert.From(medtake.Med_Count);
command.Parameters.Add("med_count_critical", SqlDbType.Int).Value = DBConvert.From(medtake.Med_Count_Critical);
command.Parameters.Add("med_type_id", SqlDbType.Int).Value = DBConvert.From(medtake.Med_Type_ID);
medtake.Med_Take_ID = (int)command.ExecuteScalar();
if (medtake.Med_Take_ID > 0)
{
command.Parameters.Clear();
command.CommandText = "spAddMedTakeSchedule";
foreach (var schedule in medtakeschedules)
{
command.Parameters.Add("med_take_id", SqlDbType.Int).Value = DBConvert.From(medtake.Med_Take_ID);
command.Parameters.Add("day_of_week", SqlDbType.Int).Value = DBConvert.From(schedule.Day_Of_Week);
command.Parameters.Add("dosage_count", SqlDbType.Int).Value = DBConvert.From(schedule.Dosage_Count);
command.Parameters.Add("time", SqlDbType.Time, 7).Value = DBConvert.From(schedule.Time);
command.ExecuteNonQuery();
command.Parameters.Clear();
}
}
}
transaction.Commit();
}
catch
{
transaction.Rollback();
return -1;
}
}
}
return 1;
}
catch
{
return -1;
}
}
/// <summary>
/// Used to get all medications added by the user
/// </summary>
/// <param name="account_id">ID assigned to the user</param>
/// <returns>returns a collection of medications</returns>
public List<MedTake> GetMedTakes(string account_id)
{
try
{
List<MedTake> collection = new List<MedTake>();
using (SqlConnection connection = new SqlConnection(conStr))
{
using (SqlCommand command = new SqlCommand("spGetMedTakes", connection))
{
command.CommandType = CommandType.StoredProcedure;
command.Parameters.Add("account_id", SqlDbType.Int).Value = DBConvert.From(int.Parse(account_id));
connection.Open();
using (SqlDataReader reader = command.ExecuteReader())
{
while (reader.Read())
{
collection.Add(new MedTake
{
Med_Take_ID = DBConvert.To<int>(reader[0]),
Account_ID = DBConvert.To<int>(reader[1]),
Med_Name = DBConvert.To<string>(reader[2]),
Med_Count = DBConvert.To<int>(reader[3]),
Med_Count_Critical = DBConvert.To<int>(reader[4]),
Med_Type_ID = DBConvert.To<int>(reader[5]),
Med_Type_Name = DBConvert.To<string>(reader[6]),
IsCount = DBConvert.To<bool>(reader[7]),
Image = DBConvert.To<string>(reader[8]),
IsActive = DBConvert.To<bool>(reader[9])
});
}
}
}
}
return collection;
}
catch
{
return null;
}
}
/// <summary>
/// Used to update the information of the selected medication
/// </summary>
/// <param name="medtake">MedTake object</param>
/// <param name="deletemedtakeschedules">List of MedTakes that will be deleted</param>
/// <param name="updatemedtakeschedules">List of MedTakes that will be updated</param>
/// <param name="createmedtakeschedules">List of MedTakes that will be created</param>
/// <returns>returns positive integer if success otherwise failed</returns>
public int UpdateMedTake(MedTake medtake, List<MedTakeSchedule> deletemedtakeschedules, List<MedTakeSchedule> updatemedtakeschedules, List<MedTakeSchedule> createmedtakeschedules)
{
try
{
using (SqlConnection connection = new SqlConnection(conStr))
{
connection.Open();
using (SqlTransaction transaction = connection.BeginTransaction())
{
try
{
using (SqlCommand command = new SqlCommand("spUpdateMedTake", connection, transaction))
{
command.CommandType = CommandType.StoredProcedure;
command.Parameters.Add("med_take_id", SqlDbType.Int).Value = DBConvert.From(medtake.Med_Take_ID);
command.Parameters.Add("med_name", SqlDbType.VarChar, 20).Value = DBConvert.From(medtake.Med_Name);
command.Parameters.Add("med_count", SqlDbType.Int).Value = DBConvert.From(medtake.Med_Count);
command.Parameters.Add("med_count_critical", SqlDbType.Int).Value = DBConvert.From(medtake.Med_Count_Critical);
command.Parameters.Add("med_type_id", SqlDbType.Int).Value = DBConvert.From(medtake.Med_Type_ID);
int result = command.ExecuteNonQuery();
if (result > 0)
{
//Create MedTake Schedule
if (createmedtakeschedules != null && createmedtakeschedules.Count > 0)
{
command.Parameters.Clear();
command.CommandText = "spAddMedTakeSchedule";
foreach (var schedule in createmedtakeschedules)
{
command.Parameters.Add("med_take_id", SqlDbType.Int).Value = DBConvert.From(medtake.Med_Take_ID);
command.Parameters.Add("day_of_week", SqlDbType.Int).Value = DBConvert.From(schedule.Day_Of_Week);
command.Parameters.Add("dosage_count", SqlDbType.Int).Value = DBConvert.From(schedule.Dosage_Count);
command.Parameters.Add("time", SqlDbType.Time, 7).Value = DBConvert.From(schedule.Time);
command.ExecuteNonQuery();
command.Parameters.Clear();
}
}
//Update MedTake Schedule
if (updatemedtakeschedules != null && updatemedtakeschedules.Count > 0)
{
command.Parameters.Clear();
command.CommandText = "spUpdateMedTakeSchedule";
foreach (var schedule in updatemedtakeschedules)
{
command.Parameters.Add("med_take_schedule_id", SqlDbType.Int).Value = DBConvert.From(schedule.Med_Take_Schedule_ID);
command.Parameters.Add("day_of_week", SqlDbType.Int).Value = DBConvert.From(schedule.Day_Of_Week);
command.Parameters.Add("dosage_count", SqlDbType.Int).Value = DBConvert.From(schedule.Dosage_Count);
command.Parameters.Add("time", SqlDbType.Time, 7).Value = DBConvert.From(schedule.Time);
command.ExecuteNonQuery();
command.Parameters.Clear();
}
}
//Delete MedTake Schedule
if (deletemedtakeschedules != null && deletemedtakeschedules.Count > 0)
{
command.Parameters.Clear();
command.CommandText = "spDeleteMedTakeSchedule";
foreach (var schedule in deletemedtakeschedules)
{
command.Parameters.Add("med_take_schedule_id", SqlDbType.Int).Value = DBConvert.From(schedule.Med_Take_Schedule_ID);
command.ExecuteNonQuery();
command.Parameters.Clear();
}
}
}
}
transaction.Commit();
}
catch
{
transaction.Rollback();
return -1;
}
}
}
return 1;
}
catch
{
return -1;
}
}
/// <summary>
/// Used to delete a medication
/// </summary>
/// <param name="med_take_id">ID assigned to the selected medication</param>
/// <returns>returns positive integer if success otherwise failed</returns>
public int DeleteMedTake(string med_take_id)
{
try
{
using (SqlConnection connection = new SqlConnection(conStr))
{
using (SqlCommand command = new SqlCommand("spDeleteMedTake", connection))
{
command.CommandType = CommandType.StoredProcedure;
command.Parameters.Add("med_take_id", SqlDbType.Int).Value = DBConvert.From(int.Parse(med_take_id));
connection.Open();
return command.ExecuteNonQuery();
}
}
}
catch
{
return -1;
}
}
/// <summary>
/// Used to get all available types of medication from the database
/// </summary>
/// <returns>returns the list of medtypes</returns>
public List<MedType> GetMedTypes()
{
try
{
List<MedType> collection = new List<MedType>();
using (SqlConnection connection = new SqlConnection(conStr))
{
using (SqlCommand command = new SqlCommand("spGetMedTypes", connection))
{
command.CommandType = CommandType.StoredProcedure;
connection.Open();
using (SqlDataReader reader = command.ExecuteReader())
{
while (reader.Read())
{
collection.Add(new MedType
{
Med_Type_ID = DBConvert.To<int>(reader[0]),
Med_Type_Name = DBConvert.To<string>(reader[1]),
IsCount = DBConvert.To<bool>(reader[2]),
Image = DBConvert.To<string>(reader[3])
});
}
}
}
}
return collection;
}
catch
{
return null;
}
}
/// <summary>
/// Used to get the schedules of a medication
/// </summary>
/// <param name="med_take_id">ID of the selected medication</param>
/// <returns>returns a collection of MedTakeSchedule</returns>
public List<MedTakeSchedule> GetMedTakeSchedules(string med_take_id)
{
try
{
List<MedTakeSchedule> collection = new List<MedTakeSchedule>();
using (SqlConnection connection = new SqlConnection(conStr))
{
using (SqlCommand command = new SqlCommand("spGetMedTakeSchedules", connection))
{
command.CommandType = CommandType.StoredProcedure;
command.Parameters.Add("med_take_id", SqlDbType.Int).Value = DBConvert.From(int.Parse(med_take_id));
connection.Open();
using (SqlDataReader reader = command.ExecuteReader())
{
while (reader.Read())
{
collection.Add(new MedTakeSchedule
{
Med_Take_Schedule_ID = DBConvert.To<int>(reader[0]),
Med_Take_ID = DBConvert.To<int>(reader[1]),
Day_Of_Week = DBConvert.To<int>(reader[2]),
Dosage_Count = DBConvert.To<int>(reader[3]),
Time = DBConvert.To<TimeSpan>(reader[4]).ToString(),
});
}
}
}
}
return collection;
}
catch
{
return null;
}
}
/// <summary>
/// Used to get the medications scheduled for the day
/// </summary>
/// <param name="account_id">ID assigned to the user</param>
/// <param name="day_of_week">Day of week in integer</param>
/// <returns>returns the list of medications to be taken for the day</returns>
public List<MedTakeToday> GetMedTakeToday(string account_id, string day_of_week)
{
try
{
List<MedTakeToday> collection = new List<MedTakeToday>();
using (SqlConnection connection = new SqlConnection(conStr))
{
using (SqlCommand command = new SqlCommand("spGetMedTakeToday", connection))
{
command.CommandType = CommandType.StoredProcedure;
command.Parameters.Add("account_id", SqlDbType.Int).Value = DBConvert.From(int.Parse(account_id));
command.Parameters.Add("day_of_week", SqlDbType.Int).Value = DBConvert.From(int.Parse(day_of_week));
connection.Open();
using (SqlDataReader reader = command.ExecuteReader())
{
while (reader.Read())
{
collection.Add(new MedTakeToday
{
Med_Take_ID = DBConvert.To<int>(reader[0]),
Med_Take_Schedule_ID = DBConvert.To<int>(reader[1]),
Time = DBConvert.To<TimeSpan>(reader[2]).ToString(),
Day_Of_Week = DBConvert.To<int>(reader[3]),
Med_Name = DBConvert.To<string>(reader[4]),
Dosage_Count = DBConvert.To<int>(reader[5]),
Image = DBConvert.To<string>(reader[6]),
Last_Take = DBConvert.To<DateTime?>(reader[7]),
});
}
}
}
}
return collection;
}
catch
{
return null;
}
}
/// <summary>
/// Used to get the password of a user
/// </summary>
/// <param name="email">Email of the user</param>
/// <returns>returns the decrypted password</returns>
public string GetAccountPassword(string email)
{
try
{
using (SqlConnection connection = new SqlConnection(conStr))
{
using (SqlCommand command = new SqlCommand("spGetAccountPassword", connection))
{
command.CommandType = CommandType.StoredProcedure;
command.Parameters.Add("email", SqlDbType.VarChar, 99).Value = DBConvert.From(email);
connection.Open();
using (SqlDataReader reader = command.ExecuteReader())
{
if (reader.Read())
{
return PassHash.MD5HashDecrypt(DBConvert.To<string>(reader[0]));
}
}
}
}
return null;
}
catch
{
return null;
}
}
/// <summary>
/// Used to get the logs information of an account
/// </summary>
/// <param name="account_id">ID assigned to the user</param>
/// <returns>returns the list of AccountLog</returns>
public List<AccountLog> GetAccountLogs(string account_id)
{
try
{
List<AccountLog> collection = new List<AccountLog>();
using (SqlConnection connection = new SqlConnection(conStr))
{
using (SqlCommand command = new SqlCommand("spGetAccountLogs", connection))
{
command.CommandType = CommandType.StoredProcedure;
command.Parameters.Add("account_id", SqlDbType.Int).Value = DBConvert.From(int.Parse(account_id));
connection.Open();
using (SqlDataReader reader = command.ExecuteReader())
{
while (reader.Read())
{
collection.Add(new AccountLog
{
Account_Log_ID = DBConvert.To<int>(reader[0]),
Account_ID = DBConvert.To<int>(reader[1]),
Date = DBConvert.To<DateTime>(reader[2]),
Tag = DBConvert.To<string>(reader[3]),
Description = DBConvert.To<string>(reader[4])
});
}
}
}
}
return collection;
}
catch
{
return null;
}
}
/// <summary>
/// Used to set the status of the selected medication schedule as taken
/// </summary>
/// <param name="med_take_schedule_id">ID assigned to the medication schedule</param>
/// <param name="med_take_id">ID assigned to the medication</param>
/// <returns>returns positive integer if success otherwise failed</returns>
public int TakeMedicine(string med_take_schedule_id, string med_take_id)
{
try
{
using (SqlConnection connection = new SqlConnection(conStr))
{
using (SqlCommand command = new SqlCommand("spTakeMedicine", connection))
{
command.CommandType = CommandType.StoredProcedure;
command.Parameters.Add("med_take_id", SqlDbType.Int).Value = DBConvert.From(int.Parse(med_take_id));
command.Parameters.Add("med_take_schedule_id", SqlDbType.Int).Value = DBConvert.From(int.Parse(med_take_schedule_id));
connection.Open();
return (int)command.ExecuteScalar();
}
}
}
catch
{
return -1;
}
}
/// <summary>
/// Used to get the intake logs of a user
/// </summary>
/// <param name="account_id">ID assigned to the user</param>
/// <returns>returns the list of intake logs of a user</returns>
public List<IntakeLog> GetIntakeLogs(string account_id)
{
try
{
List<IntakeLog> collection = new List<IntakeLog>();
using (SqlConnection connection = new SqlConnection(conStr))
{
using (SqlCommand command = new SqlCommand("spGetIntakeLogs", connection))
{
command.CommandType = CommandType.StoredProcedure;
command.Parameters.Add("account_id", SqlDbType.Int).Value = DBConvert.From(int.Parse(account_id));
connection.Open();
using (SqlDataReader reader = command.ExecuteReader())
{
while (reader.Read())
{
collection.Add(new IntakeLog
{
Intake_Log_ID = DBConvert.To<int>(reader[0]),
Account_ID = DBConvert.To<int>(reader[1]),
Med_Name = DBConvert.To<string>(reader[2]),
Dosage_Count = DBConvert.To<int>(reader[3]),
Med_Type_Name = DBConvert.To<string>(reader[4]),
Image = DBConvert.To<string>(reader[5]),
Taken = DBConvert.To<string>(reader.GetDateTime(6).ToDateWithTime())
});
}
}
}
}
return collection;
}
catch
{
return null;
}
}
}
| 43.152431 | 183 | 0.506806 | 3.046875 |
69f514fed791511d6050b8ba56b4033ad51c72da
| 6,649 |
sh
|
Shell
|
dockerfiles/zoneminder/files/zmlinkcontent.sh
|
hyperbolic2346/coreos
|
00509cb185c957ff93a944a92b2ff9201652234d
|
[
"MIT"
] | null | null | null |
dockerfiles/zoneminder/files/zmlinkcontent.sh
|
hyperbolic2346/coreos
|
00509cb185c957ff93a944a92b2ff9201652234d
|
[
"MIT"
] | null | null | null |
dockerfiles/zoneminder/files/zmlinkcontent.sh
|
hyperbolic2346/coreos
|
00509cb185c957ff93a944a92b2ff9201652234d
|
[
"MIT"
] | null | null | null |
#!/bin/bash
# The purpose of this file is to create the symlinks in the web folder to the content folder. It can use an existing content folder or create a new one.
# Set the content dir default to be the one supplied to cmake
ZM_PATH_CONTENT="/var/lib/zoneminder"
echo "*** This bash script creates the nessecary symlinks for the zoneminder content"
echo "*** It can use an existing content folder or create a new one"
echo "*** For usage: use -h"
echo "*** The default content directory is: $ZM_PATH_CONTENT"
echo ""
usage()
{
cat <<EOF
Usage: $0 [-q] [-z zm.conf] [-w WEB DIRECTORY] [CONTENT DIRECTORY]
OPTIONS:
-h Show this message and quit
-z ZoneMinder configuration file
-w Override the web directory from zm.conf
-q Quick mode. Do not change ownership recursively.
If the -w option is not used to specify the path to the web directory,
the script will use the path from zoneminder's configuration file.
If the -z option is used, the argument will be used instead of zm.conf
Otherwise, it will attempt to read zm.conf from the local directory.
If that fails, it will try from /etc/zm.conf
EOF
}
while getopts "hz:w:q" OPTION
do
case $OPTION in
h)
usage
exit 50
;;
z)
ZM_CONFIG=$OPTARG
;;
w)
ZM_PATH_WEB_FORCE=$OPTARG
;;
q)
QUICK=1
;;
esac
done
shift $(( OPTIND - 1 ))
# Lets check that we are root
if [ "$(id -u)" != "0" ]; then
echo "Error: This script needs to run as root."
exit 1
fi
# Check if zm.conf was supplied as an argument and that it exists
if [[ -n "$ZM_CONFIG" && ! -f "$ZM_CONFIG" ]]; then
echo "The zoneminder configuration file $ZM_CONFIG does not exist!"
exit 40
fi
# Load zm.conf
if [ -n "$ZM_CONFIG" ]; then
echo "Using custom zm.conf $ZM_CONFIG"
source "$ZM_CONFIG"
elif [ -f "zm.conf" ]; then
echo "Using local zm.conf"
source "zm.conf"
elif [ -f "/etc/zm.conf"]; then
echo "Using system zm.conf"
source "/etc/zm.conf"
else
echo "Failed locating zoneminder configuration file (zm.conf)\nUse the -z option to specify the full path to the zoneminder configuration file"
exit 45
fi
# Override the web directory path from zm.conf
if [ -n "$ZM_PATH_WEB_FORCE" ]; then
ZM_PATH_WEB="$(readlink -f $ZM_PATH_WEB_FORCE)"
fi
# Override the default content path
if [[ -n "$@" ]]; then
ZM_PATH_CONTENT="$(readlink -f $@)"
fi
# Print some information
echo "Web folder : $ZM_PATH_WEB"
echo "Content folder : $ZM_PATH_CONTENT"
echo ""
# Verify the web folder is a real directory
echo -n "Verifying the web folder is a directory... "
if [ -d "$ZM_PATH_WEB" ]; then
echo "OK"
else
echo "Failed"
exit 3
fi
# Check if the content folder exists, and if not, create it
echo -n "Checking if the content folder exists... "
if [ -d "$ZM_PATH_CONTENT" ]; then
echo "Yes"
else
echo "No"
echo -n "Creating the content folder... "
mkdir "$ZM_PATH_CONTENT"
if [ "$?" = "0" ]; then
echo "OK"
else
echo "Failed"
exit 4
fi
fi
# Check if the content/images folder exists, and if not, create it
echo -n "Checking if the images folder exists inside the content folder... "
if [ -d "$ZM_PATH_CONTENT/images" ]; then
echo "Yes"
else
echo "No"
echo -n "Creating the images folder inside the content folder... "
mkdir "$ZM_PATH_CONTENT/images"
if [ "$?" = "0" ]; then
echo "OK"
else
echo "Failed"
exit 6
fi
fi
# Check if the content/events folder exists, and if not, create it
echo -n "Checking if the events folder exists inside the content folder... "
if [ -d "$ZM_PATH_CONTENT/events" ]; then
echo "Yes"
else
echo "No"
echo -n "Creating the events folder inside the content folder... "
mkdir "$ZM_PATH_CONTENT/events"
if [ "$?" = "0" ]; then
echo "OK"
else
echo "Failed"
exit 7
fi
fi
if [ -d "$ZM_PATH_WEB/images" ]; then
if [ -L "$ZM_PATH_WEB/images" ]; then
echo -n "Unlinking current symlink for the images folder... "
unlink "$ZM_PATH_WEB/images"
if [ "$?" = "0" ]; then
echo "OK"
else
echo "Failed"
exit 35
fi
else
echo "Existing $ZM_PATH_WEB/images is not a symlink. Aborting to prevent data loss"
exit 10
fi
fi
if [ -d "$ZM_PATH_WEB/events" ]; then
if [ -L "$ZM_PATH_WEB/events" ]; then
echo -n "Unlinking current symlink for the events folder... "
unlink "$ZM_PATH_WEB/events"
if [ "$?" = "0" ]; then
echo "OK"
else
echo "Failed"
exit 36
fi
else
echo "Existing $ZM_PATH_WEB/events is not a symlink. Aborting to prevent data loss"
exit 11
fi
fi
# Create the symlink for the images folder
echo -n "Creating the symlink for the images folder... "
ln -s -f "$ZM_PATH_CONTENT/images" "$ZM_PATH_WEB/images"
if [ "$?" = "0" ]; then
echo "OK"
else
echo "Failed"
exit 15
fi
# Create the symlink for the events folder
echo -n "Creating the symlink for the events folder... "
ln -s -f "$ZM_PATH_CONTENT/events" "$ZM_PATH_WEB/events"
if [ "$?" = "0" ]; then
echo "OK"
else
echo "Failed"
exit 16
fi
# change ownership for the images folder. do it recursively unless -q is used
if [ -n "$QUICK" ]; then
echo -n "Changing ownership of the images folder to ${ZM_WEB_USER} ${ZM_WEB_GROUP}... "
chown ${ZM_WEB_USER}:${ZM_WEB_GROUP} "$ZM_PATH_CONTENT/images"
if [ "$?" = "0" ]; then
echo "OK"
else
echo "Failed"
exit 20
fi
else
echo -n "Changing ownership of the images folder recursively to ${ZM_WEB_USER} ${ZM_WEB_GROUP}... "
chown -R ${ZM_WEB_USER}:${ZM_WEB_GROUP} "$ZM_PATH_CONTENT/images"
if [ "$?" = "0" ]; then
echo "OK"
else
echo "Failed"
exit 21
fi
fi
# change ownership for the events folder. do it recursively unless -q is used
if [ -n "$QUICK" ]; then
echo -n "Changing ownership of the events folder to ${ZM_WEB_USER} ${ZM_WEB_GROUP}... "
chown ${ZM_WEB_USER}:${ZM_WEB_GROUP} "$ZM_PATH_CONTENT/events"
if [ "$?" = "0" ]; then
echo "OK"
else
echo "Failed"
exit 25
fi
else
echo -n "Changing ownership of the events folder recursively to ${ZM_WEB_USER} ${ZM_WEB_GROUP}... "
chown -R ${ZM_WEB_USER}:${ZM_WEB_GROUP} "$ZM_PATH_CONTENT/events"
if [ "$?" = "0" ]; then
echo "OK"
else
echo "Failed"
exit 26
fi
fi
# Change directory permissions for the images folder
echo -n "Changing permissions of the images folder to 775... "
chmod 775 "$ZM_PATH_CONTENT/images"
if [ "$?" = "0" ]; then
echo "OK"
else
echo "Failed"
exit 30
fi
# Change directory permissions for the events folder
echo -n "Changing permissions of the events folder to 775... "
chmod 775 "$ZM_PATH_CONTENT/events"
if [ "$?" = "0" ]; then
echo "OK"
else
echo "Failed"
exit 31
fi
echo ""
echo "All done"
| 24.902622 | 152 | 0.665514 | 3.34375 |
de581a2806966955cb8231b870616831ea68ba16
| 2,183 |
swift
|
Swift
|
0025. Reverse Nodes in k-Group.swift
|
sergeyleschev/leetcode-swift
|
b73b8fa61a14849e48fb38e27e51ea6c12817d64
|
[
"MIT"
] | 10 |
2021-05-16T07:19:41.000Z
|
2021-08-02T19:02:00.000Z
|
0025. Reverse Nodes in k-Group.swift
|
sergeyleschev/leetcode-swift
|
b73b8fa61a14849e48fb38e27e51ea6c12817d64
|
[
"MIT"
] | null | null | null |
0025. Reverse Nodes in k-Group.swift
|
sergeyleschev/leetcode-swift
|
b73b8fa61a14849e48fb38e27e51ea6c12817d64
|
[
"MIT"
] | 1 |
2021-08-18T05:33:00.000Z
|
2021-08-18T05:33:00.000Z
|
/**
* Definition for singly-linked list.
* public class ListNode {
* public var val: Int
* public var next: ListNode?
* public init() { self.val = 0; self.next = nil; }
* public init(_ val: Int) { self.val = val; self.next = nil; }
* public init(_ val: Int, _ next: ListNode?) { self.val = val; self.next = next; }
* }
*/
class Solution {
// Solution @ Sergey Leschev, Belarusian State University
// 25. Reverse Nodes in k-Group
// Given a linked list, reverse the nodes of a linked list k at a time and return its modified list.
// k is a positive integer and is less than or equal to the length of the linked list. If the number of nodes is not a multiple of k then left-out nodes, in the end, should remain as it is.
// You may not alter the values in the list's nodes, only nodes themselves may be changed.
// Example 1:
// Input: head = [1,2,3,4,5], k = 2
// Output: [2,1,4,3,5]
// Example 2:
// Input: head = [1,2,3,4,5], k = 3
// Output: [3,2,1,4,5]
// Example 3:
// Input: head = [1,2,3,4,5], k = 1
// Output: [1,2,3,4,5]
// Example 4:
// Input: head = [1], k = 1
// Output: [1]
// Constraints:
// The number of nodes in the list is in the range sz.
// 1 <= sz <= 5000
// 0 <= Node.val <= 1000
// 1 <= k <= sz
// Follow-up: Can you solve the problem in O(1) extra memory space?
func reverseKGroup(_ head: ListNode?, _ k: Int) -> ListNode? {
var tmp1: ListNode? = head
for _ in 0..<k - 1 { tmp1 = tmp1?.next }
if tmp1 == nil {
return head
} else {
var current: ListNode?
var tmp2: ListNode?
for _ in 0..<k {
if current == nil {
current = head?.next
head?.next = reverseKGroup(tmp1?.next, k)
tmp1 = head
} else {
tmp2 = current?.next
current?.next = tmp1
tmp1 = current
current = tmp2
}
}
return tmp1
}
}
}
| 31.185714 | 193 | 0.505268 | 3.125 |
8ac98c88c31c94464e09ce7de200657dae3acc35
| 1,948 |
ps1
|
PowerShell
|
code/Tools/Ops/Scripts/Remove-OBAEnvironment.ps1
|
microsoft/EmbeddedSocial-SyncService-for-OBA
|
7f2a33959742106a6acc63b5f41329e2ae73c4c1
|
[
"MIT"
] | 2 |
2020-05-09T09:32:21.000Z
|
2020-09-06T18:05:07.000Z
|
code/Tools/Ops/Scripts/Remove-OBAEnvironment.ps1
|
microsoft/EmbeddedSocial-SyncService-for-OBA
|
7f2a33959742106a6acc63b5f41329e2ae73c4c1
|
[
"MIT"
] | null | null | null |
code/Tools/Ops/Scripts/Remove-OBAEnvironment.ps1
|
microsoft/EmbeddedSocial-SyncService-for-OBA
|
7f2a33959742106a6acc63b5f41329e2ae73c4c1
|
[
"MIT"
] | 3 |
2020-06-30T15:45:51.000Z
|
2020-08-05T14:08:39.000Z
|
function Remove-OBAEnvironment {
<#
.NOTES
Name: Remove-OBAEnvironment.ps1
Requires: Azure Powershell version 2.1 or higher.
.SYNOPSIS
Removes an OBA Server Azure instance.
.DESCRIPTION
Implements all logic needed to delete an instance of the OBA Server on Azure.
Assumes that the user is already logged to Azure RM.
.PARAMETER Name
Name of the OBA environment to remove.
.EXAMPLE
[PS] C:\>Remove-OBAEnvironment -Name oba-dev-alec -Verbose
#>
param (
[Parameter(Mandatory=$true,HelpMessage='Environment name')]
[Alias("Name")]
[string] $EnvironmentName
)
begin {
}
process {
Write-Host "Deleting all resources for account $EnvironmentName..."
$rg = $EnvironmentName
$count = (Get-AzureRmResourceGroup -Name $rg | Measure-Object).Count
Write-Verbose "Get resource group returned count = $count."
# check that the resource group exists before we try to delete it
if ($count -eq 1) {
# Ask the user to press 'y' to continue
$message = 'This script deletes all Azure resources of a OBA Server environment.'
$question = 'Are you sure you want to proceed?'
$choices = New-Object Collections.ObjectModel.Collection[Management.Automation.Host.ChoiceDescription]
$choices.Add((New-Object Management.Automation.Host.ChoiceDescription -ArgumentList '&Yes'))
$choices.Add((New-Object Management.Automation.Host.ChoiceDescription -ArgumentList '&No'))
$decision = $Host.UI.PromptForChoice($message, $question, $choices, 1)
if ($decision -ne 0) {
throw 'cancelled'
}
Write-Verbose "Removing resource group $rg"
Remove-AzureRmResourceGroup -Name $rg -Force
}
}
end {
Write-Verbose "Finished."
}
}
| 31.934426 | 114 | 0.623203 | 3.765625 |
20e287e88403f91746ddb97cfd1363acafca9a0d
| 7,725 |
py
|
Python
|
SpaceHabitRPG/Tests/JSTests/LoginJSTest.py
|
joelliusp/SpaceHabit
|
5656ef4d9c57f3e58d0ed756a3aa754c8a7dd6a5
|
[
"MIT"
] | null | null | null |
SpaceHabitRPG/Tests/JSTests/LoginJSTest.py
|
joelliusp/SpaceHabit
|
5656ef4d9c57f3e58d0ed756a3aa754c8a7dd6a5
|
[
"MIT"
] | 13 |
2016-07-19T04:13:20.000Z
|
2016-08-17T06:06:47.000Z
|
SpaceHabitRPG/Tests/JSTests/LoginJSTest.py
|
joelliusp/SpaceHabit
|
5656ef4d9c57f3e58d0ed756a3aa754c8a7dd6a5
|
[
"MIT"
] | null | null | null |
from SpaceUnitTest import SpaceUnitTest
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import SpaceHabitServer
import threading
import cherrypy
import time
import requests
import AuthenticationLayer
import DatabaseLayer
import DatabaseTestSetupCleanup as dbHelp
class Test_LoginJSTest(SpaceUnitTest):
@classmethod
def setUpClass(cls):
DatabaseLayer.isUnitTestMode = True
cls.server = SpaceHabitServer.HabitServer()
cls.server.start()
ticks = 0
while cherrypy.engine.state != cherrypy.engine.states.STARTED:
time.sleep(1)
ticks += 1
if ticks >= 10:
raise TimeoutError("ran out of time")
return super().setUpClass()
@classmethod
def tearDownClass(cls):
dbHelp.clean_up()
cls.server.stop()
ticks = 0
while cherrypy.engine.state != cherrypy.engine.states.STOPPED:
time.sleep(1)
ticks += 1
if ticks >= 10:
raise TimeoutError("ran out of time")
return super().tearDownClass()
def setUp(self):
self.driver = webdriver.Firefox()
self.driver.implicitly_wait(5)
self.driver.get("http://127.0.0.1:8080")
self.input1 = self.driver.find_element_by_xpath("//input[@name='email_input_1']")
self.input2 = self.driver.find_element_by_xpath("//input[@name='email_input_2']")
self.pw1 = self.driver.find_element_by_xpath("//input[@name='pw_input_1']")
self.pw2 = self.driver.find_element_by_xpath("//input[@name='pw_input_2']")
self.ship = self.driver.find_element_by_xpath("//input[@name='ship_input']")
self.newUserModal = self.driver.find_element_by_id("new_user_box")
self.pwModal = self.driver.find_element_by_id("forgotten_pw_box")
return super().setUp()
def tearDown(self):
self.driver.quit()
return super().tearDown()
def open_new_user_box(self):
clickElem = self.driver.find_element_by_id("create_account")
clickElem.click()
def test_clearNewAccountWindow(self):
self.open_new_user_box()
self.input1.send_keys("aaaaa")
self.input2.send_keys("bbbbb")
self.pw1.send_keys("cccc")
self.pw2.send_keys("dddd")
self.ship.send_keys("eeee")
self.driver.execute_script("clearNewAccountWindow();")
self.assertEqual(self.input1.get_attribute('value'),"")
self.assertEqual(self.input2.get_attribute('value'),"")
self.assertEqual(self.pw1.get_attribute('value'),"")
self.assertEqual(self.pw2.get_attribute('value'),"")
self.assertEqual(self.ship.get_attribute('value'),"")
elem = self.driver.find_element_by_id("bad_email")
self.assertFalse(elem.is_displayed())
elem = self.driver.find_element_by_id("taken_email")
self.assertFalse(elem.is_displayed())
elem = self.driver.find_element_by_id("mismatched_email")
self.assertFalse(elem.is_displayed())
elem = self.driver.find_element_by_id("good_email")
self.assertFalse(elem.is_displayed())
elem = self.driver.find_element_by_id("short_pw")
self.assertFalse(elem.is_displayed())
elem = self.driver.find_element_by_id("mismatched_pw")
self.assertFalse(elem.is_displayed())
def test_createAccountClick(self):
elem = self.driver.find_element_by_id("new_user_box")
self.assertFalse(elem.is_displayed())
self.driver.execute_script("createAccountClick();")
self.assertTrue(elem.is_displayed())
def test_forgotPWClick(self):
self.assertFalse(self.pwModal.is_displayed())
self.driver.execute_script("forgotPWClick();")
self.assertTrue(self.pwModal.is_displayed())
def test_cancelAddClick(self):
self.open_new_user_box()
self.assertTrue(self.newUserModal.is_displayed())
self.driver.execute_script("cancelAddClick();")
self.assertFalse(self.newUserModal.is_displayed())
def test_cancelForgotPassword(self):
self.driver.find_element_by_id("forgot_pw").click()
self.assertTrue(self.pwModal.is_displayed())
self.driver.execute_script("cancelForgotPassword();")
self.assertFalse(self.pwModal.is_displayed())
def test_validateEmailAjaxSuccess(self):
self.open_new_user_box()
self.driver.execute_script(
"validateNewEmailAjaxSuccess("
"{'messages':['#bad_email'],'success':false});")
elem = self.driver.find_element_by_id("bad_email")
self.assertTrue(elem.is_displayed())
self.driver.execute_script(
"validateNewEmailAjaxSuccess("
"{'messages':['#bad_email','#taken_email'],'success':false});")
elem = self.driver.find_element_by_id("bad_email")
self.assertTrue(elem.is_displayed())
elem = self.driver.find_element_by_id("taken_email")
self.assertTrue(elem.is_displayed())
self.driver.execute_script(
"validateNewEmailAjaxSuccess("
"{'messages':['#good_email'],'success':true});")
elem = self.driver.find_element_by_id("bad_email")
self.assertFalse(elem.is_displayed())
elem = self.driver.find_element_by_id("taken_email")
self.assertFalse(elem.is_displayed())
elem = self.driver.find_element_by_id("good_email")
self.assertTrue(elem.is_displayed())
def test_loginAjaxSuccessSession(self):
AuthenticationLayer.disableAuthenticationRedirects = True
self.driver.execute_script("loginAjaxSuccess({'messages':[\"#bad_login\",\"#bad_login_pw\"],'success':false});")
self.assertEqual(self.driver.title,"Login to Space Habit Frontier")
elem = self.driver.find_element_by_id("bad_login")
self.assertTrue(elem.is_displayed())
elem = self.driver.find_element_by_id("bad_login_pw")
self.assertTrue(elem.is_displayed())
self.driver.execute_script("loginAjaxSuccess({'messages':[\"#bad_login_pw\"],'success':false});")
self.assertEqual(self.driver.title,"Login to Space Habit Frontier")
elem = self.driver.find_element_by_id("bad_login")
self.assertFalse(elem.is_displayed())
elem = self.driver.find_element_by_id("bad_login_pw")
self.assertTrue(elem.is_displayed())
self.driver.execute_script("loginAjaxSuccess({'messages':[],'success':true});")
#WebDriverWait(self.driver,10).until(EC.title_is("Space Habit Frontier!"))
self.assertEqual(self.driver.title,"Space Habit Frontier!")
def test_onEmail2InputBlur(self):
self.open_new_user_box()
self.input1.send_keys("[email protected]")
self.input2.send_keys("[email protected]")
self.driver.execute_script("onEmail2InputBlur();")
elem = self.driver.find_element_by_id("mismatched_email")
self.assertTrue(elem.is_displayed())
self.input2.clear()
self.input2.send_keys("[email protected]")
self.assertEqual(self.input1.get_attribute('value'),self.input2.get_attribute('value'))
self.driver.execute_script("onEmail2InputBlur();")
self.assertFalse(elem.is_displayed())
def test_onPw1InputBlur(self):
self.open_new_user_box()
self.pw1.send_keys("123")
self.driver.execute_script("onPw1InputBlur();")
elem = self.driver.find_element_by_id("short_pw")
self.assertTrue(elem.is_displayed())
self.pw1.clear()
self.pw1.send_keys("123456")
self.driver.execute_script("onPw1InputBlur();")
self.assertFalse(elem.is_displayed())
def test_onPw2InputBlur(self):
self.open_new_user_box()
self.pw1.send_keys("abcdef")
self.pw2.send_keys("Abcdef")
self.driver.execute_script("onPw2InputBlur();")
elem = self.driver.find_element_by_id("mismatched_pw")
self.assertTrue(elem.is_displayed())
self.pw2.clear()
self.pw2.send_keys("abcdef")
self.assertEqual(self.pw1.get_attribute('value'),self.pw2.get_attribute('value'))
self.driver.execute_script("onPw2InputBlur();")
self.assertFalse(elem.is_displayed())
if __name__ == '__main__':
unittest.main()
| 35.763889 | 116 | 0.725178 | 3.390625 |
641bda3052bba4bf9941da7364723d92a8d023d3
| 1,949 |
py
|
Python
|
main.py
|
mrinalpande/Music-Encrypt
|
95e0f128232be307c844066cdf85a39097855d47
|
[
"MIT"
] | 6 |
2017-08-19T21:41:57.000Z
|
2022-01-15T22:25:09.000Z
|
main.py
|
mrinalpande/Music-Encrypt
|
95e0f128232be307c844066cdf85a39097855d47
|
[
"MIT"
] | null | null | null |
main.py
|
mrinalpande/Music-Encrypt
|
95e0f128232be307c844066cdf85a39097855d47
|
[
"MIT"
] | 2 |
2017-11-28T00:18:02.000Z
|
2022-01-16T06:57:42.000Z
|
### Author: Mrinal Pande
### Date: Aug,15 2017
import sys
import os.path
import encrypt
import decrypt
def main():
print("---------------------------------------------------")
print("Music Encrypt - Music Based Encryption")
print("Author: Mrinal Pande")
print("---------------------------------------------------")
if len(sys.argv) < 2:
print("Invalid input \nFor Help:",sys.argv[0],"-h")
elif sys.argv[1] == "-h":
print("Help For Music Encrypt")
print("---------------------------------------------------")
print("Options Description")
print("-f File to encrypt/Decrypt")
print("-e Encrypt the file")
print("-d Decrypt the file")
print("-k Key to Encrypt")
print("-o name of output file")
print("\nFor encrypting a file:")
print("python main.py -f <filename> -e -k <key> -o <output file name>")
print("\nFor decrypting a file:")
print("python main.py -f <filename> -d -k <key>")
print("---------------------------------------------------")
elif sys.argv[3] == "-e" and len(sys.argv) == 8:
fpath = sys.argv[2]
key = sys.argv[5]
outfile = sys.argv[7]
print("Starting Encryption...")
if os.path.isfile(fpath):
encrypt.encrypt(fpath,key,outfile)
else:
print("File not found \n")
print("Invalid input\nFor Help:",sys.argv[0],"-h")
elif sys.argv[3] == "-d" and len(sys.argv) == 6:
print("Decryption")
fpath = sys.argv[2]
key = sys.argv[5]
print("Starting decryption...")
if os.path.isfile(fpath):
decrypt.decrypt(fpath,key)
else:
print("File not found \n")
print("Invalid input\nFor Help:",sys.argv[0],"-h")
else:
print("Invalid input\nFor Help:",sys.argv[0],"-h")
main()
| 38.215686 | 79 | 0.473063 | 3.0625 |
6654587d82f9297e39fe40639e90ccff599a025c
| 1,873 |
py
|
Python
|
dungeon_game/decorators.py
|
erem2k/dungeon_game
|
02659aa03237c48867c126fedfa123133ff6edbf
|
[
"MIT"
] | null | null | null |
dungeon_game/decorators.py
|
erem2k/dungeon_game
|
02659aa03237c48867c126fedfa123133ff6edbf
|
[
"MIT"
] | null | null | null |
dungeon_game/decorators.py
|
erem2k/dungeon_game
|
02659aa03237c48867c126fedfa123133ff6edbf
|
[
"MIT"
] | null | null | null |
"""
This module lists decorators for task 6.1 from Coding Campus 2018 Python course
(Dungeon Game)
"""
import logging
import inspect
import functools
import dungeon_game.log as log
import dungeon_game.config as config
logger = logging.getLogger(log.LOGGER_NAME)
def log_decorator(func):
"""
Decorator for function call logger
:param func: Function to decorate with call logger
:return: Wrapper function
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
return_value = None
log_level = 0
if config.IS_DEBUG:
log_level = config.LEVEL_INFO
else:
log_level = config.LEVEL_DEBUG
logger.log(log_level, f"Calling {func.__name__}")
return_value = func(*args, **kwargs)
logger.log(log_level, f"Function {func.__name__} executed successfully")
return return_value
return wrapper
def debug_log_decorator(func):
"""
Debug decorator for function call logger
:param func: Function to decorate with debug call logger
:return: Wrapper function
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
return_value = None
log_level = 0
if config.IS_DEBUG:
log_level = config.LEVEL_INFO
else:
log_level = config.LEVEL_DEBUG
bound = inspect.signature(func).bind(*args, **kwargs)
log_string_arg_list = ["Arguments passed:"]
for key, value in bound.arguments.items():
log_string_arg_list.append(f"{key} : {value}")
logger.log(log_level, ' '.join(log_string_arg_list))
return_value = func(*args, **kwargs)
logger.log(log_level, f"Function {func.__name__} returned {return_value}")
return return_value
return wrapper
| 24.973333 | 83 | 0.625734 | 3.0625 |
f5bf170c8fc612cd5fb3cf94fe96ff97bbfad61d
| 1,478 |
go
|
Go
|
rest/model/conversions.go
|
xdg-forks/evergreen
|
012cd09d902cf600cf04929949b50be2732f449a
|
[
"Apache-2.0"
] | null | null | null |
rest/model/conversions.go
|
xdg-forks/evergreen
|
012cd09d902cf600cf04929949b50be2732f449a
|
[
"Apache-2.0"
] | null | null | null |
rest/model/conversions.go
|
xdg-forks/evergreen
|
012cd09d902cf600cf04929949b50be2732f449a
|
[
"Apache-2.0"
] | null | null | null |
package model
import (
"go/types"
"github.com/pkg/errors"
)
type convertType string
func stringToString(in string) string {
return in
}
func stringToStringPtr(in string) *string {
return &in
}
func stringPtrToString(in *string) string {
if in == nil {
return ""
}
return *in
}
func stringPtrToStringPtr(in *string) *string {
return in
}
func intToInt(in int) int {
return in
}
func intToIntPtr(in int) *int {
return &in
}
func intPtrToInt(in *int) int {
if in == nil {
return 0
}
return *in
}
func intPtrToIntPtr(in *int) *int {
return in
}
func conversionFn(in types.Type, outIsPtr bool) (string, error) {
if intype, inIsPrimitive := in.(*types.Basic); inIsPrimitive {
switch intype.Kind() {
case types.String:
if outIsPtr {
return "stringToStringPtr", nil
}
return "stringToString", nil
case types.Int:
if outIsPtr {
return "intToIntPtr", nil
}
return "intToInt", nil
}
}
if intype, inIsPtr := in.(*types.Pointer); inIsPtr {
value, isPrimitive := intype.Elem().(*types.Basic)
if !isPrimitive {
return "", errors.New("pointers to complex objects not implemented yet")
}
switch value.Kind() {
case types.String:
if outIsPtr {
return "stringPtrToStringPtr", nil
}
return "stringPtrToString", nil
case types.Int:
if outIsPtr {
return "intPtrToIntPtr", nil
}
return "intPtrToInt", nil
}
}
return "", errors.Errorf("converting type %s is not supported", in.String())
}
| 17.388235 | 77 | 0.668471 | 3.03125 |
af7f95cd4be4e44e03d89a35b207c4d918a8d81d
| 1,076 |
py
|
Python
|
27_36_DAYS_OF_TYPE_2020.py
|
eduairet/eat36daysOfType2020
|
89b35c5be102ea3afb4e19daccffe39a8c24e816
|
[
"CC0-1.0"
] | 1 |
2020-07-29T22:33:39.000Z
|
2020-07-29T22:33:39.000Z
|
27_36_DAYS_OF_TYPE_2020.py
|
eduairet/eat36daysOfType2020
|
89b35c5be102ea3afb4e19daccffe39a8c24e816
|
[
"CC0-1.0"
] | null | null | null |
27_36_DAYS_OF_TYPE_2020.py
|
eduairet/eat36daysOfType2020
|
89b35c5be102ea3afb4e19daccffe39a8c24e816
|
[
"CC0-1.0"
] | null | null | null |
side = 1080
thickness = side*0.4
frames = 84
def skPts():
points = []
for i in range(360):
x = cos(radians(i))
y = sin(radians(i))
points.append((x, y))
return points
def shape(step, var):
speed = var/step
fill(1, 1, 1, 0.05)
stroke(None)
shape = BezierPath()
shape.oval(0 - (thickness/2), 0 - (thickness/2), thickness, thickness)
with savedState():
scale(0.8, 1, (side*0.5, side*0.5))
translate(side*0.2 + (thickness/4), side*0.2 + (thickness/4))
rotate(2 * pi + speed, center=(side*0.2, side*0.2))
drawPath(shape)
points = skPts()
print(points)
for i in range(int(-frames/2), int(frames/2)):
if i != 0:
newPage(side, side)
fill(0)
rect(0, 0, side, side)
for j in range(0, 360, 12):
with savedState():
translate(
points[j][0],
points[j][1]
)
if j != 0:
shape(i, j)
saveImage('~/Desktop/27_36_DAYS_OF_TYPE_2020.mp4')
| 26.243902 | 74 | 0.501859 | 3.609375 |
0dec4f35205b6426d14734a86633dc9946e79fae
| 2,780 |
kt
|
Kotlin
|
kotlin/src/test/kotlin/adventofcode/codeforces/KotlinHeroesPractice3.kt
|
3ygun/adventofcode
|
69f95bca3d22032fba6ee7d9d6ec307d4d2163cf
|
[
"MIT"
] | null | null | null |
kotlin/src/test/kotlin/adventofcode/codeforces/KotlinHeroesPractice3.kt
|
3ygun/adventofcode
|
69f95bca3d22032fba6ee7d9d6ec307d4d2163cf
|
[
"MIT"
] | null | null | null |
kotlin/src/test/kotlin/adventofcode/codeforces/KotlinHeroesPractice3.kt
|
3ygun/adventofcode
|
69f95bca3d22032fba6ee7d9d6ec307d4d2163cf
|
[
"MIT"
] | null | null | null |
package adventofcode.codeforces
import io.kotlintest.should
import io.kotlintest.shouldBe
import io.kotlintest.specs.FreeSpec
import io.kotlintest.tables.row
class KotlinHeroesPractice3Tests : FreeSpec({
"Problem A - Restoring Three Numbers" - {
listOf(
row(row(3L, 6L, 5L, 4L), Triple(1L, 2L, 3L)),
row(row(4L, 7L, 6L, 4L), Triple(1L, 3L, 3L)),
row(row(40L, 40L, 40L, 60L), Triple(20L, 20L, 20L)),
row(row(120L, 120L, 120L, 180L), Triple(60L, 60L, 60L)),
row(row(201L, 101L, 101L, 200L), Triple(1L, 100L, 100L)),
row(row(5L, 100L, 101L, 103L), Triple(2L, 3L, 98L))
).map { (input, output) ->
"$input to $output" {
val (x1, x2, x3, x4) = input
KotlinHeroesPractice3.problemA(x1, x2, x3, x4) should { it == output.toList() }
}
}
}
"Problem B - Remove Duplicates" - {
listOf(
row("6", "1 5 5 1 6 1", "3", "5 6 1"),
row("5", "2 4 2 4 4", "2", "2 4"),
row("5", "6 6 6 6 6", "1", "6")
).map { (numInputs, input, numOutputs, output) ->
"From: '$input' To: '$output'" {
val (numResult, result) = KotlinHeroesPractice3.problemB(numInputs, input)
numResult shouldBe numOutputs
result shouldBe output
}
}
}
"Problem C - File Name" - {
listOf(
row("xxxiii", 1),
row("xxoxx", 0),
row("xxxxxxxxxx", 8)
).map { (input, expected) ->
"Expected removal $expected from: $input" {
KotlinHeroesPractice3.problemC(input) shouldBe expected
}
}
}
"Problem D - Bus Video System" - {
listOf(
row("3 5", "2 1 -3", 3L),
row("2 4", "-1 1", 4L),
row("4 10", "2 4 1 2", 2L),
row("3 10", "-2 -2 -5", 2L),
row("1 10", "10", 1L),
row("2 10", "9 -10", 1L),
row("3 10", "9 -5 -5", 1L),
row("4 10", "-2 -2 -5 9", 2L),
row("4 10", "9 -5 6 -10", 1L),
row("4 12", "9 -5 3 -7", 4L),
row("1 99", "-99", 1L),
row("5 99", "0 0 0 0 0", 100L),
row("2 99", "-55 -43", 2L),
row("2 100", "-50 1", 51L),
row("1 10", "-100", 0L),
row("1 10", "100", 0L),
row("4 10", "-1 -9 -7 10", 0L),
row("5 10", "5 -1 -9 -7 10", 0L),
row("3 10", "1 2", 0L)
).map { (bus, changes, expected) ->
"Bus Inputs: '$bus', Changes: '$changes', Expecting: $expected" {
KotlinHeroesPractice3.problemD(bus, changes) shouldBe expected
}
}
}
})
| 35.189873 | 95 | 0.449281 | 3.203125 |
06caca715317e5cfe5ead1bb8633ddcb9dff7dc5
| 21,448 |
py
|
Python
|
unet_methods/unet_2d/utilities/data.py
|
DiamondLightSource/gas-hydrate-segmentation-unets
|
e635c30788c58f5c56929e437cc4704f5cbf6b79
|
[
"Apache-2.0"
] | null | null | null |
unet_methods/unet_2d/utilities/data.py
|
DiamondLightSource/gas-hydrate-segmentation-unets
|
e635c30788c58f5c56929e437cc4704f5cbf6b79
|
[
"Apache-2.0"
] | null | null | null |
unet_methods/unet_2d/utilities/data.py
|
DiamondLightSource/gas-hydrate-segmentation-unets
|
e635c30788c58f5c56929e437cc4704f5cbf6b79
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Data utilities for U-net training and prediction.
"""
import glob
import logging
import os
import re
import sys
import warnings
from datetime import date
from itertools import chain, product
from pathlib import Path
import dask.array as da
import h5py as h5
import numpy as np
import yaml
from fastai.vision import Image, crop_pad, pil2tensor
from skimage import exposure, img_as_float, img_as_ubyte, io
from tqdm import tqdm
from . import config as cfg
warnings.filterwarnings("ignore", category=UserWarning)
class SettingsData:
"""Class to sanity check and then store settings from the commandline and
a YAML settings file. Assumes given commandline args are filepaths.
Args:
settings_path (pathlib.Path): Path to the YAML file containing user settings.
parser_args (argparse.Namespace): Parsed commandline arguments from argparse.
"""
def __init__(self, settings_path, parser_args):
logging.info(f"Loading settings from {settings_path}")
if settings_path.exists():
self.settings_path = settings_path
with open(settings_path, 'r') as stream:
self.settings_dict = yaml.safe_load(stream)
else:
logging.error("Couldn't find settings file... Exiting!")
sys.exit(1)
logging.debug(f"Commandline args given: {vars(parser_args)}")
# Set the data as attributes, check paths are valid files
for k, v in self.settings_dict.items():
setattr(self, k, v)
for k, v in vars(parser_args).items():
# Check that files exist
v = Path(v)
if v.is_file():
setattr(self, k, v)
else:
logging.error(f"The file {v} does not appear to exist. Exiting!")
sys.exit(1)
class DataSlicerBase:
"""Base class for classes that convert 3d data volumes into 2d image slices on disk.
Slicing is carried in all of the xy (z), xz (y) and yz (x) planes.
Args:
settings (SettingsData): An initialised SettingsData object.
"""
def __init__(self, settings):
self.st_dev_factor = settings.st_dev_factor
if settings.clip:
self.data_vol = self.clip_to_uint8(self.data_vol)
def numpy_from_hdf5(self, path, hdf5_path='/data', nexus=False):
"""Returns a numpy array when given a path to an HDF5 file.
The data is assumed to be found in '/data' in the file.
Args:
path(pathlib.Path): The path to the HDF5 file.
hdf5_path (str): The internal HDF5 path to the data.
Returns:
numpy.array: A numpy array object for the data stored in the HDF5 file.
"""
with h5.File(path, 'r') as f:
data = f[hdf5_path][()]
return data
def clip_to_uint8(self, data):
"""Clips data to a certain number of st_devs of the mean and reduces
bit depth to uint8.
Args:
data(np.array): The data to be processed.
Returns:
np.array: A unit8 data array.
"""
logging.info("Clipping data and converting to uint8.")
data_st_dev = np.std(data)
data_mean = np.mean(data)
num_vox = np.prod(data.shape)
lower_bound = data_mean - (data_st_dev * self.st_dev_factor)
upper_bound = data_mean + (data_st_dev * self.st_dev_factor)
gt_ub = (data > upper_bound).sum()
lt_lb = (data < lower_bound).sum()
logging.info(f"Lower bound: {lower_bound}, upper bound: {upper_bound}")
logging.info(
f"Number of voxels above upper bound to be clipped {gt_ub} - percentage {gt_ub/num_vox * 100:.3f}%")
logging.info(
f"Number of voxels below lower bound to be clipped {lt_lb} - percentage {lt_lb/num_vox * 100:.3f}%")
data = np.clip(data, lower_bound, upper_bound)
data = exposure.rescale_intensity(data, out_range='float')
return img_as_ubyte(data)
def get_axis_index_pairs(self, vol_shape):
"""Gets all combinations of axis and image slice index that are found
in a 3d volume.
Args:
vol_shape (tuple): 3d volume shape (z, y, x)
Returns:
itertools.chain: An iterable containing all combinations of axis
and image index that are found in the volume.
"""
return chain(
product('z', range(vol_shape[0])),
product('y', range(vol_shape[1])),
product('x', range(vol_shape[2]))
)
def axis_index_to_slice(self, vol, axis, index):
"""Converts an axis and image slice index for a 3d volume into a 2d
data array (slice).
Args:
vol (3d array): The data volume to be sliced.
axis (str): One of 'z', 'y' and 'x'.
index (int): An image slice index found in that axis.
Returns:
2d array: A 2d image slice corresponding to the axis and index.
"""
if axis == 'z':
return vol[index, :, :]
if axis == 'y':
return vol[:, index, :]
if axis == 'x':
return vol[:, :, index]
def get_num_of_ims(self, vol_shape):
"""Calculates the total number of images that will be created when slicing
an image volume in the z, y and x planes.
Args:
vol_shape (tuple): 3d volume shape (z, y, x).
Returns:
int: Total number of images that will be created when the volume is
sliced.
"""
return sum(vol_shape)
class TrainingDataSlicer(DataSlicerBase):
"""Class that converts 3d data volumes into 2d image slices on disk for
model training.
Slicing is carried in all of the xy (z), xz (y) and yz (x) planes.
Args:
settings (SettingsData): An initialised SettingsData object.
"""
def __init__(self, settings):
data_vol_path = getattr(settings, cfg.TRAIN_DATA_ARG)
self.data_vol = self.numpy_from_hdf5(data_vol_path,
hdf5_path=settings.train_data_hdf5_path)
super().__init__(settings)
self.multilabel = False
self.data_im_out_dir = None
self.seg_im_out_dir = None
seg_vol_path = getattr(settings, cfg.LABEL_DATA_ARG)
self.seg_vol = self.numpy_from_hdf5(seg_vol_path,
hdf5_path=settings.seg_hdf5_path)
seg_classes = np.unique(self.seg_vol)
self.num_seg_classes = len(seg_classes)
if self.num_seg_classes > 2:
self.multilabel = True
logging.info("Number of classes in segmentation dataset:"
f" {self.num_seg_classes}")
logging.info(f"These classes are: {seg_classes}")
if seg_classes[0] != 0:
logging.info("Fixing label classes.")
self.fix_label_classes(seg_classes)
self.codes = [f"label_val_{i}" for i in seg_classes]
def fix_label_classes(self, seg_classes):
"""Changes the data values of classes in a segmented volume so that
they start from zero.
Args:
seg_classes(list): An ascending list of the labels in the volume.
"""
if isinstance(self.seg_vol, da.core.Array):
self.seg_vol = self.seg_vol
for idx, current in enumerate(seg_classes):
self.seg_vol[self.seg_vol == current] = idx
def output_data_slices(self, data_dir):
"""Wrapper method to intitiate slicing data volume to disk.
Args:
data_dir (pathlib.Path): The path to the directory where images will be saved.
"""
self.data_im_out_dir = data_dir
logging.info(
'Slicing data volume and saving slices to disk')
os.makedirs(data_dir, exist_ok=True)
self.output_slices_to_disk(self.data_vol, data_dir, 'data')
def output_label_slices(self, data_dir):
"""Wrapper method to intitiate slicing label volume to disk.
Args:
data_dir (pathlib.Path): The path to the directory where images will be saved.
"""
self.seg_im_out_dir = data_dir
logging.info(
'Slicing label volume and saving slices to disk')
os.makedirs(data_dir, exist_ok=True)
self.output_slices_to_disk(
self.seg_vol, data_dir, 'seg', label=True)
def output_slices_to_disk(self, data_arr, output_path, name_prefix, label=False):
"""Coordinates the slicing of an image volume in the three orthogonal
planes to images on disk.
Args:
data_arr (array): The data volume to be sliced.
output_path (pathlib.Path): A Path object to the output directory.
label (bool): Whether this is a label volume.
"""
shape_tup = data_arr.shape
ax_idx_pairs = self.get_axis_index_pairs(shape_tup)
num_ims = self.get_num_of_ims(shape_tup)
for axis, index in tqdm(ax_idx_pairs, total=num_ims):
out_path = output_path/f"{name_prefix}_{axis}_stack_{index}"
self.output_im(self.axis_index_to_slice(data_arr, axis, index),
out_path, label)
def output_im(self, data, path, label=False):
"""Converts a slice of data into an image on disk.
Args:
data (numpy.array): The data slice to be converted.
path (str): The path of the image file including the filename prefix.
label (bool): Whether to convert values >1 to 1 for binary segmentation.
"""
if isinstance(data, da.core.Array):
data = data
if label and not self.multilabel:
data[data > 1] = 1
io.imsave(f'{path}.png', data)
def delete_data_im_slices(self):
"""Deletes image slices in the data image output directory. Leaves the
directory in place since it contains model training history.
"""
if self.data_im_out_dir:
data_ims = glob.glob(f"{str(self.data_im_out_dir) + '/*.png'}")
logging.info(f"Deleting {len(data_ims)} image slices")
for fn in data_ims:
os.remove(fn)
def delete_label_im_slices(self):
"""Deletes label image slices in the segmented image output directory.
Also deletes the directory itself.
"""
if self.seg_im_out_dir:
seg_ims = glob.glob(f"{str(self.seg_im_out_dir) + '/*.png'}")
logging.info(f"Deleting {len(seg_ims)} segmentation slices")
for fn in seg_ims:
os.remove(fn)
logging.info(f"Deleting the empty segmentation image directory")
os.rmdir(self.seg_im_out_dir)
def clean_up_slices(self):
"""Wrapper function that cleans up data and label image slices.
"""
self.delete_data_im_slices()
self.delete_label_im_slices()
class PredictionDataSlicer(DataSlicerBase):
"""Class that converts 3d data volumes into 2d image slices for
segmentation prediction and that combines the slices back into volumes after
prediction.
1. Slicing is carried in the xy (z), xz (y) and yz (x) planes. 2. The data
volume is rotated by 90 degrees. Steps 1 and 2 are then repeated untill
4 rotations have been sliced.
The class also has methods to combine the image slices in to 3d volumes and
also to combine these volumes and perform consensus thresholding.
Args:
settings (SettingsData): An initialised SettingsData object.
predictor (Unet2dPredictor): A Unet2dPredictor object with a trained
2d U-net as an attribute.
"""
def __init__(self, settings, predictor):
data_vol_path = getattr(settings, cfg.PREDICT_DATA_ARG)
self.data_vol = self.numpy_from_hdf5(data_vol_path,
hdf5_path=settings.predict_data_hdf5_path)
super().__init__(settings)
self.consensus_vals = map(int, settings.consensus_vals)
self.predictor = predictor
self.delete_vols = settings.del_vols # Whether to clean up predicted vols
def setup_folder_stucture(self, root_path):
"""Sets up a folder structure to store the predicted images.
Args:
root_path (Path): The top level directory for data output.
"""
vol_dir= root_path/f'{date.today()}_predicted_volumes'
non_rotated = vol_dir/f'{date.today()}_non_rotated_volumes'
rot_90_seg = vol_dir/f'{date.today()}_rot_90_volumes'
rot_180_seg = vol_dir/f'{date.today()}_rot_180_volumes'
rot_270_seg = vol_dir/f'{date.today()}_rot_270_volumes'
self.dir_list = [
('non_rotated', non_rotated),
('rot_90_seg', rot_90_seg),
('rot_180_seg', rot_180_seg),
('rot_270_seg', rot_270_seg)
]
for _, dir_path in self.dir_list:
os.makedirs(dir_path, exist_ok=True)
def combine_slices_to_vol(self, folder_path):
"""Combines the orthogonally sliced png images in a folder to HDF5
volumes. One volume for each direction. These are then saved with a
common orientation. The images slices are then deleted.
Args:
folder_path (pathlib.Path): Path to a folder containing images that
were sliced in the three orthogonal planes.
Returns:
list of pathlib.Path: Paths to the created volumes.
"""
output_path_list = []
file_list = folder_path.ls()
axis_list = ['z', 'y', 'x']
number_regex = re.compile(r'\_(\d+)\.png')
for axis in axis_list:
# Generate list of files for that axis
axis_files = [x for x in file_list if re.search(
f'\_({axis})\_', str(x))]
logging.info(f'Axis {axis}: {len(axis_files)} files found, creating' \
' volume')
# Load in the first image to get dimensions
first_im = io.imread(axis_files[0])
shape_tuple = first_im.shape
z_dim = len(axis_files)
y_dim, x_dim = shape_tuple
data_vol = np.empty([z_dim, y_dim, x_dim], dtype=np.uint8)
for filename in axis_files:
m = number_regex.search(str(filename))
index = int(m.group(1))
im_data = io.imread(filename)
data_vol[index, :, :] = im_data
if axis == 'y':
data_vol = np.swapaxes(data_vol, 0, 1)
if axis == 'x':
data_vol = np.swapaxes(data_vol, 0, 2)
data_vol = np.swapaxes(data_vol, 0, 1)
output_path = folder_path/f'{axis}_axis_seg_combined.h5'
output_path_list.append(output_path)
logging.info(f'Outputting {axis} axis volume to {output_path}')
with h5.File(output_path, 'w') as f:
f['/data'] = data_vol
# Delete the images
logging.info(f"Deleting {len(axis_files)} image files for axis {axis}")
for filename in axis_files:
os.remove(filename)
return output_path_list
def combine_vols(self, output_path_list, k, prefix, final=False):
"""Sums volumes to give a combination of binary segmentations and saves to disk.
Args:
output_path_list (list of pathlib.Path): Paths to the volumes to be combined.
k (int): Number of 90 degree rotations that these image volumes
have been transformed by before slicing.
prefix (str): A filename prefix to give the final volume.
final (bool, optional): Set to True if this is the final combination
of the volumes that were created from each of the 90 degree rotations.
Defaults to False.
Returns:
pathlib.Path: A file path to the combined HDF5 volume that was saved.
"""
num_vols = len(output_path_list)
combined = self.numpy_from_hdf5(output_path_list[0])
for subsequent in output_path_list[1:]:
combined += self.numpy_from_hdf5(subsequent)
combined_out_path = output_path_list[0].parent.parent / \
f'{date.today()}_{prefix}_{num_vols}_volumes_combined.h5'
if final:
combined_out_path = output_path_list[0].parent / \
f'{date.today()}_{prefix}_12_volumes_combined.h5'
logging.info(f'Saving the {num_vols} combined volumes to {combined_out_path}')
combined = combined
combined = np.rot90(combined, 0 - k)
with h5.File(combined_out_path, 'w') as f:
f['/data'] = combined
if self.delete_vols:
logging.info("Deleting the source volumes for the combined volume")
for vol_filepath in output_path_list:
os.remove(vol_filepath)
return combined_out_path
def predict_single_slice(self, axis, index, data, output_path):
"""Takes in a 2d data array and saves the predicted U-net segmentation to disk.
Args:
axis (str): The name of the axis to incorporate in the output filename.
index (int): The slice number to incorporate in the output filename.
data (numpy.array): The 2d data array to be fed into the U-net.
output_path (pathlib.Path): The path to directory for file output.
"""
data = img_as_float(data)
img = Image(pil2tensor(data, dtype=np.float32))
self.fix_odd_sides(img)
prediction = self.predictor.model.predict(img)
pred_slice = img_as_ubyte(prediction[1][0])
io.imsave(
output_path/f"unet_prediction_{axis}_stack_{index}.png", pred_slice)
def fix_odd_sides(self, example_image):
"""Replaces an an odd image dimension with an even dimension by padding.
Taken from https://forums.fast.ai/t/segmentation-mask-prediction-on-different-input-image-sizes/44389/7.
Args:
example_image (fastai.vision.Image): The image to be fixed.
"""
if (list(example_image.size)[0] % 2) != 0:
example_image = crop_pad(example_image,
size=(list(example_image.size)[
0]+1, list(example_image.size)[1]),
padding_mode='reflection')
if (list(example_image.size)[1] % 2) != 0:
example_image = crop_pad(example_image,
size=(list(example_image.size)[0], list(
example_image.size)[1] + 1),
padding_mode='reflection')
def predict_orthog_slices_to_disk(self, data_arr, output_path):
"""Outputs slices from data or ground truth seg volumes sliced in
all three of the orthogonal planes
Args:
data_array (numpy.array): The 3d data volume to be sliced and predicted.
output_path (pathlib.Path): A Path to the output directory.
"""
shape_tup = data_arr.shape
ax_idx_pairs = self.get_axis_index_pairs(shape_tup)
num_ims = self.get_num_of_ims(shape_tup)
for axis, index in tqdm(ax_idx_pairs, total=num_ims):
self.predict_single_slice(
axis, index, self.axis_index_to_slice(data_arr, axis, index), output_path)
def consensus_threshold(self, input_path):
"""Saves a consensus thresholded volume from combination of binary volumes.
Args:
input_path (pathlib.Path): Path to the combined HDF5 volume that is
to be thresholded.
"""
for val in self.consensus_vals:
combined = self.numpy_from_hdf5(input_path)
combined_out = input_path.parent / \
f'{date.today()}_combined_consensus_thresh_cutoff_{val}.h5'
combined[combined < val] = 0
combined[combined >= val] = 255
logging.info(f'Writing to {combined_out}')
with h5.File(combined_out, 'w') as f:
f['/data'] = combined
def predict_12_ways(self, root_path):
"""Runs the loop that coordinates the prediction of a 3d data volume
by a 2d U-net in 12 orientations and then combination of the segmented
binary outputs.
Args:
root_path (pathlib.Path): Path to the top level directory for data
output.
"""
self.setup_folder_stucture(root_path)
combined_vol_paths = []
for k in tqdm(range(4), ncols=100, desc='Total progress', postfix="\n"):
key, output_path = self.dir_list[k]
logging.info(f'Rotating volume {k * 90} degrees')
rotated = np.rot90(self.data_vol, k)
logging.info("Predicting slices to disk.")
self.predict_orthog_slices_to_disk(rotated, output_path)
output_path_list = self.combine_slices_to_vol(output_path)
fp = self.combine_vols(output_path_list, k, key)
combined_vol_paths.append(fp)
# Combine all the volumes
final_combined = self.combine_vols(combined_vol_paths, 0, 'final', True)
self.consensus_threshold(final_combined)
if self.delete_vols:
for _, vol_dir in self.dir_list:
os.rmdir(vol_dir)
| 41.166987 | 112 | 0.61423 | 3 |
6b326e689ba4b994e89e491ad24560cd2ed37252
| 4,494 |
js
|
JavaScript
|
src/operator/binary-opr/radical.js
|
Inzaghi2012/formula
|
7c5930df0223f149ce6a374bd6dd1522900f37b9
|
[
"MIT"
] | 1 |
2020-09-22T17:09:03.000Z
|
2020-09-22T17:09:03.000Z
|
src/operator/binary-opr/radical.js
|
Inzaghi2012/formula
|
7c5930df0223f149ce6a374bd6dd1522900f37b9
|
[
"MIT"
] | null | null | null |
src/operator/binary-opr/radical.js
|
Inzaghi2012/formula
|
7c5930df0223f149ce6a374bd6dd1522900f37b9
|
[
"MIT"
] | null | null | null |
/**
* 开方操作符
*/
define( function ( require, exports, modules ) {
var kity = require( "kity" ),
// 符号图形属性
// 线条宽度
SHAPE_DATA_WIDTH = 2,
// 计算公式
radians = 2 * Math.PI / 360,
sin10 = Math.sin( 10 * radians ),
cos10 = Math.cos( 10 * radians ),
sin20 = Math.sin( 20 * radians ),
cos20 = Math.cos( 20 * radians ),
tan20 = Math.tan( 20 * radians );
return kity.createClass( 'RadicalOperator', {
base: require( "operator/binary" ),
constructor: function () {
this.callBase( "Radical" );
this.clearTransform();
},
applyOperand: function ( radicand, exponent ) {
generateOperator.call( this, radicand, exponent );
}
} );
// 根据给定的操作数生成操作符的pathData
// radicand 表示被开方数
// exponent 表示指数
function generateOperator ( radicand, exponent ) {
var decoration = generateDecoration(),
vLine = generateVLine( radicand ),
hLine = generateHLine( radicand );
this.addOperatorShape( decoration );
this.addOperatorShape( vLine );
this.addOperatorShape( hLine );
adjustmentPosition( mergeShape( decoration, vLine, hLine ), this.operatorShape, radicand, exponent );
adjustmentBox.call( this );
}
// 生成根号中的左边装饰部分
function generateDecoration () {
var shape = new kity.Path(),
// 命名为a以便于精简表达式
a = SHAPE_DATA_WIDTH,
drawer = shape.getDrawer();
// 根号尾部右上角开始
drawer.moveTo( cos10 * 7 * a, 0 );
drawer.lineTo( 0, sin10 * 7 * a );
drawer.lineBy( sin20 * a * 2, cos20 * a * 2 );
drawer.lineBy( cos10 * a * 3, -sin10 * a * 3 );
drawer.lineBy( sin20 * a * 14, cos20 * a * 14 );
drawer.lineBy( a * 2, 0 );
drawer.lineBy( 0, -a *2 / sin20 );
drawer.close();
return shape.fill( "black" );
}
// 根据操作数生成根号的竖直线部分
function generateVLine ( operand ) {
var shape = new kity.Path(),
// 命名为a以便于精简表达式
a = SHAPE_DATA_WIDTH,
// 表达式高度
h = operand.getHeight(),
drawer = shape.getDrawer();
drawer.moveTo( tan20 * h, 0 );
drawer.lineTo( 0, h );
drawer.lineBy( sin20 * a * 3, cos20 * a * 3 );
drawer.lineBy( tan20 * h + sin20 * a * 3, -( h + 3 * a * cos20 ) );
drawer.close();
return shape.fill( "black" );
}
// 根据操作数生成根号的水平线部分
function generateHLine ( operand ) {
// 表达式宽度
var w = operand.getWidth() + 2 * SHAPE_DATA_WIDTH;
return new kity.Rect( 0, 0, w, 2 * SHAPE_DATA_WIDTH ).fill( "black" );
}
// 合并根号的各个部分, 并返回根号的关键点位置数据
function mergeShape ( decoration, vLine, hLine ) {
var decoBox = decoration.getRenderBox(),
vLineBox = vLine.getRenderBox();
vLine.translate( decoBox.width - SHAPE_DATA_WIDTH , 0 );
decoration.translate( 0, vLineBox.height - decoBox.height );
vLineBox = vLine.getRenderBox();
hLine.translate( vLineBox.x + vLineBox.width - SHAPE_DATA_WIDTH / cos20, 0 );
// 返回关键点数据
return {
x: vLineBox.x + vLineBox.width - SHAPE_DATA_WIDTH / cos20,
y: 0
};
}
// 调整整个根号表达式的各个部分: 位置、操作符、被开方数、指数
function adjustmentPosition ( position, operator, radicand, exponent ) {
var radicandBox = radicand.getRenderBox(),
diff = 0,
width = 0,
exponentBox = null;
// 调整被开方数和根号的相对位置
radicand.translate( position.x + SHAPE_DATA_WIDTH - radicandBox.x + 5, position.y + 2 * SHAPE_DATA_WIDTH + 5 );
operator.translate( 5, 5 );
if ( !exponent ) {
return;
}
exponent.setAnchor( 0, 0 );
exponent.scale( 0.5 );
exponentBox = exponent.getRenderBox();
// width代表适合放置指数的最小宽度
width = exponentBox.width + exponentBox.height * tan20;
// 指数宽度超过根号左边部分的宽度, 则移动根号和被开方数
if ( width > position.x ) {
diff = width - position.x;
operator.translate( diff + 5, 0 );
radicand.translate( diff + 5, 0 );
// 否则, 移动指数
} else {
exponent.translate( position.x - width + 5, 0 );
}
}
// 调整整个边框的大小
function adjustmentBox () {
this.setBoxSize( this.operatorShape.getWidth(), this.operatorShape.getHeight() + 10 );
}
} );
| 24.966667 | 119 | 0.542501 | 3.296875 |
8d6a0cff665a07c0d36644ed562c3e0cd10e47d0
| 1,433 |
c
|
C
|
03_Data Structure/Algorithms/List/mergeList.c
|
Robert-Stackflow/HUST-Courses
|
300752552e7af035b0e5c7663953850c81871242
|
[
"MIT"
] | 4 |
2021-11-01T09:27:32.000Z
|
2022-03-07T14:24:10.000Z
|
03_Data Structure/Algorithms/List/mergeList.c
|
Robert-Stackflow/HUST-Courses
|
300752552e7af035b0e5c7663953850c81871242
|
[
"MIT"
] | null | null | null |
03_Data Structure/Algorithms/List/mergeList.c
|
Robert-Stackflow/HUST-Courses
|
300752552e7af035b0e5c7663953850c81871242
|
[
"MIT"
] | null | null | null |
#include <stdio.h>
#include "list.h"
llink merge_list(llink list1,llink list2){
llink cur1=list1,cur2=list2;
llink selected,newlist=NULL,newlist_cur=NULL;
while(list1 && list2){
if(list1->vertex<list2->vertex){
selected=list1;
list1=list1->next;
}else{
selected=list2;
list2=list2->next;
}
if(!newlist){
newlist=selected;
newlist_cur=newlist;
}else{
newlist_cur->next=selected;
newlist_cur=newlist_cur->next;
}
}
if(list1){
if(!newlist_cur)
newlist=list1;
else
newlist_cur->next=list1;
}
if(list2){
if(!newlist_cur)
newlist=list2;
else
newlist_cur->next=list2;
}
return newlist;
}
llink merge_list_recursive(llink list1,llink list2){
if(list1 && list2){
if(list1->vertex<list2->vertex){
list1->next=merge_list_recursive(list1->next,list2);
return list1;
}else{
list2->next=merge_list_recursive(list1,list2->next);
return list2;
}
}
else if(list1)
return list1;
else
return list2;
}
int main(int argc, char const *argv[])
{
int data1[]={1,3,3,8,9,10};
int size1=sizeof(data1)/sizeof(int);
int data2[]={2,3,6,7,8,9,12,13};
int size2=sizeof(data2)/sizeof(int);
llink list1=NULL,list2=NULL;
int i;
for(i=0;i<size1;i++)
insert_list(&list1,data1[i]);
display_list(list1);
for(i=0;i<size2;i++)
insert_list(&list2,data2[i]);
display_list(list2);
llink list3=merge_list_recursive(list1,list2);
display_list(list3);
return 0;
}
| 19.630137 | 55 | 0.682484 | 3.109375 |
394fe3e7349a3458b35832967c10c32e34a864c4
| 1,750 |
py
|
Python
|
entry/refresh_person.py
|
chulchultrain/FriendLeague
|
bf754f2c5e5a1e559a38f8b4617c87d6fb5f26ac
|
[
"MIT"
] | 1 |
2020-01-02T05:46:42.000Z
|
2020-01-02T05:46:42.000Z
|
entry/refresh_person.py
|
chulchultrain/FriendLeague
|
bf754f2c5e5a1e559a38f8b4617c87d6fb5f26ac
|
[
"MIT"
] | 5 |
2017-06-11T22:09:26.000Z
|
2020-01-04T02:59:49.000Z
|
entry/refresh_person.py
|
chulchultrain/FriendLeague
|
bf754f2c5e5a1e559a38f8b4617c87d6fb5f26ac
|
[
"MIT"
] | null | null | null |
# Module to serve as a runnable script to refresh a persons matchlist and data in the db.
# python3 -m entry.refresh_person.py
# Command line arguments:
# --name
# --acc_id encrypted account id using the api key
# Will retrieve the updated match list from the api,
# and for matches that are not in the db, it will populate them in the db
import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'FriendLeague.settings'
import sys
import argparse
import django
if __name__ == '__main__':
django.setup()
import leagreq.name_to_acc as name_to_acc
import leagreq.acc_to_matches as acc_to_matches
import leagreq.match as match
from django.conf import settings
from django.db import models
from analytics.models import Account
import filterdata.match_summary as match_summary
def refresh_all():
acc = Account.objects.all()
for x in acc:
refresh_person(x.account_id)
def refresh_person(acc_id):
# refresh match list
# for all matches not in db, put them in
match_list = acc_to_matches.refresh_matches(acc_id)
match_id_list = [m['gameId'] for m in match_list]
for m in match_id_list:
match_summary.match_summary_from_id(m)
def parse_args(args):
parser = argparse.ArgumentParser()
parser.add_argument('-n',"--name",help = "the name of the summoner")
parser.add_argument('-a',"--acc_id",help = "the account id of the summoner properly encrypted using the api key")
args = parser.parse_args()
return args.name, args.acc_id
if __name__ == "__main__":
name, acc_id = parse_args(sys.argv)
if acc_id is None:
if name is None:
sys.exit("need either name or account id")
else:
acc_id = name_to_acc.account_id_from_name(name)
refresh_person(acc_id)
| 33.018868 | 117 | 0.727429 | 3.21875 |
a3b5d7aa5ea18f89e32184147020da8635c7e0b3
| 1,392 |
java
|
Java
|
src/Offer_54/is.java
|
dancheren/ToOffer
|
9cc4fcfe30844ae9df695a896422a6a4665e0a33
|
[
"Apache-2.0"
] | 1 |
2017-07-21T07:38:42.000Z
|
2017-07-21T07:38:42.000Z
|
src/Offer_54/is.java
|
dancheren/ToOffer
|
9cc4fcfe30844ae9df695a896422a6a4665e0a33
|
[
"Apache-2.0"
] | null | null | null |
src/Offer_54/is.java
|
dancheren/ToOffer
|
9cc4fcfe30844ae9df695a896422a6a4665e0a33
|
[
"Apache-2.0"
] | null | null | null |
package Offer_54;
public class is {
static int i = 0;
public static void main(String[] args) {
String str1 = "+500";
char[] str = str1.toCharArray();
System.out.println(isNumeric(str));
}
public static boolean isNumeric(char[] str) {
if(str == null || str.length ==0)
return false;
boolean numeric = true;
if(str[i] =='+' || str[i] == '-'){
i++;
}
scanNum(str);
if(i<str.length){
if(str[i] == '.'){
i ++;
if(i>=str.length) return numeric;
scanNum(str);
if(str[i] == 'e' || str[i] == 'E'){
numeric = is(str);
}
}else if(str[i] == 'e' || str[i] == 'E'){
numeric = is(str);
}else{
numeric = false;
}
}
return numeric;
}
public static void scanNum(char[] str){
while(i<str.length && (str[i] -'0') >=0 && (str[i] - '0') <= 9){
i++;
}
}
public static boolean is(char[] str){
if(str[i] != 'e' && str[i] != 'E'){
return false;
}
i ++;
if(str[i] == '+' || str[i] == '-'){
i++;
}
if(i>str.length)
return false;
scanNum(str);
return (i>=str.length)?true:false;
}
}
| 25.309091 | 72 | 0.395115 | 3.15625 |
b367a02900dd463c69a7fb0a5e637daeba1ce60e
| 5,626 |
py
|
Python
|
bucket/models.py
|
passuf/blackhole
|
00166b2a294c120890202e7d0fb1f4f36d2e4fd2
|
[
"MIT"
] | null | null | null |
bucket/models.py
|
passuf/blackhole
|
00166b2a294c120890202e7d0fb1f4f36d2e4fd2
|
[
"MIT"
] | 6 |
2021-04-08T19:58:55.000Z
|
2022-02-10T08:31:37.000Z
|
bucket/models.py
|
passuf/blackhole
|
00166b2a294c120890202e7d0fb1f4f36d2e4fd2
|
[
"MIT"
] | null | null | null |
import json
import uuid
import traceback
from picklefield.fields import PickledObjectField
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
class Bucket(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4)
title = models.CharField(_('Title'), max_length=255)
description = models.CharField(_('Description'), max_length=255, blank=True, null=True)
active = models.BooleanField(_('Active'), default=True)
created_at = models.DateTimeField(_('Created at'), auto_now_add=True)
modified_at = models.DateTimeField(_('Modified at'), auto_now=True)
@property
def url(self):
return '{base_url}/{id}/'.format(base_url=settings.BASE_URL, id=self.id)
def __str__(self):
return self.title
class Request(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4)
bucket = models.ForeignKey(Bucket, related_name='requests', on_delete=models.CASCADE)
comments = models.TextField(_('Comments'), blank=True, null=True)
method = models.CharField(_('Method'), max_length=255, blank=True, null=True)
headers = models.TextField(_('Headers'), blank=True, null=True)
remote_address = models.CharField(_('Remote Address'), max_length=255, blank=True, null=True)
host = models.CharField(_('Host'), max_length=255, blank=True, null=True)
http_referer = models.CharField(_('HTTP Referer'), max_length=255, blank=True, null=True)
path = models.CharField(_('Path'), max_length=255, blank=True, null=True)
query_strings = models.TextField(_('Query Strings'), blank=True, null=True)
body = models.TextField(_('Body'), blank=True, null=True)
response_code = models.IntegerField(_('Response Code'), blank=True, null=True)
form_data = models.TextField('Form Data', blank=True, null=True)
cookies = models.TextField(_('Cookies'), blank=True, null=True)
error = models.TextField(_('Error'), blank=True, null=True)
custom_values = PickledObjectField(_('Custom Values'), blank=True, null=True)
created_at = models.DateTimeField(_('Created at'), auto_now_add=True)
modified_at = models.DateTimeField(_('Modified at'), auto_now=True)
def __str__(self):
if self.method and self.path:
return '{method} {path}'.format(method=self.method, path=self.path)
return str(self.created_at)
class RequestFactory:
@staticmethod
def from_request(request, bucket):
req = Request.objects.create(
bucket=bucket,
path=request.path,
method=request.method,
host=request.get_host(),
custom_values={},
)
errors = ''
header = dict(request.META)
# Parse the HEADER
try:
# Find non-serializable keys
keys_to_remove = []
for key in header.keys():
# Remove WSGI related keys
if key.startswith('wsgi.'):
keys_to_remove.append(key)
continue
# Try if object is serializable
try:
json.dumps(header[key])
except Exception:
keys_to_remove.append(key)
continue
# Remove invalid keys
for key in keys_to_remove:
header.pop(key, None)
# Finally try to parse the header
req.headers = json.dumps(header)
except Exception as e:
print(e, traceback.format_exc())
req.headers = header
errors += '\n' + str(traceback.format_exc())
# Parse the remote address
try:
x_forwarded_for = header.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
req.remote_address = x_forwarded_for.split(',')[0]
else:
req.remote_address = header.get('REMOTE_ADDR')
except Exception as e:
print(e, traceback.format_exc())
req.remote_address = 'error'
errors += '\n' + str(traceback.format_exc())
# Parse the HTTP Referer
try:
req.http_referer = header.get('HTTP_REFERER')
except Exception as e:
print(e, traceback.format_exc())
req.http_referer = 'error'
errors += '\n' + str(traceback.format_exc())
# Parse the body
try:
req.body = json.dumps(request.body.decode('utf-8'))
except Exception as e:
print(e, traceback.format_exc())
req.body = 'error'
errors += '\n' + str(traceback.format_exc())
# Parse GET params
try:
req.query_strings = json.dumps(request.GET)
except Exception as e:
print(e, traceback.format_exc())
req.query_strings = 'error'
errors += '\n' + str(traceback.format_exc())
# Parse POST params
try:
req.form_data = json.dumps(request.POST)
except Exception as e:
print(e, traceback.format_exc())
req.form_data = 'error'
errors += '\n' + str(traceback.format_exc())
# Parse Cookies
try:
req.cookies = json.dumps(request.COOKIES)
except Exception as e:
print(e, traceback.format_exc())
req.cookies = 'error'
errors += '\n' + str(traceback.format_exc())
if errors:
req.error = errors
req.response_code = 400
# Save the request
req.save()
return req
| 35.607595 | 97 | 0.60096 | 3.078125 |
4c21ea50532704f8d1a6a12bc29af60f7e584c06
| 3,730 |
lua
|
Lua
|
InGameAvatarEditor/src/ServerScriptService/AvatarEditorInGameSetup/AvatarEditorInGame/Modules/Common/Text.lua
|
MirayXS/avatar
|
7c78513fbe9587915700a0a5fd3c15d5f23596d2
|
[
"RSA-MD"
] | 41 |
2021-04-30T18:27:45.000Z
|
2022-03-23T21:12:57.000Z
|
InGameAvatarEditor/src/ServerScriptService/AvatarEditorInGameSetup/AvatarEditorInGame/Modules/Common/Text.lua
|
MirayXS/avatar
|
7c78513fbe9587915700a0a5fd3c15d5f23596d2
|
[
"RSA-MD"
] | 3 |
2021-08-24T20:07:47.000Z
|
2022-02-15T19:40:13.000Z
|
InGameAvatarEditor/src/ServerScriptService/AvatarEditorInGameSetup/AvatarEditorInGame/Modules/Common/Text.lua
|
MirayXS/avatar
|
7c78513fbe9587915700a0a5fd3c15d5f23596d2
|
[
"RSA-MD"
] | 25 |
2021-05-02T14:33:04.000Z
|
2022-03-17T20:28:07.000Z
|
local TextMeasureTemporaryPatch = true
local TextService = game:GetService("TextService")
local Text = {}
-- FYI: Any number greater than 2^30 will make TextService:GetTextSize give invalid results
local MAX_BOUND = 10000
-- TODO(CLIPLAYEREX-1633): We can remove this padding patch after fixing TextService:GetTextSize sizing bug
Text._TEMP_PATCHED_PADDING = Vector2.new(0, 0)
if TextMeasureTemporaryPatch then
Text._TEMP_PATCHED_PADDING = Vector2.new(2, 2)
end
-- Wrapper function for GetTextSize
function Text.GetTextBounds(text, font, fontSize, bounds)
return TextService:GetTextSize(text, fontSize, font, bounds) + Text._TEMP_PATCHED_PADDING
end
function Text.GetTextWidth(text, font, fontSize)
return Text.GetTextBounds(text, font, fontSize, Vector2.new(MAX_BOUND, MAX_BOUND)).X
end
function Text.GetTextHeight(text, font, fontSize, widthCap)
return Text.GetTextBounds(text, font, fontSize, Vector2.new(widthCap, MAX_BOUND)).Y
end
-- TODO(CLIPLAYEREX-391): Kill these truncate functions once we have official support for text truncation
function Text.Truncate(text, font, fontSize, widthInPixels, overflowMarker)
overflowMarker = overflowMarker or ""
if Text.GetTextWidth(text, font, fontSize) > widthInPixels then
-- A binary search may be more efficient
local lastText = ""
for _, stopIndex in utf8.graphemes(text) do
local newText = string.sub(text, 1, stopIndex) .. overflowMarker
if Text.GetTextWidth(newText, font, fontSize) > widthInPixels then
return lastText
end
lastText = newText
end
else -- No truncation needed
return text
end
return ""
end
function Text.TruncateTextLabel(textLabel, overflowMarker)
textLabel.Text = Text.Truncate(textLabel.Text, textLabel.Font,
textLabel.TextSize, textLabel.AbsoluteSize.X, overflowMarker)
end
-- Remove whitespace from the beginning and end of the string
function Text.Trim(str)
if type(str) ~= "string" then
error(string.format("Text.Trim called on non-string type %s.", type(str)), 2)
end
return (str:gsub("^%s*(.-)%s*$", "%1"))
end
-- Remove whitespace from the end of the string
function Text.RightTrim(str)
if type(str) ~= "string" then
error(string.format("Text.RightTrim called on non-string type %s.", type(str)), 2)
end
return (str:gsub("%s+$", ""))
end
-- Remove whitespace from the beginning of the string
function Text.LeftTrim(str)
if type(str) ~= "string" then
error(string.format("Text.LeftTrim called on non-string type %s.", type(str)), 2)
end
return (str:gsub("^%s+", ""))
end
-- Replace multiple whitespace with one; remove leading and trailing whitespace
function Text.SpaceNormalize(str)
if type(str) ~= "string" then
error(string.format("Text.SpaceNormalize called on non-string type %s.", type(str)), 2)
end
return (str:gsub("%s+", " "):gsub("^%s+" , ""):gsub("%s+$" , ""))
end
-- Splits a string by the provided pattern into a table. The pattern is interpreted as plain text.
function Text.Split(str, pattern)
if type(str) ~= "string" then
error(string.format("Text.Split called on non-string type %s.", type(str)), 2)
elseif type(pattern) ~= "string" then
error(string.format("Text.Split called with a pattern that is non-string type %s.", type(pattern)), 2)
elseif pattern == "" then
error("Text.Split called with an empty pattern.", 2)
end
local result = {}
local currentPosition = 1
while true do
local patternStart, patternEnd = string.find(str, pattern, currentPosition, true)
if not patternStart or not patternEnd then break end
table.insert(result, string.sub(str, currentPosition, patternStart - 1))
currentPosition = patternEnd + 1
end
table.insert(result, string.sub(str, currentPosition, string.len(str)))
return result
end
return Text
| 33.00885 | 107 | 0.741019 | 3 |
0ab1673123e8a2c4d027e40e14469faa294e5640
| 1,419 |
cs
|
C#
|
Module 1/[01] CSharp/C# Fundamentals/[03] Operators-And-Expressions[lecture-06]/05.ThirdDigitSeven/ThirdDigitSeven.cs
|
VProfirov/Telerik-Academy-BetaRun
|
dfa72a2e5d339f63c6479b28de9fb025dca42b20
|
[
"MIT"
] | null | null | null |
Module 1/[01] CSharp/C# Fundamentals/[03] Operators-And-Expressions[lecture-06]/05.ThirdDigitSeven/ThirdDigitSeven.cs
|
VProfirov/Telerik-Academy-BetaRun
|
dfa72a2e5d339f63c6479b28de9fb025dca42b20
|
[
"MIT"
] | 1 |
2016-10-19T20:37:51.000Z
|
2016-10-19T20:37:51.000Z
|
Module 1/[01] CSharp/C# Fundamentals/[03] Operators-And-Expressions[lecture-06]/05.ThirdDigitSeven/ThirdDigitSeven.cs
|
VProfirov/Telerik-Academy-BetaRun
|
dfa72a2e5d339f63c6479b28de9fb025dca42b20
|
[
"MIT"
] | null | null | null |
/*Problem 5. Third Digit is 7?
Write an expression that checks for given integer if its third digit from right-to-left is 7.
* Examples:
n Third digit 7?
5 false
701 true
9703 true
877 false
777877 false
9999799 true
*/
namespace ThirdDigitSeven
{
using System;
class ThirdDigitSeven
{
static void Main()
{
int number = int.Parse(Console.ReadLine());
/*NOTE: ultimately the integer approach is limiting the checking due to some arithmetic constraints around '1',
thus making the solution wrong(the 3rd position should be checked and logged for number dif. than 7) */
//NB! : Do positional single character checks always string based since they are just a lookup of an index!
int containerOfThird;
//moving to 3th position
containerOfThird = number / 100;
//checking if the 3th position is seven
if (containerOfThird % 10 == 7)
{
// Note for a case on the C# Fundamentals test
// true => in the console => True
Console.WriteLine(true);
}
else
{
// this will make it
// false => false, instead of False
Console.WriteLine(false.ToString().ToLower());
}
}
}
}
| 29.5625 | 123 | 0.55673 | 3.359375 |
2970640a8fc38f6a64b24501a5dd8f1ca8ed0926
| 2,942 |
dart
|
Dart
|
example/book_store/lib/models.dart
|
theBenForce/routemaster
|
368431cd51a38c820e84273fe26a5a9780aa4ad2
|
[
"MIT"
] | 246 |
2021-03-13T21:27:55.000Z
|
2022-03-31T11:44:19.000Z
|
example/book_store/lib/models.dart
|
theBenForce/routemaster
|
368431cd51a38c820e84273fe26a5a9780aa4ad2
|
[
"MIT"
] | 196 |
2021-03-06T16:02:28.000Z
|
2022-03-31T06:05:20.000Z
|
example/book_store/lib/models.dart
|
theBenForce/routemaster
|
368431cd51a38c820e84273fe26a5a9780aa4ad2
|
[
"MIT"
] | 35 |
2021-03-16T12:12:29.000Z
|
2022-03-31T21:49:53.000Z
|
import 'package:flutter/foundation.dart';
class AppState extends ChangeNotifier {
AppState({String? username}) : _username = username;
bool get isLoggedIn => _username != null;
String? _username = null;
String? get username => _username;
set username(String? value) {
_username = value;
notifyListeners();
}
List<Wishlist> _wishlists = [
Wishlist(
bookIds: ['1', '2'],
username: 'dash',
id: '123',
title: "Dash's birthday wishlist",
)
];
Iterable<Wishlist> get wishlists => List.unmodifiable(_wishlists);
void addWishlist(Wishlist wishlist) {
_wishlists.add(wishlist);
notifyListeners();
}
}
class Book {
final String id;
final String title;
final String description;
final DateTime releaseDate;
final List<BookCategory> categories;
final bool isStaffPick;
Book({
required this.id,
required this.title,
required this.description,
required this.releaseDate,
required this.categories,
required this.isStaffPick,
});
}
enum BookCategory {
fiction,
nonFiction,
}
extension BookCategoryExtension on BookCategory {
String get displayName {
switch (this) {
case BookCategory.fiction:
return 'Fiction';
case BookCategory.nonFiction:
return 'Non-fiction';
}
}
String get queryParam {
switch (this) {
case BookCategory.fiction:
return 'fiction';
case BookCategory.nonFiction:
return 'nonfiction';
}
}
}
class BooksDatabase {
final Iterable<Book> books = List.unmodifiable([
Book(
id: '1',
title: 'Hummingbirds for Dummies',
description: "Find out all about Hummingbirds, and how awesome they are.",
releaseDate: DateTime(1985, 3, 23),
categories: [BookCategory.nonFiction],
isStaffPick: true,
),
Book(
id: '2',
title: "Of Hummingbirds And Men",
description: "blah blah blha",
releaseDate: DateTime(1923, 1, 1),
categories: [BookCategory.fiction],
isStaffPick: false,
),
Book(
id: '3',
title: "Gone With The Hummingbirds",
description:
"Set in the American South, this book tells the story of Dash O'Bird, the strong-willed daughter...",
releaseDate: DateTime(1936, 6, 30),
categories: [BookCategory.fiction],
isStaffPick: false,
),
Book(
id: '4',
title: "Harry Potter and the Chamber of Hummingbirds",
description: "Wizard and Hummingbirds! What more could you want?",
releaseDate: DateTime(1998, 7, 2),
categories: [BookCategory.fiction],
isStaffPick: true,
),
]);
}
class Wishlist {
final String title;
final String id;
final String? username;
final List<String> bookIds;
String get shareUrl => '/wishlist/shared/$id';
Wishlist({
required this.id,
required this.title,
required this.username,
required this.bookIds,
});
}
| 23.165354 | 111 | 0.64344 | 3.03125 |
20c366c0230ce658e743c1b10159d4652b571311
| 970 |
py
|
Python
|
LTN111/cycle.py
|
benjaveri/rpi
|
ab0b7f378917f14776d8dee334b748cad65de601
|
[
"BSD-3-Clause"
] | null | null | null |
LTN111/cycle.py
|
benjaveri/rpi
|
ab0b7f378917f14776d8dee334b748cad65de601
|
[
"BSD-3-Clause"
] | null | null | null |
LTN111/cycle.py
|
benjaveri/rpi
|
ab0b7f378917f14776d8dee334b748cad65de601
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
import RPi.GPIO as GPIO
import time
# pinoutBCM scheme - http://pinout.xyz/
D = [4,17,18,27,22,23,24,25]
E = 11
RW = 8
RS = 7
def wr(rs,rw,data):
GPIO.output(RS,True if rs else False)
GPIO.output(RW,True if rw else False)
time.sleep(0.001)
GPIO.output(E,True)
for i in range(8):
GPIO.output(D[i],True if (data & (1<<i)) else False)
time.sleep(0.001)
GPIO.output(E,False)
time.sleep(0.001)
def writea(a):
for ch in a:
wr(1,0,ch)
def write(s):
for ch in s:
wr(1,0,ord(ch))
# setup
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BCM)
GPIO.setup(E,GPIO.OUT)
GPIO.output(E,False)
GPIO.setup(RW,GPIO.OUT)
GPIO.setup(RS,GPIO.OUT)
for i in D: GPIO.setup(i,GPIO.OUT)
# go
wr(0,0,0x38) # 8-bit input mode
wr(0,0,1) # cls
wr(0,0,12) # display on
# cycle characters
i = 0
while 1:
wr(0,0,0x80)
writea([ i for j in range(8) ])
wr(0,0,0xc0)
writea([ i for j in range(8) ])
i = (i+1) & 255
time.sleep(.125)
| 17.017544 | 56 | 0.628866 | 3.28125 |
4ce1139bf3eaf47b2f4ffe85a1a792c88e97ccd4
| 2,765 |
py
|
Python
|
core.py
|
NeelShah18/sentencelabel
|
57150f410899dc8c9c5262bf32215b61f7685c40
|
[
"MIT"
] | 1 |
2018-09-01T21:04:46.000Z
|
2018-09-01T21:04:46.000Z
|
core.py
|
NeelShah18/sentencelabel
|
57150f410899dc8c9c5262bf32215b61f7685c40
|
[
"MIT"
] | null | null | null |
core.py
|
NeelShah18/sentencelabel
|
57150f410899dc8c9c5262bf32215b61f7685c40
|
[
"MIT"
] | null | null | null |
from getsenti import base_emoji, base_text
from clean import clean_it
from nltk import word_tokenize
from textblob import TextBlob
def get_sentiment(text="Sample text data! And I am positive :-)", on_base = "t", flag_prob=False):
'''get_sentiment return setiment of the text, emoji and emoticons in text.
Args:
text (str): Setence of paragraph for calculating setiment.
on_base(charater): 't' for text sentitement and 'e' for emoji ant emoticons sentiment
flag (boolean): True --> It gives 5 criteria 0,1,2,3,4 where 2(Nutral), 4(very positive), 1(very negative)
False --> Gives probability with 2 floating point accuray between -1(negative) to 1(positive)
Returns:
__prob_sentiment: If flag = True it will return number(int) between 0 to 4
If flag = False it will return nmber(float-2f) between -1 to 1
Defaults:
text = "Sample text data! And I am positive :-)"
on_base = "t"
flag_prob = False
'''
result = {}
text = text
on_base = on_base
flag_prob = flag_prob
if on_base == "e":
result = base_emoji(text,flag_prob)
elif on_base == "t":
result = base_text(text, flag_prob)
else:
result = {error: "Choose right on_base, it must be e or t, wehere e = emoji and t = text."}
return result
def ngram(text, gram=2):
'''convert test to n-gram
Args:
text(str): Text to convert in n-gram
gram(int): Number of n-gram
Results:
__gram_model(list(list)): return list of list depends on n-gram input
Defaults:
gram = 2
'''
__analyzer = TextBlob(str(text))
return __analyzer.ngrams(n=gram)
def tokenizer(text):
'''tokenizer return tokens of the text.
Args:
text(str): Text to convert in tokens
Results:
__tokens(list): List of tokens of given text
'''
return word_tokenize(str(text))
def do_clean(text, settings):
'''clean_it function different type of text cleaning.
Args:
text(str): Text to ne cleaned.
settings(dict): Dictonary of different type of settings.
defaults: Here True flag means to do that things on given text.
dettings : {
'to_upeer' : False,
'to_lower' : False,
'remove_special' : False,
'remove_number' : False,
'spelling_correction' : False,
'remove_stopwords' : False,
'remove_punchuation' : False,
}
'''
return clean_it(text, settings)
def test():
'''test fucntion for setencelabel library.
'''
#test_data = "I love python 👨 :-)"
test_data = "I love python :-) why not"
result = get_sentiment(text=test_data, on_base='e', flag_prob=False)
print(result)
x = ngram("ever use may thought sat high school english class grown find writing far ever would expected",3)
print(x)
print(tokenizer("ever use may thought sat high school english class grown find writing far ever would expected"))
return None
if __name__ == '__main__':
test()
| 27.376238 | 114 | 0.709222 | 3.359375 |
d035a85216f35451abf857aec3f1e70f4c974c4e
| 2,663 |
cpp
|
C++
|
CodechefCodes/MSTICK.cpp
|
debashish05/competitive_programming
|
e2c0a7a741ac988e4393eda3b5006d6b8c88a5a9
|
[
"MIT"
] | null | null | null |
CodechefCodes/MSTICK.cpp
|
debashish05/competitive_programming
|
e2c0a7a741ac988e4393eda3b5006d6b8c88a5a9
|
[
"MIT"
] | null | null | null |
CodechefCodes/MSTICK.cpp
|
debashish05/competitive_programming
|
e2c0a7a741ac988e4393eda3b5006d6b8c88a5a9
|
[
"MIT"
] | null | null | null |
#include<bits/stdc++.h>
#define ll long long int
#define loop(k) for(i=0;i<k;++i)
#define loop2(k,l) for(j=k;j<l;++j)
#define mod 100000000
using namespace std;
int main()
{
std::ios_base::sync_with_stdio(false);//cin.tie(NULL);
ll t=1,i=0,j=0;
//cin>>t;
while(t--){
int n;cin>>n;
ll a[n];
loop(n)cin>>a[i];
int len=(int)sqrt(n)+1;
ll bmax[len]={0},bmin[len];
loop(len)bmin[i]=100000000;
loop(n){
bmax[i/len]=max(bmax[i/len],a[i]);
}
loop(n){
bmin[i/len]=min(bmin[i/len],a[i]);
}
int q;cin>>q;
while(q--){
int l,r;
cin>>l>>r;
int r1=r;
double max=a[l],min=a[l];
ll lt=l/len,rt=r/len;
if(lt==rt){ //find max and min bw l r
for(i=l;i<=r;++i){
if(a[i]>max)max=a[i];
if(a[i]<min)min=a[i];
}
}
else{
for(i=l;i<(lt+1)*len;++i){
if(a[i]>max)max=a[i];
if(a[i]<min)min=a[i];
}
for(i=lt+1;i<rt;++i){
if(bmax[i]>max)max=bmax[i];
if(bmin[i]<min)min=bmin[i];
}
for(i=rt*len;i<=r;++i){
if(a[i]>max)max=a[i];
if(a[i]<min)min=a[i];
}
}
double maxout=0; //find out max in array a excluding l to r
r=l-1;l=0;
if(r>=0){
rt=r/len;lt=l/len; //0-l-1
if(lt==rt){
for(i=l;i<=r;++i){
if(a[i]>maxout)maxout=a[i];
}
}
else{
for(i=l;i<(lt+1)*len;++i){
if(a[i]>maxout)maxout=a[i];
}
for(i=lt+1;i<rt;++i){
if(bmax[i]>maxout)maxout=bmax[i];
}
for(i=rt*len;i<=r;++i){
if(a[i]>maxout)maxout=a[i];
}
}
}
l=r1+1;r=n-1;
if(l<=n-1){
rt=r/len;lt=l/len;
if(lt==rt){
for(i=l;i<=r;++i){
if(a[i]>maxout)maxout=a[i];
}
}
else{
for(i=l;i<(lt+1)*len;++i){
if(a[i]>maxout)maxout=a[i];
}
for(i=lt+1;i<rt;++i){
if(bmax[i]>maxout)maxout=bmax[i];
}
for(i=rt*len;i<=r;++i){
if(a[i]>maxout)maxout=a[i];
}
}
}
//cout<<maxout<<" "<<min<<" "<<max<<"\n";
double temp=maxout+min;
max-=min;
double temp2=min+max/2;
//cout<<temp<<" "<<temp2<<"\n";
if(temp<temp2)temp=temp2;
cout<<fixed<<setprecision(1)<<temp<<"\n";
}
}
return 0;
}
| 25.605769 | 68 | 0.38528 | 3.234375 |
afe21ae7697b32b8ae8cdea00548132b4f71a069
| 5,938 |
py
|
Python
|
reddit_place/models.py
|
dkasper/reddit-plugin-place-opensource
|
453892c6f0a419f49c86759c60c2bdf64e13354a
|
[
"BSD-3-Clause"
] | 780 |
2017-04-13T17:52:13.000Z
|
2018-06-26T23:16:11.000Z
|
reddit_place/models.py
|
dkasper/reddit-plugin-place-opensource
|
453892c6f0a419f49c86759c60c2bdf64e13354a
|
[
"BSD-3-Clause"
] | 3 |
2017-04-18T20:23:53.000Z
|
2018-05-11T09:57:40.000Z
|
reddit_place/models.py
|
dkasper/reddit-plugin-place-opensource
|
453892c6f0a419f49c86759c60c2bdf64e13354a
|
[
"BSD-3-Clause"
] | 74 |
2017-04-13T18:18:17.000Z
|
2018-06-27T08:33:38.000Z
|
from datetime import datetime
import json
import struct
import time
from pycassa.system_manager import TIME_UUID_TYPE, INT_TYPE
from pycassa.types import CompositeType, IntegerType
from pycassa.util import convert_uuid_to_time
from pylons import app_globals as g
from pylons import tmpl_context as c
from r2.lib.db import tdb_cassandra
CANVAS_ID = "real_1"
CANVAS_WIDTH = 1000
CANVAS_HEIGHT = 1000
class RedisCanvas(object):
@classmethod
def get_board(cls):
# We plan on heavily caching this board bitmap. We include the
# timestamp as a 32 bit uint at the beginning so the client can make a
# determination as to whether the cached state is too old. If it's too
# old, the client will hit the non-fastly-cached endpoint directly.
timestamp = time.time()
# If no pixels have been placed yet, we'll get back None. This will
# cause concatenation to fail below, so we turn it into a string
# instead.
bitmap = c.place_redis.get(CANVAS_ID) or ''
return struct.pack('I', int(timestamp)) + bitmap
@classmethod
def set_pixel(cls, color, x, y):
# The canvas is stored in one long redis bitfield, offset by the
# coordinates of the pixel. For instance, for a canvas of width 1000,
# the offset for position (1, 1) would be 1001. redis conveniently
# lets us ignore our integer size when specifying our offset, doing the
# calculation for us. For instance, rather than (3, 0) being sent as
# offset 72 for a 24-bit integer, we can just use the offset 3.
#
# https://redis.io/commands/bitfield
#
UINT_SIZE = 'u4' # Max value: 15
offset = y * CANVAS_WIDTH + x
c.place_redis.execute_command(
'bitfield', CANVAS_ID, 'SET',
UINT_SIZE, '#%d' % offset, color)
class Pixel(tdb_cassandra.UuidThing):
_use_db = True
_connection_pool = 'main'
_read_consistency_level = tdb_cassandra.CL.QUORUM
_write_consistency_level = tdb_cassandra.CL.QUORUM
_int_props = (
'x',
'y',
)
@classmethod
def create(cls, user, color, x, y):
# We dual-write to cassandra to allow the frontend to get information
# on a particular pixel, as well as to have a backup, persistent state
# of the board in case something goes wrong with redis.
pixel = cls(
canvas_id=CANVAS_ID,
user_name=user.name if user else '',
user_fullname=user._fullname if user else '',
color=color,
x=x,
y=y,
)
pixel._commit()
Canvas.insert_pixel(pixel)
if user:
PixelsByParticipant.add(user, pixel)
RedisCanvas.set_pixel(color, x, y)
g.stats.simple_event('place.pixel.create')
return pixel
@classmethod
def get_last_placement_datetime(cls, user):
return PixelsByParticipant.get_last_pixel_datetime(user)
@classmethod
def get_pixel_at(cls, x, y):
pixel_dict = Canvas.get(x, y)
if not pixel_dict:
return None
return dict(
user_name=pixel_dict["user_name"],
color=pixel_dict["color"],
x=x,
y=y,
timestamp=pixel_dict["timestamp"],
)
class PixelsByParticipant(tdb_cassandra.View):
_use_db = True
_connection_pool = 'main'
_compare_with = TIME_UUID_TYPE
_read_consistency_level = tdb_cassandra.CL.QUORUM
_write_consistency_level = tdb_cassandra.CL.QUORUM
@classmethod
def _rowkey(cls, user):
return CANVAS_ID + "_ " + user._fullname
@classmethod
def add(cls, user, pixel):
rowkey = cls._rowkey(user)
pixel_dict = {
"user_fullname": pixel.user_fullname,
"color": pixel.color,
"x": pixel.x,
"y": pixel.y,
}
columns = {pixel._id: json.dumps(pixel_dict)}
cls._cf.insert(rowkey, columns)
@classmethod
def get_last_pixel_datetime(cls, user):
rowkey = cls._rowkey(user)
try:
columns = cls._cf.get(rowkey, column_count=1, column_reversed=True)
except tdb_cassandra.NotFoundException:
return None
u = columns.keys()[0]
ts = convert_uuid_to_time(u)
return datetime.utcfromtimestamp(ts).replace(tzinfo=g.tz)
class Canvas(tdb_cassandra.View):
_use_db = True
_connection_pool = 'main'
_compare_with = CompositeType(IntegerType(), IntegerType())
"""
Super naive storage for the canvas, everything's in a single row.
In the future we may want to break it up so that each C* row contains only
a subset of all rows. That would spread the data out in the ring and
would make it easy to grab regions of the canvas.
"""
@classmethod
def _rowkey(cls):
return CANVAS_ID
@classmethod
def insert_pixel(cls, pixel):
columns = {
(pixel.x, pixel.y): json.dumps({
"color": pixel.color,
"timestamp": convert_uuid_to_time(pixel._id),
"user_name": pixel.user_name,
"user_fullname": pixel.user_fullname,
})
}
cls._cf.insert(cls._rowkey(), columns)
@classmethod
def get(cls, x, y):
column = (x, y)
try:
row = cls._cf.get(cls._rowkey(), columns=[column])
except tdb_cassandra.NotFoundException:
return {}
d = row.get(column, '{}')
pixel_dict = json.loads(d)
return pixel_dict
@classmethod
def get_all(cls):
"""Return dict of (x,y) -> color"""
try:
gen = cls._cf.xget(cls._rowkey())
except tdb_cassandra.NotFoundException:
return {}
return {
(x, y): json.loads(d) for (x, y), d in gen
}
| 29.542289 | 79 | 0.613675 | 3.296875 |
da170612789e99b5cd76c4b5399ca41c6e2971af
| 1,148 |
c
|
C
|
I godina/Zimski semestar/OP/I KOLOKVIJUM/2019-2020/I ZADATAK/Zad1-II-Regularni.c
|
TodorovicSrdjan/PMF
|
d3deee21697dad300177fca6090c56d3b8d0cb76
|
[
"MIT"
] | null | null | null |
I godina/Zimski semestar/OP/I KOLOKVIJUM/2019-2020/I ZADATAK/Zad1-II-Regularni.c
|
TodorovicSrdjan/PMF
|
d3deee21697dad300177fca6090c56d3b8d0cb76
|
[
"MIT"
] | null | null | null |
I godina/Zimski semestar/OP/I KOLOKVIJUM/2019-2020/I ZADATAK/Zad1-II-Regularni.c
|
TodorovicSrdjan/PMF
|
d3deee21697dad300177fca6090c56d3b8d0cb76
|
[
"MIT"
] | null | null | null |
#include <stdio.h>
int main(){
int n,i,j;
float suma = 0;
float clan;
int sumaBrojilac = 0;
float faktBrojilac = 1;
float faktI = 1;
float faktRazlika = 1;
printf("Unesite n: ");
scanf("%d", &n);
if(n>0){
for(i=1; i<=2*n; i++)
faktBrojilac*=i;
printf("Brojilac: %.2f\n", faktBrojilac);
for(i=1; i<=n; i++){
faktI = 1;
for(j=1;j<=2*i;j++)
faktI*=j;
faktRazlika = 1;
for(j=1;j<=(2*n-i);j++)
faktRazlika*=j;
sumaBrojilac+=2*i;
printf("Faktorijel brojilac: %f\n", faktBrojilac);
printf("Suma brojilac: %d\n", sumaBrojilac);
printf("Faktorijel I: %f\n", faktI);
printf("Faktorijel razlika: %f\n", faktRazlika);
clan = faktBrojilac/(faktI*faktRazlika)+sumaBrojilac;
suma+=clan;
printf("Clan : %f\n", clan);
printf("Suma: %f\n", suma);
printf("===================================\n");
}
printf("Suma: %10.5f\n", suma);
}
else {
int noviBroj = 0;
int dek = 1;
int cifra;
n*=-1;
while(n>0){
cifra = n%10;
if(cifra%2==0){
noviBroj*=10;
noviBroj+=cifra;
}
n/=10;
}
printf("Novi broj je : %d", noviBroj);
}
}
| 20.5 | 57 | 0.529617 | 3.109375 |
4597f01490d3e396914f5593954bb0b8e628c50c
| 1,574 |
py
|
Python
|
devproject/core/management/commands/sync_developers.py
|
francisbrito/take-home-assignment
|
0eafede75666aa74f9e4bcf31565569d227278cd
|
[
"MIT"
] | null | null | null |
devproject/core/management/commands/sync_developers.py
|
francisbrito/take-home-assignment
|
0eafede75666aa74f9e4bcf31565569d227278cd
|
[
"MIT"
] | null | null | null |
devproject/core/management/commands/sync_developers.py
|
francisbrito/take-home-assignment
|
0eafede75666aa74f9e4bcf31565569d227278cd
|
[
"MIT"
] | null | null | null |
import sys
import traceback
from django.core.management.base import BaseCommand, CommandParser
from devproject.core.selectors import get_registered_developers
from devproject.core.services import sync_developer
ERROR_CODE_MISSING_INPUT = 1
class Command(BaseCommand):
help = "creates or updates local developer information from Github"
requires_migrations_checks = True
def add_arguments(self, parser: CommandParser) -> None:
parser.add_argument("logins", nargs="*", type=str)
parser.add_argument(
"-r",
"--registered",
action="store_true",
help="use registered developers as input",
)
def handle(self, *args, **options) -> None:
if options["registered"]:
options["logins"] = get_registered_developers().values_list(
"login", flat=True
)
if len(options["logins"]) == 0:
self.stdout.write(
self.style.ERROR(
"either provide an space-separated list of logins or pass --registered flag"
)
)
sys.exit(ERROR_CODE_MISSING_INPUT)
for login in options["logins"]:
self.stdout.write(f"syncing developer '{login}' ... ", ending="")
try:
sync_developer(login=login)
self.stdout.write(self.style.SUCCESS("SUCCESS"))
except Exception as e: # noqa
self.stdout.write(self.style.ERROR("FAILED"))
self.stderr.write(traceback.format_exc())
| 31.48 | 96 | 0.602287 | 3.109375 |
c973421d9a33d35b665d31326a1c19e0c3ff7e94
| 1,760 |
ts
|
TypeScript
|
packages/actor-query-operation-ask/lib/ActorQueryOperationAsk.ts
|
rubensworks/comunica
|
8cddee42ff8a3a8debf9f64659add25054ca167a
|
[
"MIT"
] | null | null | null |
packages/actor-query-operation-ask/lib/ActorQueryOperationAsk.ts
|
rubensworks/comunica
|
8cddee42ff8a3a8debf9f64659add25054ca167a
|
[
"MIT"
] | null | null | null |
packages/actor-query-operation-ask/lib/ActorQueryOperationAsk.ts
|
rubensworks/comunica
|
8cddee42ff8a3a8debf9f64659add25054ca167a
|
[
"MIT"
] | null | null | null |
import type { IActorQueryOperationTypedMediatedArgs } from '@comunica/bus-query-operation';
import { ActorQueryOperation, ActorQueryOperationTypedMediated } from '@comunica/bus-query-operation';
import type { IActorTest } from '@comunica/core';
import type {
IActionContext,
IQueryOperationResult,
IQueryOperationResultBindings,
} from '@comunica/types';
import type { Algebra } from 'sparqlalgebrajs';
/**
* A comunica Ask Query Operation Actor.
*/
export class ActorQueryOperationAsk extends ActorQueryOperationTypedMediated<Algebra.Ask> {
public constructor(args: IActorQueryOperationTypedMediatedArgs) {
super(args, 'ask');
}
public async testOperation(operation: Algebra.Ask, context: IActionContext): Promise<IActorTest> {
return true;
}
public async runOperation(operation: Algebra.Ask, context: IActionContext): Promise<IQueryOperationResult> {
// Call other query operations like this:
const output: IQueryOperationResult = await this.mediatorQueryOperation.mediate(
{ operation: operation.input, context },
);
const bindings: IQueryOperationResultBindings = ActorQueryOperation.getSafeBindings(output);
const execute: () => Promise<boolean> = () => new Promise<boolean>((resolve, reject) => {
// Resolve to true if we find one element, and close immediately
bindings.bindingsStream.once('data', () => {
resolve(true);
bindings.bindingsStream.close();
});
// If we reach the end of the stream without finding anything, resolve to false
bindings.bindingsStream.on('end', () => resolve(false));
// Reject if an error occurs in the stream
bindings.bindingsStream.on('error', reject);
});
return { type: 'boolean', execute };
}
}
| 39.111111 | 110 | 0.721591 | 3.03125 |
9c1d37423e616355aaa823e78c857a7a60a42c48
| 6,724 |
rs
|
Rust
|
src/game.rs
|
Ryan1729/aspect-aspic
|
2acbc621a288b61e5568c3c1442a46fe891e3dc5
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
src/game.rs
|
Ryan1729/aspect-aspic
|
2acbc621a288b61e5568c3c1442a46fe891e3dc5
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
src/game.rs
|
Ryan1729/aspect-aspic
|
2acbc621a288b61e5568c3c1442a46fe891e3dc5
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
use common::*;
impl GameState {
fn isSelectrix(&self, id: usize) -> bool {
self.entities[id].contains(Component::Player | Component::IntraCellPosition)
&& self.player_types[id] == PlayerType::Selectrix
}
fn isAvatar(&self, id: usize) -> bool {
self.entities[id].contains(Component::Player) && self.player_types[id] == PlayerType::Avatar
}
}
//TODO picking up and throwing orbs (no interactions just placement first)
#[inline]
pub fn update_and_render(state: &mut GameState, framebuffer: &mut Framebuffer, input: Input) {
for i in 0..GameState::ENTITY_COUNT {
if state.mode == Mode::MoveAvatar && state.isAvatar(i) {
let appearance = &mut state.appearances[i];
if appearance.is_offset() {
appearance.reduce_offset(8);
continue;
}
let (mut x, mut y) = state.positions[i];
if input.pressed_this_frame(Button::Left) && x > 0 {
x = x.saturating_sub(1);
appearance.offset.0 = CELL_WIDTH as isize;
}
if input.pressed_this_frame(Button::Right) && x < BOARD_WIDTH - 1 {
x = x.saturating_add(1);
appearance.offset.0 = -(CELL_WIDTH as isize)
}
if input.pressed_this_frame(Button::Up) && y > 0 {
y = y.saturating_sub(1);
appearance.offset.1 = CELL_WIDTH as isize;
}
if input.pressed_this_frame(Button::Down) && y < BOARD_HEIGHT - 1 {
y = y.saturating_add(1);
appearance.offset.1 = -(CELL_WIDTH as isize);
}
state.positions[i] = (x, y);
} else if state.mode == Mode::MoveSelectrix && state.isSelectrix(i) {
let appearance = &mut state.appearances[i];
if appearance.is_offset() {
appearance.reduce_offset(8);
continue;
}
let (mut x, mut y) = state.positions[i];
let mut inter_pos = state.intra_cell_positions[i];
if input.pressed_this_frame(Button::Left) {
if x > 0 && inter_pos.on_left_edge() {
x = x.saturating_sub(1);
inter_pos = inter_pos.left();
appearance.offset.0 = (CELL_WIDTH / 2) as isize;
} else if !inter_pos.on_left_edge() {
inter_pos = inter_pos.left();
appearance.offset.0 = (CELL_WIDTH / 2) as isize;
}
}
if input.pressed_this_frame(Button::Right) {
if x < BOARD_WIDTH - 1 && inter_pos.on_right_edge() {
x = x.saturating_add(1);
inter_pos = inter_pos.right();
appearance.offset.0 = -((CELL_WIDTH / 2) as isize);
} else if !inter_pos.on_right_edge() {
inter_pos = inter_pos.right();
appearance.offset.0 = -((CELL_WIDTH / 2) as isize);
}
}
if input.pressed_this_frame(Button::Up) {
if y > 0 && inter_pos.on_top_edge() {
y = y.saturating_sub(1);
inter_pos = inter_pos.up();
appearance.offset.1 = (CELL_HEIGHT / 2) as isize;
} else if !inter_pos.on_top_edge() {
inter_pos = inter_pos.up();
appearance.offset.1 = (CELL_HEIGHT / 2) as isize;
}
}
if input.pressed_this_frame(Button::Down) {
if y < BOARD_HEIGHT - 1 && inter_pos.on_bottom_edge() {
y = y.saturating_add(1);
inter_pos = inter_pos.down();
appearance.offset.1 = -((CELL_HEIGHT / 2) as isize);
} else if !inter_pos.on_bottom_edge() {
inter_pos = inter_pos.down();
appearance.offset.1 = -((CELL_HEIGHT / 2) as isize);
}
}
state.positions[i] = (x, y);
state.intra_cell_positions[i] = inter_pos;
}
}
state.mode = match state.mode {
Mode::MoveAvatar if input.pressed_this_frame(Button::B) => {
state.positions[state.selectrixId] = state.positions[state.avatarId];
state.entities[state.selectrixId].insert(Component::Appearance);
Mode::MoveSelectrix
}
Mode::MoveSelectrix if input.pressed_this_frame(Button::B) => {
state.entities[state.selectrixId].remove(Component::Appearance);
Mode::MoveAvatar
}
_ => state.mode,
};
if input.pressed_this_frame(Button::Select) {
state.inventory_index = (state.inventory_index + 1) % state.inventory.len() as u8;
}
framebuffer.clear();
for i in 0..GameState::ENTITY_COUNT {
let entity = state.entities[i];
if state.isSelectrix(i) {
if state.mode == Mode::MoveSelectrix {
let pos = state.positions[i];
let inter_pos = state.intra_cell_positions[i];
let appearance = &mut state.appearances[i];
appearance.render_intra_positioned(framebuffer, pos, inter_pos);
}
} else if entity
.contains(Component::Position | Component::Appearance | Component::IntraCellPosition)
{
let pos = state.positions[i];
let inter_pos = state.intra_cell_positions[i];
let appearance = &mut state.appearances[i];
appearance.render_intra_positioned(framebuffer, pos, inter_pos);
} else if entity.contains(Component::Position | Component::Appearance) {
let pos = state.positions[i];
let appearance = &mut state.appearances[i];
appearance.render_positioned(framebuffer, pos);
}
}
framebuffer.draw_filled_rect(HUD_LEFT_EDGE, 0, HUD_WIDTH, SCREEN_HEIGHT, GREY);
for i in 0..state.inventory.len() {
let item = state.inventory[i];
let x = INVENTORY_LEFT_EDGE;
let y = (INVENTORY_HEIGHT + 4) * (i + 1);
framebuffer.draw_filled_rect(x, y, INVENTORY_WIDTH, INVENTORY_HEIGHT, PURPLE);
match item {
OrbType::DeadOrb => {
framebuffer.draw_circle(
x + INVENTORY_WIDTH / 2,
y + INVENTORY_HEIGHT / 2,
ORB_RADIUS,
RED,
);
}
_ => {}
}
if i as u8 == state.inventory_index {
framebuffer.draw_rect(x, y, INVENTORY_WIDTH, INVENTORY_HEIGHT, YELLOW);
}
}
}
| 36.743169 | 100 | 0.53257 | 3.21875 |
664b866a09852857fed83506538c063694d71de2
| 1,503 |
py
|
Python
|
Medium/1791.FindCenterofStarGraph.py
|
YuriSpiridonov/LeetCode
|
2dfcc9c71466ffa2ebc1c89e461ddfca92e2e781
|
[
"MIT"
] | 39 |
2020-07-04T11:15:13.000Z
|
2022-02-04T22:33:42.000Z
|
Medium/1791.FindCenterofStarGraph.py
|
YuriSpiridonov/LeetCode
|
2dfcc9c71466ffa2ebc1c89e461ddfca92e2e781
|
[
"MIT"
] | 1 |
2020-07-15T11:53:37.000Z
|
2020-07-15T11:53:37.000Z
|
Medium/1791.FindCenterofStarGraph.py
|
YuriSpiridonov/LeetCode
|
2dfcc9c71466ffa2ebc1c89e461ddfca92e2e781
|
[
"MIT"
] | 20 |
2020-07-14T19:12:53.000Z
|
2022-03-02T06:28:17.000Z
|
'''
There is an undirected star graph consisting of n nodes
labeled from 1 to n. A star graph is a graph where there
is one center node and exactly n - 1 edges that connect
the center node with every other node.
You are given a 2D integer array edges where each
edges[i] = [ui, vi] indicates that there is an edge
between the nodes ui and vi. Return the center of the
given star graph.
Example:
Input: edges = [[1,2],[2,3],[4,2]]
Output: 2
Explanation: As shown in the figure above, node 2 is
connected to every other node, so 2 is the
center.
Example:
Input: edges = [[1,2],[5,1],[1,3],[1,4]]
Output: 1
Constraints:
- 3 <= n <= 10^5
- edges.length == n - 1
- edges[i].length == 2
- 1 <= ui, vi <= n
- ui != vi
- The given edges represent a valid star graph.
'''
#Difficulty: Medium
#60 / 60 test cases passed.
#Runtime: 836 ms
#Memory Usage: 50.7 MB
#Runtime: 836 ms, faster than 56.70% of Python3 online submissions for Find Center of Star Graph.
#Memory Usage: 50.7 MB, less than 16.71% of Python3 online submissions for Find Center of Star Graph.
class Solution:
def findCenter(self, edges: List[List[int]]) -> int:
count = {}
for edge in edges:
for val in edge:
if val not in count:
count[val] = 0
count[val] += 1
return max(count, key=count.get)
| 31.978723 | 101 | 0.586161 | 3.28125 |
ddd66de2ae937c66f7fbe082dcaf70f733148734
| 2,099 |
java
|
Java
|
programmers/42579/Solution2.java
|
pparkddo/ps
|
7164c694403c2087a7b4b16a64f521ae327e328f
|
[
"MIT"
] | 1 |
2021-04-02T09:37:11.000Z
|
2021-04-02T09:37:11.000Z
|
programmers/42579/Solution2.java
|
pparkddo/ps
|
7164c694403c2087a7b4b16a64f521ae327e328f
|
[
"MIT"
] | null | null | null |
programmers/42579/Solution2.java
|
pparkddo/ps
|
7164c694403c2087a7b4b16a64f521ae327e328f
|
[
"MIT"
] | null | null | null |
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.PriorityQueue;
import java.util.stream.Collectors;
class Music implements Comparable<Music> {
int id;
int play;
String genre;
Music(int id, int play, String genre) {
this.id = id;
this.play = play;
this.genre = genre;
}
@Override
public int compareTo(Music music) {
if (this.play == music.play) {
return -Integer.compare(this.id, music.id);
}
return Integer.compare(this.play, music.play);
}
@Override
public String toString() {
return this.id + " " + this.play + " " + this.genre;
}
}
class Solution2 {
public int[] solution(String[] genres, int[] plays) {
Map<String, Integer> counts = new HashMap<>();
Map<String, PriorityQueue<Music>> musics = new HashMap<>();
for (int i = 0; i < plays.length; i++) {
String genre = genres[i];
int play = plays[i];
counts.put(genre, counts.getOrDefault(genre, 0)+play);
PriorityQueue<Music> pq = musics.getOrDefault(genre, new PriorityQueue<>(Collections.reverseOrder()));
pq.add(new Music(i, play, genre));
musics.put(genre, pq);
}
List<String> sorted = counts
.entrySet()
.stream()
.sorted((a, b) -> -Integer.compare(a.getValue(), b.getValue()))
.map(each -> each.getKey())
.collect(Collectors.toList());
List<Integer> answer = new ArrayList<>();
for (String genre : sorted) {
for (int i = 0; i < 2; i++) {
PriorityQueue<Music> pq = musics.get(genre);
if (pq.isEmpty()) {
break;
}
answer.add(pq.poll().id);
}
}
return answer.stream().mapToInt(i -> i).toArray();
}
}
| 29.56338 | 114 | 0.515484 | 3.078125 |
05e5cba33788aeffc62a51c8a7effb5dc8ee4270
| 4,679 |
py
|
Python
|
datakit_github/commands/integrate.py
|
associatedpress/datakit-github
|
8a76f10987b372d9492f878e869ecff39983dc73
|
[
"ISC"
] | 9 |
2019-09-16T19:20:30.000Z
|
2022-03-03T07:12:03.000Z
|
datakit_github/commands/integrate.py
|
associatedpress/datakit-github
|
8a76f10987b372d9492f878e869ecff39983dc73
|
[
"ISC"
] | 24 |
2019-07-17T18:16:11.000Z
|
2022-01-20T17:49:00.000Z
|
datakit_github/commands/integrate.py
|
associatedpress/datakit-github
|
8a76f10987b372d9492f878e869ecff39983dc73
|
[
"ISC"
] | 3 |
2019-07-26T15:59:47.000Z
|
2019-09-17T17:26:38.000Z
|
# -*- coding: utf-8 -*-
from cliff.command import Command
from github.GithubException import GithubException
from datakit_github.github_api import GithubApi
from datakit_github.repository import Repository
from datakit_github.project_mixin import ProjectMixin
def ask(question):
return input(question)
class Integrate(ProjectMixin, Command):
"Integrate local project code with Github"
def take_action(self, parsed_args):
# Check for Github API key from configs
# TODO: Provide more helpful error message on how to create API key
# and configure plugin locally
api_key = self.configs.get('github_api_key')
if not api_key:
err_msg = "You must configure a Github API key to use this command!!\n"
self.log.error(err_msg)
elif Repository.initialized():
self.log.error("\nERROR: Repo has already been initialized locally!!")
self.log.error(
"You must either remove the .git/ directory " +
"before re-running this command, or manually " +
"configure Github integration.\n"
)
else:
account = self.choose_account(api_key)
confirm = self.confirm_account_choice(account)
if confirm in ["y", ""]:
# TODO: Handle overrides for project settings from
# configs and/or command-line flags (e.g. privacy)
privacy_choice = ask("Should this repo be private? y/n [y]: ").strip().lower()
privacy = True if privacy_choice in ['', 'y'] else False
try:
repo = account.create_repo(self.project_slug, private=privacy)
self.log.info("Repo created at {}".format(repo.html_url))
self.run_local_git_commands(repo)
except GithubException as err:
try:
error_msg = err.data['errors'][0]['message']
except KeyError:
error_msg = err.data['message']
msg = "\nERROR: Failed to create {} for {}: {}!!\n".format(
self.project_slug,
account.login,
error_msg
)
self.log.error(msg)
finally:
return {
'account': account.login,
'repo_name': self.project_slug,
'private_repo': privacy
}
def choose_account(self, api_key):
accounts = GithubApi.accounts(api_key)
if len(accounts) == 1:
target_account = accounts[0]
else:
msg = "Choose an account where the new project should be created:\n"
account_lkup = {}
self.log.info(msg)
for idx, account in enumerate(accounts):
num = idx + 1
account_lkup[num] = account
self.log.info("({}) {}".format(num, account.login))
# TODO: Check plugin for default account configuration,
# otherwise default to personal account
default = account_lkup[1]
choice_msg = "\nType a number or leave blank for default [{}]: ".format(default.login)
choice = ask(choice_msg)
if choice.strip() == '':
target_account = default
else:
target_account = account_lkup[int(choice)]
return target_account
def confirm_account_choice(self, target_account):
self.log.info("Repo will be created on account: {}".format(target_account.login))
choice = ask("Is this correct? y/n [y]: ").strip().lower()
return choice
def run_local_git_commands(self, repo):
self.log.info("Running local Git initialization...")
# TODO: Create a project-level config/datakit-github.json?
# containing name of selected account and possibly account type (org or user)?
# This can be used downstream to configure org or user-specific API calls
# if any (hold off on the "type" config until we
# determine if there are different call ypes)
Repository.init()
Repository.add()
Repository.commit("Initial commit")
Repository.add_remote(repo.ssh_url)
alert_msg = 'Local repo linked to remote origin: \n\t{}'.format(repo.html_url)
self.log.info(alert_msg)
Repository.push()
self.log.info("First commit made locally and pushed to remote")
self.log.info("View the project on Github at {}".format(repo.html_url))
| 44.561905 | 98 | 0.576405 | 3.015625 |
ff5ff5b92e277867f0dafddc28fdde3023cdf3a1
| 3,486 |
py
|
Python
|
python/src/shipping_allocation/envs/inventory_generators.py
|
jotaporras/ts_mcfrl
|
c8c77a8fbd58e80e926e6705320ca8bc1979efdd
|
[
"MIT"
] | null | null | null |
python/src/shipping_allocation/envs/inventory_generators.py
|
jotaporras/ts_mcfrl
|
c8c77a8fbd58e80e926e6705320ca8bc1979efdd
|
[
"MIT"
] | 5 |
2020-09-26T01:26:21.000Z
|
2022-02-10T02:45:51.000Z
|
python/src/shipping_allocation/envs/inventory_generators.py
|
jotaporras/ts_mcfrl
|
c8c77a8fbd58e80e926e6705320ca8bc1979efdd
|
[
"MIT"
] | null | null | null |
from abc import ABC
from typing import List
import numpy as np
from network import physical_network
from experiment_utils.Order import Order
class InventoryGenerator(ABC):
# Generates new inventory and distributes it somehow to keep the network balanced for the selected locations.
# Returns a numpy array of shape (num_dcs,num_commodities) representing how much extra inventory is going to appear.
def generate_new_inventory(
self, network: physical_network, open_orders: List[Order]
): # todo add type when it works.
pass
class NaiveInventoryGenerator(InventoryGenerator):
def generate_new_inventory(
self, network: physical_network, open_orders: List[Order]
):
# logging.info("==> inventory generator")
total_inventory = sum(
map(lambda o: o.demand, open_orders)
) # TODO rename and do for many commmodities.
even = total_inventory // network.num_dcs
dc_inv = np.array([even] * network.num_dcs).reshape(
network.num_dcs, -1
) # To keep the (dc,product) shape. #todo validate with multiple commodities
# logging.info("Demand", total_inventory)
# logging.info("Pre level dc_inv")
# logging.info(dc_inv)
# logging.info("Total new inv",np.sum(dc_inv))
imbalance = total_inventory - np.sum(dc_inv, axis=0)
# if total_inventory // network.num_dcs != total_inventory / network.num_dcs:
dc_inv[0, :] = dc_inv[0, :] + imbalance
# logging.info("Rebalanced dc inv",dc_inv)
# logging.info("Rebalanced sum",np.sum(dc_inv))
if (np.sum(dc_inv, axis=0) != total_inventory).any():
raise Exception("np.sum(dc_inv) != total_inventory")
return dc_inv
class DirichletInventoryGenerator(InventoryGenerator):
def __init__(self, network: physical_network):
num_dcs = network.num_dcs
num_commodities = network.num_commodities
self.alpha = np.random.permutation(
num_dcs / np.arange(1, num_dcs + 1)
) # trying to make it skewed.
self.inventory_generation_distribution = np.random.dirichlet(
self.alpha, num_commodities
) # (num_dc,num_k) of dc distribution of inventory.
def generate_new_inventory(
self, network: physical_network, open_orders: List[Order]
):
# logging.info("==> inventory generator")
total_inventory = sum(
map(lambda o: o.demand, open_orders)
) # TODO rename and do for many commmodities.
# even = total_inventory // network.num_dcs
inventory_distribution = self.inventory_generation_distribution
supply_per_dc = np.floor(
total_inventory.reshape(-1, 1) * inventory_distribution
)
imbalance = total_inventory - np.sum(supply_per_dc, axis=1)
supply_per_dc[:, 0] = supply_per_dc[:, 0] + imbalance
# logging.info("Demand", total_inventory)
# logging.info("Pre level dc_inv")
# logging.info(dc_inv)
# logging.info("Total new inv",np.sum(dc_inv))
# if total_inventory // network.num_dcs != total_inventory / network.num_dcs:
# logging.info("Rebalanced dc inv",dc_inv)
# logging.info("Rebalanced sum",np.sum(dc_inv))
if not np.isclose(np.sum(np.sum(supply_per_dc, axis=1) - total_inventory), 0.0):
raise RuntimeError("Demand was not correctly balanced")
return supply_per_dc.transpose()
| 42.512195 | 120 | 0.662651 | 3.453125 |
23c7f5592c835ca38b9489e97a9c11b2866142a7
| 1,407 |
js
|
JavaScript
|
src/components/Card.js
|
Adherentman/simple-forum
|
07587fdc9943d84075396b16cadf216396452232
|
[
"MIT"
] | 1 |
2018-03-03T03:52:31.000Z
|
2018-03-03T03:52:31.000Z
|
src/components/Card.js
|
Adherentman/simple-forum
|
07587fdc9943d84075396b16cadf216396452232
|
[
"MIT"
] | null | null | null |
src/components/Card.js
|
Adherentman/simple-forum
|
07587fdc9943d84075396b16cadf216396452232
|
[
"MIT"
] | null | null | null |
import React from 'react';
import { Card } from 'antd';
import { graphql } from 'react-apollo';
import { gql } from 'apollo-boost';
//import * as Immutable from 'immutable';
//import { bindActionCreators } from 'redux';
//import { connect } from 'react-redux';
// import { bookActions, BookActions } from '../actions/bookAction';
import Apolloex from './Apolloex';
const { Meta } = Card;
const cardStyle = {
width: '400px',
margin: '0 auto',
};
const gridStyle = {
width: '300px',
};
class Cards extends React.Component {
componentDidMount() {
//var actions = this.props.actions;
//actions.AddTodo();
}
render() {
//var text: Immutable.List<Immutable.Map<string, any>> = this.props.text;
return (
<div style={cardStyle}>
<Card
hoverable
cover={<img alt="example" src={require('../assets/images/pi.jpg')} />}
style={gridStyle}
>
<Meta
title="Thank you for using!!"
description="if you don't like ant desgin, you can use:"
/>
<code>yarn remove antd</code>
<Apolloex/>
</Card>
</div>
);
}
}
// export default connect(
// (state: stateType) => {
// return {
// text: state.book.get('text'),
// };
// },
// dispatch => ({
// actions: bindActionCreators(bookActions, dispatch),
// })
// )(Cards);
export default Cards;
| 24.258621 | 80 | 0.574982 | 3.015625 |
041249e8360e805b3879e9ac3e612954b1c12ed3
| 1,043 |
cpp
|
C++
|
fontcharacter.cpp
|
zheka20012/OMSIFontCreator
|
237515473720ca663f87aadbf1eed7950164d0c1
|
[
"Unlicense"
] | 2 |
2021-12-19T18:57:45.000Z
|
2021-12-21T12:50:50.000Z
|
fontcharacter.cpp
|
zheka20012/OMSIFontCreator
|
237515473720ca663f87aadbf1eed7950164d0c1
|
[
"Unlicense"
] | 1 |
2022-03-07T11:24:39.000Z
|
2022-03-07T11:24:39.000Z
|
fontcharacter.cpp
|
zheka20012/OMSIFontCreator
|
237515473720ca663f87aadbf1eed7950164d0c1
|
[
"Unlicense"
] | null | null | null |
#include "fontcharacter.h"
FontCharacter::~FontCharacter(){
}
FontCharacter::FontCharacter(const QChar letter)
{
Letter = letter;
LeftPixel = RightPixel = 0;
TopPixel = 0;
}
FontCharacter::FontCharacter(QTextStream &inStream)
{
Letter = inStream.readLine().at(0);
LeftPixel = inStream.readLine().toUShort();
RightPixel = inStream.readLine().toUShort();
TopPixel = inStream.readLine().toUShort();
}
void FontCharacter::SetCharacter(const QChar letter)
{
Letter = letter;
}
void FontCharacter::SetPixels(unsigned int leftPixel, unsigned int rightPixel, unsigned int topPixel)
{
LeftPixel = leftPixel;
RightPixel = rightPixel;
TopPixel = topPixel;
}
void FontCharacter::Save(QTextStream &fileStream)
{
fileStream << "[char]" << Qt::endl;
fileStream << Letter << Qt::endl;
fileStream << LeftPixel << Qt::endl;
fileStream << RightPixel << Qt::endl;
fileStream << TopPixel << Qt::endl;
fileStream << Qt::endl;
}
QChar FontCharacter::GetCharacter()
{
return Letter;
}
| 21.729167 | 103 | 0.683605 | 3.0625 |
9fe71933a149f99aec3489200c0cb1189fbfb29a
| 5,172 |
py
|
Python
|
sinkhorn_knopp.py
|
kkahatapitiya/Numpy-OT
|
f4698580b4e84e5bd4c2fdd828d086ac7f088295
|
[
"MIT"
] | 3 |
2021-04-27T17:08:11.000Z
|
2021-04-29T15:28:20.000Z
|
sinkhorn_knopp.py
|
kumarak93/numpy_ot
|
f4698580b4e84e5bd4c2fdd828d086ac7f088295
|
[
"MIT"
] | null | null | null |
sinkhorn_knopp.py
|
kumarak93/numpy_ot
|
f4698580b4e84e5bd4c2fdd828d086ac7f088295
|
[
"MIT"
] | 1 |
2021-06-03T17:12:04.000Z
|
2021-06-03T17:12:04.000Z
|
import numpy as np
### from https://github.com/rflamary/POT/blob/master/ot/bregman.py ###
def sinkhorn_knopp(a, b, M, reg, numItermax=1000,
stopThr=1e-9, verbose=False, log=False, **kwargs):
r"""
Solve the entropic regularization optimal transport problem and return the OT matrix
The function solves the following optimization problem:
.. math::
\gamma = arg\min_\gamma <\gamma,M>_F + reg\cdot\Omega(\gamma)
s.t. \gamma 1 = a
\gamma^T 1= b
\gamma\geq 0
where :
- M is the (dim_a, dim_b) metric cost matrix
- :math:`\Omega` is the entropic regularization term :math:`\Omega(\gamma)=\sum_{i,j} \gamma_{i,j}\log(\gamma_{i,j})`
- a and b are source and target weights (histograms, both sum to 1)
The algorithm used for solving the problem is the Sinkhorn-Knopp matrix scaling algorithm as proposed in [2]_
Parameters
----------
a : ndarray, shape (dim_a,)
samples weights in the source domain
b : ndarray, shape (dim_b,) or ndarray, shape (dim_b, n_hists)
samples in the target domain, compute sinkhorn with multiple targets
and fixed M if b is a matrix (return OT loss + dual variables in log)
M : ndarray, shape (dim_a, dim_b)
loss matrix
reg : float
Regularization term >0
numItermax : int, optional
Max number of iterations
stopThr : float, optional
Stop threshol on error (>0)
verbose : bool, optional
Print information along iterations
log : bool, optional
record log if True
Returns
-------
gamma : ndarray, shape (dim_a, dim_b)
Optimal transportation matrix for the given parameters
log : dict
log dictionary return only if log==True in parameters
Examples
--------
>>> import ot
>>> a=[.5, .5]
>>> b=[.5, .5]
>>> M=[[0., 1.], [1., 0.]]
>>> ot.sinkhorn(a, b, M, 1)
array([[0.36552929, 0.13447071],
[0.13447071, 0.36552929]])
References
----------
.. [2] M. Cuturi, Sinkhorn Distances : Lightspeed Computation of Optimal Transport, Advances in Neural Information Processing Systems (NIPS) 26, 2013
See Also
--------
ot.lp.emd : Unregularized OT
ot.optim.cg : General regularized OT
"""
a = np.asarray(a, dtype=np.float64)
b = np.asarray(b, dtype=np.float64)
M = np.asarray(M, dtype=np.float64)
if len(a) == 0:
a = np.ones((M.shape[0],), dtype=np.float64) / M.shape[0]
if len(b) == 0:
b = np.ones((M.shape[1],), dtype=np.float64) / M.shape[1]
# init data
dim_a = len(a)
dim_b = len(b)
if len(b.shape) > 1:
n_hists = b.shape[1]
else:
n_hists = 0
if log:
log = {'err': []}
# we assume that no distances are null except those of the diagonal of
# distances
if n_hists:
u = np.ones((dim_a, n_hists)) / dim_a
v = np.ones((dim_b, n_hists)) / dim_b
else:
u = np.ones(dim_a) / dim_a
v = np.ones(dim_b) / dim_b
# print(reg)
# Next 3 lines equivalent to K= np.exp(-M/reg), but faster to compute
K = np.empty(M.shape, dtype=M.dtype)
np.divide(M, -reg, out=K)
np.exp(K, out=K)
# print(np.min(K))
tmp2 = np.empty(b.shape, dtype=M.dtype)
Kp = (1 / (a+ 1e-299)).reshape(-1, 1) * K
cpt = 0
err = 1
while (err > stopThr and cpt < numItermax):
uprev = u
vprev = v
KtransposeU = np.dot(K.T, u)
v = np.divide(b, (KtransposeU+ 1e-299))
u = 1. / (np.dot(Kp, v)+ 1e-299)
if (np.any(KtransposeU == 0)
or np.any(np.isnan(u)) or np.any(np.isnan(v))
or np.any(np.isinf(u)) or np.any(np.isinf(v))):
# we have reached the machine precision
# come back to previous solution and quit loop
print('Warning: numerical errors at iteration', cpt)
u = uprev
v = vprev
break
if cpt % 10 == 0:
# we can speed up the process by checking for the error only all
# the 10th iterations
if n_hists:
np.einsum('ik,ij,jk->jk', u, K, v, out=tmp2)
else:
# compute right marginal tmp2= (diag(u)Kdiag(v))^T1
np.einsum('i,ij,j->j', u, K, v, out=tmp2)
err = np.linalg.norm(tmp2 - b) # violation of marginal
if log:
log['err'].append(err)
if verbose:
if cpt % 200 == 0:
print(
'{:5s}|{:12s}'.format('It.', 'Err') + '\n' + '-' * 19)
print('{:5d}|{:8e}|'.format(cpt, err))
cpt = cpt + 1
if log:
log['u'] = u
log['v'] = v
if n_hists: # return only loss
res = np.einsum('ik,ij,jk,ij->k', u, K, v, M)
if log:
return res, cpt, log
else:
return res, cpt
else: # return OT matrix
if log:
return u.reshape((-1, 1)) * K * v.reshape((1, -1)), cpt, log
else:
return u.reshape((-1, 1)) * K * v.reshape((1, -1)), cpt
| 32.942675 | 153 | 0.53983 | 3.03125 |
20dcba74bf2e8503fd50436b2fd50dcb78e910ef
| 2,632 |
py
|
Python
|
app/utils/analyze.py
|
codingjerk/ztd.blunders-web
|
38d4c1049dc3d0bd0b4294ffa419d25cbfbf2b83
|
[
"MIT"
] | null | null | null |
app/utils/analyze.py
|
codingjerk/ztd.blunders-web
|
38d4c1049dc3d0bd0b4294ffa419d25cbfbf2b83
|
[
"MIT"
] | null | null | null |
app/utils/analyze.py
|
codingjerk/ztd.blunders-web
|
38d4c1049dc3d0bd0b4294ffa419d25cbfbf2b83
|
[
"MIT"
] | null | null | null |
import chess
import chess.uci
class Engine:
def __init__(self, path):
self.engine = chess.uci.popen_engine(path)
self.engine.uci()
self.engine.isready()
self.handler = chess.uci.InfoHandler()
self.engine.info_handlers.append(self.handler)
self.board = chess.Board()
if not self.engine.is_alive():
raise Exception("Engine failed to start")
def __enter__(self):
return self
def new(self):
self.engine.ucinewgame()
self.engine.isready()
def set(self, fen):
self.board = chess.Board(fen)
self.engine.position(self.board)
self.engine.isready()
def is_game_over(self):
return self.board.is_game_over()
def __cloneScore(self, score):
return { "cp": score.cp, "mate": score.mate }
def __pvToStr(self, fen, line):
board = chess.Board(fen)
result = []
for move in line:
movestr = board.san(move)
result.append(movestr)
board.push(move)
return result
def __filterMove(self, index):
pv = self.handler.info['pv']
score = self.handler.info['score']
if pv == {} or score == {}:
return None
if index not in pv or index not in score:
return None
lineStr = self.__pvToStr(self.board.fen(), pv[index])
scoreStr = self.__cloneScore(score[index])
return {
'line': lineStr,
'score': scoreStr
}
def think(self, timeToThink, move = None):
searchmoves = None
self.engine.isready()
if move is not None:
searchmoves = [self.board.parse_san(move)]
promise = self.engine.go(movetime=timeToThink, searchmoves=searchmoves, async_callback=True)
promise.result()
result = self.__filterMove(1)
if result is None:
raise Exception('Engine failed to analyze blunder')
return result
def moveLine(self, line):
for move in line:
move_int = self.board.parse_san(move)
if move_int not in self.board.legal_moves:
raise Exception("Illegal move in this position")
self.board.push(move_int)
self.engine.position(self.board)
self.engine.isready()
def moveOne(self, move):
self.moveLine([move])
def unmove(self):
self.board.pop()
self.engine.position(self.board)
self.engine.isready()
def __enter__(self):
return self
def __exit__(self, exception, _1, _2):
self.engine.quit()
| 26.059406 | 100 | 0.583207 | 3.21875 |
d68666c2533ec945af0d4b7765e27e775268cb70
| 3,494 |
dart
|
Dart
|
packages/conduit/tool/generated_test_runner.dart
|
anycode/conduit
|
f3baca4b1d4f2f24e542d7e12859d6ab6d6ffcb3
|
[
"BSD-2-Clause"
] | null | null | null |
packages/conduit/tool/generated_test_runner.dart
|
anycode/conduit
|
f3baca4b1d4f2f24e542d7e12859d6ab6d6ffcb3
|
[
"BSD-2-Clause"
] | null | null | null |
packages/conduit/tool/generated_test_runner.dart
|
anycode/conduit
|
f3baca4b1d4f2f24e542d7e12859d6ab6d6ffcb3
|
[
"BSD-2-Clause"
] | null | null | null |
import 'dart:async';
import 'dart:io';
import 'package:conduit_runtime/runtime.dart';
Future main(List<String> args) async {
final conduitDir = Directory.current.uri;
final blacklist = [
(String s) => s.contains('test/command/'),
(String s) => s.contains('/compilation_errors/'),
(String s) => s.contains('test/openapi/'),
(String s) => s.contains('postgresql/migration/'),
(String s) => s.contains('db/migration/'),
(String s) => s.endsWith('entity_mirrors_test.dart'),
(String s) => s.endsWith('moc_openapi_test.dart'),
(String s) => s.endsWith('auth_documentation_test.dart'),
(String s) => s.endsWith('entity_mirrors_test.dart'),
(String s) => s.endsWith('cli/command_test.dart'),
];
List<File> testFiles;
if (args.length == 1) {
testFiles = [File(args.first)];
} else {
final testDir = args.isNotEmpty
? conduitDir.resolveUri(Uri.parse(args[0]))
: conduitDir.resolve('test/');
testFiles = Directory.fromUri(testDir)
.listSync(recursive: true)
.whereType<File>()
.where((f) => f.path.endsWith('_test.dart'))
.where((f) => blacklist.every(
(blacklistFunction) => blacklistFunction(f.uri.path) == false))
.toList();
}
var remainingCounter = testFiles.length;
final passingFiles = <File>[];
final failingFiles = <File>[];
for (var f in testFiles) {
final currentTime = DateTime.now();
final makePrompt = () =>
'(Pass: ${passingFiles.length} Fail: ${failingFiles.length} Remain: $remainingCounter)';
print('Running tests derived from ${f.path}...');
final ctx = BuildContext(
conduitDir.resolve('lib/').resolve('conduit.dart'),
Directory.current.uri.resolve('../').resolve('_build/'),
Directory.current.uri.resolve('../').resolve('run'),
File(conduitDir.resolve(f.path).path).readAsStringSync(),
forTests: true);
final bm = BuildManager(ctx);
await bm.build();
final result = await Process.start('dart', ['test/main_test.dart'],
workingDirectory:
ctx.buildDirectoryUri.toFilePath(windows: Platform.isWindows),
environment: {
'CONDUIT_CI_DIR_LOCATION': Directory.current.uri
.resolve('../../')
.resolve('ci/')
.toFilePath(windows: Platform.isWindows)
});
// ignore: unawaited_futures
stdout.addStream(result.stdout);
// ignore: unawaited_futures
stderr.addStream(result.stderr);
if (await result.exitCode != 0) {
exitCode = -1;
failingFiles.add(f);
print('Tests FAILED in ${f.path}.');
} else {
passingFiles.add(f);
}
final elapsed = DateTime.now().difference(currentTime);
remainingCounter--;
print(
'${makePrompt()} (${elapsed.inSeconds}s) Completed tests derived from ${f.path}.');
await bm.clean();
}
print('==============');
print('Result Summary');
print('==============');
final testRoot =
Directory.current.uri.resolve('../').resolve('conduit/').resolve('test/');
String stripParentDir(Uri uri) {
final testPathList = uri.pathSegments;
final parentDirPathList = testRoot.pathSegments;
final components = testPathList.skip(parentDirPathList.length - 1);
return components.join('/');
}
passingFiles.forEach((f) {
print(' ${stripParentDir(f.uri)}: success');
});
failingFiles.forEach((f) {
print(' ${stripParentDir(f.uri)}: FAILURE');
});
}
| 32.962264 | 96 | 0.618489 | 3.15625 |
87b0a134ab1365055680d5f39264c539df98d2d3
| 2,450 |
swift
|
Swift
|
TempiBeatDetection/TempiUtilities.swift
|
jasonmarkperez/TempiBeatDetection
|
c5e747155f67626135919c51701184e75d5caa52
|
[
"MIT"
] | 36 |
2017-06-09T09:42:59.000Z
|
2022-02-16T21:35:51.000Z
|
TempiBeatDetection/TempiUtilities.swift
|
jasonmarkperez/TempiBeatDetection
|
c5e747155f67626135919c51701184e75d5caa52
|
[
"MIT"
] | null | null | null |
TempiBeatDetection/TempiUtilities.swift
|
jasonmarkperez/TempiBeatDetection
|
c5e747155f67626135919c51701184e75d5caa52
|
[
"MIT"
] | 13 |
2017-05-16T15:18:51.000Z
|
2021-12-23T14:43:56.000Z
|
//
// TempiUtilities.swift
// TempiBeatDetection
//
// Created by John Scalo on 1/8/16.
// Copyright © 2016 John Scalo. See accompanying License.txt for terms.
import Foundation
import Accelerate
func tempi_dispatch_delay(delay:Double, closure:()->()) {
dispatch_after(
dispatch_time(
DISPATCH_TIME_NOW,
Int64(delay * Double(NSEC_PER_SEC))
),
dispatch_get_main_queue(), closure)
}
func tempi_is_power_of_2 (n: Int) -> Bool {
let lg2 = logbf(Float(n))
return remainderf(Float(n), powf(2.0, lg2)) == 0
}
func tempi_max(a: [Float]) -> Float {
var max: Float = 0.0
vDSP_maxv(a, 1, &max, UInt(a.count))
return max
}
func tempi_smooth(a: [Float], w: Int) -> [Float] {
var newA: [Float] = [Float]()
for i in 0..<a.count {
let realW = min(w, a.count - i)
var avg: Float = 0.0
let subArray: [Float] = Array(a[i..<i+realW])
vDSP_meanv(subArray, 1, &avg, UInt(realW))
newA.append(avg)
}
return newA
}
func tempi_median(a: [Float]) -> Float {
// I tried to make this an Array extension and failed. See below.
let sortedArray : [Float] = a.sort( { $0 < $1 } )
var median : Float
if sortedArray.count == 1 {
return sortedArray[0]
}
if sortedArray.count % 2 == 0 {
let f1 : Float = sortedArray[sortedArray.count / 2 - 1]
let f2 : Float = sortedArray[sortedArray.count / 2]
median = (f1 + f2) / 2.0
} else {
median = sortedArray[sortedArray.count / 2]
}
return median
}
func tempi_mean(a: [Float]) -> Float {
// Again, would be better as an Array extension.
var total : Float = 0
for (_, f) in a.enumerate() {
total += f
}
return total/Float(a.count)
}
//extension Array where Element : IntegerArithmeticType {
// func median() -> Float {
// let sortedArray : [Float] = a.sort( { $0 < $1 } )
// var median : Float
//
// if sortedArray.count == 1 {
// return sortedArray[0]
// }
//
// if sortedArray.count % 2 == 0 {
// let f1 : Float = sortedArray[sortedArray.count / 2 - 1]
// let f2 : Float = sortedArray[sortedArray.count / 2]
// median = (f1 + f2) / 2.0
// } else {
// median = sortedArray[sortedArray.count / 2]
// }
//
// return median
// }
//}
| 25.789474 | 72 | 0.550204 | 3.25 |
1a7faeacd83f1f044393b1b8c323eea332a2a1ec
| 2,328 |
py
|
Python
|
modern-infrastructure-wednesday/2020-05-06/gcp-py-functions/funcs.py
|
nimbinatus/pulumitv
|
19477611afde5d00d90838956d3a7b1cc7c1f705
|
[
"Apache-2.0"
] | 13 |
2020-05-09T17:37:06.000Z
|
2022-01-09T06:26:19.000Z
|
modern-infrastructure-wednesday/2020-05-06/gcp-py-functions/funcs.py
|
nimbinatus/pulumitv
|
19477611afde5d00d90838956d3a7b1cc7c1f705
|
[
"Apache-2.0"
] | 28 |
2020-04-24T05:51:39.000Z
|
2022-03-31T13:31:38.000Z
|
modern-infrastructure-wednesday/2020-05-06/gcp-py-functions/funcs.py
|
nimbinatus/pulumitv
|
19477611afde5d00d90838956d3a7b1cc7c1f705
|
[
"Apache-2.0"
] | 10 |
2020-03-24T19:29:52.000Z
|
2021-06-18T11:01:21.000Z
|
import time
import os
import pulumi
from pulumi_gcp import storage
from pulumi_gcp import cloudfunctions
# Disable rule for that module-level exports be ALL_CAPS, for legibility.
# pylint: disable=C0103
# File path to where the Cloud Function's source code is located.
PATH_TO_SOURCE_CODE = "./functions"
# Get values from Pulumi config to use as environment variables in our Cloud Function.
config = pulumi.Config(name=None)
config_values = {
# Target destination and travel time offset.
"DESTINATION": config.get("destination"),
"TRAVEL_OFFSET": config.get("travelOffset"),
# Google Maps API key.
"GOOGLE_MAPS_API_KEY": config.get("googleMapsApiKey"),
# Twilio account for sending SMS messages.
"TWILLIO_ACCESS_TOKEN": config.get("twillioAccessToken"),
"TWILLIO_ACCOUNT_SID": config.get("twillioAccountSid"),
"TO_PHONE_NUMBER": config.get("toPhoneNumber"),
"FROM_PHONE_NUMBER": config.get("fromPhoneNumber"),
}
# We will store the source code to the Cloud Function in a Google Cloud Storage bucket.
bucket = storage.Bucket("eta_demo_bucket")
# The Cloud Function source code itself needs to be zipped up into an
# archive, which we create using the pulumi.AssetArchive primitive.
assets = {}
for file in os.listdir(PATH_TO_SOURCE_CODE):
location = os.path.join(PATH_TO_SOURCE_CODE, file)
asset = pulumi.FileAsset(path=location)
assets[file] = asset
archive = pulumi.AssetArchive(assets=assets)
# Create the single Cloud Storage object, which contains all of the function's
# source code. ("main.py" and "requirements.txt".)
source_archive_object = storage.BucketObject(
"eta_demo_object",
name="main.py-%f" % time.time(),
bucket=bucket.name,
source=archive)
# Create the Cloud Function, deploying the source we just uploaded to Google
# Cloud Storage.
fxn = cloudfunctions.Function(
"eta_demo_function",
entry_point="get_demo",
environment_variables=config_values,
region="us-central1",
runtime="python37",
source_archive_bucket=bucket.name,
source_archive_object=source_archive_object.name,
trigger_http=True)
invoker = cloudfunctions.FunctionIamMember(
"invoker",
project=fxn.project,
region=fxn.region,
cloud_function=fxn.name,
role="roles/cloudfunctions.invoker",
member="allUsers",
)
| 31.890411 | 87 | 0.746134 | 3.078125 |
f447f73ce8edbcb34ca56ace49f4d7c1a901e5ac
| 2,272 |
ts
|
TypeScript
|
src/routes/spaces.ts
|
ianleon/production-saas
|
f9f14ebf7af4dc35b5116c9c37519a761d319473
|
[
"MIT"
] | null | null | null |
src/routes/spaces.ts
|
ianleon/production-saas
|
f9f14ebf7af4dc35b5116c9c37519a761d319473
|
[
"MIT"
] | null | null | null |
src/routes/spaces.ts
|
ianleon/production-saas
|
f9f14ebf7af4dc35b5116c9c37519a761d319473
|
[
"MIT"
] | null | null | null |
import { compose } from 'worktop';
import * as Space from 'lib/models/space';
import * as User from 'lib/models/user';
import * as utils from 'lib/utils';
/**
* GET /spaces
* @requires Authentication
*/
export const list = compose(
User.authenticate,
async function (req, context) {
const IDs = await Space.list(context.user!);
const rows = await Promise.all(IDs.map(Space.find));
let i=0, output=[];
for (; i < rows.length; i++) {
if (rows[i]) output.push(
Space.output(rows[i] as Space.Space)
);
}
return utils.send(200, output);
}
);
/**
* POST /spaces
* @requires Authentication
*/
export const create = compose(
User.authenticate,
async function (req, context) {
const input = await utils.body<{ name?: string }>(req);
const name = input && input.name && input.name.trim();
if (!name) {
return utils.send(400, 'TODO: port over validation lib');
}
const doc = await Space.insert({ name }, context.user!);
if (!doc) return utils.send(500, 'Error creating document');
const output = Space.output(doc);
return utils.send(201, output);
}
);
/**
* GET /spaces/:spaceid
* @requires Authentication,Ownership
*/
export const show = compose(
User.authenticate,
Space.load, Space.isAuthorized,
function (req, context) {
const space = context.space!;
return utils.send(200, Space.output(space));
}
);
/**
* PUT /spaces/:spaceid
* @requires Authentication,Ownership
*/
export const update = compose(
User.authenticate,
Space.load, Space.isAuthorized,
async function (req, context) {
const input = await utils.body<{ name?: string }>(req);
const name = input && input.name && input.name.trim();
if (!name) return utils.send(400, 'TODO: port over validation lib');
const doc = await Space.update(context.space!, { name });
if (doc) return utils.send(200, Space.output(doc));
return utils.send(500, 'Error updating document');
}
);
/**
* DELETE /spaces/:spaceid
* @requires Authentication,Ownership
*/
export const destroy = compose(
User.authenticate,
Space.load, Space.isAuthorized,
async function (req, context) {
const { user, space } = context;
if (await Space.destroy(space!, user!)) return utils.send(204);
return utils.send(500, 'Error while destroying Space');
}
);
| 24.170213 | 70 | 0.671655 | 3.1875 |
4cdf2f68940022bc418529909c08b2c4c4ee0f1c
| 1,513 |
py
|
Python
|
lib/python/treadmill/cli/scheduler/apps.py
|
drienyov/treadmill
|
ce21537cd9a2fdb0567ac2aa3de1afcb2f6861de
|
[
"Apache-2.0"
] | 2 |
2017-10-31T18:48:20.000Z
|
2018-03-04T20:35:20.000Z
|
lib/python/treadmill/cli/scheduler/apps.py
|
bretttegart/treadmill
|
812109e31c503a6eddaee2d3f2e1faf2833b6aaf
|
[
"Apache-2.0"
] | null | null | null |
lib/python/treadmill/cli/scheduler/apps.py
|
bretttegart/treadmill
|
812109e31c503a6eddaee2d3f2e1faf2833b6aaf
|
[
"Apache-2.0"
] | null | null | null |
"""Show apps report."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import click
import pandas as pd
from treadmill import cli
from treadmill.cli.scheduler import fetch_report, print_report
from treadmill import restclient
def init():
"""Return top level command handler."""
@click.command()
@cli.handle_exceptions(restclient.CLI_REST_EXCEPTIONS)
@click.option('--match', help='Server name pattern match')
@click.option('--partition', help='Partition name pattern match')
@click.option('--full', is_flag=True, default=False)
@click.pass_context
def apps(ctx, match, partition, full):
"""View apps report."""
report = fetch_report(ctx.obj.get('api'), 'apps', match, partition)
# Replace integer N/As
for col in ['identity', 'expires', 'lease', 'data_retention']:
report.loc[report[col] == -1, col] = ''
# Convert to datetimes
for col in ['expires']:
report[col] = pd.to_datetime(report[col], unit='s')
# Convert to timedeltas
for col in ['lease', 'data_retention']:
report[col] = pd.to_timedelta(report[col], unit='s')
report = report.fillna('')
if not full:
report = report[[
'instance', 'allocation', 'partition', 'server',
'mem', 'cpu', 'disk'
]]
print_report(report)
return apps
| 30.877551 | 75 | 0.631196 | 3.125 |
433aec82e7b9b6cc232f933a9adfab307e594f3e
| 3,777 |
tsx
|
TypeScript
|
src/BouncingBallsDiv.tsx
|
joejensen/react-bouncing-balls
|
2bdaaecad4ca9c6396fb93a3d335d75ac0c515b1
|
[
"Unlicense"
] | null | null | null |
src/BouncingBallsDiv.tsx
|
joejensen/react-bouncing-balls
|
2bdaaecad4ca9c6396fb93a3d335d75ac0c515b1
|
[
"Unlicense"
] | null | null | null |
src/BouncingBallsDiv.tsx
|
joejensen/react-bouncing-balls
|
2bdaaecad4ca9c6396fb93a3d335d75ac0c515b1
|
[
"Unlicense"
] | 1 |
2020-09-06T07:26:21.000Z
|
2020-09-06T07:26:21.000Z
|
/**
* @class BouncingBallsDivComponent
*/
import * as React from 'react';
import {RefObject} from "react";
import {PointCollection} from "./PointCollection";
export type BouncingBallsDivProps = {
src: string;
width: number;
height: number;
cellSize: number;
}
export default class BouncingBallsDivComponent extends React.Component<BouncingBallsDivProps> {
static defaultProps: BouncingBallsDivProps = {
src: '',
width: 512,
height: 512,
cellSize: 20
};
private readonly containerRef: RefObject<HTMLDivElement>;
private readonly pointCollection: PointCollection = new PointCollection();
constructor(props: BouncingBallsDivProps) {
super(props);
this.containerRef = React.createRef();
}
render() {
return (
<div ref={this.containerRef} className="BouncingBallsDiv_container"/>
)
}
public componentDidMount(): void {
const containerEl: HTMLDivElement | null = this.containerRef.current;
if( !containerEl) {
return;
}
containerEl.ontouchstart = e => {
e.preventDefault();
};
containerEl.ontouchmove = e => {
e.preventDefault();
let mPosx = 0;
let mPosy = 0;
let ePosx = 0;
let ePosy = 0;
if (e.targetTouches[0].pageX || e.targetTouches[0].pageY) {
mPosx = e.targetTouches[0].pageX;
mPosy = e.targetTouches[0].pageY;
} else if (e.targetTouches[0].clientX || e.targetTouches[0].clientY) {
mPosx = e.targetTouches[0].clientX + document.body.scrollLeft + document.documentElement.scrollLeft;
mPosy = e.targetTouches[0].clientY + document.body.scrollTop + document.documentElement.scrollTop;
}
let currentObject: any = containerEl;
if ( currentObject.offsetParent) {
do {
ePosx += currentObject.offsetLeft;
ePosy += currentObject.offsetTop;
currentObject = currentObject.offsetParent;
} while ( currentObject.offsetParent);
}
this.pointCollection.mousePos.setValue(mPosx - ePosx, mPosy - ePosy, 0);
};
containerEl.ontouchend = e => {
e.preventDefault();
this.pointCollection.mousePos.setValue(-999, -999, -999);
};
containerEl.ontouchcancel = e => {
e.preventDefault();
this.pointCollection.mousePos.setValue(-999, -999, -999);
};
containerEl.onmousemove = e => {
let mPosx = 0;
let mPosy = 0;
let ePosx = 0;
let ePosy = 0;
if (e.pageX || e.pageY) {
mPosx = e.pageX;
mPosy = e.pageY;
} else if (e.clientX || e.clientY) {
mPosx = e.clientX + document.body.scrollLeft + document.documentElement.scrollLeft;
mPosy = e.clientY + document.body.scrollTop + document.documentElement.scrollTop;
}
let currentObject: any = containerEl;
if ( currentObject.offsetParent) {
do {
ePosx += currentObject.offsetLeft;
ePosy += currentObject.offsetTop;
currentObject = currentObject.offsetParent;
} while ( currentObject.offsetParent);
}
this.pointCollection.mousePos.setValue(mPosx - ePosx, mPosy - ePosy, 0);
};
containerEl.onmouseleave = _e => {
this.pointCollection.mousePos.setValue(-999, -999, -999);
};
containerEl.setAttribute('style', `width:${this.props.width}px; height:${this.props.height}px`);
this.pointCollection.loadFromSource( this.props.src, this.props.width, this.props.height, this.props.cellSize);
this.timeout();
}
private timeout(): void {
const container: HTMLDivElement | null = this.containerRef.current;
if( !container) {
return;
}
this.pointCollection.drawDiv(container);
this.pointCollection.update();
setTimeout(() => this.timeout(), 30);
}
}
| 29.27907 | 115 | 0.644427 | 3.015625 |
a5193594c5b20a70fcdadf248bf65c1783c0d666
| 1,311 |
sh
|
Shell
|
Services_dev/MonoApp/run_bash.sh
|
samuelxu999/Microservices_dev
|
70d5845fdedbcaecb7f7cf8bc7a623053c57b136
|
[
"MIT"
] | null | null | null |
Services_dev/MonoApp/run_bash.sh
|
samuelxu999/Microservices_dev
|
70d5845fdedbcaecb7f7cf8bc7a623053c57b136
|
[
"MIT"
] | 2 |
2021-03-17T23:27:00.000Z
|
2021-03-17T23:27:01.000Z
|
Services_dev/MonoApp/run_bash.sh
|
samuelxu999/Microservices_dev
|
70d5845fdedbcaecb7f7cf8bc7a623053c57b136
|
[
"MIT"
] | 2 |
2019-04-23T22:13:18.000Z
|
2019-08-19T01:39:51.000Z
|
#!/bin/bash
# -i sets up an interactive session; -t allocates a pseudo tty; --rm makes this container ephemeral
# -u specify the process should be run by root. This step is important (v.i.)!
# -v @volume:@docker path. use volume to save data
# -v /etc/localtime:/etc/localtime:ro make sure docker's time syncs with that of the host
# --name=@ specify the name of the container (here rdev); the image you want to run the container from (here ubuntu-r); the process you want to run in the container (here bash). (The last step of specifying a process is only necessary if you have not set a default CMD or ENTRYPOINT for your image.)
IMAGE_NAME="mono_node"
CONTAINER_NAME="mono-service"
VOLUME_ACCOUNT="gethAccount"
RPC_PORT=$1
PORT=$2
# arguments validation
if [[ 2 -ne $# ]]; then
echo "Usage $0 -rpcport -port!"
exit 0
fi
if ! [[ $RPC_PORT =~ ^[0-9]+$ ]]; then
echo "Error: rpcport should be integer!"
exit 0
fi
if ! [[ $PORT =~ ^[0-9]+$ ]]; then
echo "Error: port should be integer!"
exit 0
fi
# execute docker run command
docker run -i -t --rm \
-p 8080:80 \
-p $RPC_PORT:8042 \
-p $PORT:30303 \
--privileged=true \
-v /etc/localtime:/etc/localtime:ro \
-v $VOLUME_ACCOUNT:/home/docker/account \
-v $(pwd)/node_data:/home/docker/node_data \
--name=$CONTAINER_NAME $IMAGE_NAME /bin/bash
| 31.214286 | 299 | 0.698703 | 3 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.