hexsha
stringlengths 40
40
| size
int64 140
1.03M
| ext
stringclasses 94
values | lang
stringclasses 21
values | max_stars_repo_path
stringlengths 3
663
| max_stars_repo_name
stringlengths 4
120
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
368k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
663
| max_issues_repo_name
stringlengths 4
120
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
663
| max_forks_repo_name
stringlengths 4
135
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 140
1.03M
| avg_line_length
float64 2.32
23.1k
| max_line_length
int64 11
938k
| alphanum_fraction
float64 0.01
1
| score
float32 3
4.25
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
b0b628f835e24c6dec57b66bf8d5f70da3564481
| 2,344 |
py
|
Python
|
Scripts/Preprocessing/spectrogram.py
|
CotaCalin/AutomatedMusicTranscription
|
02ea0d2f48f614f8a929f687a112e8b309599d63
|
[
"MIT"
] | 1 |
2019-12-18T16:06:49.000Z
|
2019-12-18T16:06:49.000Z
|
Scripts/Preprocessing/spectrogram.py
|
CotaCalin/AutomatedMusicTranscription
|
02ea0d2f48f614f8a929f687a112e8b309599d63
|
[
"MIT"
] | null | null | null |
Scripts/Preprocessing/spectrogram.py
|
CotaCalin/AutomatedMusicTranscription
|
02ea0d2f48f614f8a929f687a112e8b309599d63
|
[
"MIT"
] | null | null | null |
import matplotlib.pyplot as plt
from scipy import signal
from scipy.io import wavfile
import os
import sys
import wave
import pylab
#from spectrogram2 import plotstft
# Generate and plot a constant-Q power spectrum
import matplotlib.pyplot as plt
import numpy as np
import librosa
import librosa.display
class SpectrogramBuilder():
def __init__(self, WavPath, DestinationPath):
self.__wavPath = WavPath
self.__wav_files = self.get_wavs()
self.__destinationPath = DestinationPath
def get_wavs(self):
ret = []
for wav_file in os.listdir(self.__wavPath):
if wav_file.endswith(".wav"):
ret.append(os.path.join(self.__wavPath, wav_file))
return ret
def build_spectrograms(self):
for wavfile in self.__wav_files:
self.graph_spectrogram(wavfile)
pass
def __build_spectrogram(self, filePath):
sample_rate, samples = wavfile.read(filePath)
frequencies, times, spectrogram = signal.spectrogram(samples, sample_rate)
#print(spectrogram)
color_tuple = spectrogram.transpose((1,0,2)).reshape((spectrogram.shape[0]*spectrogram.shape[1],spectrogram.shape[2]))/255.0
plt.pcolormesh(times, frequencies, color_tuple)
plt.imshow(spectrogram)
plt.ylabel('Frequency [Hz]')
plt.xlabel('Time [sec]')
plt.show()
def graph_spectrogram(self, wav_file):
pass
# Q Transform
y, sr = librosa.load(wav_file)
C = np.abs(librosa.cqt(y, sr=sr))
librosa.display.specshow(librosa.amplitude_to_db(C, ref=np.max),
sr=sr)#, x_axis='time', y_axis='cqt_hz')
#plt.colorbar(format='%+2.0f dB')
#plt.title('spectrogram of %r' % wav_file)
#plt.tight_layout()
fileName = 'spectrogram_{0}.png'.format(wav_file.split("\\")[-1])
plt.savefig(os.path.join(self.__destinationPath, fileName), bbox_inches="tight")
plt.close('all')
def get_wav_info(self, wav_file):
wav = wave.open(wav_file, 'r')
frames = wav.readframes(-1)
sound_info = pylab.fromstring(frames, 'int16')
frame_rate = wav.getframerate()
wav.close()
return sound_info, frame_rate
| 33.485714 | 133 | 0.62628 | 3.25 |
1a8884148e176fea46e77a0ad39d5188f10274f7
| 499 |
py
|
Python
|
LeetCode/HashTable/594. Longest Harmonious Subsequence.py
|
thehanemperor/LeetCode
|
8d120162657a1e29c3e821b51ac4121300fc7a12
|
[
"MIT"
] | null | null | null |
LeetCode/HashTable/594. Longest Harmonious Subsequence.py
|
thehanemperor/LeetCode
|
8d120162657a1e29c3e821b51ac4121300fc7a12
|
[
"MIT"
] | null | null | null |
LeetCode/HashTable/594. Longest Harmonious Subsequence.py
|
thehanemperor/LeetCode
|
8d120162657a1e29c3e821b51ac4121300fc7a12
|
[
"MIT"
] | null | null | null |
# EASY
# count each element in array and store in dict{}
# loop through the array check if exist nums[i]+1 in dict{}
class Solution:
def findLHS(self, nums: List[int]) -> int:
n = len(nums)
appear = {}
for i in range(n):
appear[nums[i]] = appear.get(nums[i],0) + 1
result = 0
for k,v in appear.items():
if k+1 in appear:
result = max(result,v+appear[k+1])
return result
| 26.263158 | 59 | 0.503006 | 3.078125 |
1eb1d2d155823e1851c12bbc0f2be38e9ccbac68
| 2,672 |
ps1
|
PowerShell
|
scripts/windows-setup.ps1
|
kyranet/drakhtar
|
1d8d1ad84ae0fb953e813a761c17ff306ca5aac8
|
[
"MIT"
] | 7 |
2019-05-05T14:58:12.000Z
|
2019-11-11T21:36:57.000Z
|
scripts/windows-setup.ps1
|
kyranet/drakhtar
|
1d8d1ad84ae0fb953e813a761c17ff306ca5aac8
|
[
"MIT"
] | 17 |
2019-02-14T21:12:28.000Z
|
2019-05-13T23:28:26.000Z
|
scripts/windows-setup.ps1
|
kyranet/Drakhtar
|
1d8d1ad84ae0fb953e813a761c17ff306ca5aac8
|
[
"MIT"
] | 1 |
2021-04-27T21:06:53.000Z
|
2021-04-27T21:06:53.000Z
|
# Set up the submodule so Drakhtar i18n and Telemetry is properly included in the project:
git submodule init
git submodule update
# Define the hooks patch as $root/hooks over $root/.git/hooks so we can run our own hooks:
git config core.hooksPath hooks
$local:DependencyFolder = Join-Path -Path $(Split-Path $PSScriptRoot) -ChildPath "deps"
$local:BaseDomain = "https://www.libsdl.org/"
function Step-Download {
[CmdletBinding()]
param (
[string] $Output,
[string] $UriPath
)
process {
$private:OutputDirectory = Join-Path -Path $DependencyFolder -ChildPath $Output
if (Test-Path -Path $OutputDirectory) {
Write-Host "Skipping [" -ForegroundColor Green -NoNewline
Write-Host $Output -ForegroundColor Blue -NoNewline
Write-Host "] as it already exists in [" -ForegroundColor Green -NoNewline
Write-Host $OutputDirectory -ForegroundColor Blue -NoNewline
Write-Host "]." -ForegroundColor Green
} else {
Write-Host "Downloading [" -ForegroundColor DarkGray -NoNewline
Write-Host $Output -ForegroundColor Blue -NoNewline
Write-Host "] into [" -ForegroundColor DarkGray -NoNewline
Write-Host $OutputDirectory -ForegroundColor Blue -NoNewline
Write-Host "]." -ForegroundColor DarkGray
$private:DownloadUri = $BaseDomain + $UriPath
$private:File = New-TemporaryFile
Invoke-WebRequest -Uri $DownloadUri -OutFile $File
$File | Expand-Archive -DestinationPath $DependencyFolder -Force
$File | Remove-Item
}
}
}
function Remove-SafeItem([string] $Path) {
if (Test-Path -Path $Path) {
Write-Host "Deleting [" -ForegroundColor DarkGray -NoNewline
Write-Host $Path -ForegroundColor Blue -NoNewline
Write-Host "]." -ForegroundColor DarkGray
Remove-Item $Path
}
}
$private:Sdl2 = "SDL2-2.0.14"
$private:Sdl2Ttf = "SDL2_ttf-2.0.15"
$private:Sdl2Image = "SDL2_image-2.0.5"
$private:Sdl2Mixer = "SDL2_mixer-2.0.4"
# Download the dependencies:
Step-Download -Output $Sdl2 -UriPath "release/SDL2-devel-2.0.14-VC.zip"
Step-Download -Output $Sdl2Ttf -UriPath "projects/SDL_ttf/release/SDL2_ttf-devel-2.0.15-VC.zip"
Step-Download -Output $Sdl2Image -UriPath "projects/SDL_image/release/SDL2_image-devel-2.0.5-VC.zip"
Step-Download -Output $Sdl2Mixer -UriPath "projects/SDL_mixer/release/SDL2_mixer-devel-2.0.4-VC.zip"
# Remove SDL2 TTF's zlib1.dll, as they are already included in SDL2 Image:
Remove-SafeItem $(Join-Path -Path $DependencyFolder -ChildPath "$Sdl2Ttf/lib/x64/zlib1.dll")
Remove-SafeItem $(Join-Path -Path $DependencyFolder -ChildPath "$Sdl2Ttf/lib/x86/zlib1.dll")
| 40.484848 | 100 | 0.708832 | 3.03125 |
b24d53bb89f4beb4973066bc5471161bcf19e331
| 16,926 |
swift
|
Swift
|
Firefly Fixture/Firefly Fixture/Geometry.swift
|
denisbohm/firefly-production-tools
|
30b06b42e339456f9412e4be6bfbc2bc2392ec77
|
[
"Apache-2.0"
] | 1 |
2017-07-18T22:53:34.000Z
|
2017-07-18T22:53:34.000Z
|
Firefly Fixture/Firefly Fixture/Geometry.swift
|
denisbohm/firefly-production-tools
|
30b06b42e339456f9412e4be6bfbc2bc2392ec77
|
[
"Apache-2.0"
] | null | null | null |
Firefly Fixture/Firefly Fixture/Geometry.swift
|
denisbohm/firefly-production-tools
|
30b06b42e339456f9412e4be6bfbc2bc2392ec77
|
[
"Apache-2.0"
] | null | null | null |
//
// Geometry.swift
// Firefly Fixture
//
// Created by Denis Bohm on 1/31/17.
// Copyright © 2017 Firefly Design LLC. All rights reserved.
//
import Foundation
class Geometry {
struct Point3D {
let x: CGFloat
let y: CGFloat
let z: CGFloat
init(x: CGFloat, y: CGFloat, z: CGFloat) {
self.x = x
self.y = y
self.z = z
}
init(xy: NSPoint, z: CGFloat) {
self.x = xy.x
self.y = xy.y
self.z = z
}
}
class Path3D {
var points: [Point3D] = []
}
static func lastPoint(path: NSBezierPath) -> NSPoint {
var first: NSPoint? = nil
var last = NSPoint(x: 0, y: 0)
for i in 0 ..< path.elementCount {
var points: [NSPoint] = [NSPoint(x: 0, y: 0), NSPoint(x: 0, y: 0), NSPoint(x: 0, y: 0)]
let kind = path.element(at: i, associatedPoints: &points)
switch (kind) {
case .moveToBezierPathElement:
last = points[0]
first = last
case .lineToBezierPathElement:
last = points[0]
case .curveToBezierPathElement:
last = points[2]
case .closePathBezierPathElement:
if let first = first {
last = first
}
}
}
return last
}
static func firstPoint(path: NSBezierPath) -> NSPoint {
for i in 0 ..< path.elementCount {
var points: [NSPoint] = [NSPoint(x: 0, y: 0), NSPoint(x: 0, y: 0), NSPoint(x: 0, y: 0)]
let kind = path.element(at: i, associatedPoints: &points)
switch (kind) {
case .moveToBezierPathElement:
return points[0]
default:
break
}
}
return NSPoint(x: 0, y: 0)
}
static func equal(point1: NSPoint, point2: NSPoint) -> Bool {
return (point1.x == point2.x) && (point1.y == point2.y)
}
static func distance(point1: NSPoint, point2: NSPoint) -> CGFloat {
let dx = point1.x - point2.x
let dy = point1.y - point2.y
return sqrt(dx * dx + dy * dy)
}
static func canCombine(path1: NSBezierPath, path2: NSBezierPath) -> Bool {
let last1 = Geometry.lastPoint(path: path1)
let first2 = Geometry.firstPoint(path: path2)
return Geometry.equal(point1: last1, point2: first2)
}
static func combine(path1: NSBezierPath, path2: NSBezierPath) -> NSBezierPath {
let newPath = NSBezierPath()
for i in 0 ..< path1.elementCount {
var points: [NSPoint] = [NSPoint(x: 0, y: 0), NSPoint(x: 0, y: 0), NSPoint(x: 0, y: 0)]
let kind = path1.element(at: i, associatedPoints: &points)
switch (kind) {
case .moveToBezierPathElement:
newPath.move(to: points[0])
case .lineToBezierPathElement:
newPath.line(to: points[0])
case .curveToBezierPathElement:
newPath.curve(to: points[2], controlPoint1: points[0], controlPoint2: points[1])
case .closePathBezierPathElement:
newPath.close()
}
}
for i in 0 ..< path2.elementCount {
var points: [NSPoint] = [NSPoint(x: 0, y: 0), NSPoint(x: 0, y: 0), NSPoint(x: 0, y: 0)]
let kind = path2.element(at: i, associatedPoints: &points)
switch (kind) {
case .moveToBezierPathElement:
if i == 0 {
newPath.line(to: points[0])
} else {
newPath.move(to: points[0])
}
case .lineToBezierPathElement:
newPath.line(to: points[0])
case .curveToBezierPathElement:
newPath.curve(to: points[2], controlPoint1: points[0], controlPoint2: points[1])
case .closePathBezierPathElement:
newPath.close()
}
}
return newPath
}
static func combine(paths: [NSBezierPath]) -> [NSBezierPath] {
let path1 = paths[0]
let path2 = paths[paths.count - 1]
if canCombine(path1: path1, path2: path2) {
var newPaths: [NSBezierPath] = []
let combined = combine(path1: path1, path2: path2)
newPaths.append(combined)
if paths.count > 2 {
for i in 1 ... paths.count - 2 {
newPaths.append(paths[i])
}
}
return newPaths
} else
if canCombine(path1: path2, path2: path1) {
var newPaths: [NSBezierPath] = []
let combined = combine(path1: path2, path2: path1)
for i in 1 ... paths.count - 2 {
newPaths.append(paths[i])
}
newPaths.append(combined)
return newPaths
} else {
return paths
}
}
static func sortByX(paths: [NSBezierPath]) -> [NSBezierPath] {
return paths.sorted() {
let p0 = firstPoint(path: $0)
let p1 = firstPoint(path: $1)
return p0.x < p1.x
}
}
static func orderByY(paths: [NSBezierPath]) -> [NSBezierPath] {
var sortedPaths: [NSBezierPath] = []
for path in paths {
let first = firstPoint(path: path)
let last = lastPoint(path: path)
var sortedPath = path
if last.y < first.y {
sortedPath = path.reversed
}
sortedPaths.append(sortedPath)
}
return sortedPaths
}
static func join(path: NSBezierPath) -> NSBezierPath {
let epsilon: CGFloat = 0.001;
var last = NSPoint(x: 0.123456, y: 0.123456)
let newPath = NSBezierPath()
for i in 0 ..< path.elementCount {
var points: [NSPoint] = [NSPoint(x: 0, y: 0), NSPoint(x: 0, y: 0), NSPoint(x: 0, y: 0)]
let kind = path.element(at: i, associatedPoints: &points)
switch (kind) {
case .moveToBezierPathElement:
let p0 = points[0]
if (fabs(last.x - p0.x) > epsilon) || (fabs(last.y - p0.y) > epsilon) {
newPath.move(to: p0)
last = points[0]
// NSLog(@"keeping move to %0.3f, %0.3f", points[0].x, points[0].y);
} else {
// NSLog(@"discarding move to %0.3f, %0.3f", points[0].x, points[0].y);
}
case .lineToBezierPathElement:
let p0 = points[0]
if (fabs(last.x - p0.x) > epsilon) || (fabs(last.y - p0.y) > epsilon) {
newPath.line(to: p0)
last = p0
}
case .curveToBezierPathElement:
newPath.curve(to: points[2], controlPoint1: points[0], controlPoint2: points[1])
last = points[2]
case .closePathBezierPathElement:
newPath.close()
}
}
return newPath
}
static func intersection(p0_x: CGFloat, p0_y: CGFloat, p1_x: CGFloat, p1_y: CGFloat, p2_x: CGFloat, p2_y: CGFloat, p3_x: CGFloat, p3_y: CGFloat) -> NSPoint? {
let s1_x = p1_x - p0_x
let s1_y = p1_y - p0_y
let s2_x = p3_x - p2_x
let s2_y = p3_y - p2_y
let s = (-s1_y * (p0_x - p2_x) + s1_x * (p0_y - p2_y)) / (-s2_x * s1_y + s1_x * s2_y)
let t = ( s2_x * (p0_y - p2_y) - s2_y * (p0_x - p2_x)) / (-s2_x * s1_y + s1_x * s2_y)
if (s >= 0) && (s <= 1) && (t >= 0) && (t <= 1) {
let x = p0_x + (t * s1_x)
let y = p0_y + (t * s1_y)
return NSPoint(x: x, y: y)
}
return nil
}
static func intersection(p0: NSPoint, p1: NSPoint, x: CGFloat) -> NSPoint? {
let big: CGFloat = 1e20
return intersection(p0_x: p0.x, p0_y: p0.y, p1_x: p1.x, p1_y: p1.y, p2_x: x, p2_y: -big, p3_x: x, p3_y: big)
}
static func slice(p0: CGPoint, p1: CGPoint, x0: CGFloat, x1: CGFloat) -> NSBezierPath {
let newPath = NSBezierPath()
if (p1.x <= x0) || (p0.x >= x1) {
// line is completely outside slice area
newPath.move(to: p0)
newPath.line(to: p1)
} else
if (x0 <= p0.x) && (p1.x <= x1) {
// line is completely inside slice area
} else {
// only handle horizontal line splitting
if (p0.x < x0) && (p1.x > x0) && (p1.x <= x1) {
// line only crosses x0
// split line at x0. keep left segment
if let p = intersection(p0: p0, p1: p1, x: x0) {
newPath.move(to: p0)
newPath.line(to: p)
} else {
NSLog("should not happen")
}
} else
if (p0.x < x0) && (p1.x > x1) {
// line crosses both x0 and x1
// split line at x0 and x1. keep left and right segments
if
let pa = intersection(p0: p0, p1: p1, x: x0),
let pb = intersection(p0: p0, p1: p1, x: x1)
{
newPath.move(to: p0)
newPath.line(to: pa)
newPath.move(to: pb)
newPath.line(to: p1)
} else {
NSLog("should not happen")
}
} else
if (x0 <= p0.x) && (p0.x < x1) && (x1 < p1.x) {
// line only crosses x1
// split line at x1. keep right segment
if let p = intersection(p0: p0, p1: p1, x: x1) {
newPath.move(to: p)
newPath.line(to: p1)
} else {
NSLog("should not happen")
}
} else {
NSLog("should not happen")
}
}
return newPath
}
static func slice(pa: CGPoint, pb: CGPoint, x0: CGFloat, x1: CGFloat) -> NSBezierPath {
if pa.x < pb.x {
return Geometry.slice(p0: pa, p1: pb, x0: x0, x1: x1)
} else {
let path = Geometry.slice(p0: pb, p1: pa, x0: x0, x1: x1)
var subpaths = Geometry.segments(path: path)
for i in 0 ..< subpaths.count {
subpaths[i] = subpaths[i].reversed
}
subpaths.reverse()
let reversed = NSBezierPath()
for subpath in subpaths {
reversed.append(subpath)
}
return reversed
}
}
static func slice(path: NSBezierPath, x0: CGFloat, x1: CGFloat) -> NSBezierPath {
var last = NSPoint(x: 0, y: 0)
let newPath = NSBezierPath()
for i in 0 ..< path.elementCount {
var points: [NSPoint] = [NSPoint(x: 0, y: 0), NSPoint(x: 0, y: 0), NSPoint(x: 0, y: 0)]
let kind = path.element(at: i, associatedPoints: &points)
switch (kind) {
case .moveToBezierPathElement:
last = points[0]
case .lineToBezierPathElement:
let path = slice(pa: last, pb: points[0], x0: x0, x1: x1)
if !path.isEmpty {
newPath.append(path)
last = lastPoint(path: path)
} else {
last = points[0]
}
case .curveToBezierPathElement:
let p = points[2]
if (p.x < x0) || (p.x > x1) {
newPath.curve(to: points[2], controlPoint1: points[0], controlPoint2: points[1])
last = points[2]
}
case .closePathBezierPathElement:
break
}
}
return join(path: newPath)
}
static func segments(path: NSBezierPath) -> [NSBezierPath] {
var paths: [NSBezierPath] = []
var newPath = NSBezierPath()
for i in 0 ..< path.elementCount {
var points: [NSPoint] = [NSPoint(x: 0, y: 0), NSPoint(x: 0, y: 0), NSPoint(x: 0, y: 0)]
let kind = path.element(at: i, associatedPoints: &points)
switch (kind) {
case .moveToBezierPathElement:
if !newPath.isEmpty {
paths.append(newPath)
newPath = NSBezierPath()
}
newPath.move(to: points[0])
case .lineToBezierPathElement:
newPath.line(to: points[0])
case .curveToBezierPathElement:
newPath.curve(to: points[2], controlPoint1: points[0], controlPoint2: points[1])
case .closePathBezierPathElement:
newPath.close()
break
}
}
if !newPath.isEmpty {
paths.append(newPath)
}
return paths
}
// Take two concentric continuous curves and split off the left and right segments (removing the segments in the middle). Return:
// path - the complete path made by joining the resulting left and right segments (as two subpaths)
// leftOuterPath - the left outer path with points from min y to max y
// leftInnerPath - the left inner path with points from min y to max y
// rightInnerPath - the right inner path with points from min y to max y
// rightOuterPath - the right outer path with points from min y to max y
static func split(path1: NSBezierPath, path2: NSBezierPath, x0: CGFloat, x1: CGFloat) -> (path: NSBezierPath, leftOuterPath: NSBezierPath, leftInnerPath: NSBezierPath, rightInnerPath: NSBezierPath, rightOuterPath: NSBezierPath) {
let sliced1 = Geometry.slice(path: path1, x0: x0, x1: x1)
let segments1 = Geometry.segments(path: Geometry.join(path: sliced1))
let segs1 = Geometry.combine(paths: segments1)
let ordered1 = Geometry.orderByY(paths: segs1)
let sorted1 = Geometry.sortByX(paths: ordered1)
let sliced2 = Geometry.slice(path: path2, x0: x0, x1: x1)
let segments2 = Geometry.segments(path: Geometry.join(path: sliced2))
let segs2 = Geometry.combine(paths: segments2)
let ordered2 = Geometry.orderByY(paths: segs2)
let sorted2 = Geometry.sortByX(paths: ordered2)
let final = NSBezierPath()
for i in 0 ..< sorted1.count {
let a = sorted1[i].reversed
let b = sorted2[i]
let b0 = Geometry.firstPoint(path: b)
a.line(to: b0)
let path = Geometry.combine(paths: [a, b])[0]
path.close()
final.append(path)
}
return (path: final, leftOuterPath: sorted1[0], leftInnerPath: sorted2[0], rightInnerPath: sorted2[1], rightOuterPath: sorted1[1])
}
static func bezierPathForWires(wires: [Board.Wire]) -> NSBezierPath {
let epsilon = CGFloat(0.001)
var remaining = wires
let path = NSBezierPath()
let current = remaining.removeFirst()
// NSLog(@"+ %0.3f, %0.3f - %0.3f, %0.3f", current.x1, current.y1, current.x2, current.y2);
path.append(current.bezierPath())
var cx = current.x2
var cy = current.y2
while remaining.count > 0 {
var found = false
for index in (0 ..< remaining.count).reversed() {
let candidate = remaining[index]
if ((fabs(candidate.x1 - cx) < epsilon) && (fabs(candidate.y1 - cy) < epsilon)) {
// NSLog(@"> %0.3f, %0.3f - %0.3f, %0.3f", candidate.x1, candidate.y1, candidate.x2, candidate.y2);
remaining.remove(at: index)
path.append(candidate.bezierPath())
cx = candidate.x2
cy = candidate.y2
found = true
break
}
if ((fabs(candidate.x2 - cx) < epsilon) && (fabs(candidate.y2 - cy) < epsilon)) {
// NSLog(@"< %0.3f, %0.3f - %0.3f, %0.3f", candidate.x1, candidate.y1, candidate.x2, candidate.y2);
remaining.remove(at: index)
path.append(candidate.bezierPath().reversed)
cx = candidate.x1
cy = candidate.y1
found = true
break
}
}
if (!found) {
break;
}
}
return Geometry.join(path: path)
}
}
| 39.271462 | 233 | 0.492201 | 3.046875 |
874dee4d2fc1b49048226f859c9a85c6bb003b38
| 969 |
swift
|
Swift
|
base-app-ios/presentation/foundation/SyncScreens.swift
|
FuckBoilerplate/base_app_ios
|
3d3ee8b9d0705b306547ab03487471cc930c6da5
|
[
"Apache-2.0"
] | 20 |
2016-04-11T19:40:08.000Z
|
2021-03-03T13:24:42.000Z
|
base-app-ios/presentation/foundation/SyncScreens.swift
|
FuckBoilerplate/base_app_ios
|
3d3ee8b9d0705b306547ab03487471cc930c6da5
|
[
"Apache-2.0"
] | 5 |
2016-09-19T15:18:29.000Z
|
2017-09-22T10:38:08.000Z
|
base-app-ios/presentation/foundation/SyncScreens.swift
|
FuckBoilerplate/base_app_ios
|
3d3ee8b9d0705b306547ab03487471cc930c6da5
|
[
"Apache-2.0"
] | 2 |
2016-08-03T06:28:35.000Z
|
2016-08-10T16:49:09.000Z
|
//
// SyncScreens.swift
// base-app-ios
//
// Created by Roberto Frontado on 4/22/16.
// Copyright © 2016 Roberto Frontado. All rights reserved.
//
protocol SyncScreensMatcher {
func matchesTarget(_ key: String) -> Bool
}
class SyncScreens {
private var pendingScreens: [String]!
init() {
pendingScreens = []
}
func addScreen(screen: String) {
if !pendingScreens.contains(screen) {
pendingScreens.append(screen)
}
}
func needToSync(matcher: SyncScreensMatcher) -> Bool {
var needToSync = false
var index = 0
for i in 0..<pendingScreens.count {
if matcher.matchesTarget(pendingScreens[i]) {
needToSync = true
index = i
break
}
}
if needToSync {
pendingScreens.removeObject(index)
}
return needToSync
}
}
| 20.617021 | 59 | 0.537668 | 3.015625 |
e4523a57de335a2e7452cfda50260fe370c24e67
| 1,833 |
sh
|
Shell
|
storage/scripts/displaywebpagescript.sh
|
IrisBroadcast/ophrys-signage
|
f7c4787794ab87960a0d07fb63e674c40bb88697
|
[
"BSD-3-Clause"
] | 8 |
2019-11-12T18:51:10.000Z
|
2020-12-21T22:26:30.000Z
|
storage/scripts/displaywebpagescript.sh
|
IrisBroadcast/OphrysSignage
|
0bec692479d93d769e46a81eb46841daaf67f68a
|
[
"BSD-3-Clause"
] | 4 |
2020-02-06T10:31:17.000Z
|
2020-02-26T20:55:41.000Z
|
storage/scripts/displaywebpagescript.sh
|
IrisBroadcast/OphrysSignage
|
0bec692479d93d769e46a81eb46841daaf67f68a
|
[
"BSD-3-Clause"
] | 2 |
2020-02-06T10:34:15.000Z
|
2020-03-08T01:24:26.000Z
|
#!/bin/bash
COMMONFILE=/usr/local/aloe/scripts/common.sh
. $COMMONFILE
STATEFILENODE="/usr/local/aloe/scripts/ophrys_state_node.json"
function cleanChromium
{
PREFERENCES="/home/pi/.config/chromium/Default/Preferences"
if [ -f $PREFERENCES ];then
sed -i 's/"exit_type":"Crashed"/"exit_type":"Normal"/g' $PREFERENCES
fi
}
function openUrl
{
DYNAMIC_URL=$(cat $STATEFILENODE | jq -r '.url')
BROWSERPARAMETER=$(cat $STATEFILENODE | jq -r '.browserparameter')
# check if this is boot or not - Show IP-adress if this is boot
TMPSPLASH="/tmp/splash.png"
if [ ! -f /tmp/online ];then
sudo cp $GRAPHICSFOLDER/splash.png $TMPSPLASH
IPv4=$(hostname -I)
HOST=$(hostname)
printf "convert -pointsize 40 -fill white -draw 'text 715,1000 \"IPv4: $IPv4\nHostname: $HOST\" ' /usr/local/aloe/graphics/embedip.png /usr/local/aloe/graphics/splash.png" > /tmp/temp
sudo bash /tmp/temp
sudo service lightdm restart
sleep 10
## Restore splashscreen to default
if [ -f $TMPSPLASH ];then
sudo cp $TMPSPLASH $GRAPHICSFOLDER/splash.png
fi
fi
# Make sure state file exists
if [ -e $STATEFILENODE ];then
DYNAMIC_URL=$(cat $STATEFILENODE | jq -r '.url')
BROWSERPARAMETER=$(cat $STATEFILENODE | jq -r '.browserparameter')
else
DYNAMIC_URL="http://localhost:82"
BROWSERPARAMETER=""
fi
if [ -z "$DYNAMIC_URL" ];then
DYNAMIC_URL="http://localhost:82"
fi
export DISPLAY=:0.0
chromium-browser $BROWSERPARAMETER --disable-site-isolation-trials --disable-web-security --user-data-dir=/tmp/temp/ --noerrdialogs --check-for-update-interval=1209600 --disable-session-crashed-bubble --disable-infobars --disable-restore-session-state --disable-features=TranslateUI --kiosk --disable-pinch --overscroll-history-navigation=0 --proxy-auto-detect $DYNAMIC_URL
}
cleanChromium
openUrl
| 33.944444 | 374 | 0.716312 | 3.109375 |
e2483d05b04196ef8f605e8147da7946db65efd3
| 4,011 |
py
|
Python
|
pineboolib/application/database/tests/test_pnconnection_manager.py
|
Aulla/pineboo
|
3ad6412d365a6ad65c3bb2bdc03f5798d7c37004
|
[
"MIT"
] | 2 |
2017-12-10T23:06:16.000Z
|
2017-12-10T23:06:23.000Z
|
pineboolib/application/database/tests/test_pnconnection_manager.py
|
Aulla/pineboo
|
3ad6412d365a6ad65c3bb2bdc03f5798d7c37004
|
[
"MIT"
] | 36 |
2017-11-05T21:13:47.000Z
|
2020-08-26T15:56:15.000Z
|
pineboolib/application/database/tests/test_pnconnection_manager.py
|
Aulla/pineboo
|
3ad6412d365a6ad65c3bb2bdc03f5798d7c37004
|
[
"MIT"
] | 8 |
2017-11-05T15:56:31.000Z
|
2019-04-25T16:32:28.000Z
|
"""Test_pnconnection module."""
import unittest
from pineboolib.loader.main import init_testing, finish_testing
from pineboolib import application
from pineboolib.application.database import pnsqlcursor
from pineboolib.core.utils import logging
import time
LOGGER = logging.get_logger(__name__)
USER_ID: str
class TestPNConnectionManager(unittest.TestCase):
"""TestPNConnection Class."""
@classmethod
def setUp(cls) -> None:
"""Ensure pineboo is initialized for testing."""
init_testing()
def test_basic1(self) -> None:
"""Basic test 1."""
global USER_ID
USER_ID = "usu0"
application.PROJECT.set_session_function(self.user_id)
conn_manager = application.PROJECT.conn_manager
self.assertEqual(conn_manager.session_id(), USER_ID)
cursor_1 = pnsqlcursor.PNSqlCursor("flfiles") # noqa: F841
self.assertEqual(conn_manager.session_id(), "usu0")
self.assertEqual(conn_manager.active_pncursors(True), ["flfiles"])
def test_basic2(self) -> None:
"""Basic test 2."""
global USER_ID
USER_ID = "usu1"
conn_manager = application.PROJECT.conn_manager
self.assertEqual(conn_manager.session_id(), "usu1")
self.assertEqual(conn_manager.active_pncursors(True), [])
cursor_1 = pnsqlcursor.PNSqlCursor("flfiles") # noqa: F841
self.assertEqual(conn_manager.active_pncursors(True), ["flfiles"])
self.assertTrue("flfiles" in conn_manager.active_pncursors(True, True))
USER_ID = "usu2"
self.assertEqual(conn_manager.session_id(), "usu2")
self.assertEqual(conn_manager.active_pncursors(True), [])
cursor_2 = pnsqlcursor.PNSqlCursor("flfiles") # noqa: F841
cursor_3 = pnsqlcursor.PNSqlCursor("flareas") # noqa: F841
self.assertEqual(conn_manager.active_pncursors(True), ["flfiles", "flareas"])
self.assertTrue(len(conn_manager.active_pncursors(True, True)) > 2)
USER_ID = "usu1"
self.assertEqual(conn_manager.active_pncursors(True), ["flfiles"])
self.assertTrue(len(conn_manager.active_pncursors(True, True)) > 2)
def test_basic3(self) -> None:
"""Basic test 3."""
from PyQt6 import QtWidgets # type: ignore[import]
global USER_ID
USER_ID = "test3"
conn_manager = application.PROJECT.conn_manager
self.assertEqual(application.PROJECT.conn_manager.session_id(), "test3")
cur = pnsqlcursor.PNSqlCursor("flfiles")
cur.select()
time.sleep(1)
pnsqlcursor.CONNECTION_CURSORS[application.PROJECT.conn_manager.session_id()].pop()
while "flfiles" in conn_manager.active_pncursors(True):
QtWidgets.QApplication.processEvents()
while "flfiles" in conn_manager.active_pncursors(True):
QtWidgets.QApplication.processEvents()
cur = pnsqlcursor.PNSqlCursor("flfiles")
cur.select()
time.sleep(1)
pnsqlcursor.CONNECTION_CURSORS[application.PROJECT.conn_manager.session_id()].pop()
conn_manager.set_max_connections_limit(100)
conn_manager.set_max_idle_connections(50)
self.assertEqual(conn_manager.limit_connections, 100)
self.assertEqual(conn_manager.connections_time_out, 50)
while "flfiles" in conn_manager.active_pncursors(True):
QtWidgets.QApplication.processEvents()
def threaded_function(self) -> None:
"""Threaded function."""
try:
cur = pnsqlcursor.PNSqlCursor("flfiles")
cur.select()
except Exception:
time.sleep(1)
pnsqlcursor.CONNECTION_CURSORS[application.PROJECT.conn_manager.session_id()].pop()
def user_id(self) -> str:
"""Return user id."""
global USER_ID
return USER_ID
@classmethod
def tearDown(cls) -> None:
"""Ensure test clear all data."""
finish_testing()
if __name__ == "__main__":
unittest.main()
| 34.282051 | 95 | 0.671154 | 3.015625 |
bf8115a5ef12e4187e3bf26da305d9413026cf35
| 7,056 |
lua
|
Lua
|
rc-car/src/init.lua
|
henrythasler/nodemcu
|
e820be27e8f8feaa30ec0f9a26abcc3d740b61c2
|
[
"MIT"
] | 1 |
2020-04-17T06:34:32.000Z
|
2020-04-17T06:34:32.000Z
|
rc-car/src/init.lua
|
henrythasler/nodemcu
|
e820be27e8f8feaa30ec0f9a26abcc3d740b61c2
|
[
"MIT"
] | 1 |
2022-03-02T02:49:58.000Z
|
2022-03-02T02:49:58.000Z
|
rc-car/src/init.lua
|
henrythasler/nodemcu
|
e820be27e8f8feaa30ec0f9a26abcc3d740b61c2
|
[
"MIT"
] | 1 |
2018-09-28T17:20:11.000Z
|
2018-09-28T17:20:11.000Z
|
-- Compile additional modules
local files = {
"webserver-request.lua",
"webserver-header.lua",
"webserver-websocket.lua"
}
for i, f in ipairs(files) do
if file.exists(f) then
print("Compiling:", f)
node.compile(f)
file.remove(f)
collectgarbage()
end
end
files = nil
collectgarbage()
local function compile_lua(filename)
if file.exists(filename .. ".lua") then
node.compile(filename .. ".lua")
file.remove(filename .. ".lua")
collectgarbage()
return true
else
return false
end
end
local function run_lc(filename)
if file.exists(filename .. ".lc") then
dofile(filename .. ".lc")
return true
else
print("[init] - " .. filename .. ".lc not found.")
return false
end
end
local function start_runnables()
for _, item in ipairs(cfg.runnables.active) do
if pcall(run_lc, item) then
print("[init] - started " .. item)
else
print("![init] - Error running " .. item)
end
end
end
local function wifi_monitor(config)
local connected = false
local retry = 0
tmr.alarm(
0,
2000,
tmr.ALARM_AUTO,
function()
if wifi.sta.getip() == nil then
print("[init] - Waiting for WiFi connection to '" .. cfg.wifi.ssid .. "'")
retry = retry + 1
gpio.write(0, 1 - gpio.read(0))
if (retry > 10) then
node.restart()
end
if connected == true then
connected = false
node.restart()
end
else
print(string.format("[init] - %u Bytes free", node.heap()))
gpio.write(0, 1)
tmr.alarm(
3,
50,
tmr.ALARM_SINGLE,
function()
gpio.write(0, 0)
end
)
if connected ~= true then
connected = true
gpio.write(0, 0)
print("[init] - \tWiFi - connected")
print("[init] - \tIP: " .. wifi.sta.getip())
print("[init] - \tHostname: " .. wifi.sta.gethostname())
print("[init] - \tChannel: " .. wifi.getchannel())
print("[init] - \tSignal Strength: " .. wifi.sta.getrssi())
mdns.register(
cfg.hostname,
{description = "CDC rocks", service = "http", port = 80, location = "Earth"}
)
print("[init] - \tmDNS: " .. cfg.hostname .. ".local")
start_runnables()
end
if cfg.ntp.server and (cfg.ntp.synced == false) and not cfg.ntp.inProgress then
cfg.ntp.inProgress = true
sntp.sync(
cfg.ntp.server,
function(sec, usec, server)
local tm = rtctime.epoch2cal(rtctime.get())
local date =
string.format(
"%04d-%02d-%02d %02d:%02d:%02d",
tm["year"],
tm["mon"],
tm["day"],
tm["hour"],
tm["min"],
tm["sec"]
)
print(string.format("[init] - ntp sync with %s ok: %s UTC/GMT", server, date))
cfg.ntp.synced = true
cfg.ntp.inProgress = false
end,
function(err)
print("[init] - ntp sync failed")
cfg.ntp.synced = false
cfg.ntp.inProgress = false
end
)
end
end
end
)
end
-- ### main part
-- compile config file
compile_lua("config")
-- compile all user-scripts
local l = file.list("^usr/.+(%.lua)$")
for k, v in pairs(l) do
if file.exists(k) then
print("Compiling:", k)
node.compile(k)
--file.remove(k) -- do not remove file, might want to download into browser
collectgarbage()
end
end
-- load config from file
if run_lc("config") == false then
print("[init] - using default cfg")
cfg = {}
-- WIFI
cfg.wifi = {}
cfg.wifi.mode = wifi.SOFTAP
cfg.wifi.ssid = "CDC"
cfg.wifi.pwd = "00000000"
cfg.wifi.auth = wifi.OPEN
cfg.wifi.channel = 6
cfg.wifi.hidden = false
cfg.wifi.max = 4
cfg.wifi.save = false
cfg.net = {}
cfg.net.ip = "192.168.1.1"
cfg.net.netmask = "255.255.255.0"
cfg.net.gateway = "192.168.1.1"
-- nodemcu
-- hostname: name of this nodemcu
cfg.hostname = "car"
-- Runnables
cfg.runnables = {}
cfg.runnables.sources = {"flashdaemon", "webserver", "MPU6050"}
-- NTP
-- cfg.ntp.server: IP address of NTP provider. Set to 'false' to disable sync
cfg.ntp = {}
cfg.ntp.server = false
end
cfg.runnables.active = {}
cfg.ntp.synced = false
-- build runnables
for _, item in ipairs(cfg.runnables.sources) do
print("[init] - preparing " .. item)
local status, error = pcall(compile_lua, item)
if status == true then
table.insert(cfg.runnables.active, item)
else
print("[init] - Error compiling " .. item .. ": " .. error)
end
end
-- setup general configuration
wifi.sta.sethostname(cfg.hostname)
-- setup GPIO
gpio.mode(0, gpio.OUTPUT) -- LED, if mounted
-- Set-up Wifi AP
wifi.setmode(cfg.wifi.mode)
if cfg.wifi.mode == wifi.SOFTAP then
print("[init] - setting up SoftAP...")
wifi.ap.config(cfg.wifi)
wifi.ap.setip(cfg.net)
mdns.register(cfg.hostname, {description = "CDC rocks", service = "http", port = 80, location = "Earth"})
wifi.eventmon.register(
wifi.eventmon.AP_STACONNECTED,
function(T)
print("[init] - connected (" .. T.MAC .. ")")
end
)
wifi.eventmon.register(
wifi.eventmon.AP_STADISCONNECTED,
function(T)
print("[init] - disconnected (" .. T.MAC .. ")")
end
)
tmr.alarm(
0,
2000,
tmr.ALARM_AUTO,
function()
print(string.format("[init] - %u Bytes free", node.heap()))
gpio.write(0, 1)
tmr.alarm(
3,
50,
tmr.ALARM_SINGLE,
function()
gpio.write(0, 0)
end
)
end
)
start_runnables()
elseif cfg.wifi.mode == wifi.STATION then
print("[init] - Connecting to AP...")
wifi.sta.config(cfg.wifi)
wifi.sta.connect()
wifi_monitor()
end
| 28.682927 | 109 | 0.474773 | 3.109375 |
43d53313944848a45d822612644192be0677615f
| 1,657 |
ts
|
TypeScript
|
packages/reflow-core/src/execute.ts
|
Yamsafer/reflow
|
32fdb54c4cbd4ef63683d2dbdff0ea83d5486524
|
[
"MIT"
] | 2 |
2019-05-15T21:15:48.000Z
|
2020-06-18T12:11:41.000Z
|
packages/reflow-core/src/execute.ts
|
Yamsafer/reflow
|
32fdb54c4cbd4ef63683d2dbdff0ea83d5486524
|
[
"MIT"
] | null | null | null |
packages/reflow-core/src/execute.ts
|
Yamsafer/reflow
|
32fdb54c4cbd4ef63683d2dbdff0ea83d5486524
|
[
"MIT"
] | null | null | null |
import * as path from 'path'
import Duration from 'duration';
import {threadPool} from './thread-pool'
import {analyzeCombination} from './analyze'
const executeMatrix = function(matrix, config) {
const {
mocha: mochaConfig,
jobDetails,
flowDetails,
connection,
capability,
customActions,
} = config;
const startTime = jobDetails.startTime;
const numberOfThreads = jobDetails.numberOfThreads;
const numberOfFlows = jobDetails.numberOfFlows;
const pool = threadPool({
workerPath: path.join(__dirname, './worker.js'),
threadsToSpawn: numberOfThreads,
});
const sendToPool = combination => pool.send({
DAG: analyzeCombination(combination),
combination,
mochaConfig,
jobDetails,
flowDetails,
connection,
capability,
customActions,
});
matrix.forEach(sendToPool);
let failures = 0;
let errored = false;
let done = false;
pool
.on('done', function(job, jobFailures) {
failures += jobFailures;
})
.on('error', function(job, error) {
errored = true;
console.log('Job errored:', error);
throw error;
})
.on('finished', function() {
console.log('Everything done, shutting down the thread pool.');
const duration = new Duration(startTime, new Date())
console.log(`Finished All ${numberOfFlows} Flows in ${duration.toString(1, 1)}`);
console.log(`${failures} total errors.`);
pool.killAll();
done = true;
});
process.on('exit', function() {
if(!done) console.log('Exited before done')
process.exit(+!!(errored || failures));
})
return pool
}
export default executeMatrix
| 24.731343 | 87 | 0.654798 | 3.265625 |
301090c420513009b6fc25c447bc503c0276b665
| 1,431 |
sql
|
SQL
|
SQL/DB_Initial.sql
|
i-chi-li/micronaut-kotlin-blanco-sample
|
0fbff11f2495bac941eecbee4f4577d7ae9c2f98
|
[
"Apache-2.0"
] | null | null | null |
SQL/DB_Initial.sql
|
i-chi-li/micronaut-kotlin-blanco-sample
|
0fbff11f2495bac941eecbee4f4577d7ae9c2f98
|
[
"Apache-2.0"
] | null | null | null |
SQL/DB_Initial.sql
|
i-chi-li/micronaut-kotlin-blanco-sample
|
0fbff11f2495bac941eecbee4f4577d7ae9c2f98
|
[
"Apache-2.0"
] | null | null | null |
CREATE DATABASE IF NOT EXISTS `sample00`
DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;
USE `sample00`;
CREATE TABLE IF NOT EXISTS `users`
(
`user_id` INTEGER UNSIGNED NOT NULL,
`user_name` VARCHAR(50) NOT NULL,
`password` VARCHAR(50) NOT NULL,
`email` VARCHAR(200),
`created_at` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
`updated_at` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`user_id`)
);
CREATE INDEX idx_users_user_name ON `users` (user_name);
CREATE INDEX idx_users_password ON `users` (password);
CREATE INDEX idx_users_email ON `users` (email);
CREATE INDEX idx_users_created_at ON `users` (created_at);
CREATE INDEX idx_users_updated_at ON `users` (updated_at);
INSERT INTO `users` (user_id, user_name, password, email)
SELECT 0, '桃太 郎', 'pass001', '[email protected]'
FROM dual
WHERE NOT EXISTS(SELECT * FROM users WHERE user_id = 0);
INSERT INTO `users` (user_id, user_name, password, email)
SELECT 1, 'い ぬ', 'pass002', NULL
FROM dual
WHERE NOT EXISTS(SELECT * FROM users WHERE user_id = 1);
INSERT INTO `users` (user_id, user_name, password, email)
SELECT 2, 'き じ', 'pass003', NULL
FROM dual
WHERE NOT EXISTS(SELECT * FROM users WHERE user_id = 2);
INSERT INTO `users` (user_id, user_name, password, email)
SELECT 3, 'さ る', 'pass004', NULL
FROM dual
WHERE NOT EXISTS(SELECT * FROM users WHERE user_id = 3);
| 34.071429 | 89 | 0.730957 | 3.140625 |
79bbd0e88d20bff77a51d24a9a465a14edf40e4a
| 4,885 |
php
|
PHP
|
app/controllers/NewsController.php
|
idmaximum/majProject
|
53897f8a0514578f095bfb42388ba52b2225c78f
|
[
"MIT"
] | null | null | null |
app/controllers/NewsController.php
|
idmaximum/majProject
|
53897f8a0514578f095bfb42388ba52b2225c78f
|
[
"MIT"
] | null | null | null |
app/controllers/NewsController.php
|
idmaximum/majProject
|
53897f8a0514578f095bfb42388ba52b2225c78f
|
[
"MIT"
] | null | null | null |
<?php
class NewsController extends BaseController {
/*
|--------------------------------------------------------------------------
| Default Home Controller
|--------------------------------------------------------------------------
|
| You may wish to use controllers instead of, or in addition to, Closure
| based routes. That's great! Here is an example controller method to
| get you started. To route to this controller, just add the route:
|
| Route::get('/', 'HomeController@showWelcome');
|
*/
public function newsList(){
function str_replace_text($word){
$strwordArr = array("#",":" ,"'","\'","-","%",":") ;
$strCensor = "" ;
foreach ($strwordArr as $value) {
$word = str_replace($value,$strCensor ,$word);
}
$strwordArr_2 = array("(",")","/"," ") ;
$strCensor_2 = "-" ;
foreach ($strwordArr_2 as $value_2) {
$word = str_replace($value_2,$strCensor_2 ,$word);
}
$word = str_replace("_","" ,$word);
return ( $word) ;
}#end fn
$Selectnews = DB::table('movie_news')
->select('news_ID', 'news_title_en', 'news_abstract_en', 'news_imageThumb', 'news_datetime')
->where('news_publish' , '1')
->orderBy('orderBy', 'asc')
->paginate(6);
return View::make('frontend.event_activity')-> with('rowNews', $Selectnews);
}
public function newsListTH(){
function str_replace_text($word){
$strwordArr = array("#",":" ,"'","\'","-","%",":") ;
$strCensor = "" ;
foreach ($strwordArr as $value) {
$word = str_replace($value,$strCensor ,$word);
}
$strwordArr_2 = array("(",")","/"," ") ;
$strCensor_2 = "-" ;
foreach ($strwordArr_2 as $value_2) {
$word = str_replace($value_2,$strCensor_2 ,$word);
}
$word = str_replace("_","" ,$word);
return ( $word) ;
}#end fn
$Selectnews = DB::table('movie_news')
->select('news_ID', 'news_title_th', 'news_abstract_th', 'news_imageThumb', 'news_datetime')
->where('news_publish' , '1')
->orderBy('orderBy', 'asc')
->paginate(6);
return View::make('frontendTH.event_activity')-> with('rowNews', $Selectnews);
}
public function newsDetail($id = null){
function str_replace_text($word){
$strwordArr = array("#",":" ,"'","\'","-","%",":") ;
$strCensor = "" ;
foreach ($strwordArr as $value) {
$word = str_replace($value,$strCensor ,$word);
}
$strwordArr_2 = array("(",")","/"," ") ;
$strCensor_2 = "-" ;
foreach ($strwordArr_2 as $value_2) {
$word = str_replace($value_2,$strCensor_2 ,$word);
}
$word = str_replace("_","" ,$word);
return ( $word) ;
} #end fn str
$resultNews = DB::select("select news_ID, news_title_en, news_youtube,
news_detail_th, news_detail_en, news_detail_cn,
news_imageThumb,news_publish, news_datetime,
news_abstract_en
FROM movie_news
WHERE news_ID = '$id'");
$resultNewsGallery = DB::select('select *
FROM movie_image_gallery
WHERE news_ID ='.$id);
$resultNewsGalleryCount = DB::select('select count(*) as countGallery
FROM movie_image_gallery
WHERE news_ID ='.$id);
return View::make('frontend.event_activitydetail')-> with('rowNews', $resultNews)
-> with('rowNewsGallery', $resultNewsGallery)
-> with('rowCountGallery', $resultNewsGalleryCount);
}
public function newsDetailTH($id = null){
function str_replace_text($word){
$strwordArr = array("#",":" ,"'","\'","-","%",":") ;
$strCensor = "" ;
foreach ($strwordArr as $value) {
$word = str_replace($value,$strCensor ,$word);
}
$strwordArr_2 = array("(",")","/"," ") ;
$strCensor_2 = "-" ;
foreach ($strwordArr_2 as $value_2) {
$word = str_replace($value_2,$strCensor_2 ,$word);
}
$word = str_replace("_","" ,$word);
return ( $word) ;
} #end fn str
$resultNews = DB::select("select news_ID, news_title_th, news_youtube,
news_detail_th, news_detail_en, news_detail_cn,
news_imageThumb,news_publish, news_datetime,
news_abstract_en
FROM movie_news
WHERE news_ID = '$id'");
$resultNewsGallery = DB::select('select *
FROM movie_image_gallery
WHERE news_ID ='.$id);
$resultNewsGalleryCount = DB::select('select count(*) as countGallery
FROM movie_image_gallery
WHERE news_ID ='.$id);
return View::make('frontendTH.event_activitydetail')-> with('rowNews', $resultNews)
-> with('rowNewsGallery', $resultNewsGallery)
-> with('rowCountGallery', $resultNewsGalleryCount);
}#end fn
}
| 34.160839 | 98 | 0.550051 | 3.15625 |
ff4f3416b8a246203f7b0ae5f3a2505e4a5b91c1
| 21,907 |
py
|
Python
|
engine.py
|
craigvear/monks_mood
|
14864669830aca27393eb5e88d3d8cfb6b593846
|
[
"MIT"
] | null | null | null |
engine.py
|
craigvear/monks_mood
|
14864669830aca27393eb5e88d3d8cfb6b593846
|
[
"MIT"
] | null | null | null |
engine.py
|
craigvear/monks_mood
|
14864669830aca27393eb5e88d3d8cfb6b593846
|
[
"MIT"
] | null | null | null |
"""main server script
will sit onboard host and operate as Nebula --- its dynamic soul"""
# --------------------------------------------------
#
# Embodied AI Engine Prototype v0.10
# 2021/01/25
#
# © Craig Vear 2020
# [email protected]
#
# Dedicated to Fabrizio Poltronieri
#
# --------------------------------------------------
from random import randrange
from time import time
from tensorflow.keras.models import load_model
import pyaudio
import numpy as np
import concurrent.futures
from random import random
from time import sleep
from pydub import AudioSegment
from pydub.playback import play
# --------------------------------------------------
#
# instantiate an object for each neural net
#
# --------------------------------------------------
# v4 models were trained with 1st batch of Blue Haze datasets
class MoveRNN:
def __init__(self):
print('MoveRNN initialization')
self.move_rnn = load_model('models/EMR-v4_RNN_skeleton_data.nose.x.h5')
def predict(self, in_val):
# predictions and input with localval
self.pred = self.move_rnn.predict(in_val)
return self.pred
class AffectRNN:
def __init__(self):
print('AffectRNN initialization')
self.affect_rnn = load_model('models/EMR-v4_RNN_bitalino.h5')
def predict(self, in_val):
# predictions and input with localval
self.pred = self.affect_rnn.predict(in_val)
return self.pred
class MoveAffectCONV2:
def __init__(self):
print('MoveAffectCONV2 initialization')
self.move_affect_conv2 = load_model('models/EMR-v4_conv2D_move-affect.h5')
def predict(self, in_val):
# predictions and input with localval
self.pred = self.move_affect_conv2.predict(in_val)
return self.pred
class AffectMoveCONV2:
def __init__(self):
print('AffectMoveCONV2 initialization')
self.affect_move_conv2 = load_model('models/EMR-v4_conv2D_affect-move.h5')
def predict(self, in_val):
# predictions and input with localval
self.pred = self.affect_move_conv2.predict(in_val)
return self.pred
# --------------------------------------------------
#
# controls all thought-trains and affect responses
#
# --------------------------------------------------
class AiDataEngine():
def __init__(self, speed=1):
print('building engine server')
self.interrupt_bang = False
# self.running = False
# self.PORT = 8000
# self.IP_ADDR = "127.0.0.1"
self.global_speed = speed
self.rnd_stream = 0
# make a default dict for the engine
self.datadict = {'move_rnn': 0,
'affect_rnn': 0,
'move_affect_conv2': 0,
'affect_move_conv2': 0,
'master_output': 0,
'user_in': 0,
'rnd_poetry': 0,
'rhythm_rnn': 0,
'affect_net': 0,
'self_awareness': 0,
'affect_decision': 0,
'rhythm_rate': 0.1}
# name list for nets
self.netnames = ['move_rnn',
'affect_rnn',
'move_affect_conv2',
'affect_move_conv2',
'self_awareness', # Net name for self-awareness
'master_output'] # input for self-awareness
# names for affect listening
self.affectnames = ['user_in',
'rnd_poetry',
'affect_net',
'self_awareness']
self.rhythm_rate = 0.1
self.affect_listen = 0
# fill with random values
self.dict_fill()
print(self.datadict)
# instantiate nets as objects and make models
self.move_net = MoveRNN()
self.affect_net = AffectRNN()
self.move_affect_net = MoveAffectCONV2()
self.affect_move_net = AffectMoveCONV2()
self.affect_perception = MoveAffectCONV2()
# logging on/off switches
self.net_logging = False
self.master_logging = False
self.streaming_logging = False
self.affect_logging = False
# --------------------------------------------------
#
# prediction and rnd num gen zone
#
# --------------------------------------------------
# makes a prediction for a given net and defined input var
def make_data(self):
while True:
# calc rhythmic intensity based on self-awareness factor & global speed
intensity = self.datadict.get('self_awareness')
self.rhythm_rate = (self.rhythm_rate * intensity) * self.global_speed
self.datadict['rhythm_rate'] = self.rhythm_rate
# get input vars from dict (NB not always self)
in_val1 = self.get_in_val(0) # move RNN as input
in_val2 = self.get_in_val(1) # affect RNN as input
in_val3 = self.get_in_val(2) # move - affect as input
in_val4 = self.get_in_val(1) # affect RNN as input
# send in vals to net object for prediction
pred1 = self.move_net.predict(in_val1)
pred2 = self.affect_net.predict(in_val2)
pred3 = self.move_affect_net.predict(in_val3)
pred4 = self.affect_move_net.predict(in_val4)
# special case for self awareness stream
self_aware_input = self.get_in_val(5) # main movement as input
self_aware_pred = self.affect_perception.predict(self_aware_input)
if self.net_logging:
print(f" 'move_rnn' in: {in_val1} predicted {pred1}")
print(f" 'affect_rnn' in: {in_val2} predicted {pred2}")
print(f" move_affect_conv2' in: {in_val3} predicted {pred3}")
print(f" 'affect_move_conv2' in: {in_val4} predicted {pred4}")
print(f" 'self_awareness' in: {self_aware_input} predicted {self_aware_pred}")
# put predictions back into the dicts and master
self.put_pred(0, pred1)
self.put_pred(1, pred2)
self.put_pred(2, pred3)
self.put_pred(3, pred4)
self.put_pred(4, self_aware_pred)
# outputs a stream of random poetry
rnd_poetry = random()
self.datadict['rnd_poetry'] = random()
if self.streaming_logging:
print(f'random poetry = {rnd_poetry}')
sleep(self.rhythm_rate)
# function to get input value for net prediction from dictionary
def get_in_val(self, which_dict):
# get the current value and reshape ready for input for prediction
input_val = self.datadict.get(self.netnames[which_dict])
input_val = np.reshape(input_val, (1, 1, 1))
return input_val
# function to put prediction value from net into dictionary
def put_pred(self, which_dict, pred):
# randomly chooses one of te 4 predicted outputs
out_pred_val = pred[0][randrange(4)]
if self.master_logging:
print(f"out pred val == {out_pred_val}, master move output == {self.datadict['master_output']}")
# save to data dict and master move out ONLY 1st data
self.datadict[self.netnames[which_dict]] = out_pred_val
self.datadict['master_output'] = out_pred_val
# fills the dictionary with rnd values for each key of data dictionary
def dict_fill(self):
for key in self.datadict.keys():
rnd = random()
self.datadict[key] = rnd
# --------------------------------------------------
#
# affect and streaming methods
#
# --------------------------------------------------
# define which feed to listen to, and duration
# and a course of affect response
def affect(self):
# daddy cycle = is the master running on?
while True:
if self.affect_logging:
print('\t\t\t\t\t\t\t\t=========HIYA - DADDY cycle===========')
# flag for breaking on big affect signal
self.interrupt_bang = True
# calc master cycle before a change
master_cycle = randrange(6, 26) * self.global_speed
loop_dur = time() + master_cycle
if self.affect_logging:
print(f" interrupt_listener: started! sleeping now for {loop_dur}...")
# refill the dicts?????
self.dict_fill()
# child cycle - waiting for interrupt from master clock
while time() < loop_dur:
if self.affect_logging:
print('\t\t\t\t\t\t\t\t=========Hello - child cycle 1 ===========')
# if a major break out then go to Daddy cycle
if not self.interrupt_bang:
break
# randomly pick an input stream for this cycle
rnd = randrange(4)
self.rnd_stream = self.affectnames[rnd]
self.datadict['affect_decision'] = rnd
print(self.rnd_stream)
if self.affect_logging:
print(self.rnd_stream)
# hold this stream for 1-4 secs, unless interrupt bang
end_time = time() + (randrange(1000, 4000) / 1000)
if self.affect_logging:
print('end time = ', end_time)
# baby cycle 2 - own time loops
while time() < end_time:
if self.affect_logging:
print('\t\t\t\t\t\t\t\t=========Hello - baby cycle 2 ===========')
# go get the current value from dict
affect_listen = self.datadict[self.rnd_stream]
if self.affect_logging:
print('current value =', affect_listen)
# make the master output the current value of the stream
self.datadict['master_output'] = affect_listen
if self.master_logging:
print(f'\t\t ============== master move output = {affect_listen}')
# calc affect on behaviour
# if input stream is LOUD then smash a random fill and break out to Daddy cycle...
if affect_listen > 0.50:
if self.affect_logging:
print('interrupt > HIGH !!!!!!!!!')
# A - refill dict with random
self.dict_fill()
# B - jumps out of this loop into daddy
self.interrupt_bang = False
if self.affect_logging:
print('interrupt bang = ', self.interrupt_bang)
# C break out of this loop, and next (cos of flag)
break
# if middle loud fill dict with random, all processes norm
elif 0.20 < affect_listen < 0.49:
if self.affect_logging:
print('interrupt MIDDLE -----------')
print('interrupt bang = ', self.interrupt_bang)
# refill dict with random
self.dict_fill()
elif affect_listen <= 0.20:
if self.affect_logging:
print('interrupt LOW_______________')
print('interrupt bang = ', self.interrupt_bang)
# and wait for a cycle
sleep(self.rhythm_rate)
def parse_got_dict(self, got_dict):
self.datadict['user_in'] = got_dict['mic_level']
# user change the overall speed of the engine
self.global_speed = got_dict['speed']
# user change tempo of outputs and parsing
self.rhythm_rate = got_dict['tempo']
# # stop start methods
# def go(self):
# # self.running = True
# trio.run(self.flywheel)
# print('I got here daddy')
def quit(self):
self.running = False
"""main client script
controls microphone stream and organise all audio responses"""
class Client:
def __init__(self, library):
self.running = True
self.connected = False
self.logging = False
# is the robot connected
self.robot_connected = True
self.direction = 1
if self.robot_connected:
# import robot scripts
from arm.arm import Arm
from robot.rerobot import Robot
# instantiate arm comms
self.arm_arm = Arm()
# self.robot_robot.reset_arm()
# prepare for movement
# LED's ready fpr drawing
self.arm_arm.led_blue()
# get arm into draw mode
self.arm_arm.draw_mode_status = True
self.arm_arm.first_draw_move = True
self.arm_arm.pen_drawing_status = False
# goto position
self.arm_arm.arm_reach_out()
# instantiate robot comms
self.robot_robot = Robot()
# move gripper arm up
for n in range(12):
self.robot_robot.gripper_up()
if library == 'jazz':
self.audio_file_sax = AudioSegment.from_mp3('assets/alfie.mp3')
self.audio_file_bass = AudioSegment.from_mp3('assets/bass.mp3') + 4
elif library == 'pop':
self.audio_file_sax = AudioSegment.from_wav('assets/vocals.wav')
self.audio_file_bass = AudioSegment.from_wav('assets/accompaniment.wav')
# robot instrument vars
# globs for sax
self.pan_law_sax = -0.5
self.audio_file_len_ms_sax = self.audio_file_sax.duration_seconds * 1000
# globs for bass
self.pan_law_bass = 0
self.audio_file_len_ms_bass = self.audio_file_bass.duration_seconds * 1000
# self.HOST = '127.0.0.1' # Client IP (this)
# self.PORT = 8000
# Port to listen on (non-privileged ports are > 1023)
self.CHUNK = 2 ** 11
self.RATE = 44100
self.p = pyaudio.PyAudio()
self.stream = self.p.open(format=pyaudio.paInt16,
channels=1,
rate=self.RATE,
input=True,
frames_per_buffer=self.CHUNK)
# build send data dict
self.send_data_dict = {'mic_level': 0,
'speed': 1,
'tempo': 0.1
}
# init got dict
self.got_dict = {}
# instantiate the server
self.engine = AiDataEngine()
# # set the ball rolling
# self.main()
def snd_listen(self):
print("mic listener: started!")
while True:
data = np.frombuffer(self.stream.read(self.CHUNK,exception_on_overflow = False),
dtype=np.int16)
peak = np.average(np.abs(data)) * 2
if peak > 2000:
bars = "#" * int(50 * peak / 2 ** 16)
print("%05d %s" % (peak, bars))
self.send_data_dict['mic_level'] = peak / 30000
def terminate(self):
self.stream.stop_stream()
self.stream.close()
self.p.terminate()
def data_exchange(self):
print("data exchange: started!")
while True:
# send self.send_data_dict
self.engine.parse_got_dict(self.send_data_dict)
# get self.datadict from engine
self.got_dict = self.engine.datadict
# sync with engine & stop freewheeling
sleep_dur = self.got_dict['rhythm_rate']
# print('data exchange')
sleep(sleep_dur)
def engine(self):
# set the engine off
self.engine.go()
def main(self):
# snd_listen and client need dependent threads.
# All other IO is ok as a single Trio thread inside self.client
tasks = [self.engine.make_data,
self.engine.affect,
self.snd_listen,
self.data_exchange,
self.robot_sax,
self.robot_bass]
with concurrent.futures.ThreadPoolExecutor() as executor:
futures = {executor.submit(task): task for task in tasks}
def robot_sax(self):
# make a serial port connection here
print('im here SAX - sleeping for 3')
sleep(3)
# loop here
# while self.running:
# print('im here2')
# while not self.improv_go:
# print('im here3')
# sleep(1)
# print('sleeping robot')
# then start improvisers
while True:
print('im here4')
# grab raw data from engine stream
raw_data_from_dict = self.got_dict['master_output']
rhythm_rate = self.got_dict['rhythm_rate']
print('sax', raw_data_from_dict, rhythm_rate)
# add variability to the individual instrument
rnd_dur_delta = random()
rhythm_rate *= rnd_dur_delta * 8
print('sax', raw_data_from_dict, rhythm_rate)
# make a sound & move bot
self.make_sound('sax', raw_data_from_dict, rhythm_rate)
print('making a new one')
def robot_bass(self):
# make a serial port connection here
print('im here Bass - sleeping for 3')
sleep(3)
# loop here
# while self.running:
# print('im here2')
# while not self.improv_go:
# print('im here3')
# sleep(1)
# print('sleeping robot')
# then start improvisers
while True:
print('im here4')
# grab raw data from engine stream
raw_data_from_dict = self.got_dict['master_output']
# trying different part of the dict
raw_data_from_dict = self.got_dict['move_rnn']
rhythm_rate = self.got_dict['rhythm_rate']
print('bass', raw_data_from_dict, rhythm_rate)
# add variability to the individual instrument
rnd_dur_delta = random() * 4
rhythm_rate *= rnd_dur_delta
print('bass', raw_data_from_dict, rhythm_rate)
# make a sound & move bot
self.make_sound('bass', raw_data_from_dict, rhythm_rate)
print('making a new one')
def make_sound(self, instrument, incoming_raw_data, rhythm_rate):
# # temp random num gen
# rnd = randrange(self.audio_dir_len)
# print(self.audio_dir[rnd])
print('making sound')
if instrument == 'sax':
audio_file = self.audio_file_sax
audio_file_len_ms = self.audio_file_len_ms_sax
pan_law = self.pan_law_sax
len_delta = random() * 1000
elif instrument == 'bass':
audio_file = self.audio_file_bass
audio_file_len_ms = self.audio_file_len_ms_bass
pan_law = self.pan_law_bass
len_delta = random() * 1000
# rescale incoming raw data
audio_play_position = int(((incoming_raw_data - 0) / (1 - 0)) * (audio_file_len_ms - 0) + 0)
duration = rhythm_rate * len_delta
if duration < 0.1:
duration = 0.1
end_point = audio_play_position + duration
print(audio_play_position, end_point, duration)
# make a sound from incoming data
snippet = audio_file[audio_play_position: end_point]
print('snippet')
# pan snippet
pan_snippet = snippet.pan(pan_law)
print('pan')
# move bot before making sound
if self.robot_connected:
if instrument == 'sax':
self.move_robot(incoming_raw_data, duration)
# get the robot to move with
play(pan_snippet)
print('play')
# sleep(duration/ 1000)
print('fininshed a play')
def move_robot(self, incoming_data, duration):
# top previous movements
# self.robot_robot.gripper_stop()
# self.robot_robot.paddle_stop()
# self.robot_robot.stop()
# which movement
if duration > 0.2:
# select a joint (1-16 % 4)
# or move bot left or right (17)
# or move gripper up or down (18)
rnd_joint = randrange(22)
rnd_direction = randrange(2)
if rnd_direction == 1:
direction = -20
else:
direction = 20
rnd_speed = randrange(3, 15)
rnd_speed *= 10
# move an arm joint
if rnd_joint <= 15:
joint = (rnd_joint % 4) + 1
self.arm_arm.move_joint_relative_speed(joint, direction, rnd_speed)
# move the gripper
elif rnd_joint == 16:
if rnd_direction == 1:
self.robot_robot.gripper_up()
else:
self.robot_robot.gripper_down()
# or move the wheels
elif rnd_joint == 17:
if rnd_direction == 1:
self.robot_robot.paddle_open()
else:
self.robot_robot.paddle_close()
# or move the wheels
elif rnd_joint >= 18:
if rnd_direction == 1:
self.robot_robot.step_forward()
else:
self.robot_robot.step_backward()
if __name__ == '__main__':
library = 'jazz'
# library = 'pop'
cl = Client(library)
# set the ball rolling
cl.main()
| 34.229688 | 110 | 0.540147 | 3.078125 |
8330b464e582767016af8d4734e10a6cae427420
| 2,007 |
ps1
|
PowerShell
|
Functions/New-OpsGenieConnction.ps1
|
skjaerhus/PS-OpsGenie
|
08070abcd25e66b8f7e85d322bcc0e4f2a8a3b81
|
[
"MIT"
] | null | null | null |
Functions/New-OpsGenieConnction.ps1
|
skjaerhus/PS-OpsGenie
|
08070abcd25e66b8f7e85d322bcc0e4f2a8a3b81
|
[
"MIT"
] | null | null | null |
Functions/New-OpsGenieConnction.ps1
|
skjaerhus/PS-OpsGenie
|
08070abcd25e66b8f7e85d322bcc0e4f2a8a3b81
|
[
"MIT"
] | null | null | null |
<#
.SYNOPSIS
Connects to an OpsGenie HTTP Integration.
.DESCRIPTION
Connects to OpsGenie using the API Key from an HTTP integration.
.INPUTS
None.
.OUTPUTS
None, saves script variables with needed information for the module.
.EXAMPLE
PS> New-OpsGenieConnection -APIKey xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
Connect without proxy and using default base url for OpsGenie EU Datacenter.
.EXAMPLE
PS> $mycred = get-credential
PS> New-OpsGenieConnection -APIKey xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -ProxyCredential $mycred -ProxyUrl "http://myproxy:8080"
Connect using proxy information.
.LINK
https://github.com/skjaerhus/PS-OpsGenie
#>
Function New-OpsGenieConnection {
Param(
[Parameter(Mandatory=$true)][string]$APIKey = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx",
[string]$BaseUrl = "https://api.eu.opsgenie.com",
[pscredential]$ProxyCredential,
[string]$ProxyUrl
)
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
if($ProxyUrl){
$Script:proxyurl = $ProxyUrl
}
elseif(!$proxyurl -and !$ProxyCredential){
#No Proxy defined and no credential, assuming no proxy, but check system info still.
$dest = $BaseUrl
$proxytest = ([System.Net.WebRequest]::GetSystemWebproxy()).GetProxy($dest)
if ($proxytest.OriginalString -ne $BaseUrl){
#Proxy detected, set details and use it.
$Script:ProxyUrl = $proxytest
}
}
else{
$dest = $BaseUrl
$Script:proxyurl = ([System.Net.WebRequest]::GetSystemWebproxy()).GetProxy($dest)
$Script:proxyurl
}
$Script:headers = New-Object "System.Collections.Generic.Dictionary[[String],[String]]"
$Script:headers.Add("Authorization", 'GenieKey ' + $($APIKey))
$Script:BaseUrl = $BaseUrl
if($ProxyCredential){
$Script:ProxyCredentials = $ProxyCredential
Write-Host "Will Connect using credentials..."
}
Write-Host $Script:BaseUrl
}
| 27.493151 | 128 | 0.684106 | 3.25 |
05a30d0dbff653653fc618df07ed749747a331ce
| 1,610 |
py
|
Python
|
plugin/settings.py
|
andykingking/sublime-format
|
d1d9e2192729ffdecf9f09e54bdfc2c13890542f
|
[
"MIT"
] | null | null | null |
plugin/settings.py
|
andykingking/sublime-format
|
d1d9e2192729ffdecf9f09e54bdfc2c13890542f
|
[
"MIT"
] | null | null | null |
plugin/settings.py
|
andykingking/sublime-format
|
d1d9e2192729ffdecf9f09e54bdfc2c13890542f
|
[
"MIT"
] | null | null | null |
import sublime
PLUGIN_NAME = 'Format'
PLUGIN_SETTINGS = '{}.sublime-settings'.format(PLUGIN_NAME)
class Settings:
@staticmethod
def load():
return sublime.load_settings(PLUGIN_SETTINGS)
@staticmethod
def save():
sublime.save_settings(PLUGIN_SETTINGS)
@staticmethod
def on_change(callback):
Settings.load().add_on_change(PLUGIN_NAME, callback)
@staticmethod
def stop_listening_for_changes():
Settings.load().clear_on_change(PLUGIN_NAME)
@staticmethod
def formatter(name):
return Settings.load().get('{}_formatter'.format(name), default={})
@staticmethod
def paths():
return Settings.load().get('paths', default=[])
@staticmethod
def update_formatter(name, value):
Settings.load().set('{}_formatter'.format(name), value)
Settings.save()
class FormatterSettings:
def __init__(self, name):
self.__name = name
self.__settings = Settings.formatter(name)
def get(self, value, default=None):
return self.__settings.get(value, default)
def set(self, key, value):
self.__settings[key] = value
Settings.update_formatter(self.__name, self.__settings)
@property
def format_on_save(self):
return self.get('format_on_save', default=False)
@format_on_save.setter
def format_on_save(self, value):
return self.set('format_on_save', value)
@property
def sources(self):
return self.get('sources', default=[])
@property
def options(self):
return self.get('options', default=[])
| 24.769231 | 75 | 0.660248 | 3.15625 |
93f5b5af82fac3028f59f2895ef1febd03c0cbc9
| 3,884 |
cs
|
C#
|
Black_Rabbit_Pro/Assets/BlackRabbitPro/Scripts/Editor/BuildTools/BuildTools.cs
|
Fungus-Light/Black-Rabbit-Pro
|
27e1e5e4d439033be9e494eba99460afa3bf254a
|
[
"Apache-2.0"
] | 3 |
2021-03-22T13:45:29.000Z
|
2021-11-05T10:01:03.000Z
|
Black_Rabbit_Pro/Assets/BlackRabbitPro/Scripts/Editor/BuildTools/BuildTools.cs
|
Fungus-Light/Black-Rabbit-Pro
|
27e1e5e4d439033be9e494eba99460afa3bf254a
|
[
"Apache-2.0"
] | null | null | null |
Black_Rabbit_Pro/Assets/BlackRabbitPro/Scripts/Editor/BuildTools/BuildTools.cs
|
Fungus-Light/Black-Rabbit-Pro
|
27e1e5e4d439033be9e494eba99460afa3bf254a
|
[
"Apache-2.0"
] | null | null | null |
using System.Collections;
using System.Collections.Generic;
using System.IO;
using UnityEngine;
using UnityEditor;
using Puerts;
public class BuildTools
{
[MenuItem("Build/Clear Persist")]
public static void ClearPersist()
{
DirectoryInfo info = new DirectoryInfo(Application.persistentDataPath);
info.Delete(true);
}
[MenuItem("Build/Init Packs")]
public static void InitPacks()
{
JsEnv env = new JsEnv();
env.Eval("require(\"PacksManager\").InitPacks()");
env.Dispose();
}
[MenuItem("Build/Auto Configure Packs")]
public static void ConfigurePacks()
{
JsEnv env = new JsEnv();
env.Eval("require(\"PacksManager\").ConfigurePacks()");
env.Dispose();
}
[MenuItem("Build/Build All Packs")]
public static void BuildAllPacks()
{
ConfigurePacks();
List<AssetBundleBuild> builds = new List<AssetBundleBuild>();
DirectoryInfo packDir = new DirectoryInfo(Path.Combine(Application.dataPath, "Games"));
foreach (DirectoryInfo game in packDir.GetDirectories())
{
string configFile = Path.Combine("Assets", "Games", game.Name, "config.asset");
if (File.Exists(configFile))
{
PackConfig config = AssetDatabase.LoadAssetAtPath<PackConfig>(configFile);
if (config.entrance == null)
{
Debug.LogError(configFile + " must asign entrance!!!");
return;
}
else
{
AssetBundleBuild build = new AssetBundleBuild();
build.assetBundleName = game.Name.ToLower();
List<string> Assets = new List<string>();
foreach (FileInfo file in game.GetFiles())
{
if (file.Extension == ".unity")
{
bool exclude = false;
foreach (SceneAsset s in config.exclude)
{
if (file.Name.ToLower().IndexOf(s.name.ToLower()) != -1)
{
exclude = true;
}
}
if (exclude == false)
{
Assets.Add(Path.Combine("Assets/Games", game.Name, file.Name).Replace("\\", "/"));
}
}
}
build.assetNames = Assets.ToArray();
builds.Add(build);
}
}
else
{
Debug.LogError("Please Init Packs First!!!");
return;
}
}
DirectoryInfo uiDir = new DirectoryInfo(Path.Combine(Application.dataPath, "UIs"));
AssetBundleBuild uibuild = new AssetBundleBuild();
uibuild.assetBundleName = uiDir.Name.ToLower();
List<string> UIAssets = new List<string>();
foreach (FileInfo file in uiDir.GetFiles())
{
if (file.Extension == ".prefab")
{
UIAssets.Add(Path.Combine("Assets", uiDir.Name, file.Name).Replace("\\", "/"));
}
}
uibuild.assetNames = UIAssets.ToArray();
builds.Add(uibuild);
string output = Path.Combine(Application.dataPath, "StreamingAssets", "GamePacks").Replace("\\", "/");
if (Directory.Exists(output) == false)
{
Directory.CreateDirectory(output);
}
BuildPipeline.BuildAssetBundles(output, builds.ToArray(), BuildAssetBundleOptions.None, EditorUserBuildSettings.activeBuildTarget);
AssetDatabase.Refresh();
}
}
| 32.099174 | 139 | 0.501545 | 3.203125 |
c68dc3037164a58e8972fb3c805e51e5b0b5198b
| 3,066 |
py
|
Python
|
test/test_derivatives.py
|
gelijergensen/Constrained-Neural-Nets-Workbook
|
d71049939ace04b8baa672c7a8f632f5e01da24b
|
[
"MIT"
] | 3 |
2019-09-25T10:04:46.000Z
|
2020-03-03T10:04:15.000Z
|
test/test_derivatives.py
|
gelijergensen/Constrained-Neural-Nets-Workbook
|
d71049939ace04b8baa672c7a8f632f5e01da24b
|
[
"MIT"
] | null | null | null |
test/test_derivatives.py
|
gelijergensen/Constrained-Neural-Nets-Workbook
|
d71049939ace04b8baa672c7a8f632f5e01da24b
|
[
"MIT"
] | 2 |
2019-09-21T21:27:04.000Z
|
2021-02-12T19:42:47.000Z
|
import numpy as np
import torch
from src.derivatives import jacobian, trace
def test_jacobian():
batchsize = int(np.random.randint(1, 10))
# vector * matrix --
# Unbatched
rand_lengths = np.random.randint(1, 10, 2)
ins = torch.rand(tuple(list(rand_lengths[-1:])), requires_grad=True)
factor = torch.rand(tuple(list(rand_lengths)))
out = factor @ ins
jac = jacobian(out, ins)
assert torch.allclose(jac, factor)
# Batched
ins = ins.unsqueeze(0).expand(batchsize, *ins.size())
out = torch.einsum("ij,kj->ki", factor, ins)
assert torch.allclose(torch.squeeze(out), out)
bat_jac = jacobian(out, ins, batched=True)
for i in range(batchsize):
assert torch.allclose(bat_jac[i], factor)
# test nonlinear case
rand_lengths = np.random.randint(1, 10, 2)
ins = torch.rand(
batchsize, *tuple(list(rand_lengths[-1:])), requires_grad=True
)
out = torch.sin(3.15 * ins + 2.91)
bat_jac = jacobian(out, ins, batched=True)
expected = torch.diag_embed(3.15 * torch.cos(3.15 * ins + 2.91))
assert torch.allclose(bat_jac, expected)
# matrix * matrix --
# Unbatched
rand_lengths = np.random.randint(1, 10, 3)
ins = torch.rand(tuple(list(rand_lengths[-2:])), requires_grad=True)
factor = torch.rand(tuple(list(rand_lengths[:-1])))
out = factor @ ins
jac = jacobian(out, ins)
ans = jac.new_zeros(jac.size())
for i in range(jac.size()[-1]):
ans[:, i, :, i] = factor
assert torch.allclose(jac, ans)
# Batched
ins = ins.unsqueeze(0).expand(batchsize, *ins.size())
out = torch.einsum("ij,kjl->kil", factor, ins)
bat_jac = jacobian(out, ins, batched=True)
ans = jac.new_zeros(bat_jac.size())
for b in range(batchsize):
for i in range(bat_jac.size()[-1]):
ans[b, :, i, :, i] = factor
assert torch.allclose(bat_jac, ans)
# Confirm agreement in complex case --
# Unbatched
rand_lengths = np.random.randint(1, 7, 5)
ins = torch.rand(tuple(list(rand_lengths)), requires_grad=True)
out = torch.relu(ins)
jac = jacobian(out, ins)
# Check that lists work correctly
out = torch.relu(ins)
list_jac = jacobian(out, [ins, ins])
assert all(torch.allclose(jac, list_jac[i]) for i in range(len(list_jac)))
# Batched
ins = ins.view(-1, *ins.size())
out = torch.relu(ins)
bat_jac = jacobian(out, ins, batched=True)
assert torch.allclose(jac, bat_jac[0])
def test_trace():
# Unbatched
rand_length = int(np.random.randint(1, 10, 1))
ins = torch.rand((rand_length, rand_length))
trc = trace(ins)
assert torch.allclose(trc, torch.trace(ins))
# Check that lists work correctly
list_trc = trace([ins, ins])
assert all(torch.allclose(list_trc[i], trc) for i in range(len(list_trc)))
# Batched
batchsize = int(np.random.randint(1, 10))
ins = ins.unsqueeze(0).expand(batchsize, *ins.size())
ans = trace(ins)
for b in range(batchsize):
assert torch.allclose(ans[b], trc)
| 29.2 | 78 | 0.634703 | 3.421875 |
8865c79a8dfcd8d324b605c12e0cde8181fc3629
| 1,101 |
psm1
|
PowerShell
|
lib/webserver/New-IcingaTCPSocket.psm1
|
moreamazingnick/icinga-powershell-framework
|
211d9d3a1d7e1672dbeafc00da2ea0860080d173
|
[
"MIT"
] | 45 |
2019-10-31T16:51:08.000Z
|
2022-01-28T13:17:14.000Z
|
lib/webserver/New-IcingaTCPSocket.psm1
|
moreamazingnick/icinga-powershell-framework
|
211d9d3a1d7e1672dbeafc00da2ea0860080d173
|
[
"MIT"
] | 210 |
2019-11-05T10:42:10.000Z
|
2022-03-31T15:51:23.000Z
|
lib/webserver/New-IcingaTCPSocket.psm1
|
moreamazingnick/icinga-powershell-framework
|
211d9d3a1d7e1672dbeafc00da2ea0860080d173
|
[
"MIT"
] | 31 |
2019-11-26T13:50:30.000Z
|
2022-03-25T14:53:18.000Z
|
function New-IcingaTCPSocket()
{
param (
[string]$Address = '',
[int]$Port = 0,
[switch]$Start = $FALSE
);
if ($Port -eq 0) {
throw 'Please specify a valid port to open a TCP socket for';
}
# Listen on localhost by default
$ListenAddress = New-Object System.Net.IPEndPoint([IPAddress]::Loopback, $Port);
if ([string]::IsNullOrEmpty($Address) -eq $FALSE) {
$ListenAddress = New-Object System.Net.IPEndPoint([IPAddress]::Parse($Address), $Port);
}
$TCPSocket = New-Object 'System.Net.Sockets.TcpListener' $ListenAddress;
Write-IcingaDebugMessage -Message (
[string]::Format(
'Creating new TCP socket on Port {0}. Endpoint configuration {1}',
$Port,
$TCPSocket.LocalEndpoint
)
);
if ($Start) {
Write-IcingaDebugMessage -Message (
[string]::Format(
'Starting TCP socket for endpoint {0}',
$TCPSocket.LocalEndpoint
)
);
$TCPSocket.Start();
}
return $TCPSocket;
}
| 26.214286 | 95 | 0.5604 | 3.125 |
0dc079c53443f6952f757ea2a4a9e2cdaa92a248
| 1,626 |
rb
|
Ruby
|
test/ractor/tvar_test.rb
|
ko1/ractor-tvar
|
e57244747ef75df0c71b223bb28585a67a643720
|
[
"MIT"
] | 54 |
2020-11-18T22:38:18.000Z
|
2022-03-18T18:31:12.000Z
|
test/ractor/tvar_test.rb
|
ko1/ractor_tvar
|
e57244747ef75df0c71b223bb28585a67a643720
|
[
"MIT"
] | 3 |
2020-11-20T09:24:48.000Z
|
2021-02-04T12:50:21.000Z
|
test/ractor/tvar_test.rb
|
ko1/ractor_tvar
|
e57244747ef75df0c71b223bb28585a67a643720
|
[
"MIT"
] | 1 |
2020-12-14T23:24:49.000Z
|
2020-12-14T23:24:49.000Z
|
# frozen_string_literal: true
require "test_helper"
class Ractor::TVarTest < Test::Unit::TestCase
test "VERSION" do
assert do
::Ractor::TVar.const_defined?(:VERSION)
end
end
test 'Ractor::TVar can has a value' do
tv = Ractor::TVar.new(1)
assert_equal 1, tv.value
end
test 'Ractor::TVar without initial value will return nil' do
tv = Ractor::TVar.new
assert_equal nil, tv.value
end
test 'Ractor::TVar can change the value' do
tv = Ractor::TVar.new
assert_equal nil, tv.value
Ractor::atomically do
tv.value = :ok
end
assert_equal :ok, tv.value
end
test 'Ractor::TVar update without atomically will raise an exception' do
tv = Ractor::TVar.new
assert_raise Ractor::TransactionError do
tv.value = :ng
end
end
test 'Ractor::TVar#increment increments the value' do
tv = Ractor::TVar.new(0)
tv.increment
assert_equal 1, tv.value
tv.increment 2
assert_equal 3, tv.value
Ractor::atomically do
tv.increment 3
end
assert_equal 6, tv.value
Ractor::atomically do
tv.value = 1.5
end
tv.increment(-1.5)
assert_equal 0.0, tv.value
end
test 'Ractor::TVar can not set the unshareable value' do
assert_raise ArgumentError do
Ractor::TVar.new [1]
end
end
## with Ractors
N = 10_000
test 'Ractor::TVar consistes with other Ractors' do
tv = Ractor::TVar.new(0)
rs = 4.times.map{
Ractor.new tv do |tv|
N.times{ Ractor::atomically{ tv.increment } }
end
}
rs.each{|r| r.take}
assert_equal N * 4 , tv.value
end
end
| 21.116883 | 74 | 0.644526 | 3.03125 |
f0143bd3ed561d0dfb0348516e68bf8dbb769f62
| 5,723 |
lua
|
Lua
|
lib/shapes/pentagon.lua
|
cjmartin20/Learning-Modules
|
589588d3cbc8270f0412fccc3cc9a422a0eab8fa
|
[
"MIT"
] | 1 |
2020-09-13T21:37:19.000Z
|
2020-09-13T21:37:19.000Z
|
lib/shapes/pentagon.lua
|
cjmartin20/Learning-Modules
|
589588d3cbc8270f0412fccc3cc9a422a0eab8fa
|
[
"MIT"
] | 1 |
2020-07-09T11:06:24.000Z
|
2020-07-09T11:06:24.000Z
|
lib/shapes/pentagon.lua
|
cjmartin20/Learning-Modules
|
589588d3cbc8270f0412fccc3cc9a422a0eab8fa
|
[
"MIT"
] | null | null | null |
<<<<<<< HEAD
----------------------------------------------------------------------------------------
--pentagon.lua Creates Pentagon Object
----------------------------------------------------------------------------------------
local pentagon = {
object = nil,
originalColor = { Red = 0, Green = 0, Blue = 0},
hasAttribute = nil,
inPosition = nil,
"pentagon",
"5 sides",
"5 vertices",
"polygon"
}
local useAttributes = require "attributes"
function pentagon.createPentagon( x, y, scaler, currentAttribute )
x = x or display.contentCenterX
y = y or display.contentCenterY
scaler = scaler * 11 or 11
local pentagonShape = { 0,-6*scaler, -7*scaler,-2*scaler, -4*scaler,5*scaler, 4*scaler,5*scaler, 7*scaler,-2*scaler }
pentagon.object = display.newPolygon( x, y, pentagonShape )
Red = 0
Green = 5
Blue = 5
pentagon.originalColor.Red = Red
pentagon.originalColor.Green = Green
pentagon.originalColor.Blue = Blue
pentagon.object:setFillColor( Red, Green, Blue ) -- fill the pentagon with color
pentagon.object.strokeWidth = 0.016 * display.contentWidth -- Sets the width of the border of pentagon
--Set Stroke color
pentagon.object:setStrokeColor( 128, 0, 128 ) -- Sets the border color
pentagon.object:addEventListener( "touch", pentagon.move )
pentagon.object.alpha = 0.7 --pentagon opacity
--check if pentagon has attributes.currentAttribute (in attributes.lua table)
print( "Checking pentagon Attributes" )
local test = false
for index, attribute in ipairs(pentagon) do
print("checking ", index, attribute)
if attribute == currentAttribute then
test = true
print("pentagon Has Attribute")
end
end
pentagon.hasAttribute = test
--initialize attributes.hasAttribute if no value set it to true
return pentagon
end --createPentagon function
--Move shapes function
function pentagon.move( event )
--eventt.target comes from EventListener and is the object the "touch" is targeting
local object = event.target
local touchDistance = object.width
--Move shape
if math.abs( object.x - event.x ) < touchDistance and math.abs( object.y - event.y ) < touchDistance then
object.x = event.x
object.y = event.y
end
--Change color if pentagon is in position and has attribute
if useAttributes.isShapeWithinRadius( object, .85 * display.contentCenterX, display.contentCenterX, display.contentCenterY) then
if pentagon.hasAttribute then
--change color to green
object:setFillColor( 0, 128 , 0)
else
--change color to red
object:setFillColor( 128, 0 , 0 )
end
pentagon.inPosition = true
else
object:setFillColor( pentagon.originalColor.Red, pentagon.originalColor.Green, pentagon.originalColor.Blue )
pentagon.inPosition = false
end
end --end move function
=======
----------------------------------------------------------------------------------------
--pentagon.lua Creates Pentagon Object
----------------------------------------------------------------------------------------
local pentagon = {
object = nil,
originalColor = { Red = 0, Green = 0, Blue = 0},
hasAttribute = nil,
inPosition = false,
"pentagon",
"5 sides",
"5 vertices",
"polygon"
}
local useAttributes = require "attributes"
function pentagon.createPentagon( x, y, scaler, currentAttribute )
x = x or display.contentCenterX
y = y or display.contentCenterY
scaler = scaler * 11 or 11
local pentagonShape = { 0,-6*scaler, -7*scaler,-2*scaler, -4*scaler,5*scaler, 4*scaler,5*scaler, 7*scaler,-2*scaler }
pentagon.object = display.newPolygon( x, y, pentagonShape )
Red = 0
Green = 5
Blue = 5
pentagon.originalColor.Red = Red
pentagon.originalColor.Green = Green
pentagon.originalColor.Blue = Blue
pentagon.object:setFillColor( Red, Green, Blue ) -- fill the pentagon with color
pentagon.object.strokeWidth = 0.016 * display.contentWidth -- Sets the width of the border of pentagon
--Set Stroke color
pentagon.object:setStrokeColor( 128, 0, 128 ) -- Sets the border color
pentagon.object:addEventListener( "touch", pentagon.move )
pentagon.object.alpha = 0.7 --pentagon opacity
--check if pentagon has attributes.currentAttribute (in attributes.lua table)
print( "Checking pentagon Attributes" )
local test = false
for index, attribute in ipairs(pentagon) do
print("checking ", index, attribute)
if attribute == currentAttribute then
test = true
print("pentagon Has Attribute")
end
end
pentagon.hasAttribute = test
--initialize attributes.hasAttribute if no value set it to true
return pentagon
end --createPentagon function
--Move shapes function
function pentagon.move( event )
--eventt.target comes from EventListener and is the object the "touch" is targeting
local object = event.target
local touchDistance = object.width
--Move shape
if math.abs( object.x - event.x ) < touchDistance and math.abs( object.y - event.y ) < touchDistance then
object.x = event.x
object.y = event.y
end
--Change color if pentagon is in position and has attribute
if useAttributes.isShapeWithinRadius( object, .85 * display.contentCenterX, display.contentCenterX, display.contentCenterY) then
if pentagon.hasAttribute then
--change color to green
object:setFillColor( 0, 128 , 0)
else
--change color to red
object:setFillColor( 128, 0 , 0 )
end
pentagon.inPosition = true
else
object:setFillColor( pentagon.originalColor.Red, pentagon.originalColor.Green, pentagon.originalColor.Blue )
pentagon.inPosition = false
end
end --end move function
>>>>>>> windows_testing
return pentagon
| 38.153333 | 130 | 0.664162 | 3.515625 |
af0b79d44a1325389de6e3a681b3cb432b2b8a95
| 1,752 |
py
|
Python
|
src/models/categories_products/categories_products.py
|
nnecklace/webi-shoppi
|
140d1e6ea8d019aa10ee2104e1bbd2baf0b9aa0f
|
[
"MIT"
] | null | null | null |
src/models/categories_products/categories_products.py
|
nnecklace/webi-shoppi
|
140d1e6ea8d019aa10ee2104e1bbd2baf0b9aa0f
|
[
"MIT"
] | 2 |
2020-06-02T13:55:02.000Z
|
2020-06-16T17:58:55.000Z
|
src/models/categories_products/categories_products.py
|
nnecklace/webi-shoppi
|
140d1e6ea8d019aa10ee2104e1bbd2baf0b9aa0f
|
[
"MIT"
] | null | null | null |
from src.db import db
from sqlalchemy import exc
from src.models import Product, Category
import sys
class CategoryProduct(db.Model):
__tablename__ = "categories_products"
__table_args__ = (
db.PrimaryKeyConstraint("category_id", "product_id"),
)
category_id = db.Column(db.Integer, db.ForeignKey("categories.id"), nullable=False)
product_id = db.Column(db.Integer, db.ForeignKey("products.id", ondelete="CASCADE"), nullable=False)
def add_product_categories(self, product_id, categories):
curr_product = Product.query.get(product_id)
if not curr_product:
return False
categories = list(map(lambda cat: int(cat), categories))
# delete the categories not selected in update
CategoryProduct.query \
.filter(CategoryProduct.product_id == product_id) \
.filter(CategoryProduct.category_id.notin_(categories)) \
.delete(synchronize_session="fetch")
# everytime we access the property categories a query is made
# this is why we save it in a variable
curr_categories = list(map(lambda cat: cat.id, curr_product.categories))
if len(categories) > 0:
db.session().bulk_insert_mappings(
CategoryProduct,
[
{"category_id": category, "product_id": product_id}
for category in [cat for cat in categories if cat not in curr_categories]
]
)
try:
db.session().commit()
except exc.SQLAlchemyError as err:
print("[ERROR] Batch insert product categories " + str(err), sys.stderr)
return False
return True
| 36.5 | 104 | 0.621575 | 3.1875 |
969fce68dbaae362f57029e5bccd0c5c5e32b7bf
| 3,242 |
dart
|
Dart
|
app14/lib/Home.dart
|
BryanSOliveira/flutter
|
d18d331868e4ebcf0d5e07f1047bd4f1d0e31f79
|
[
"MIT"
] | null | null | null |
app14/lib/Home.dart
|
BryanSOliveira/flutter
|
d18d331868e4ebcf0d5e07f1047bd4f1d0e31f79
|
[
"MIT"
] | null | null | null |
app14/lib/Home.dart
|
BryanSOliveira/flutter
|
d18d331868e4ebcf0d5e07f1047bd4f1d0e31f79
|
[
"MIT"
] | null | null | null |
import 'package:flutter/material.dart';
import 'package:shared_preferences/shared_preferences.dart';
class Home extends StatefulWidget {
@override
_HomeState createState() => _HomeState();
}
class _HomeState extends State<Home> {
@override
void initState() {
// TODO: implement initState
super.initState();
pegarEstadoDia();
pegarEstadoFonte();
}
String _textoSalvo = "Nada salvo!";
bool dia = true;
bool pequeno = true;
TextEditingController _controllerCampo = TextEditingController();
_salvar() async {
String valorDigitado = _controllerCampo.text;
final prefs = await SharedPreferences.getInstance();
await prefs.setString("nome", valorDigitado);
print("Método Salvar: $valorDigitado");
}
_recuperar() async {
final prefs = await SharedPreferences.getInstance();
setState(() {
_textoSalvo = prefs.getString("nome").toString();
});
print("Método Recuperar: $_textoSalvo");
}
_remover() async {
final prefs = await SharedPreferences.getInstance();
await prefs.remove("nome");
print("Método Remover");
}
_blocoFrase() {
return Container(
padding: EdgeInsets.all(10),
color: dia ? Colors.white : Colors.black26,
child: Text(
'"A vingança nunca é plena, mata a alma e envenena" (Seu Madruga)',
style: TextStyle(fontSize: pequeno ? 15 : 30),
),
);
}
Future<bool> salvarEstadoDia(bool estado) async {
final prefs = await SharedPreferences.getInstance();
await prefs.setBool("dia", dia);
return prefs.setBool("dia", dia);
}
Future<bool> pegarEstadoDia() async {
final prefs = await SharedPreferences.getInstance();
setState(() {
dia = prefs.getBool("dia") == true;
});
return dia;
}
Future<bool> salvarEstadoFonte(bool pequeno) async {
final prefs = await SharedPreferences.getInstance();
await prefs.setBool("pequeno", pequeno);
return prefs.setBool("pequeno", pequeno);
}
Future<bool> pegarEstadoFonte() async {
final prefs = await SharedPreferences.getInstance();
setState(() {
pequeno = prefs.getBool("pequeno") == true;
});
return pequeno;
}
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
title: Text("Frases"),
centerTitle: true,
),
body: Container(
padding: EdgeInsets.all(32),
child: Column(
children: <Widget>[
Row(
children: <Widget>[
Text('Dia'),
Switch(
value: dia,
onChanged: (value) {
setState(() {
dia = value;
salvarEstadoDia(value);
});
},
),
Text('Pequeno'),
Switch(
value: pequeno,
onChanged: (value) {
setState(() {
pequeno = value;
salvarEstadoFonte(pequeno);
});
},
),
],
),
_blocoFrase(),
],
),
),
);
}
}
| 24.748092 | 75 | 0.553671 | 3.1875 |
674ca13dd53940fcd17ce3e33f535dba2d176c24
| 4,485 |
ps1
|
PowerShell
|
noiseTools/prependTimestamps.ps1
|
johnaho/Cloakify-Powershell
|
700ec8030c57ff6dc8e6d3be6627b3a65861047c
|
[
"MIT"
] | 33 |
2017-07-21T13:47:05.000Z
|
2020-05-10T07:11:25.000Z
|
noiseTools/prependTimestamps.ps1
|
dumpsterfirevip/Cloakify-Powershell
|
700ec8030c57ff6dc8e6d3be6627b3a65861047c
|
[
"MIT"
] | null | null | null |
noiseTools/prependTimestamps.ps1
|
dumpsterfirevip/Cloakify-Powershell
|
700ec8030c57ff6dc8e6d3be6627b3a65861047c
|
[
"MIT"
] | 8 |
2018-05-12T16:31:10.000Z
|
2020-10-18T04:28:21.000Z
|
#
# Filename: prependTimestamps.py
#
# Version: 1.0.1
#
# Author: Joe Gervais (TryCatchHCF)
#
# Ported to Powershell by: John Aho
#
# Summary: Inserts datetimestamps in front of each line of a file. Used to
# add noise to a cloaked file (see cloakify.py) in order to degrade frequency
# analysis attacks against the cloaked payload.
#
# Description:
# Takes current date and randomly subtracts 1011-1104 days to generate a
# starting date. Then starts randomly incrementing the datetimestamp (between
# 0-664 seconds) for each entry in the cloaked file. If the datetimestamp
# reaches the current date, repeats the above steps to avoid generating
# timestamps into the future.
#
# Example:
#
# $ ./prependTimestamps.ps1 cloaked.txt > exfiltrateMe.txt
#
# Remove timestamps before trying to decloak the file
#
# $ cat exfiltrateMe.txt | cut -d" " -f 3- > cloaked.txt
param (
[Parameter(Mandatory=$false)][string]$cloakedFile
)
[void] (Invoke-Expression("chcp 65001")) #sets output of console to UTF-8
$minDaysBack = -1104
$maxDaysBack = -1011
$minSecondsStep = 0
$maxSecondsStep = 664
$minMilliseconds = 11
$maxMilliseconds = 9999
$minTick = 3
$maxTick = 9999
# Set the start date back around 2 years from today (give or take) for entropy range
# Randomize a little for each run to avoid a pattern in the first line of each file
$today = Get-Date
$startDate = $today.AddDays( (Get-Random -Minimum $minDaysBack -Maximum $maxDaysBack))
$step = (Get-Random -Minimum $minSecondsStep -Maximum $maxSecondsStep)
$startDate = [System.DateTime]::Parse(($startDate).ToString("yyyy.MM.dd"))
$toparse = ((Get-Random -Minimum 0 -Maximum 23).ToString()+":"+(Get-Random -Minimum 0 -Maximum 59).ToString()+":"+ (Get-Random -Minimum 0 -Maximum 59).ToString())
$t = [System.Timespan]::Parse($toparse)
$fakeDate = $startDate.Add($t)
$fakeDate = $fakeDate.AddMilliseconds((Get-Random -Minimum $minMilliseconds -Maximum $maxMilliseconds))
$fakeDate = $fakeDate.AddTicks((Get-Random -Minimum $minTick -Maximum $maxTick))
if ( $cloakedFile.Length -eq 0){
write-host("usage: prependTimestamps.ps1 <cloakedFilename>")
write-host("")
write-host("Strip the timestamps prior to decloaking the cloaked file.")
write-host("")
# Generate sample of noise generator output
$i = 0
while ( $i -lt 20 ){
Write-Host( Get-date $fakeDate -Format o)
$fakeDate = $fakeDate.AddSeconds( (Get-Random -Minimum $minSecondsStep -Maximum $maxSecondsStep))
$fakeDate = $fakeDate.AddMilliseconds((Get-Random -Minimum $minMilliseconds -Maximum $maxMilliseconds))
$fakeDate = $fakeDate.AddTicks((Get-Random -Minimum $minTick -Maximum $maxTick))
if($fakeDate -gt $today){
$startDate = $today.AddDays( (Get-Random -Minimum $minDaysBack -Maximum $maxDaysBack))
$step = (Get-Random -Minimum $minSecondsStep -Maximum $maxSecondsStep)
$startDate = [System.DateTime]::Parse(($startDate).ToString("yyyy.MM.dd"))
$toparse = ((Get-Random -Minimum 0 -Maximum 23).ToString()+":"+(Get-Random -Minimum 0 -Maximum 59).ToString()+":"+ (Get-Random -Minimum 0 -Maximum 59).ToString())
$t = [System.Timespan]::Parse($toparse)
$fakeDate = $startDate.Add($t)
$fakeDate = $fakeDate.AddMilliseconds((Get-Random -Minimum $minMilliseconds -Maximum $maxMilliseconds))
$fakeDate = $fakeDate.AddTicks((Get-Random -Minimum $minTick -Maximum $maxTick))
}
$i = $i+1
}
}
if( $cloakedFile.Length -gt 0){
if(Test-Path $cloakedFile){
# Generate a random with enough range to look good, scale with vals above
$clFile = Get-Content $cloakedFile -Encoding UTF8
$i = 0
while($i -lt $clFile.Length ){
$clFile[$i] = ((get-date $fakeDate -Format o ) + " " + $clFile[$i])
$fakeDate = $fakeDate.AddSeconds( (Get-Random -Minimum $minSecondsStep -Maximum $maxSecondsStep))
$fakeDate = $fakeDate.AddMilliseconds((Get-Random -Minimum 12 -Maximum 9999))
$fakeDate = $fakeDate.AddTicks((Get-Random -Minimum 12 -Maximum 9999))
$i = $i+1
}
Out-File -FilePath $cloakedFile -InputObject $clFile -Encoding UTF8
}else{
Write-Host "File not found!"
}
}
| 36.762295 | 182 | 0.6466 | 3.203125 |
65a3e5ef786bff02bcc771e0bae40fada80ebc31
| 14,973 |
lua
|
Lua
|
www/.lua/ZoneDB.lua
|
RealTimeLogic/SharkTrustEx
|
e68e228fd3d29b146498e141a0fdda0060277cbb
|
[
"MIT"
] | 2 |
2021-03-25T07:15:29.000Z
|
2021-03-26T18:29:30.000Z
|
www/.lua/ZoneDB.lua
|
RealTimeLogic/SharkTrustX
|
e68e228fd3d29b146498e141a0fdda0060277cbb
|
[
"MIT"
] | null | null | null |
www/.lua/ZoneDB.lua
|
RealTimeLogic/SharkTrustX
|
e68e228fd3d29b146498e141a0fdda0060277cbb
|
[
"MIT"
] | 1 |
2020-11-25T05:01:39.000Z
|
2020-11-25T05:01:39.000Z
|
local fmt, sbyte, tinsert, tunpack = string.format, string.byte, table.insert, table.unpack
local su = require "sqlutil"
local rcBridge = require "RevConnBridge"
-- 64 byte hex key
local function createHexKey(bytes)
return ba.rndbs(bytes):gsub(".", function(x) return fmt("%02x", sbyte(x)) end)
end
-- Encapsulation of the connection used exclusively for writing
-- dbExec(sql, noCommit, func) -- Async DB insert with optional callback
local env, dbExec =
(function()
local env, wconn = io:dofile(".lua/CreateDB.lua",_ENV)() -- Requires env:io
assert(env, fmt("Cannot open zones.db: %s", wconn))
assert(wconn:execute "PRAGMA foreign_keys = on;")
assert(wconn:setautocommit "IMMEDIATE")
local function commit()
while true do
local ok, err = wconn:commit "IMMEDIATE"
if ok then break end
if err ~= "BUSY" then
trace("ERROR: commit failed on exclusive connection:", err)
break
end
end
end
local function checkExec(sql, ok, err, err2)
if not ok then
trace("SQL err:", err2 or err, sql)
end
end
local dbthread = ba.thread.create()
local function dbExec(sql, noCommit, func)
tracep(9,sql)
dbthread:run(
function()
if sql then checkExec(sql, wconn:execute(sql)) end
if not noCommit then commit() end
if func then func() end
end
)
end
return env, dbExec
end)()
local quote = env.quotestr
local function quotedNowTime()
return quote(ba.datetime"NOW":tostring())
end
-- Open/close connections used exclusively for reading.
local function openConn()
local x, conn = su.open(env, "zones")
if conn then conn:setbusytimeout(10000) end
return conn
end
local function closeConn(conn)
conn:close()
end
-- Wraps around openConn/closeConn and "sqlutil.lua"'s iterator
-- Note, when tab=false: The iterator can at most return one element
local function sqlIter(sql,tab)
local conn = openConn()
local next = su.iter(conn, sql, tab)
return function()
local t, err = next()
if t then return t end
if err then trace("Err:", err, sql) end
closeConn(conn)
end
end
-- can return one element, one column or a table
local function dbFind(tab, sql)
local conn = openConn()
local x, err = true == tab and su.findt(conn, sql, {}) or su.find(conn, sql)
if not x and err then trace("Err:", err) end
closeConn(conn)
return x
end
------------------------------- READING ------------------------------------
local function getZoneKey(zname)
return dbFind(false, fmt("%s%s%s", "zkey FROM zones WHERE zname=", quote(zname), " COLLATE NOCASE"))
end
local function zidGetZoneT(zid)
return dbFind(true, fmt("%s%s", "* FROM zones WHERE zid=", zid))
end
local function znameGetZoneT(zname)
local t=dbFind(true, fmt("%s%s%s", "* FROM zones WHERE zname=", quote(zname), " COLLATE NOCASE"))
if t then
t.autoReg = t.autoReg == "1"
t.sso = t.sso == "1"
end
return t
end
local function zkeyGetZoneT(zkey)
return dbFind(true, fmt("%s%s%s", "* FROM zones WHERE zkey=", quote(zkey), " COLLATE NOCASE"))
end
local function getZoneName(zkey)
return dbFind(false, fmt("%s%s%s", "zname FROM zones WHERE zkey=", quote(zkey), " COLLATE NOCASE"))
end
local function getZid4Zone(zkey)
return dbFind(false, fmt("%s%s%s", "zid FROM zones WHERE zkey=", quote(zkey), " COLLATE NOCASE"))
end
-- Returns table with keys 'did,name,dkey,localAddr,wanAddr,dns,info,zid'
local function keyGetDeviceT(dkey)
return dbFind(true, fmt("%s%s%s", "* FROM devices WHERE dkey=", quote(dkey), " COLLATE NOCASE"))
end
local function nameGetDeviceT(zid, name)
return dbFind(true, fmt("%s%s and name=%s%s", "* FROM devices WHERE zid=", zid, quote(name), " COLLATE NOCASE"))
end
local function countDevices4Zone(zid)
return tonumber(dbFind(false, fmt("%s%s", "count(*) FROM devices WHERE zid=", zid)))
end
-- Returns iterator, which returns a table with all of the zone's keys/vals
-- zid can be the zone ID (zid) or the zone key (zkey)
local function getDevices4ZoneT(zid)
zid = "string" == type(zid) and #zid == 64 and getZid4Zone(zid) or zid
local sql = fmt("%s%s%s", "* FROM devices WHERE zid=", zid, " ORDER BY wanAddr,name ASC")
return sqlIter(sql, true)
end
-- Get all devices for zone that are part of the WAN "wanAddr"
--Returns iterator, which returns devT
local function getDevices4Wan(zid, wanAddr)
local sql = fmt("%s%s%s%s%s", "* FROM devices where wanAddr=", quote(wanAddr), " AND zid=", zid, " ORDER BY name ASC")
return sqlIter(sql,true)
end
-- Get all devices a regular user has access to
-- Returns:
-- if tab=true: a table where key=did,val=true
-- if not tab: an iterator, which returns a table with all of the zone's keys/vals
local function getDevices4User(uid,tab)
local sql =
fmt(
"%s%s%s%s",
tab and "devices.did" or "*",
" FROM devices INNER JOIN UsersDevAccess ON devices.did == UsersDevAccess.did WHERE UsersDevAccess.uid=",
uid,
" ORDER BY wanAddr,name ASC"
)
if tab then
local t={}
local conn = openConn()
local next = su.iter(conn, sql)
local did,err = next()
while did do
t[did] = true
did,err = next()
end
if err then trace("Err:", err, sql) end
closeConn(conn)
return t
end
return sqlIter(sql, true)
end
-- Returns iterator, which returns zid,zname,zkey
local function getZonesT()
local conn = openConn()
local sql = "zid,zname,zkey FROM zones"
local next = su.iter(conn, sql)
return function()
local zid, zname, zkey = next()
if zid then return zid, zname, zkey end
if zname then trace("Err:", zname, sql) end
closeConn(conn)
end
end
local function getAutoReg(zid)
local enabled = dbFind(false, fmt("%s%s", "autoReg FROM zones WHERE zid=",zid))
return enabled == "1"
end
local function getSsoEnabled(zid)
local enabled = dbFind(false, fmt("%s%s", "sso FROM zones WHERE zid=",zid))
return enabled == "1"
end
local function getSsoCfg(zid)
local cfg = dbFind(false, fmt("%s%s", "ssocfg FROM zones WHERE zid=",zid))
return cfg and ba.json.decode(cfg)
end
-- Returns table array with all wan addresses for zone ID
local function getWanL(zid)
local conn = openConn()
local list = {}
local sql = fmt("%s%s", "DISTINCT wanAddr FROM devices where zid=", zid)
for wanAddr in su.iter(conn, sql) do
tinsert(list, wanAddr)
end
closeConn(conn)
return list
end
-- Returns iterator, which returns uid,email,regTime,accessTime,poweruser
local function getUsers(zid)
local conn = openConn()
local sql = fmt("%s%s","uid,email,regTime,accessTime,poweruser FROM users WHERE zid=", zid)
local next = su.iter(conn, sql)
return function()
local uid,email,regTime,accessTime,poweruser = next()
if uid then return uid,email,regTime,accessTime,poweruser == "1" end
if email then trace("Err:", email, sql) end
closeConn(conn)
end
end
-- Get user info by email addr.
-- Returns userT
local function getUserT(zid, email)
local uT =
dbFind(true, fmt("%s%s%s%s%s", "* FROM users WHERE zid=", zid, " AND email=", quote(email), " COLLATE NOCASE"))
if uT then
uT.poweruser = uT.poweruser ~= "0"
end
return uT
end
------------------------------- WRITING ------------------------------------
local function addZone(zname, admEmail, admPwd, func)
if getZoneKey(zname) then
trace("Err: zone exists:", zname)
return
end
local zkey
while true do
zkey = createHexKey(32)
if not dbFind(false, fmt("%s%s%s", "zkey FROM zones WHERE zkey=", quote(zkey), " COLLATE NOCASE")) then
break
end
end
local now = quotedNowTime()
dbExec(
fmt(
"%s(%s,%s,%s,%s,%s,%s,%s,0)",
"INSERT INTO zones (zname,regTime,accessTime,admEmail,admPwd,zkey,zsecret,autoReg) VALUES",
quote(zname),
now,
now,
quote(admEmail),
quote(admPwd),
quote(zkey),
quote(createHexKey(32))
),
false,
func
)
-- Return a simplified zoneT. We just need these values for bindzonedb()
return {zname = zname, zkey = zkey, zid = 0}
end
local function updateAdmPwd(zid, admPwd)
dbExec(fmt("UPDATE zones SET admPwd=%s WHERE zid=%s", quote(admPwd), zid))
end
local function updateUSerPwd(zid, email, pwd)
dbExec(fmt("UPDATE users SET pwd=%s WHERE zid=%s AND email=%s COLLATE NOCASE", quote(pwd), zid, quote(email)))
end
local function setAutoReg(zid, enable)
dbExec(fmt("UPDATE zones SET autoReg=%d WHERE zid=%s", enable and 1 or 0, zid))
end
local function setSsoEnabled(zid, enable)
dbExec(fmt("UPDATE zones SET sso=%d WHERE zid=%s", enable and 1 or 0, zid))
end
local function setSsoCfg(zid, tab)
dbExec(fmt("UPDATE zones SET ssocfg=%s WHERE zid=%s", quote(ba.json.encode(tab)), zid))
end
local function removeZone(zkey,func)
local zid = getZid4Zone(zkey)
if not zid then
trace("Not found:", zname)
return
end
local devsL={}
for devT in getDevices4ZoneT(zid) do
tinsert(devsL, devT)
end
for _,devT in ipairs(devsL) do
dbExec(fmt("%s%s", "DELETE FROM UsersDevAccess WHERE did=", devT.did), true)
rcBridge.removeDevice(devT.dkey)
end
dbExec(fmt("%s%s", "DELETE FROM devices WHERE zid=", zid), true)
dbExec(fmt("%s%s", "DELETE FROM users WHERE zid=", zid), true)
dbExec(fmt("%s%s", "DELETE FROM zones WHERE zid=", zid),false,func)
end
local function removeUsers(uidL)
for _,uid in pairs(uidL) do
dbExec(fmt("%s%s", "DELETE FROM UsersDevAccess WHERE uid=", uid), true)
dbExec(fmt("%s%s", "DELETE FROM Users WHERE uid=", uid), true)
end
dbExec() -- Commit
end
local function addDevice(zkey, name, localAddr, wanAddr, dns, info, func)
local zid = getZid4Zone(zkey)
if not zid then
trace("zkey not found:", zkey)
return
end
if dbFind(false, fmt("%s%s AND name=%s%s", "dkey FROM devices WHERE zid=", zid, quote(name), " COLLATE NOCASE")) then
trace("Err: device exists:", name)
return
end
local dkey
while true do
dkey = createHexKey(10)
if not dbFind(false, fmt("%s%s%s", "dkey FROM devices WHERE dkey=", quote(dkey), " COLLATE NOCASE")) then
break
end
end
local now = quotedNowTime()
dbExec(
fmt(
"%s(%s,%s,%s,%s,%s,%s,%s,%s,%s)",
"INSERT INTO devices (name,dkey,localAddr,wanAddr,dns,info,regTime,accessTime,zid) VALUES",
quote(name),
quote(dkey),
quote(localAddr),
quote(wanAddr),
quote(dns),
quote(info),
now,
now,
zid
),
false,
func
)
return dkey
end
local function updateAddress4Device(dkey, localAddr, wanAddr, dns, func)
dbExec(
fmt(
"UPDATE devices SET localAddr=%s, wanAddr=%s, dns=%s, accessTime=%s WHERE dkey=%s",
quote(localAddr),
quote(wanAddr),
quote(dns),
quotedNowTime(),
quote(dkey)
),
false,
func
)
end
local function updateTime4Device(dkey)
dbExec(fmt("UPDATE devices SET accessTime=%s WHERE dkey=%s", quotedNowTime(), quote(dkey)))
end
local function removeDevice(dkey, func)
local t = keyGetDeviceT(dkey)
if t then
dbExec(fmt("%s%s", "DELETE FROM UsersDevAccess WHERE did=", t.did), true)
dbExec(fmt("%s%s%s", "DELETE FROM devices WHERE did=", t.did, " COLLATE NOCASE"), false, func)
rcBridge.removeDevice(dkey)
else
trace("Not found", dkey)
end
end
local function addUser(zid, email, pwd, poweruser, func)
local now=quotedNowTime()
dbExec(
fmt(
"%s(%s,%s,%s,%s,%d,%s)",
"INSERT INTO users (email,pwd,regTime,accessTime,poweruser,zid) VALUES",
quote(email),
quote(pwd),
now,
now,
poweruser and 1 or 0,
zid
),
false,
func
)
end
local function setUserAccessTime(uid)
dbExec(fmt("UPDATE users SET accessTime=%s WHERE uid=%s", quotedNowTime(), uid))
end
local function setPoweruser(uid, poweruser)
dbExec(fmt("UPDATE users SET poweruser=%d WHERE uid=%s", poweruser and 1 or 0, uid))
end
-- Create an entry in UserDevAccess if the entry does not exist
local function createUserDevAccess(uid,did,noCommit)
-- Execute UPSERT
dbExec(
fmt(
"%s(%s,%s)%s",
"INSERT INTO UsersDevAccess (did,uid) VALUES",
did,
uid,
"ON CONFLICT (did,uid) DO NOTHING"
),
noCommit
)
end
-- Auto set user access for all devices part of "wanAddr".
-- This function is called when user logs in.
local function setUserAccess4Wan(zid, uid, wanAddr)
for devT in getDevices4Wan(zid, wanAddr) do
createUserDevAccess(uid,devT.did, true)
end
dbExec() -- Commit
end
-- Create or delete an entry in UsersDevAccess
local function setDevAccess4User(uid,did,enable)
if enable then
createUserDevAccess(uid,did)
else
dbExec(fmt("%s uid=%s and did=%s", "DELETE FROM UsersDevAccess WHERE", uid,did))
end
end
return {
addDevice = addDevice, -- (zkey, name, localAddr, wanAddr, dns, info, func)
addUser = addUser, -- (zid, email, pwd, poweruser)
addZone = addZone, -- (zname, admEmail, admPwd, func)
countDevices4Zone = countDevices4Zone, -- (zid)
getAutoReg=getAutoReg, -- (zid)
getSsoEnabled=getSsoEnabled, -- (zid)
getSsoCfg=getSsoCfg, -- (zid)
getDevices4User=getDevices4User, -- (uid)
getDevices4Wan = getDevices4Wan, -- (zid, wanAddr)
getDevices4ZoneT = getDevices4ZoneT, -- (zid)
getUserT = getUserT, -- (zid, email)
getUsers=getUsers, -- (zid)
getWanL = getWanL, -- (zid)
getZid4Zone = getZid4Zone, -- (zkey)
getZoneKey = getZoneKey, -- (zname)
getZoneName = getZoneName, -- (zkey)
getZonesT = getZonesT, -- ()
keyGetDeviceT = keyGetDeviceT, -- (dkey)
nameGetDeviceT = nameGetDeviceT, -- (zid, name)
removeDevice = removeDevice, -- (dkey, func)
removeUsers=removeUsers, -- (uidL)
removeZone = removeZone, -- (zkey)
setAutoReg=setAutoReg, -- (zid, enable)
setSsoEnabled=setSsoEnabled, -- (zid, enable)
setSsoCfg=setSsoCfg, -- (zid, lua-table)
setDevAccess4User=setDevAccess4User, -- (uid,did,enable)
setUserAccessTime=setUserAccessTime , -- (uid)
setPoweruser=setPoweruser, -- (uid, poweruser)
setUserAccess4Wan = setUserAccess4Wan, -- (zid, uid, wanAddr)
updateAddress4Device = updateAddress4Device, -- (dkey, localAddr, wanAddr, dns, func)
updateAdmPwd = updateAdmPwd, -- (zid, admPwd)
updateTime4Device = updateTime4Device, -- (dkey)
updateUSerPwd = updateUSerPwd, -- (zid, email, pwd)
zidGetZoneT = zidGetZoneT, -- (zid)
zkeyGetZoneT = zkeyGetZoneT, -- (zkey)
znameGetZoneT = znameGetZoneT -- (zname)
}
| 30.494908 | 121 | 0.641288 | 3.515625 |
ce485d3945ec01938ce1cc2b0d888ebd8daff438
| 958 |
lua
|
Lua
|
config/neovim/lua/config/api/vimacs.lua
|
sei40kr/npm-aliases
|
c1dd180a277b6ddc27f751e60b5dbbacc697847a
|
[
"MIT"
] | null | null | null |
config/neovim/lua/config/api/vimacs.lua
|
sei40kr/npm-aliases
|
c1dd180a277b6ddc27f751e60b5dbbacc697847a
|
[
"MIT"
] | null | null | null |
config/neovim/lua/config/api/vimacs.lua
|
sei40kr/npm-aliases
|
c1dd180a277b6ddc27f751e60b5dbbacc697847a
|
[
"MIT"
] | null | null | null |
local M = {}
function M.kill_word()
local col = vim.api.nvim_win_get_cursor(0)[2]
local line = vim.api.nvim_get_current_line()
if #line <= col then
vim.api.nvim_feedkeys(
vim.api.nvim_replace_termcodes("<Del><C-o>dw", true, true, true),
"i",
true
)
else
vim.api.nvim_feedkeys(
vim.api.nvim_replace_termcodes("<C-o>dw", true, true, true),
"i",
true
)
end
end
function M.kill_line()
local col = vim.api.nvim_win_get_cursor(0)[2]
local line = vim.api.nvim_get_current_line()
if #line <= col then
vim.api.nvim_feedkeys(
vim.api.nvim_replace_termcodes("<Del>", true, true, true),
"i",
true
)
else
vim.api.nvim_feedkeys(
vim.api.nvim_replace_termcodes("<C-o>d$", true, true, true),
"i",
true
)
end
end
return M
| 22.809524 | 77 | 0.532359 | 3 |
05e42f315833cab5bc5272cbd2173ea8012ff7f5
| 979 |
py
|
Python
|
python/paddle/v2/fluid/tests/test_maxout_op.py
|
QingshuChen/Paddle
|
25a92be3e123ed21fd98c7be6bd7e3a6320756a3
|
[
"Apache-2.0"
] | null | null | null |
python/paddle/v2/fluid/tests/test_maxout_op.py
|
QingshuChen/Paddle
|
25a92be3e123ed21fd98c7be6bd7e3a6320756a3
|
[
"Apache-2.0"
] | 9 |
2017-09-13T07:39:31.000Z
|
2017-10-18T05:58:23.000Z
|
python/paddle/v2/fluid/tests/test_maxout_op.py
|
QingshuChen/Paddle
|
25a92be3e123ed21fd98c7be6bd7e3a6320756a3
|
[
"Apache-2.0"
] | null | null | null |
import unittest
import numpy as np
from op_test import OpTest
def maxout_forward_naive(input, groups):
s0, s1, s2, s3 = input.shape
return np.ndarray([s0, s1 / groups, groups, s2, s3], \
buffer = input, dtype=input.dtype).max(axis=(2))
class TestMaxOutOp(OpTest):
def setUp(self):
self.op_type = "maxout"
self.init_test_case()
input = np.random.random(self.shape).astype("float32")
output = self.MaxOut_forward_naive(input, self.groups).astype("float32")
self.inputs = {'X': input}
self.attrs = {'groups': self.groups}
self.outputs = {'Out': output.astype('float32')}
def test_check_output(self):
self.check_output()
def test_check_grad(self):
self.check_grad(['X'], 'Out')
def init_test_case(self):
self.MaxOut_forward_naive = maxout_forward_naive
self.shape = [100, 6, 2, 2]
self.groups=2
if __name__ == '__main__':
unittest.main()
| 24.475 | 80 | 0.630235 | 3.15625 |
ff3e7479d936c4d7528161ea01f28f7b8c279b1f
| 3,322 |
py
|
Python
|
test/distributed/fsdp/test_fsdp_param_exec_order_wrap.py
|
YifanShenSZ/pytorch
|
b4232f7cbe407909f9d95b91304c73fdc4c66a50
|
[
"Intel"
] | null | null | null |
test/distributed/fsdp/test_fsdp_param_exec_order_wrap.py
|
YifanShenSZ/pytorch
|
b4232f7cbe407909f9d95b91304c73fdc4c66a50
|
[
"Intel"
] | null | null | null |
test/distributed/fsdp/test_fsdp_param_exec_order_wrap.py
|
YifanShenSZ/pytorch
|
b4232f7cbe407909f9d95b91304c73fdc4c66a50
|
[
"Intel"
] | null | null | null |
# Owner(s): ["oncall: distributed"]
import torch
from torch.testing._internal.common_fsdp import FSDPTest
from torch.testing._internal.common_distributed import skip_if_lt_x_gpu
from torch.distributed.fsdp.wrap import ParamExecOrderWrapPolicy, always_wrap_policy
from torch.distributed.fsdp.fully_sharded_data_parallel import ShardingStrategy
from torch.distributed.fsdp import FullyShardedDataParallel as FSDP
from torch.testing._internal.common_utils import (
instantiate_parametrized_tests,
parametrize,
run_tests,
)
class Model(torch.nn.Module):
def __init__(self) -> None:
super().__init__()
self.layer0 = torch.nn.Linear(6, 6)
self.layer1 = torch.nn.Linear(6, 6, bias=False)
self.layer2 = torch.nn.Sequential(
torch.nn.Linear(6, 3, bias=False),
torch.nn.ReLU(),
torch.nn.Linear(3, 6, bias=False),
)
self.relu = torch.nn.ReLU()
def forward(self, x):
# `layer0` -> `layer2` -> `layer1`
# the forward execution order is NOT consistent with the model definition order.
z = self.relu(self.layer0(x))
z = self.relu(self.layer2(z))
z = self.relu(self.layer1(z))
return z
def get_input(self, device: torch.device):
return (torch.randn((8, 6)).to(device), )
def get_loss(self, input, output):
return (output - input[0]).sum()
@staticmethod
def wrap(sharding_strategy: ShardingStrategy, device: torch.device, init_policy=always_wrap_policy):
model = Model()
wrap_policy = ParamExecOrderWrapPolicy(init_policy=init_policy)
fsdp_model = FSDP(model, auto_wrap_policy=wrap_policy, sharding_strategy=sharding_strategy)
return fsdp_model.to(device)
class TestFSDPExecOrder(FSDPTest):
@property
def device(self):
return torch.device("cuda")
@skip_if_lt_x_gpu(2)
@parametrize(
"sharding_strategy",
[ShardingStrategy.FULL_SHARD, ShardingStrategy.SHARD_GRAD_OP],
)
@parametrize("iters", [1, 3])
def test_fsdp_flatten_params_exec_order(self, sharding_strategy: ShardingStrategy, iters: int):
"""Tests the basic APIs of FSDP with ParamExecOrderWrapPolicy"""
fsdp_model = Model.wrap(sharding_strategy, self.device)
self.assertTrue(fsdp_model._is_param_exec_order_prep_stage())
for _ in range(iters):
input = fsdp_model.module.get_input(self.device)
output = fsdp_model(*input)
loss = fsdp_model.module.get_loss(input, output).to(self.device)
loss.backward()
params_list = list(fsdp_model.parameters())
# Since the forward execution order is NOT consistent with the model definition order,
# the ordering in flatten_named_params_exec_order should be different from named_parameters
self.assertEqual(
fsdp_model._fsdp_params_exec_order,
[
params_list[0],
params_list[2],
params_list[3],
params_list[1]
]
)
self.assertTrue(fsdp_model._use_param_exec_order_policy())
self.assertTrue(not fsdp_model._is_param_exec_order_prep_stage())
instantiate_parametrized_tests(TestFSDPExecOrder)
if __name__ == "__main__":
run_tests()
| 36.911111 | 104 | 0.675196 | 3.28125 |
c9a454f53a892538c50bf713a7dd698d086eab90
| 1,482 |
ts
|
TypeScript
|
packages/object/src/set-props.test.ts
|
devdigital/utilz
|
6936f55a4d3869184e84e62c9a746f39cbcea172
|
[
"MIT"
] | null | null | null |
packages/object/src/set-props.test.ts
|
devdigital/utilz
|
6936f55a4d3869184e84e62c9a746f39cbcea172
|
[
"MIT"
] | 10 |
2020-09-14T14:45:19.000Z
|
2021-08-04T07:21:23.000Z
|
packages/object/src/set-props.test.ts
|
devdigital/utilz
|
6936f55a4d3869184e84e62c9a746f39cbcea172
|
[
"MIT"
] | null | null | null |
import { setProps } from './set-props'
describe('setProps', () => {
it('throws exception given undefined object', () => {
expect(setProps()).toThrow('No object specified.')
})
it('returns empty object given empty object', () => {
expect(setProps()({})).toEqual({})
})
it('returns props set to undefined when value is undefined', () => {
expect(setProps()({ foo: 'bar' })).toEqual({ foo: undefined })
})
it('returns props set to null when value is null', () => {
expect(setProps(null)({ foo: 'bar' })).toEqual({ foo: null })
})
it('returns props set to value', () => {
expect(setProps(0)({ foo: 'bar', baz: 'foo' })).toEqual({ foo: 0, baz: 0 })
})
it('returns nested props set to value', () => {
expect(setProps(0)({ foo: 'bar', baz: { foo: { bar: 'foo' } } })).toEqual({
foo: 0,
baz: { foo: { bar: 0 } },
})
})
it('does not modify the object', () => {
const value = {
foo: 'bar',
}
setProps(0)(value)
expect(value).toEqual({ foo: 'bar' })
})
it('should pass key to provided function', () => {
const value = {
foo: 'bar',
}
expect(setProps(({ key }: { key: any }) => key)(value)).toEqual({
foo: 'foo',
})
})
it('should pass value to provided function', () => {
const value = {
foo: 'bar',
}
expect(
setProps(({ value }: { value: any }) => `updated-${value}`)(value)
).toEqual({
foo: 'updated-bar',
})
})
})
| 23.903226 | 79 | 0.521592 | 3.140625 |
43aa6ad66ac9a1c19676c72a29d9838781420356
| 4,516 |
ts
|
TypeScript
|
src/engine/dataset_fakes_test.ts
|
rouismia/tfjs-layers
|
c8f12d3ae460685a66b9a7df960194a0bd77bf3b
|
[
"Apache-2.0"
] | null | null | null |
src/engine/dataset_fakes_test.ts
|
rouismia/tfjs-layers
|
c8f12d3ae460685a66b9a7df960194a0bd77bf3b
|
[
"Apache-2.0"
] | null | null | null |
src/engine/dataset_fakes_test.ts
|
rouismia/tfjs-layers
|
c8f12d3ae460685a66b9a7df960194a0bd77bf3b
|
[
"Apache-2.0"
] | null | null | null |
/**
* @license
* Copyright 2018 Google LLC
*
* Use of this source code is governed by an MIT-style
* license that can be found in the LICENSE file or at
* https://opensource.org/licenses/MIT.
* =============================================================================
*/
import {Tensor} from '@tensorflow/tfjs-core';
import {describeMathCPUAndGPU} from '../utils/test_utils';
import {FakeNumericDataset} from './dataset_fakes';
import {TensorMap} from './dataset_stub';
describeMathCPUAndGPU('FakeNumericDataset', () => {
it('1D features, 1D targets', async () => {
const dataset = new FakeNumericDataset(
{xShape: [3], yShape: [1], batchSize: 8, numBatches: 5});
for (let k = 0; k < 2; ++k) {
// Run twice to make sure that calling iteartor() multiple times works.
const iterator = await dataset.iterator();
for (let i = 0; i < 5; ++i) {
const result = await iterator.next();
expect(result.value.length).toEqual(2);
expect((result.value[0] as Tensor).shape).toEqual([8, 3]);
expect((result.value[1] as Tensor).shape).toEqual([8, 1]);
expect(result.done).toEqual(false);
}
for (let i = 0; i < 3; ++i) {
const result = await iterator.next();
expect(result.value).toBeNull();
expect(result.done).toEqual(true);
}
}
});
it('2D features, 1D targets', async () => {
const dataset = new FakeNumericDataset(
{xShape: [3, 4], yShape: [2], batchSize: 8, numBatches: 5});
for (let k = 0; k < 2; ++k) {
// Run twice to make sure that calling iteartor() multiple times works.
const iterator = await dataset.iterator();
for (let i = 0; i < 5; ++i) {
const result = await iterator.next();
expect(result.value.length).toEqual(2);
expect((result.value[0] as Tensor).shape).toEqual([8, 3, 4]);
expect((result.value[1] as Tensor).shape).toEqual([8, 2]);
expect(result.done).toEqual(false);
}
for (let i = 0; i < 3; ++i) {
const result = await iterator.next();
expect(result.value).toBeNull();
expect(result.done).toEqual(true);
}
}
});
it('Multiple 2D features, 1D targets', async () => {
const dataset = new FakeNumericDataset({
xShape: {'input1': [3, 4], 'input2': [2, 3]},
yShape: [2],
batchSize: 8,
numBatches: 5
});
for (let k = 0; k < 2; ++k) {
// Run twice to make sure that calling iteartor() multiple times works.
const iterator = await dataset.iterator();
for (let i = 0; i < 5; ++i) {
const result = await iterator.next();
expect(result.value.length).toEqual(2);
const xs = result.value[0] as TensorMap;
expect(xs['input1'].shape).toEqual([8, 3, 4]);
expect(xs['input2'].shape).toEqual([8, 2, 3]);
expect((result.value[1] as Tensor).shape).toEqual([8, 2]);
expect(result.done).toEqual(false);
}
for (let i = 0; i < 3; ++i) {
const result = await iterator.next();
expect(result.value).toBeNull();
expect(result.done).toEqual(true);
}
}
});
it('Invalid batchSize leads to Error', () => {
expect(
() => new FakeNumericDataset(
{xShape: [3], yShape: [1], batchSize: -8, numBatches: 5}))
.toThrow();
expect(
() => new FakeNumericDataset(
{xShape: [3], yShape: [1], batchSize: 8.5, numBatches: 5}))
.toThrow();
expect(
() => new FakeNumericDataset(
{xShape: [3], yShape: [1], batchSize: 0, numBatches: 5}))
.toThrow();
expect(
() => new FakeNumericDataset(
// tslint:disable-next-line:no-any
{xShape: [3], yShape: [1], batchSize: 'foo' as any, numBatches: 5}))
.toThrow();
});
it('Invalid numBatches leads to Error', () => {
expect(
() => new FakeNumericDataset(
{xShape: [3], yShape: [1], batchSize: 8, numBatches: -5}))
.toThrow();
expect(
() => new FakeNumericDataset(
{xShape: [3], yShape: [1], batchSize: 8, numBatches: 5.5}))
.toThrow();
expect(
() => new FakeNumericDataset(
{xShape: [3], yShape: [1], batchSize: 8, numBatches: 0}))
.toThrow();
expect(
() => new FakeNumericDataset(
// tslint:disable-next-line:no-any
{xShape: [3], yShape: [1], batchSize: 8, numBatches: 'foo' as any}))
.toThrow();
});
});
| 35.559055 | 80 | 0.547166 | 3.125 |
d67baa0325bb20fe3127fe952bd69d28a8dcd3e5
| 1,485 |
cs
|
C#
|
test/CollectionJson.Client.Tests/CollectionJsonContentTest.cs
|
WebApiContrib/CollectionJson.Net
|
03321523e92dce0929902e4ba5ca241946a40afa
|
[
"Apache-2.0"
] | 36 |
2015-01-13T18:51:12.000Z
|
2021-09-08T10:13:26.000Z
|
test/CollectionJson.Client.Tests/CollectionJsonContentTest.cs
|
WebApiContrib/CollectionJson.Net
|
03321523e92dce0929902e4ba5ca241946a40afa
|
[
"Apache-2.0"
] | 9 |
2015-01-22T15:29:44.000Z
|
2017-04-04T16:36:00.000Z
|
test/CollectionJson.Client.Tests/CollectionJsonContentTest.cs
|
WebApiContrib/CollectionJson.Net
|
03321523e92dce0929902e4ba5ca241946a40afa
|
[
"Apache-2.0"
] | 10 |
2015-02-24T23:41:27.000Z
|
2017-07-05T20:23:36.000Z
|
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Should;
using Xunit;
namespace CollectionJson.Client.Tests
{
public class CollectionJsonContentTest
{
[Fact]
public async void WhenCreatingCollectionJsonContentObjectIsSerializedToCollectionJson()
{
var coll = new Collection();
var content = new CollectionJsonContent(coll);
var stream = new MemoryStream();
await content.CopyToAsync(stream);
var reader = new StreamReader(stream);
stream.Position = 0;
var json = reader.ReadToEnd();
json.ShouldContain("\"collection\"");
}
[Fact]
public async void WhenCreatingCollectionJsonContentWithErrorObjectIsSerializedWithError()
{
var coll = new Collection()
{
Error = new Error()
{
Code = "1234",
Message = "Hello world",
Title = "An error occurred"
}
};
var content = new CollectionJsonContent(coll);
var stream = new MemoryStream();
await content.CopyToAsync(stream);
var reader = new StreamReader(stream);
stream.Position = 0;
var json = reader.ReadToEnd();
json.ShouldContain("\"error\"");
}
}
}
| 30.306122 | 97 | 0.562963 | 3 |
daea0a2da08dea345382a6a4577d6c7ca409c105
| 1,328 |
tsx
|
TypeScript
|
packages/preview/src/components/pages/search/search-iconset.tsx
|
coreyward/meronex-icons
|
8412306ee3ef85ad8da3a0fe55181b3a2b7b307b
|
[
"MIT"
] | 41 |
2020-08-05T22:26:10.000Z
|
2022-03-03T04:11:02.000Z
|
packages/preview/src/components/pages/search/search-iconset.tsx
|
coreyward/meronex-icons
|
8412306ee3ef85ad8da3a0fe55181b3a2b7b307b
|
[
"MIT"
] | 6 |
2020-07-31T17:23:42.000Z
|
2021-11-25T12:19:39.000Z
|
packages/preview/src/components/pages/search/search-iconset.tsx
|
coreyward/meronex-icons
|
8412306ee3ef85ad8da3a0fe55181b3a2b7b307b
|
[
"MIT"
] | 4 |
2020-07-31T07:37:59.000Z
|
2021-01-21T22:00:11.000Z
|
import Icon from "@components/@core/icon";
import loadable from "@loadable/component";
import React from "react";
import { getIcons } from "@utils/getIcons";
import SearchPageIconLoading from "./loading";
declare global {
interface Window {
iconSetCache: any;
}
}
export default function SearchIconSet({ icon, query, setResults }) {
let iconSetCache = window.iconSetCache;
if (!iconSetCache) {
window.iconSetCache = new Map();
iconSetCache = window.iconSetCache;
}
let IconSet = iconSetCache.get(icon.id);
if (!IconSet) {
IconSet = loadable.lib(() => getIcons(icon.id));
iconSetCache.set(icon.id, IconSet);
}
return (
<IconSet fallback={<SearchPageIconLoading />}>
{({ default: icons }) => {
const found = Object.keys(icons).filter((name) =>
name.toLowerCase().includes(query)
);
return (
<>
{found.map((name) => (
<Icon key={name} icon={icons[name]} name={name} />
))}
{setResults((prevResults) => {
return prevResults.hasOwnProperty(icon.id)
? prevResults
: {
...prevResults,
[icon.id]: found.length,
};
})}
</>
);
}}
</IconSet>
);
}
| 26.039216 | 68 | 0.545181 | 3.078125 |
4fa957f635ab169788e53dfff1024e1f7a785c3c
| 1,649 |
rb
|
Ruby
|
simple_es.rb
|
zephraph/peer-learning-event-sourcing
|
6f40fc384b627b66903fc0a18c41ff3e323b01a2
|
[
"MIT"
] | 1 |
2021-04-07T15:22:01.000Z
|
2021-04-07T15:22:01.000Z
|
simple_es.rb
|
zephraph/peer-learning-event-sourcing
|
6f40fc384b627b66903fc0a18c41ff3e323b01a2
|
[
"MIT"
] | 5 |
2021-04-16T13:43:26.000Z
|
2021-04-16T16:01:09.000Z
|
simple_es.rb
|
zephraph/peer-learning-event-sourcing
|
6f40fc384b627b66903fc0a18c41ff3e323b01a2
|
[
"MIT"
] | 5 |
2021-04-06T17:24:35.000Z
|
2021-04-16T17:03:13.000Z
|
# rubocop:disable
class AccountAggregate
attr_reader :user, :amount
def initialize(user:, amount:)
raise ArgumentError, 'user must be a string' unless user.instance_of?(String)
raise ArgumentError, 'amount must be a number' unless amount.to_f.instance_of?(Float)
@user = user
@amount = amount.to_f
end
def to_s
"Account: `#{user}` | Balance: #{amount}"
end
def copy(**args)
new_args = { user: user, amount: amount }.merge(args)
self.class.new(new_args)
end
end
class BaseEvent
def apply
raise 'not implemented'
end
end
class CreateAccount < BaseEvent
attr_reader :user
STARTING_AMOUNT = 0
def initialize(user)
@user = user
end
def apply(_aggregate = nil)
puts "opening a new account for #{user}"
AccountAggregate.new(user: user, amount: STARTING_AMOUNT)
end
end
class AddToAccount < BaseEvent
attr_reader :amount
def initialize(amount)
@amount = amount
end
def apply(aggregate)
puts "adding #{amount} to #{aggregate.user}'s account"
aggregate.copy(amount: aggregate.amount + amount)
end
end
class SubtractFromAccount < BaseEvent
attr_reader :amount
def initialize(amount)
@amount = amount
end
def apply(aggregate)
puts "subtracting #{amount} from #{aggregate.user}'s account"
aggregate.copy(amount: aggregate.amount - amount)
end
end
# require('./simple_es.rb')
# => true
# => #<CreateAccount:0x00007f8bb11675d0 @user="erik">
e1 = CreateAccount.new('erik')
e2 = AddToAccount.new(50)
e3 = SubtractFromAccount.new(42)
account = [e1, e2, e3].inject(nil) do |aggregate, event|
event.apply(aggregate)
end
puts account
| 20.6125 | 89 | 0.699212 | 3.28125 |
6dab9326812d9bdad372f2f07f2c38bf5b88db76
| 4,041 |
ts
|
TypeScript
|
client/app/signup/signup.component.ts
|
pontsoleil/wuwei
|
54c4997a95fbd3ae8e97a6a73534288c65b34611
|
[
"MIT"
] | null | null | null |
client/app/signup/signup.component.ts
|
pontsoleil/wuwei
|
54c4997a95fbd3ae8e97a6a73534288c65b34611
|
[
"MIT"
] | 4 |
2015-03-12T23:48:09.000Z
|
2020-09-01T22:23:27.000Z
|
client/app/signup/signup.component.ts
|
pontsoleil/WuWei
|
54c4997a95fbd3ae8e97a6a73534288c65b34611
|
[
"MIT"
] | null | null | null |
import { Component, OnInit } from '@angular/core';
import { FormBuilder, FormGroup, Validators, FormControl } from '@angular/forms';
import { Router } from '@angular/router';
import { CognitoUserService } from '../services';
import { ToastService } from '../mdb-type/pro/alerts';
@Component({
selector: 'app-signup',
templateUrl: './signup.component.html',
styleUrls: ['./signup.component.scss']
})
export class SignupComponent implements OnInit {
public signupForm: FormGroup;
public successfullySignup: boolean;
public name = new FormControl('', [
Validators.required,
]);
public nickname = new FormControl('', [
Validators.required,
]);
public email = new FormControl('', [
Validators.required,
Validators.pattern('^[a-zA-Z0-9.!#$%&’*+/=?^_`{|}~-]+@[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*$')
]);
public password = new FormControl('', [
Validators.required,
// Validators.minLength(8), // minLength doesn't work
Validators.pattern('^(?=.*[a-z])(?=.*[A-Z])(?!.*\s).{8,}$') // MUST contain both upper and lower case letter
]);
// see https://www.concretepage.com/angular-2/angular-2-4-pattern-validation-example#formControl
// and https://regex101.com/r/uE5lT4/4 for testing
get name_() { return this.signupForm.get('name'); }
get nickname_() { return this.signupForm.get('nickname'); }
get email_() { return this.signupForm.get('email'); }
get password_() { return this.signupForm.get('password'); }
public confirmationForm: FormGroup;
public name2 = new FormControl('', [
Validators.required,
]);
public confirmationCode = new FormControl('', [
Validators.required,
]);
get name2_() { return this.signupForm.get('name2'); }
get confirmationCode_() { return this.signupForm.get('confirmationCode'); }
constructor(
private fb: FormBuilder,
private router: Router,
private toast: ToastService,
private auth: CognitoUserService
) { }
ngOnInit() {
this.initForm();
const tab1 = document.querySelector('.nav-item a[href="#panel1"]');
const tab2 = document.querySelector('.nav-item a[href="#panel2"]');
const panel1 = document.getElementById('panel1');
const panel2 = document.getElementById('panel2');
tab1.addEventListener('click', (evt) => {
evt.preventDefault();
panel1.classList.add('active');
panel2.classList.remove('active');
}, false);
tab2.addEventListener('click', (evt) => {
evt.preventDefault();
panel1.classList.remove('active');
panel2.classList.add('active');
}, false);
}
initForm() {
this.signupForm = this.fb.group({
'name': this.name,
'nickname': this.nickname,
'email': this.email,
'password': this.password
});
this.confirmationForm = this.fb.group({
'name2': this.name2,
'confirmationCode': this.confirmationCode
});
}
onSubmitSignup(value: any) {
const
name = value.name,
nickname = value.nickname,
email = value.email,
password = value.password;
this.auth
.signUp(name, nickname, email, password)
.then((result) => {
console.log(result);
this.toastMessage('Register finished.', 'success');
this.successfullySignup = true;
})
.catch((err) => {
this.toastMessage(err.message, 'error');
console.log(err);
});
}
onSubmitConfirmation(value: any) {
const
name2 = value.name2,
confirmationCode = value.confirmationCode;
console.log(name2);
this.auth
.confirmation(name2, confirmationCode)
.then((result) => {
return console.log(result) || this.router.navigate(['/signin']);
})
.catch((err) => {
console.log(err);
});
}
close() {
return this.router.navigate(['/']);
}
toastMessage(message: string, action: string) {
const
options = {
closeButton: true,
positionClass: 'toast-bottom-center'
};
this.toast[action](message, action.toUpperCase(), options);
}
}
| 30.383459 | 112 | 0.62831 | 3.046875 |
39b054c57e5e13c4f2a2570f32079c3f9d312e67
| 739 |
ps1
|
PowerShell
|
data/test/powershell/39b054c57e5e13c4f2a2570f32079c3f9d312e67Invoke-ZapApiRequest.ps1
|
aliostad/deep-learning-lang-detection
|
d6b031f3ebc690cf2ffd0ae1b08ffa8fb3b38a62
|
[
"MIT"
] | 84 |
2017-10-25T15:49:21.000Z
|
2021-11-28T21:25:54.000Z
|
data/test/powershell/39b054c57e5e13c4f2a2570f32079c3f9d312e67Invoke-ZapApiRequest.ps1
|
vassalos/deep-learning-lang-detection
|
cbb00b3e81bed3a64553f9c6aa6138b2511e544e
|
[
"MIT"
] | 5 |
2018-03-29T11:50:46.000Z
|
2021-04-26T13:33:18.000Z
|
data/test/powershell/39b054c57e5e13c4f2a2570f32079c3f9d312e67Invoke-ZapApiRequest.ps1
|
vassalos/deep-learning-lang-detection
|
cbb00b3e81bed3a64553f9c6aa6138b2511e544e
|
[
"MIT"
] | 24 |
2017-11-22T08:31:00.000Z
|
2022-03-27T01:22:31.000Z
|
<#
.Synopsis
This Script will Invoke ZAP API And will be call by various other scripts which needs to access ZAP API
.Example
.\Invoke-ZapApiRequest [URL to attack] [Proxy from ZAP]
.Notes
Author: Mrityunjaya Pathak
Date : March 2015
#>
param(
#Url on which ZAP operation will be performed(only http url)
$URL =$(throw "Missing URL value"),
#ZAP Proxy URI
$proxy="http://localhost:8080",
#stop ZAP if any error occurs
[switch]$KillZapOnError
)
try{
Invoke-WebRequest -uri $url -Proxy $proxy
# write-host $s
}
catch{
Write-Warning "$(get-date) $PSCommandPath : Invoke-Webrequest failed. $_"
if($KillZapOnError.IsPresent){
Stop-Zap -killonly
}
}
| 26.392857 | 111 | 0.648173 | 3.109375 |
7cd24044fb4b6191c30724087c87ddb621625914
| 13,007 |
ps1
|
PowerShell
|
function_Get-VtGroup.ps1
|
kmsigma/vtPowerShell
|
b5329057ecb1b8654925a824beecbd2bc3e5fec5
|
[
"MIT"
] | 1 |
2021-11-12T14:51:00.000Z
|
2021-11-12T14:51:00.000Z
|
function_Get-VtGroup.ps1
|
kmsigma/vtPowerShell
|
b5329057ecb1b8654925a824beecbd2bc3e5fec5
|
[
"MIT"
] | null | null | null |
function_Get-VtGroup.ps1
|
kmsigma/vtPowerShell
|
b5329057ecb1b8654925a824beecbd2bc3e5fec5
|
[
"MIT"
] | null | null | null |
function Get-VtGroup {
<#
.Synopsis
Get groups from Verint / Telligent communities
.DESCRIPTION
Long description
.EXAMPLE
Example of how to use this cmdlet
.EXAMPLE
Another example of how to use this cmdlet
.INPUTS
Inputs to this cmdlet (if any)
.OUTPUTS
Output from this cmdlet (if any)
.NOTES
https://community.telligent.com/community/11/w/api-documentation/64702/list-group-rest-endpoint
.COMPONENT
The component this cmdlet belongs to
.ROLE
The role this cmdlet belongs to
.FUNCTIONALITY
The functionality that best describes this cmdlet
#>
[CmdletBinding(DefaultParameterSetName = 'Group by Name with Connection File',
SupportsShouldProcess = $true,
PositionalBinding = $false,
HelpUri = 'https://community.telligent.com/community/11/w/api-documentation/64699/group-rest-endpoints',
ConfirmImpact = 'Medium')]
[Alias()]
[OutputType()]
Param
(
# Get group by name
[Parameter(Mandatory = $true, ParameterSetName = 'Group by Name with Authentication Header')]
[Parameter(Mandatory = $true, ParameterSetName = 'Group by Name with Connection Profile')]
[Parameter(Mandatory = $true, ParameterSetName = 'Group by Name with Connection File')]
[Alias("Name")]
[string[]]$GroupName,
# Group name exact match
[Parameter(Mandatory = $false, ParameterSetName = 'Group by Name with Authentication Header')]
[Parameter(Mandatory = $false, ParameterSetName = 'Group by Name with Connection Profile')]
[Parameter(Mandatory = $false, ParameterSetName = 'Group by Name with Connection File')]
[switch]$ExactMatch,
# Get group by id number
[Parameter(Mandatory = $true, ParameterSetName = 'Group by Id with Authentication Header')]
[Parameter(Mandatory = $true, ParameterSetName = 'Group by Id with Connection Profile')]
[Parameter(Mandatory = $true, ParameterSetName = 'Group by Id with Connection File')]
[Alias("Id")]
[int[]]$GroupId,
# Community Domain to use (include trailing slash) Example: [https://yourdomain.telligenthosted.net/]
[Parameter(Mandatory = $true, ParameterSetName = 'Group by Name with Authentication Header')]
[Parameter(Mandatory = $true, ParameterSetName = 'Group by Id with Authentication Header')]
[ValidateNotNull()]
[ValidateNotNullOrEmpty()]
[ValidatePattern('^(http:\/\/|https:\/\/)(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])\/$')]
[Alias("Community")]
[string]$VtCommunity,
# Authentication Header for the community
[Parameter(Mandatory = $true, ParameterSetName = 'Group by Name with Authentication Header')]
[Parameter(Mandatory = $true, ParameterSetName = 'Group by Id with Authentication Header')]
[ValidateNotNull()]
[ValidateNotNullOrEmpty()]
[System.Collections.Hashtable]$VtAuthHeader,
[Parameter(Mandatory = $true, ParameterSetName = 'Group by Name with Connection Profile')]
[Parameter(Mandatory = $true, ParameterSetName = 'Group by Id with Connection Profile')]
[ValidateNotNull()]
[ValidateNotNullOrEmpty()]
[System.Management.Automation.PSObject]$Connection,
# File holding credentials. By default is stores in your user profile \.vtPowerShell\DefaultCommunity.json
[Parameter(ParameterSetName = 'Group by Name Connection File')]
[Parameter(ParameterSetName = 'Group by Id with Connection File')]
[string]$ProfilePath = ( $env:USERPROFILE ? ( Join-Path -Path $env:USERPROFILE -ChildPath ".vtPowerShell\DefaultCommunity.json" ) : ( Join-Path -Path $env:HOME -ChildPath ".vtPowerShell/DefaultCommunity.json" ) ),
# Should we return all details?
[Parameter()]
[switch]$ReturnDetails,
# Number of entries to get per batch (default of 20)
[Parameter()]
[ValidateRange(1, 100)]
[int]$BatchSize = 20,
# Get all groups
[Parameter()]
[ValidateSet("Joinless", "PublicOpen", "PublicClosed", "PrivateUnlisted", "PrivateListed", "All")]
[Alias("Type")]
[string]$GroupType = "All",
# Should I recurse into child groups? Default is false
[Parameter(ParameterSetName = 'Group by Id with Authentication Header')]
[Parameter(ParameterSetName = 'Group by Id with Connection Profile')]
[Parameter(ParameterSetName = 'Group by Id with Connection File')]
[switch]$Recurse,
# Sort By
[Parameter(
Mandatory = $false,
ValueFromPipeline = $false,
ValueFromPipelineByPropertyName = $false,
ValueFromRemainingArguments = $false
)]
[ValidateSet('Name', 'SortOrder', 'LastUpdate', 'ContentIdsOrder')]
[string]$SortBy = 'Name',
# Sort Order
[Parameter(
Mandatory = $false,
ValueFromPipeline = $false,
ValueFromPipelineByPropertyName = $false,
ValueFromRemainingArguments = $false
)]
[ValidateSet('Ascending', 'Descending')]
[string]$SortOrder = 'Ascending'
)
BEGIN {
switch -wildcard ( $PSCmdlet.ParameterSetName ) {
'* Connection File' {
Write-Verbose -Message "Getting connection information from Connection File ($ProfilePath)"
$VtConnection = Get-Content -Path $ProfilePath | ConvertFrom-Json
$Community = $VtConnection.Community
# Check to see if the VtAuthHeader is empty
$AuthHeaders = @{ }
$VtConnection.Authentication.PSObject.Properties | ForEach-Object { $AuthHeaders[$_.Name] = $_.Value }
}
'* Connection Profile' {
Write-Verbose -Message "Getting connection information from Connection Profile"
$Community = $Connection.Community
$AuthHeaders = $Connection.Authentication
}
'* Authentication Header' {
Write-Verbose -Message "Getting connection information from Parameters"
$Community = $VtCommunity
$AuthHeaders = $VtAuthHeader
}
}
# Check the authentication header for any 'Rest-Method' and revert to a traditional "get"
$VtAuthHeader = $AuthHeaders | Update-VtAuthHeader -RestMethod Get -Verbose:$false -WhatIf:$false
$UriParameters = @{}
$UriParameters['PageSize'] = $BatchSize
$UriParameters['PageIndex'] = 0
$UriParameters['GroupTypes'] = $GroupType
if ( $ParentGroupId ) {
$UriParameters['ParentGroupId'] = $ParentGroupId
}
if ( $Recurse ) {
$UriParameters['IncludeAllSubGroups'] = 'true'
}
if ( $SortOrder ) {
$UriParameters['SortOrder'] = $SortOrder
}
if ( $SortBy ) {
$UriParameters['SortBy'] = $SortBy
}
$PropertiesToReturn = @(
@{ Name = "GroupId"; Expression = { $_.Id } },
@{ Name = "Name"; Expression = { [System.Web.HttpUtility]::HtmlDecode( $_.Name ) } },
"Key",
@{ Name = "Description"; Expression = { [System.Web.HttpUtility]::HtmlDecode( $_.Description ) } },
"DateCreated",
"Url",
"GroupType",
"ParentGroupId"
)
}
PROCESS {
switch -Wildcard ( $PSCmdlet.ParameterSetName ) {
'Group by Name *' {
ForEach ( $Name in $GroupName ) {
$Uri = 'api.ashx/v2/groups.json'
$UriParameters['GroupNameFilter'] = $Name
$GroupCount = 0
do {
Write-Verbose -Message "Making call with '$Uri'"
# Get the list of groups with matching name from the call
$GroupsResponse = Invoke-RestMethod -Uri ( $Community + $Uri + '?' + ( $UriParameters | ConvertTo-QueryString ) ) -Headers $AuthHeaders
if ( $GroupsResponse ) {
$GroupCount += $GroupsResponse.Groups.Count
# If we need an exact response on the name, then filter for only that exact group
if ( $ExactMatch ) {
$GroupsResponse.Groups = $GroupsResponse.Groups | Where-Object { [System.Web.HttpUtility]::HtmlDecode( $_.Name ) -eq $Name }
}
# Should we return everything?
if ( $ReturnDetails ) {
$GroupsResponse.Groups
}
else {
$GroupsResponse.Groups | Select-Object -Property $PropertiesToReturn
}
}
$UriParameters['PageIndex']++
Write-Verbose -Message "Incrementing Page Index :: $( $UriParameters['PageIndex'] )"
} while ( $GroupCount -lt $GroupsResponse.TotalCount )
}
}
'Group by Id *' {
ForEach ( $Id in $GroupId ) {
# Setup the URI - depends on if we are using a parent ID or not
if ( $ParentGroupId ) {
$Uri = "api.ashx/v2/groups/$ParentGroupId/groups/$Id.json"
}
else {
$Uri = "api.ashx/v2/groups/$Id.json"
}
# Because everything is encoded in the URI, we don't need to send any $UriParameters
Write-Verbose -Message "Making call with '$Uri'"
$GroupsResponse = Invoke-RestMethod -Uri ( $Community + $Uri ) -Headers $AuthHeaders -Verbose:$false
# Filter if we are using the parent group id
if ( $ParentGroupId ) {
$GroupsResponse.Group = $GroupsResponse.Group | Where-Object { $_.ParentGroupId -eq $ParentGroupId }
}
if ( $GroupsResponse.Group ) {
if ( $ReturnDetails ) {
$GroupsResponse.Group
}
else {
$GroupsResponse.Group | Select-Object -Property $PropertiesToReturn
}
}
else {
Write-Warning -Message "No matching groups found for ID: $Id"
}
}
}
'Default' {
# No ForEach loop needed here because we are pulling all groups
$Uri = 'api.ashx/v2/groups.json'
$GroupCount = 0
do {
$GroupsResponse = Invoke-RestMethod -Uri ( $Community + $Uri + '?' + ( $UriParameters | ConvertTo-QueryString ) ) -Headers $AuthHeaders -Verbose:$false
if ( $ResolveParentName ) {
# This calls itself to get the parent group name
$GroupsResponse.Groups | Add-Member -MemberType ScriptProperty -Name "ParentGroupName" -Value { Get-VtGroup -GroupId $this.ParentGroupId | Select-Object -Property @{ Name = "Name"; Expression = { [System.Web.HttpUtility]::HtmlDecode( $_.Name ) } } | Select-Object -ExpandProperty Name } -Force
}
if ( $GroupsResponse ) {
$GroupCount += $GroupsResponse.Groups.Count
if ( $ParentGroupId ) {
$GroupsResponse.Groups = $GroupsResponse.Groups | Where-Object { $_.ParentGroupId -eq $ParentGroupId }
}
if ( $ReturnDetails ) {
$GroupsResponse.Groups
}
else {
$GroupsResponse.Groups | Select-Object -Property $PropertiesToReturn
}
}
$UriParameters['PageIndex']++
Write-Verbose -Message "Incrementing Page Index :: $( $UriParameters['PageIndex'] )"
} while ( $GroupCount -lt $GroupsResponse.TotalCount )
}
}
if ( $PSCmdlet.ShouldProcess( "On this target --> Target", "Did this thing --> Operation" ) ) {
}
}
END {
# Nothing to see here
}
}
| 45.320557 | 317 | 0.538018 | 3.234375 |
7cfe753ad2505203fd1195f5deb762869fba9b5e
| 4,354 |
lua
|
Lua
|
nvim/session.lua
|
LuaDist-testing/nvim-client
|
c0ff82c80d9bc3024b3ccaae7d96d89ee195a713
|
[
"Apache-2.0"
] | null | null | null |
nvim/session.lua
|
LuaDist-testing/nvim-client
|
c0ff82c80d9bc3024b3ccaae7d96d89ee195a713
|
[
"Apache-2.0"
] | null | null | null |
nvim/session.lua
|
LuaDist-testing/nvim-client
|
c0ff82c80d9bc3024b3ccaae7d96d89ee195a713
|
[
"Apache-2.0"
] | null | null | null |
require('coxpcall')
local uv = require('luv')
local MsgpackRpcStream = require('nvim.msgpack_rpc_stream')
local Session = {}
Session.__index = Session
local function resume(co, ...)
local status, result = coroutine.resume(co, ...)
if coroutine.status(co) == 'dead' then
if not status then
error(result)
end
return
end
assert(coroutine.status(co) == 'suspended')
result(co)
end
local function coroutine_exec(func, ...)
local args = {...}
local on_complete
if #args > 0 and type(args[#args]) == 'function' then
-- completion callback
on_complete = table.remove(args)
end
resume(coroutine.create(function()
local status, result = copcall(func, unpack(args))
if on_complete then
coroutine.yield(function()
-- run the completion callback on the main thread
on_complete(status, result)
end)
end
end))
end
function Session.new(stream)
return setmetatable({
_msgpack_rpc_stream = MsgpackRpcStream.new(stream),
_pending_messages = {},
_prepare = uv.new_prepare(),
_timer = uv.new_timer(),
_is_running = false
}, Session)
end
function Session:next_message(timeout)
local function on_request(method, args, response)
table.insert(self._pending_messages, {'request', method, args, response})
uv.stop()
end
local function on_notification(method, args)
table.insert(self._pending_messages, {'notification', method, args})
uv.stop()
end
if self._is_running then
error('Event loop already running')
end
if #self._pending_messages > 0 then
return table.remove(self._pending_messages, 1)
end
self:_run(on_request, on_notification, timeout)
return table.remove(self._pending_messages, 1)
end
function Session:notify(method, ...)
self._msgpack_rpc_stream:write(method, {...})
end
function Session:request(method, ...)
local args = {...}
local err, result
if self._is_running then
err, result = self:_yielding_request(method, args)
else
err, result = self:_blocking_request(method, args)
end
if err then
return false, err
end
return true, result
end
function Session:run(request_cb, notification_cb, setup_cb, timeout)
local function on_request(method, args, response)
coroutine_exec(request_cb, method, args, function(status, result)
if status then
response:send(result)
else
response:send(result, true)
end
end)
end
local function on_notification(method, args)
coroutine_exec(notification_cb, method, args)
end
self._is_running = true
if setup_cb then
coroutine_exec(setup_cb)
end
while #self._pending_messages > 0 do
local msg = table.remove(self._pending_messages, 1)
if msg[1] == 'request' then
on_request(msg[2], msg[3], msg[4])
else
on_notification(msg[2], msg[3])
end
end
self:_run(on_request, on_notification, timeout)
self._is_running = false
end
function Session:stop()
uv.stop()
end
function Session:close(signal)
if not self._timer:is_closing() then self._timer:close() end
if not self._prepare:is_closing() then self._prepare:close() end
self._msgpack_rpc_stream:close(signal)
end
function Session:_yielding_request(method, args)
return coroutine.yield(function(co)
self._msgpack_rpc_stream:write(method, args, function(err, result)
resume(co, err, result)
end)
end)
end
function Session:_blocking_request(method, args)
local err, result
local function on_request(method, args, response)
table.insert(self._pending_messages, {'request', method, args, response})
end
local function on_notification(method, args)
table.insert(self._pending_messages, {'notification', method, args})
end
self._msgpack_rpc_stream:write(method, args, function(e, r)
err = e
result = r
uv.stop()
end)
self:_run(on_request, on_notification)
return err, result
end
function Session:_run(request_cb, notification_cb, timeout)
if type(timeout) == 'number' then
self._prepare:start(function()
self._timer:start(timeout, 0, function()
uv.stop()
end)
self._prepare:stop()
end)
end
self._msgpack_rpc_stream:read_start(request_cb, notification_cb, uv.stop)
uv.run()
self._prepare:stop()
self._timer:stop()
self._msgpack_rpc_stream:read_stop()
end
return Session
| 23.408602 | 77 | 0.702343 | 3.40625 |
cd19aa42ebe4810bd5dbd37bf3fcc3fba32fbbbd
| 2,447 |
rb
|
Ruby
|
factbook-importers/lib/factbook-importers/importer.rb
|
eUgEntOptIc44/factbook
|
5a3894a34ba5273d74282c9703536cab424a5d46
|
[
"CC0-1.0"
] | 94 |
2016-11-05T15:51:50.000Z
|
2022-02-04T14:48:58.000Z
|
factbook-importers/lib/factbook-importers/importer.rb
|
eUgEntOptIc44/factbook
|
5a3894a34ba5273d74282c9703536cab424a5d46
|
[
"CC0-1.0"
] | 5 |
2021-01-22T14:38:12.000Z
|
2022-03-25T16:50:23.000Z
|
factbook-importers/lib/factbook-importers/importer.rb
|
eUgEntOptIc44/factbook
|
5a3894a34ba5273d74282c9703536cab424a5d46
|
[
"CC0-1.0"
] | 15 |
2016-12-14T21:08:42.000Z
|
2022-03-31T23:04:51.000Z
|
# encoding: utf-8
module Factbook
class Importer
def import( page )
## note: assumes active connection
code = page.info.country_code
name = page.info.country_name
attribs = {
name: name,
area: sq_km( page.area ), # e.g. 83,871 sq km
area_land: sq_km( page.area_land ), # e.g. 82,445 sq km
area_water: sq_km( page.area_water ), # e.g. 1,426 sq km
population: num( page.population ), # e.g. 8,665,550 (July 2015 est.)
population_growth: percent( page.population_growth ), # e.g. 0.55% (2015 est.)
birth_rate: rate_per_thousand( page.birth_rate ), # e.g. 9.41 births/1,000 population (2015 est.)
death_rate: rate_per_thousand( page.death_rate ), # e.g. 9.42 deaths/1,000 population (2015 est.)
migration_rate: rate_per_thousand( page.migration_rate ), # e.g. 5.56 migrant(s)/1,000 population (2015 est.)
}
rec = Fact.find_by( code: code )
if rec.nil? ## create (new) record
rec = Fact.new
attribs[ :code ] = code
puts "create fact record #{code}/#{name}:"
else ## update (exisiting) record
puts "update fact record #{code}/#{name}:"
end
puts " #{attribs.inspect}"
rec.update_attributes!( attribs )
end
def rate_per_thousand( text )
# e.g. 9.41 births/1,000 population (2015 est.)
# 9.42 deaths/1,000 population (2015 est.)
# 5.56 migrant(s)/1,000 population (2015 est.)
if text =~/([0-9\.]+) [a-z\(\)]+\/1,000/
$1.to_f
else
puts "*** warn: unknown rate <name>/1,000 format (no match): >#{text}<"
nil
end
end
def num( text )
# e.g. 8,665,550 (July 2015 est.)
if text =~/([0-9,\.]+)/
$1.gsub(',', '').to_i ## note: remove commas (,) if present
else
puts "*** warn: unknown number format (no match): >#{text}<"
nil ## return nil
end
end
def percent( text )
# e.g. 0.55% (2015 est.)
if text =~/([0-9\.]+)%/
$1.to_f
else
puts "*** warn: unknown percent format (no match): >#{text}<"
nil ## return nil
end
end
def sq_km( text )
# e.g. 83,871 sq km
## todo - check vatican - uses float e.g. 0.44 ?? add support?
if text =~/([0-9,\.]+) sq km/
$1.gsub(',', '').to_i ## note: remove commas (,) if present
else
puts "*** warn: unknown sq km format (no match): >#{text}<"
nil ## return nil
end
end
end # class Importer
end # module Factbook
| 26.311828 | 117 | 0.575398 | 3 |
80fcdc5e9bd67ad198e94fafad5d98aaa402af6d
| 8,544 |
lua
|
Lua
|
lua/mediaplayer/utils.lua
|
kevinhenschen/gm-mediaplayer
|
61636f52349b8ad08acec95f7e24bc69092d10b0
|
[
"MIT"
] | null | null | null |
lua/mediaplayer/utils.lua
|
kevinhenschen/gm-mediaplayer
|
61636f52349b8ad08acec95f7e24bc69092d10b0
|
[
"MIT"
] | null | null | null |
lua/mediaplayer/utils.lua
|
kevinhenschen/gm-mediaplayer
|
61636f52349b8ad08acec95f7e24bc69092d10b0
|
[
"MIT"
] | null | null | null |
if SERVER then AddCSLuaFile() end
local file = file
local math = math
local urllib = url
local ceil = math.ceil
local floor = math.floor
local Round = math.Round
local log = math.log
local pow = math.pow
local format = string.format
local tostring = tostring
local IsValid = IsValid
local utils = {}
---
-- Ceil the given number to the largest power of two.
--
function utils.CeilPower2(n)
return pow(2, ceil(log(n) / log(2)))
end
---
-- Method for easily grabbing a value from a table without checking that each
-- fragment exists.
--
-- @param tbl Table
-- @param key e.g. "json.key.fragments"
--
function utils.TableLookup( tbl, key )
local fragments = string.Split(key, '.')
local value = tbl
for _, fragment in ipairs(fragments) do
value = value[fragment]
if not value then
return nil
end
end
return value
end
---
-- Formats the number of seconds to a string.
-- e.g. 3612 => 24:12
--
function utils.FormatSeconds(sec)
sec = Round(sec)
local hours = floor(sec / 3600)
local minutes = floor((sec % 3600) / 60)
local seconds = sec % 60
if minutes < 10 then
minutes = "0" .. tostring(minutes)
end
if seconds < 10 then
seconds = "0" .. tostring(seconds)
end
if hours > 0 then
return format("%s:%s:%s", hours, minutes, seconds)
else
return format("%s:%s", minutes, seconds)
end
end
-- https://github.com/xfbs/PiL3/blob/master/18MathLibrary/shuffle.lua
function utils.Shuffle(list)
-- make and fill array of indices
local indices = {}
for i = 1, #list do
indices[#indices+1] = i
end
-- create shuffled list
local shuffled = {}
for i = 1, #list do
-- get a random index
local index = math.random(#indices)
-- get the value
local value = list[indices[index]]
-- remove it from the list so it won't be used again
table.remove(indices, index)
-- insert into shuffled array
shuffled[#shuffled+1] = value
end
return shuffled
end
function utils.Retry( func, success, error, maxAttempts )
maxAttempts = maxAttempts or 3
local attempts = 1
local function callback( value )
if value then
success( value )
elseif attempts < maxAttempts then
attempts = attempts + 1
func( callback )
else
error()
end
end
func( callback )
end
local function setTimeout( func, wait )
local timerID = tostring( func )
timer.Create( timerID, wait, 1, func )
timer.Start( timerID )
return timerID
end
local function clearTimeout( timerID )
if timer.Exists( timerID ) then
timer.Destroy( timerID )
end
end
-- based on underscore.js' _.throttle function
function utils.Throttle( func, wait, options )
wait = wait or 1
options = options or {}
local timeout, args, result
local previous
local function later()
previous = (options.leading == false) and 0 or RealTime()
timeout = nil
result = func( unpack(args) )
if not timeout then
args = nil
end
end
local function throttled(...)
local now = RealTime()
if not previous then
previous = now
end
local remaining = wait - (now - previous)
args = {...}
if remaining <= 0 or remaining > wait then
if timeout then
clearTimeout(timeout)
timeout = nil
end
previous = now
result = func( unpack(args) )
if not timeout then
args = nil
end
elseif not timeout and options.trailing ~= false then
timeout = setTimeout(later, remaining)
end
return result
end
return throttled
end
if CLIENT then
local CeilPower2 = utils.CeilPower2
local SetDrawColor = surface.SetDrawColor
local SetMaterial = surface.SetMaterial
local DrawTexturedRect = surface.DrawTexturedRect
local DrawRect = surface.DrawRect
local color_white = color_white
function utils.DrawHTMLPanel( panel, w, h )
if not (IsValid( panel ) and w and h) then return end
panel:UpdateHTMLTexture()
local pw, ph = panel:GetSize()
-- Convert to scalar
w = w / pw
h = h / ph
-- Fix for non-power-of-two html panel size
pw = CeilPower2(pw)
ph = CeilPower2(ph)
SetDrawColor( color_white )
local mat = panel:GetHTMLMaterial()
if mat then
SetMaterial( mat )
DrawTexturedRect( 0, 0, w * pw, h * ph )
else
DrawRect( 0, 0, w * pw, h * ph )
end
end
function utils.ParseHHMMSS( time )
local tbl = {}
-- insert fragments in reverse
for fragment, _ in string.gmatch(time, ":?(%d+)") do
table.insert(tbl, 1, tonumber(fragment) or 0)
end
if #tbl == 0 then
return nil
end
local seconds = 0
for i = 1, #tbl do
seconds = seconds + tbl[i] * math.max(60 ^ (i-1), 1)
end
return seconds
end
---
-- Attempts to play uri from stream or local file and returns channel in
-- callback.
--
function utils.LoadStreamChannel(uri, options, callback)
local isLocalFile = false
-- Play uri from a local file if:
-- 1. Windows OS and path contains drive letter
-- 2. Linux or OS X and path starts with a single '/'
--
-- We can't check this using file.Exists since GMod only allows checking
-- within the GMod directory. However, sound.PlayFile will still load
-- a file from any directory.
if (system.IsWindows() and uri:find("^%w:/")) or (not system.IsWindows() and uri:find("^/[^/]")) then
isLocalFile = true
local success, decoded = pcall(urllib.unescape, uri)
if success then
uri = decoded
end
end
local playFunc = isLocalFile and sound.PlayFile or sound.PlayURL
playFunc(uri, options or "noplay", function(channel)
if IsValid(channel) then
local channel_ensured = utils.BassEnsurePlayback(channel)
assert(IsValid(channel_ensured))
callback(channel_ensured)
else
callback(nil)
end
end)
end
local DEBUG = false
local channels = utils.GetEnsuredPlayers and utils.GetEnsuredPlayers() or setmetatable({}, {
__mode = 'k'
})
function utils.GetEnsuredPlayers()
return channels
end
if DEBUG then
table.Empty(utils.GetEnsuredPlayers())
end
local WRAPPERS = {}
local MT = {
__tostring = function(self) return "EnsureWrapped: " .. tostring(self.__channel__) end
}
local IGModAudioChannel = FindMetaTable"IGModAudioChannel"
function MT:__index(k)
local f = WRAPPERS[k]
if f ~= nil then return f end
local function wrapper(me, ...)
return IGModAudioChannel[k](me.__channel__, ...)
end
WRAPPERS[k] = wrapper
return wrapper
end
function WRAPPERS:Play(...)
local channel = self.__channel__
channels[channel] = true
return IGModAudioChannel.Play(channel, ...)
end
function WRAPPERS:Stop(...)
local channel = self.__channel__
channels[channel] = nil
return IGModAudioChannel.Stop(channel, ...)
end
function WRAPPERS:Pause(...)
local channel = self.__channel__
channels[channel] = nil
return IGModAudioChannel.Pause(channel, ...)
end
--MediaPlayer.Cvars.EnsurePlayback
local EnsurePlayback = CreateClientConVar("mediaplayer_force_playback", '1', true, false)
local function process()
if not EnsurePlayback:GetBool() then return end
if not next(channels) then
timer.Exists"MP_BassEnsurePlayback"
end
for channel, inited in pairs(channels) do
if channel:IsValid() then
local stopped = channel:GetState() == GMOD_CHANNEL_STOPPED
if DEBUG then
print(channel, "inited=", inited, "stopped=", stopped)
end
if not inited and not stopped then
channels[channel] = true
elseif inited and stopped then
channel:Play()
print("Forcing playback for", channel)
end
else
channels[channel] = nil
end
end
end
concommand.Add("stopsound_mediplayer_bass", function()
for channel, inited in pairs(channels) do
if channel:IsValid() then
channels[channel] = nil
channel:Stop()
print("Stopped", channel)
else
channels[channel] = nil
end
end
end)
function utils.BassEnsurePlayback(channel, remove)
if remove == true then
channels[channel] = nil
return
end
channels[channel] = false
if DEBUG or not timer.Exists"MP_BassEnsurePlayback" then
timer.Create("MP_BassEnsurePlayback", DEBUG and 3.123 or 0.351, 0, process)
end
local wrapped = setmetatable({
__channel__ = channel,
channel = channel,
__wrappers = WRAPPERS
}, MT)
if DEBUG then
print("Wrapping to ensure playback", channel, "->", wrapped)
end
return wrapped
end
if DEBUG then
sound.PlayURL("https://www.happyhardcore.com/livestreams/p/u9/", "noplay", function(a, ...)
local asd = MediaPlayerUtils.BassEnsurePlayback(a)
print(asd == nil, asd)
_G.asd = asd
end)
end
end
_G.MediaPlayerUtils = utils
| 20.839024 | 103 | 0.683052 | 3.578125 |
4461c66e66f845a8cf08dff5ed1aaa965d0c416a
| 1,466 |
py
|
Python
|
da_monitor.py
|
linjin1130/DA_monitor_websocket
|
d592559a95225d3d8f6d96055918d15162c26c9b
|
[
"MIT"
] | null | null | null |
da_monitor.py
|
linjin1130/DA_monitor_websocket
|
d592559a95225d3d8f6d96055918d15162c26c9b
|
[
"MIT"
] | null | null | null |
da_monitor.py
|
linjin1130/DA_monitor_websocket
|
d592559a95225d3d8f6d96055918d15162c26c9b
|
[
"MIT"
] | null | null | null |
import time
from socket import *
# from config_util import *
class DAbroaddata(object):
dabroad_host=''
dabroad_port=6789
bufsize = 1024
def __init__(self):
# self.confobj = Config()
# self.dabroad_host = self.confobj.dabroad_host
# self.dabroad_port = self.confobj.dabroad_port
self.bufsize = self.bufsize
#与DA板建立连接
def connectDAboard(self):
addr = (self.dabroad_host, self.dabroad_port)
self.udpServer = socket(AF_INET, SOCK_DGRAM) # 创建一个服务器端UDP套接字
self.udpServer.bind(addr) # 开始监听
#得到DA板返回的数据
def dabroad_datareq(self):
data, addr = self.udpServer.recvfrom(self.bufsize) # 接收数据和返回地址
return data,addr
def udp_uncon(self):
self.udpServer.close()
return
udp_rc = DAbroaddata()
udp_rc.connectDAboard()
# for i in range(10000):
while(1):
data, addr = udp_rc.dabroad_datareq()
print(addr, data)
# time.sleep(0.01)
#!/usr/bin/python
# encoding=utf-8
import time
import websocket
# 配置远程服务器的IP,帐号,密码,端口等,因我做了双机密钥信任,所以不需要密码
# websocket服务端地址
ws_server = "ws://127.0.0.1:8002/websocket/"
def tailfLog():
"""获取远程服务器实时日志,并发送到websocket服务端"""
ws = websocket.WebSocket.create_connection(ws_server) # 创建websocket连接
while True:
data, addr = udp_rc.dabroad_datareq()
if addr[0]:
ws.send(addr[0]) #把内容发送到websocket服务端
print(time.time())
if __name__ == '__main__':
tailfLog()
| 23.269841 | 75 | 0.658254 | 3.25 |
82e808875da5d8cb965f413e3b1cbe7c08a3cfdb
| 2,649 |
rs
|
Rust
|
src/cargo-tizen/commands/config.rs
|
Palladinium/cargo-rutin-tizen
|
c51ee225104a44d797fdc8c60c9cd4aa97c5854b
|
[
"Apache-2.0"
] | 2 |
2021-04-07T01:07:15.000Z
|
2021-05-05T13:57:09.000Z
|
src/cargo-tizen/commands/config.rs
|
Palladinium/cargo-rutin-tizen
|
c51ee225104a44d797fdc8c60c9cd4aa97c5854b
|
[
"Apache-2.0"
] | 1 |
2021-05-05T14:52:44.000Z
|
2021-05-05T22:59:55.000Z
|
src/cargo-tizen/commands/config.rs
|
Palladinium/cargo-rutin-tizen
|
c51ee225104a44d797fdc8c60c9cd4aa97c5854b
|
[
"Apache-2.0"
] | 1 |
2021-11-22T11:58:27.000Z
|
2021-11-22T11:58:27.000Z
|
use crate::error::TizenError;
use crate::tizen_env::{ConfigFrom, TizenEnv};
use clap::ArgMatches;
use cli_table::{print_stdout, Cell, Table};
use colored::*;
pub fn run(tizen_env: &TizenEnv, args: &ArgMatches) -> Result<i32, TizenError> {
match args.value_of("env_key") {
Some(str_value) => show_detail(tizen_env, String::from(str_value)),
None => list_configs(&tizen_env),
}
}
fn show_detail(tizen_env: &TizenEnv, env_key: String) -> Result<i32, TizenError> {
let config_value = tizen_env
.raw_config_values
.iter()
.find(|v| v.env_key == env_key);
match config_value {
Some(config_value) => {
let table = vec![
vec!["env key".cell(), config_value.env_key.clone().cell()],
vec!["value".cell(), config_value.value.clone().cell()],
vec!["from".cell(), from_to_s(&config_value.from).cell()],
vec![
"cargo key".cell(),
config_value
.cargo_key
.clone()
.unwrap_or_else(|| "".to_string())
.cell(),
],
vec![
"manifest key".cell(),
config_value
.manifest_key
.clone()
.unwrap_or_else(|| "".to_string())
.cell(),
],
]
.table();
assert!(print_stdout(table).is_ok());
Ok(0)
}
None => Err(TizenError {
message: format!("No config named {}", env_key),
}),
}
}
fn list_configs(tizen_env: &TizenEnv) -> Result<i32, TizenError> {
println!("{}", "Configurable values:".green().bold());
for raw_value in tizen_env.raw_config_values.iter() {
println!("{}={}", raw_value.env_key, raw_value.value);
}
println!(
"{} {} {}",
"Run".green(),
"cargo tizen config NAME_OF_CONFIG".yellow().bold(),
"to see more info".green()
);
println!("\n{}", "Other env variables:".green().bold());
for (key, value) in tizen_env.get_additional_build_env() {
println!("{}={}", &key, &value);
}
Ok(0)
}
fn from_to_s(config_from: &ConfigFrom) -> String {
match config_from {
ConfigFrom::Env => "env".to_string(),
ConfigFrom::Arg => "cli args".to_string(),
ConfigFrom::Cargo => "cargo file".to_string(),
ConfigFrom::Manifest => "manifest".to_string(),
ConfigFrom::Default => "default".to_string(),
}
}
| 30.448276 | 82 | 0.505851 | 3.03125 |
05c6d77e7667cad697221a4964d241fc05ccc199
| 10,713 |
py
|
Python
|
tests/core/test_view.py
|
STejas6/core
|
0576d50093b4520915636a216ebb26aa72aea9d5
|
[
"MIT"
] | 95 |
2018-02-22T23:54:00.000Z
|
2021-04-17T03:39:21.000Z
|
tests/core/test_view.py
|
STejas6/core
|
0576d50093b4520915636a216ebb26aa72aea9d5
|
[
"MIT"
] | 840 |
2018-01-27T04:26:20.000Z
|
2021-01-24T12:28:58.000Z
|
tests/core/test_view.py
|
STejas6/core
|
0576d50093b4520915636a216ebb26aa72aea9d5
|
[
"MIT"
] | 100 |
2018-02-23T00:19:55.000Z
|
2020-08-28T07:59:31.000Z
|
import glob
import time
from jinja2 import FileSystemLoader
from config import cache
from masonite.app import App
from masonite.drivers import CacheDiskDriver
from masonite.exceptions import RequiredContainerBindingNotFound, ViewException
from masonite.managers.CacheManager import CacheManager
from masonite.view import View
import unittest
class TestView(unittest.TestCase):
def setUp(self):
self.container = App()
view = View(self.container)
self.container.bind('View', view.render)
self.container.bind('ViewClass', view)
def test_view_extends_dictionary(self):
view = self.container.make('View')
self.assertEqual(view('test', {'test': 'test'}).rendered_template, 'test')
def test_view_exists(self):
view = self.container.make('ViewClass')
assert view.exists('index')
self.assertFalse(view.exists('not_available'))
def test_view_render_does_not_keep_previous_variables(self):
view = self.container.make('ViewClass')
view.render('test', {'var1': 'var1'})
view.render('test', {'var2': 'var2'})
self.assertNotIn('var1', view.dictionary)
self.assertIn('var2', view.dictionary)
def test_global_view_exists(self):
view = self.container.make('ViewClass')
self.assertTrue(view.exists('/resources/templates/index'))
self.assertFalse(view.exists('/resources/templates/not_available'))
def test_view_gets_global_template(self):
view = self.container.make('View')
self.assertEqual(view('/templates/test', {'test': 'test'}).rendered_template, 'test')
def test_view_extends_without_dictionary_parameters(self):
view = self.container.make('ViewClass')
view.share({'test': 'test'})
view = self.container.make('View')
self.assertEqual(view('test').rendered_template, 'test')
def test_render_from_container_as_view_class(self):
self.container.make('ViewClass').share({'test': 'test'})
view = self.container.make('View')
self.assertEqual(view('test').rendered_template, 'test')
def test_composers(self):
self.container.make('ViewClass').composer('test', {'test': 'test'})
view = self.container.make('View')
self.assertEqual(self.container.make('ViewClass').composers, {'test': {'test': 'test'}})
self.assertEqual(view('test').rendered_template, 'test')
def test_composers_load_all_views_with_astericks(self):
self.container.make('ViewClass').composer('*', {'test': 'test'})
self.assertEqual(self.container.make('ViewClass').composers, {'*': {'test': 'test'}})
view = self.container.make('View')
self.assertEqual(view('test').rendered_template, 'test')
def test_composers_with_wildcard_base_view(self):
self.container.make('ViewClass').composer('mail*', {'to': 'test_user'})
self.assertEqual(self.container.make('ViewClass').composers, {'mail*': {'to': 'test_user'}})
view = self.container.make('View')
self.assertIn('test_user', view('mail/welcome').rendered_template)
def test_composers_with_wildcard_base_view_route(self):
self.container.make('ViewClass').composer('mail*', {'to': 'test_user'})
self.assertEqual(self.container.make('ViewClass').composers, {'mail*': {'to': 'test_user'}})
view = self.container.make('View')
self.assertIn('test_user', view('mail/welcome').rendered_template)
def test_render_deep_in_file_structure_with_package_loader(self):
self.container.make('ViewClass').add_environment('storage')
view = self.container.make('View')
self.assertEqual(view('/templates/tests/test', {'test': 'testing'}).rendered_template, 'testing')
def test_composers_with_wildcard_lower_directory_view(self):
self.container.make('ViewClass').composer(
'mail/welcome*', {'to': 'test_user'})
self.assertEqual(self.container.make('ViewClass').composers, {'mail/welcome*': {'to': 'test_user'}})
view = self.container.make('View')
self.assertIn('test_user', view('mail/welcome').rendered_template)
def test_composers_with_wildcard_lower_directory_view_and_incorrect_shortend_wildcard(self):
self.container.make('ViewClass').composer(
'mail/wel*', {'to': 'test_user'})
self.assertEqual(self.container.make('ViewClass').composers, {'mail/wel*': {'to': 'test_user'}})
view = self.container.make('View')
assert 'test_user' not in view('mail/welcome').rendered_template
def test_composers_load_all_views_with_list(self):
self.container.make('ViewClass').composer(
['home', 'test'], {'test': 'test'})
self.assertEqual(self.container.make('ViewClass').composers, {'home': {'test': 'test'}, 'test': {'test': 'test'}})
view = self.container.make('View')
self.assertEqual(view('test').rendered_template, 'test')
def test_view_share_updates_dictionary_not_overwrite(self):
viewclass = self.container.make('ViewClass')
viewclass.share({'test1': 'test1'})
viewclass.share({'test2': 'test2'})
self.assertEqual(viewclass._shared, {'test1': 'test1', 'test2': 'test2'})
viewclass.render('test', {'var1': 'var1'})
self.assertEqual(viewclass.dictionary, {'test1': 'test1', 'test2': 'test2', 'var1': 'var1'})
def test_adding_environment(self):
viewclass = self.container.make('ViewClass')
viewclass.add_environment('storage', loader=FileSystemLoader)
self.assertEqual(viewclass.render('test_location', {'test': 'testing'}).rendered_template, 'testing')
def test_view_throws_exception_without_cache_binding(self):
view = self.container.make('View')
with self.assertRaises(RequiredContainerBindingNotFound):
view('test_cache').cache_for('5', 'seconds')
def test_view_can_add_custom_filters(self):
view = self.container.make('ViewClass')
view.filter('slug', self._filter_slug)
self.assertEqual(view._filters, {'slug': self._filter_slug})
self.assertEqual(view.render('filter', {'test': 'test slug'}).rendered_template, 'test-slug')
@staticmethod
def _filter_slug(item):
return item.replace(' ', '-')
def test_view_cache_caches_files(self):
self.container.bind('CacheConfig', cache)
self.container.bind('CacheDiskDriver', CacheDiskDriver)
self.container.bind('CacheManager', CacheManager(self.container))
self.container.bind('Application', self.container)
self.container.bind('Cache', self.container.make(
'CacheManager').driver('disk'))
view = self.container.make('View')
self.assertEqual(view('test_cache', {'test': 'test'}).cache_for(1, 'second').rendered_template, 'test')
self.assertEqual(open(glob.glob('bootstrap/cache/test_cache:*')[0]).read(), 'test')
time.sleep(2)
self.assertEqual(view('test_cache', {'test': 'macho'}).cache_for(5, 'seconds').rendered_template, 'macho')
time.sleep(2)
self.assertEqual(open(glob.glob('bootstrap/cache/test_cache:*')[0]).read(), 'macho')
self.assertEqual(view('test_cache', {'test': 'macho'}).cache_for(1, 'second').rendered_template, 'macho')
time.sleep(1)
self.assertEqual(open(glob.glob('bootstrap/cache/test_cache:*')[0]).read(), 'macho')
self.assertEqual(view('test_cache', {'test': 'macho'}).cache_for('1', 'second').rendered_template, 'macho')
def test_cache_throws_exception_with_incorrect_cache_type(self):
self.container.bind('CacheConfig', cache)
self.container.bind('CacheDiskDriver', CacheDiskDriver)
self.container.bind('CacheManager', CacheManager(self.container))
self.container.bind('Application', self.container)
self.container.bind('Cache', self.container.make(
'CacheManager').driver('disk'))
view = self.container.make('View')
with self.assertRaises(ValueError):
view(
'test_exception', {'test': 'test'}
).cache_for(1, 'monthss')
def test_view_can_change_template_splice(self):
self.container.make('ViewClass').set_splice('.')
view = self.container.make('View')
self.container.make('ViewClass').composer(
'mail/welcome', {'test': 'test'})
self.container.make('ViewClass').share(
{'test': 'John'})
self.assertIn('John', view('mail.welcome', {'to': 'John'}).rendered_template)
self.assertEqual(view('mail.composers', {'test': 'John'}).rendered_template, 'John')
self.assertEqual(view('mail.share').rendered_template, 'John')
self.assertIn('John', view('mail/welcome', {'to': 'John'}).rendered_template)
self.container.make('ViewClass').set_splice('@')
self.assertIn('John', view('mail@welcome', {'to': 'John'}).rendered_template)
self.assertIn('John', view('mail@composers', {'test': 'John'}).rendered_template)
self.assertIn('John', view('mail/welcome', {'to': 'John'}).rendered_template)
def test_can_add_tests_to_view(self):
view = self.container.make('ViewClass')
view.test('admin', self._is_admin)
self.assertEqual(view._tests, {'admin': self._is_admin})
user = MockAdminUser
self.assertEqual(view.render('admin_test', {'user': user}).rendered_template, 'True')
user.admin = 0
self.assertEqual(view.render('admin_test', {'user': user}).rendered_template, 'False')
def _is_admin(self, obj):
return obj.admin == 1
def test_can_render_pubjs(self):
view = self.container.make('ViewClass')
view.add_extension('pypugjs.ext.jinja.PyPugJSExtension')
self.assertEqual(view._jinja_extensions, ['jinja2.ext.loopcontrols', 'pypugjs.ext.jinja.PyPugJSExtension'])
self.assertEqual(view.render('pug/hello.pug', {'name': 'Joe'}).rendered_template, '<p>hello Joe</p>')
def test_throws_exception_on_incorrect_type(self):
view = self.container.make('ViewClass')
with self.assertRaises(ViewException):
assert view.render('test', {'', ''})
def test_can_use_dot_templates(self):
view = self.container.make('ViewClass')
self.assertEqual(view.render('mail.share', {'test': 'test'}).rendered_template, 'test')
def test_can_use_at_line_statements(self):
view = self.container.make('ViewClass')
self.assertIn('test this string', view.render('line-statements', {'test': 'test this string'}).rendered_template)
class MockAdminUser:
admin = 1
| 39.09854 | 122 | 0.662279 | 3.40625 |
ff33fba7d42263a1ab60715f05be4517834edd1a
| 10,139 |
py
|
Python
|
src/vsc/model/solvegroup_swizzler_range.py
|
fvutils/pyvsc
|
a828f3c779e3af7dbe30e873010615b8ab0ef488
|
[
"Apache-2.0"
] | 54 |
2020-03-28T17:54:00.000Z
|
2022-03-27T08:53:13.000Z
|
src/vsc/model/solvegroup_swizzler_range.py
|
fvutils/pyvsc
|
a828f3c779e3af7dbe30e873010615b8ab0ef488
|
[
"Apache-2.0"
] | 124 |
2020-04-10T03:06:03.000Z
|
2022-03-24T18:35:46.000Z
|
src/vsc/model/solvegroup_swizzler_range.py
|
fvutils/pyvsc
|
a828f3c779e3af7dbe30e873010615b8ab0ef488
|
[
"Apache-2.0"
] | 17 |
2020-04-09T21:47:58.000Z
|
2022-02-23T19:37:37.000Z
|
'''
Created on Sep 18, 2021
@author: mballance
'''
import random
from vsc.model.bin_expr_type import BinExprType
from vsc.model.expr_bin_model import ExprBinModel
from vsc.model.expr_fieldref_model import ExprFieldRefModel
from vsc.model.expr_literal_model import ExprLiteralModel
from vsc.model.expr_model import ExprModel
from vsc.model.field_scalar_model import FieldScalarModel
from vsc.model.variable_bound_model import VariableBoundModel
class SolveGroupSwizzlerRange(object):
def __init__(self, solve_info):
self.debug = 0
self.solve_info = solve_info
pass
def swizzle(self,
btor,
rs,
bound_m):
if self.debug > 0:
print("--> swizzle_randvars")
swizzled_field = False
# For each random variable, select a partition with it's known
# domain and add the corresponding constraint
field_l = rs.rand_fields()
if self.debug > 0:
print(" " + str(len(field_l)) + " fields in randset")
if rs.rand_order_l is not None:
# Perform an ordered randomization
for ro_l in rs.rand_order_l:
swizzled_field |= self.swizzle_field_l(ro_l, rs, bound_m, btor)
else:
swizzled_field |= self.swizzle_field_l(rs.rand_fields(), rs, bound_m, btor)
# x += 1
if not swizzled_field:
if self.solve_info is not None:
self.solve_info.n_sat_calls += 1
btor.Sat()
if self.debug > 0:
print("<-- swizzle_randvars")
def swizzle_field_l(self, field_l, rs, bound_m, btor):
e = None
if len(field_l) > 0:
# Make a copy of the field list so we don't
# destroy the original
field_l = field_l.copy()
swizzle_node_l = []
swizzle_expr_l = []
max_swizzle = 4
# Select up to `max_swizzle` fields to swizzle
for i in range(max_swizzle):
if len(field_l) > 0:
field_idx = self.randint(0, len(field_l)-1)
f = field_l.pop(field_idx)
e = self.swizzle_field(f, rs, bound_m)
if e is not None:
swizzle_expr_l.append(e)
swizzle_node_l.append(e.build(btor))
else:
break
while len(swizzle_node_l) > 0:
# Start by assuming all
for n in swizzle_node_l:
btor.Assume(n)
if self.solve_info is not None:
self.solve_info.n_sat_calls += 1
if btor.Sat() != btor.SAT:
e = swizzle_expr_l.pop()
n = swizzle_node_l.pop()
if self.debug > 0:
print("Randomization constraint failed. Removing last: %s" %
self.pretty_printer.print(e))
else:
# Randomization constraints succeeded. Go ahead and assert
for n in swizzle_node_l:
btor.Assert(n)
break
if self.solve_info is not None:
self.solve_info.n_sat_calls += 1
if btor.Sat() != btor.SAT:
raise Exception("failed to add in randomization (2)")
return True
else:
return False
def swizzle_field(self, f, rs, bound_m) -> ExprModel:
ret = None
if self.debug > 0:
print("Swizzling field %s" % f.name)
if f in rs.dist_field_m.keys():
if self.debug > 0:
print("Note: field %s is in dist map" % f.name)
for d in rs.dist_field_m[f]:
print(" Target interval %d" % d.target_range)
if len(rs.dist_field_m[f]) > 1:
target_d = self.randint(0, len(rs.dist_field_m[f])-1)
dist_scope_c = rs.dist_field_m[f][target_d]
else:
dist_scope_c = rs.dist_field_m[f][0]
target_w = dist_scope_c.dist_c.weights[dist_scope_c.target_range]
if target_w.rng_rhs is not None:
# Dual-bound range
val_l = target_w.rng_lhs.val()
val_r = target_w.rng_rhs.val()
val = self.randint(val_l, val_r)
if self.debug > 0:
print("Select dist-weight range: %d..%d ; specific value %d" % (
int(val_l), int(val_r), int(val)))
ret = ExprBinModel(
ExprFieldRefModel(f),
BinExprType.Eq,
ExprLiteralModel(val, f.is_signed, f.width))
else:
# Single value
val = target_w.rng_lhs.val()
ret = ExprBinModel(
ExprFieldRefModel(f),
BinExprType.Eq,
ExprLiteralModel(int(val), f.is_signed, f.width))
else:
if f in bound_m.keys():
f_bound = bound_m[f]
if not f_bound.isEmpty():
ret = self.create_rand_domain_constraint(f, f_bound)
return ret
def create_rand_domain_constraint(self,
f : FieldScalarModel,
bound_m : VariableBoundModel)->ExprModel:
e = None
range_l = bound_m.domain.range_l
range_idx = self.randint(0, len(range_l)-1)
range = range_l[range_idx]
domain = range[1]-range[0]
if self.debug > 0:
print("create_rand_domain_constraint: " + f.name + " range_idx=" + str(range_idx) + " range=" + str(range))
if domain > 64:
r_type = self.randint(0, 3)
r_type = 3 # Note: hard-coded to selecting single value for
single_val = self.randint(range[0], range[1])
if r_type >= 0 and r_type <= 2: # range
# Pretty simple. Partition and randomize
# bin_sz_h = 1 if int(domain/128) == 0 else int(domain/128)
bin_sz_h = 1 if int(domain/128) == 0 else int(domain/128)
if r_type == 0:
# Center value in bin
if single_val+bin_sz_h > range[1]:
max = range[1]
min = range[1]-2*bin_sz_h
elif single_val-bin_sz_h < range[0]:
max = range[0]+2*bin_sz_h
min = range[0]
else:
max = single_val+bin_sz_h
min = single_val-bin_sz_h
if self.debug > 0:
print("rand_domain range-type is bin center value: center=%d => %d..%d" % (single_val,min,max))
elif r_type == 1:
# Bin starts at value
if single_val+2*bin_sz_h > range[1]:
max = range[1]
min = range[1]-2*bin_sz_h
elif single_val-2*bin_sz_h < range[0]:
max = range[0]+2*bin_sz_h
min = range[0]
else:
max = single_val+2*bin_sz_h
min = single_val
if self.debug > 0:
print("rand_domain range-type is bin left-target value: left=%d %d..%d" % (single_val, min,max))
elif r_type == 2:
# Bin ends at value
if single_val+2*bin_sz_h > range[1]:
max = range[1]
min = range[1]-2*bin_sz_h
elif single_val-2*bin_sz_h < range[0]:
max = range[0]+2*bin_sz_h
min = range[0]
else:
max = single_val
min = single_val-2*bin_sz_h
if self.debug > 0:
print("rand_domain range-type is bin right-target value: left=%d %d..%d" % (single_val, min,max))
e = ExprBinModel(
ExprBinModel(
ExprFieldRefModel(f),
BinExprType.Ge,
ExprLiteralModel(
min,
f.is_signed,
f.width)
),
BinExprType.And,
ExprBinModel(
ExprFieldRefModel(f),
BinExprType.Le,
ExprLiteralModel(
max,
f.is_signed,
f.width)
)
)
elif r_type == 3: # Single value
if self.debug > 0:
print("rand_domain range-type is single value: %d" % single_val)
e = ExprBinModel(
ExprFieldRefModel(f),
BinExprType.Eq,
ExprLiteralModel(single_val, f.is_signed, f.width))
else:
val = self.randint(range[0], range[1])
if self.debug > 0:
print("rand_domain on small domain [%d..%d] => %d" % (range[0], range[1], val))
e = ExprBinModel(
ExprFieldRefModel(f),
BinExprType.Eq,
ExprLiteralModel(val, f.is_signed, f.width))
return e
def randint(self, low, high):
if low > high:
tmp = low
low = high
high = tmp
return random.randint(low,high)
| 39.146718 | 121 | 0.458921 | 3.0625 |
b071d52b58cd51d0b1382d454b90f109ba99f3dd
| 9,437 |
py
|
Python
|
tictactoe.py
|
karthikayan4u/Tic-Tac-Toe-with-AI
|
b377d213024f149f65518d150cf183a4a81a5f98
|
[
"MIT"
] | null | null | null |
tictactoe.py
|
karthikayan4u/Tic-Tac-Toe-with-AI
|
b377d213024f149f65518d150cf183a4a81a5f98
|
[
"MIT"
] | null | null | null |
tictactoe.py
|
karthikayan4u/Tic-Tac-Toe-with-AI
|
b377d213024f149f65518d150cf183a4a81a5f98
|
[
"MIT"
] | null | null | null |
import random
# AI algorithm for two-player games which is for "Hard" option here.
def minimax(predictive_cells, playing_player, play_decide, action):
# Checking for win.
if check(playing_player, predictive_cells):
return 10
# Checking for lose i.e opposition's win.
elif check('O' if playing_player == 'X' else 'X', predictive_cells):
return -10
# Checking for draw.
elif [j for i in predictive_cells for j in i].count(' ') == 0:
return 0
choices = []
for i in range(3):
for j in range(3):
# Player's turn.
if predictive_cells[i][j] == ' ' and play_decide:
predictive_cells[i][j] = playing_player
play_decide = False
choices.append(minimax(predictive_cells, playing_player, play_decide,
"minimize" if action == 'maximize' else "maximize"))
play_decide = True
action = "minimize" if action == 'maximize' else "maximize"
predictive_cells[i][j] = ' '
# opponent's turn.
elif predictive_cells[i][j] == ' ' and not play_decide:
predictive_cells[i][j] = 'O' if playing_player == 'X' else 'X'
play_decide = True
choices.append(minimax(predictive_cells, playing_player, play_decide,
"minimize" if action == 'maximize' else "maximize"))
play_decide = False
action = "minimize" if action == 'maximize' else "maximize"
predictive_cells[i][j] = ' '
if action == 'maximize':
return min(choices)
else:
return max(choices)
# Medium option
def medium(symbol, formatted_cells):
cols = [[1 if j == symbol else j for j in i] for i in formatted_cells]
rows = [[1 if f == symbol else f for f in k] for k in
[[formatted_cells[j][lk] for j in range(3)] for lk in range(3)]]
leading_diagonal = [1 if formatted_cells[i][j] == symbol else formatted_cells[i][j] for i in range(3) for j in
range(3) if i == j]
leading_diagonal_dict = {0: (1, 1), 1: (2, 2), 2: (3, 3)}
opp_diagonal = [1 if formatted_cells[i][j] == symbol else formatted_cells[i][j] for i in range(3) for j in range(3)
if i == 2 - j]
opp_diagonal_dict = {0: (1, 3), 1: (2, 2), 2: (3, 1)}
broke = False
coordinates_i, coordinates_j = -1, -1
for i in range(3):
#Player's chance of winning through rows and columns.
if 1 in cols[i] and cols[i].count(1) == 2 and ' ' in cols[i]:
coordinates_i, coordinates_j = i + 1, cols[i].index(' ') + 1
broke = True
break
elif 1 in rows[i] and rows[i].count(1) == 2 and ' ' in rows[i]:
coordinates_i, coordinates_j = rows[i].index(' ') + 1, i + 1
broke = True
break
#Opponent's chance of winning through rows and columns.
elif 3 - cols[i].count(' ') == 2 and 1 not in cols[i]:
coordinates_i, coordinates_j = i + 1, cols[i].index(' ') + 1
broke = True
break
elif 3 - rows[i].count(' ') == 2 and 1 not in rows[i]:
coordinates_i, coordinates_j = rows[i].index(' ') + 1, i + 1
broke = True
break
if not broke:
#Player's chance of winning through leading and opposite diagonals.
if leading_diagonal.count(1) == 2 and leading_diagonal.count(' ') == 1:
coordinates_i, coordinates_j = leading_diagonal_dict[leading_diagonal.index(' ')]
elif 1 not in leading_diagonal and 3 - leading_diagonal.count(' ') == 2 and leading_diagonal.count(' ') == 1:
coordinates_i, coordinates_j = leading_diagonal_dict[leading_diagonal.index(' ')]
#Opponent's chance of winning through leading and opposite diagonals.
elif opp_diagonal.count(1) == 2 and opp_diagonal.count(' ') == 1:
coordinates_i, coordinates_j = opp_diagonal_dict[opp_diagonal.index(' ')]
elif 1 not in opp_diagonal and 3 - opp_diagonal.count(' ') == 2 and opp_diagonal.count(' ') == 1:
coordinates_i, coordinates_j = opp_diagonal_dict[opp_diagonal.index(' ')]
return coordinates_i, coordinates_j
def printing(formatted_cells):
print(*['-' for _ in range(9)], sep='')
for i in reversed(range(3)):
print("|", end=' ')
for j in range(3):
print(formatted_cells[j][i], end=' ')
print("|", end=' ')
print()
print(*['-' for _ in range(9)], sep='')
#Checking for win.
def check(arg, formatted_cells_check):
#Column-wise check.
if any(i.count(arg) == 3 for i in formatted_cells_check):
return True
#Row-wise check.
elif any(k.count(arg) == 3 for k in [[formatted_cells_check[j][lk] for j in range(3)] for lk in range(3)]):
return True
#Leading and Opposite diagonals check.
elif all(
True if formatted_cells_check[i][j] == arg else False for i in range(3) for j in range(3) if i == j) or all(
[True if formatted_cells_check[i][j] == arg else False for i in range(3) for j in range(3) if i == 2 - j]):
return True
else:
return False
def check_inputs(coordinates_i, coordinates_j, player, formatted_cells):
if coordinates_i >= 3 or coordinates_j >= 3:
if player == 'user':
print("Coordinates should be from 1 to 3!")
return False
#Checking whether the given input position is empty.
elif formatted_cells[coordinates_i][coordinates_j] != ' ':
if player == 'user':
print("This cell is occupied! Choose another one!")
return False
return True
def menu(option):
formatted_cells = [[" " for _ in range(3)] for _ in range(3)]
player = option[1]
printing(formatted_cells)
if player == 'easy' or player == 'medium' or player == 'hard':
print('Making move level "{}"'.format(player))
player_option = True
while True:
if player == 'user':
try:
coordinates_i, coordinates_j = map(int, input("Enter the coordinates: >").split())
except ValueError:
print("You should enter numbers!")
continue
elif player == 'medium':
if option[1] == 'medium' and player_option:
symbol = 'X'
else:
symbol = 'O'
coordinates_i, coordinates_j = medium(symbol, formatted_cells)
if coordinates_i == -1 and coordinates_j == -1:
coordinates_i = random.choice([1, 2, 3])
coordinates_j = random.choice([1, 2, 3])
elif player == 'hard':
if option[1] == 'hard' and player_option:
symbol = 'X'
else:
symbol = 'O'
max_choice = -10
predicting_cells = [[j for j in i] for i in formatted_cells]
if predicting_cells == [[" ", " ", " "], [" ", " ", " "], [" ", " ", " "]]:
coordinates_i = random.choice([1, 2, 3])
coordinates_j = random.choice([1, 2, 3])
else:
choices = []
for i in range(3):
for j in range(3):
if predicting_cells[i][j] == ' ':
predicting_cells[i][j] = symbol
choice = minimax(predicting_cells, symbol, False, "maximize")
if choice >= max_choice:
max_choice = choice
choices.append((i + 1, j + 1))
predicting_cells[i][j] = ' '
coordinates_i, coordinates_j = medium(symbol, formatted_cells)
if coordinates_i == -1 and coordinates_j == -1:
coordinates_i, coordinates_j = random.choice(choices)
else:
coordinates_i = random.choice([1, 2, 3])
coordinates_j = random.choice([1, 2, 3])
if check_inputs(coordinates_i - 1, coordinates_j - 1, player, formatted_cells):
formatted_cells[coordinates_i - 1][coordinates_j - 1] = 'X' if player_option else 'O'
printing(formatted_cells)
player_option = False if player_option else True
player = option[1] if player_option else option[2]
if check("X", formatted_cells):
print("X wins")
exit()
elif check("O", formatted_cells):
print("O wins")
exit()
elif not any(True if j == ' ' else False for i in formatted_cells for j in i):
print("Draw")
exit()
if player == 'easy' or player == 'medium' or player == 'hard':
print('Making move level "{}"'.format(player))
if __name__ == '__main__':
while True:
inp = input("Input command: >").strip()
if len(inp.split()) == 1 and inp == 'exit':
exit()
elif len(inp.split()) == 3 and inp.split()[0] == 'start' and inp.split()[1] in ['user', 'easy', 'medium',
'hard'] and inp.split()[2] in [
'user', 'easy', 'medium', 'hard']:
menu(inp.split())
else:
print("Bad parameters!")
| 45.370192 | 120 | 0.540956 | 3.25 |
0ac56e10d9db393efb0c8773114230f35b1ecb89
| 1,046 |
cs
|
C#
|
FlatBuffers-net.Tests/AssertExtensions.cs
|
evolutional/flatbuffers-net
|
27e0fd14cba9e9afc740a6c360f1879034b3e072
|
[
"Apache-2.0"
] | 46 |
2016-01-19T23:28:53.000Z
|
2022-01-12T13:11:32.000Z
|
FlatBuffers-net.Tests/AssertExtensions.cs
|
evolutional/flatbuffers-net
|
27e0fd14cba9e9afc740a6c360f1879034b3e072
|
[
"Apache-2.0"
] | 57 |
2016-01-03T17:03:50.000Z
|
2019-01-21T13:06:26.000Z
|
FlatBuffers-net.Tests/AssertExtensions.cs
|
evolutional/flatbuffers-net
|
27e0fd14cba9e9afc740a6c360f1879034b3e072
|
[
"Apache-2.0"
] | 7 |
2016-09-15T18:22:50.000Z
|
2019-03-08T10:18:13.000Z
|
using System.Text;
using NUnit.Framework;
namespace FlatBuffers.Tests
{
public static class AssertExtensions
{
private static string NormalizeWhitespace(string value)
{
var lastChar = '\0';
var sb = new StringBuilder();
for (var i = 0; i < value.Length; ++i)
{
var c = value[i];
if (c == '\n' || c == '\r')
{
c = ' ';
}
if (char.IsWhiteSpace(lastChar) && char.IsWhiteSpace(c))
{
lastChar = c;
continue;
}
sb.Append(c);
lastChar = c;
}
return sb.ToString().Trim();
}
public static void AreEquivalent(string expected, string actual)
{
expected = NormalizeWhitespace(expected);
actual = NormalizeWhitespace(actual);
Assert.AreEqual(expected, actual);
}
}
}
| 26.820513 | 72 | 0.438815 | 3.0625 |
41092ad1802fd1f0fd703493a7d868e30952e547
| 1,389 |
lua
|
Lua
|
Screensavers/Lines.lua
|
veso266/MineOS
|
a2d2c749141b49807602869d549f171dc0aa4602
|
[
"MIT"
] | null | null | null |
Screensavers/Lines.lua
|
veso266/MineOS
|
a2d2c749141b49807602869d549f171dc0aa4602
|
[
"MIT"
] | null | null | null |
Screensavers/Lines.lua
|
veso266/MineOS
|
a2d2c749141b49807602869d549f171dc0aa4602
|
[
"MIT"
] | null | null | null |
local buffer = require("doubleBuffering")
local event = require("event")
-------------------------------------------------------------------------------------
local lineCount = 10
local backgroundColor = 0x0
local lineColor = 0xFFFFFF
local bufferWidth, bufferHeight = buffer.getResolution()
-------------------------------------------------------------------------------------
local t = {}
function rnd()
if math.random(0,1) == 0 then
return -1
else
return 1
end
end
for i = 1, lineCount do
t[i] = {
x = math.random(1, bufferWidth),
y = math.random(1, bufferHeight * 2),
dx = rnd(),
dy = rnd()
}
end
-------------------------------------------------------------------------------------
buffer.clear(backgroundColor)
buffer.draw(true)
while true do
local eventType = event.pull(0.0001)
if eventType == "touch" or eventType == "key_down" then
break
end
for i = 1, lineCount do
t[i].x = t[i].x + t[i].dx
t[i].y = t[i].y + t[i].dy
if t[i].x > bufferWidth then t[i].dx = -1 end
if t[i].y > bufferHeight * 2 then t[i].dy = -1 end
if t[i].x < 1 then t[i].dx = 1 end
if t[i].y < 1 then t[i].dy = 1 end
end
buffer.clear(backgroundColor)
for i = 1, lineCount - 1 do
buffer.semiPixelLine(t[i].x, t[i].y, t[i + 1].x, t[i + 1].y, lineColor)
end
buffer.semiPixelLine(t[1].x, t[1].y, t[lineCount].x, t[lineCount].y, lineColor)
buffer.draw()
end
| 22.403226 | 85 | 0.516919 | 3.453125 |
a3344bdab18db1e190a71c1fe002d58a022d64cd
| 1,060 |
ts
|
TypeScript
|
problemset/gas-station/index.ts
|
OUDUIDUI/leet-code
|
50e61ce16d1c419ccefc075ae9ead721cdd1cdbb
|
[
"MIT"
] | 6 |
2022-01-17T03:19:56.000Z
|
2022-01-17T05:45:39.000Z
|
problemset/gas-station/index.ts
|
OUDUIDUI/algorithm-brushing
|
61a1b26dd46f2d9f4f90572e66475a52a18ec4d5
|
[
"MIT"
] | null | null | null |
problemset/gas-station/index.ts
|
OUDUIDUI/algorithm-brushing
|
61a1b26dd46f2d9f4f90572e66475a52a18ec4d5
|
[
"MIT"
] | null | null | null |
/**
* 暴力解法
* @desc 时间复杂度 O(N^2) 空间复杂度 O(1)
* @param gas
* @param cost
*/
export function canCompleteCircuit(gas: number[], cost: number[]): number {
const len = gas.length
for (let i = 0; i < len; i++) {
if (drive(i))
return i
}
return -1
function drive(startIndex: number) {
let oil = 0
let count = 0
while (count < len) {
const idx = (count + startIndex) % len
oil += gas[idx] - cost[idx]
if (oil < 0)
return false
count++
}
return true
}
}
/**
* 一次遍历
* @desc 时间复杂度 O(N) 空间复杂度 O(1)
* @param gas
* @param cost
*/
export function canCompleteCircuit2(gas: number[], cost: number[]): number {
const len = gas.length
let i = 0
while (i < len) {
let oil = 0
let count = 0
while (count < len) {
const idx = (i + count) % len
oil += gas[idx] - cost[idx]
if (oil < 0)
break
count++
}
if (count === len) {
return i
}
else {
// 直接从无法到达加油站的最后一个点开始
i = i + count + 1
}
}
return -1
}
| 16.060606 | 76 | 0.510377 | 3.609375 |
1968ab3d3e6b2fbab03e36426d644267de5bf5b9
| 1,843 |
lua
|
Lua
|
examples/udppingpong/system.lua
|
stetre/lunasdl
|
32e98fd9ebb030edd23659bb2bb12f0ad9f8a436
|
[
"MIT"
] | 2 |
2015-05-03T06:06:07.000Z
|
2020-07-03T04:48:05.000Z
|
examples/udppingpong/system.lua
|
stetre/lunasdl
|
32e98fd9ebb030edd23659bb2bb12f0ad9f8a436
|
[
"MIT"
] | null | null | null |
examples/udppingpong/system.lua
|
stetre/lunasdl
|
32e98fd9ebb030edd23659bb2bb12f0ad9f8a436
|
[
"MIT"
] | null | null | null |
-- System agent script: system.lua
socket = require("socket")
local T1 = sdl.timer(10,"T1")
local player
local s, peerip, peerport, role
local function udprecv() -- socket 'read' callback
-- get the UDP datagram and the sender's address
local msg, fromip, fromport = s:receivefrom()
sdl.printf("received '%s' from %s:%s",msg,fromip,fromport)
-- check that it is an expected message
assert(msg == "PING" or msg == "PONG" or msg == "STOP")
-- send the corresponding signal to the local player agent
sdl.send({ msg, self_ }, player)
if msg == "STOP" then
sdl.stop()
end
end
function Start(ip, port, remip, remport, duration, ping_interval)
peerip = remip
peerport = remport
role = ping_interval and "initiator" or "responder"
sdl.printf("starting %s at %s:%s (peer system is at %s:%s)",
role,ip,port,peerip,peerport)
-- create a UDP socket and bind it to ip:port
s = assert(socket.udp())
assert(s:setsockname(ip,port))
assert(s:setoption("reuseaddr",true))
-- register the socket in the event loop
sdl.register(s, udprecv)
-- create the player agent
player=sdl.create("player","pingpong.player", ping_interval) --
-- send it the start signal (initiator side only)
if role == "initiator" then
sdl.send({ "START", self_ }, player )
end
-- start the overall timer
sdl.set(T1,sdl.now()+duration)
sdl.nextstate("Active")
end
function Active_T1Expired()
sdl.send({ "STOP" }, player )
s:sendto("STOP",peerip,peerport)
sdl.stop(function () sdl.deregister(s) s:close() end)
end
function Active_Any()
-- signal from local player, redirect signal name to peer system
s:sendto(signame_,peerip,peerport)
end
sdl.start(Start)
sdl.transition("Active","T1",Active_T1Expired)
sdl.transition("Active","*",Active_Any)
| 24.905405 | 67 | 0.670646 | 3.734375 |
9fe1830e7049b67549fc4cf3828a43204d454c07
| 5,703 |
py
|
Python
|
modeling_wDolly.py
|
mfatihaktas/deep-scheduler
|
ad567465399620ec379cfdaa67fbcd94ded03c75
|
[
"MIT"
] | 11 |
2018-03-28T02:55:12.000Z
|
2021-07-12T15:21:38.000Z
|
modeling_wDolly.py
|
mfatihaktas/deep-scheduler
|
ad567465399620ec379cfdaa67fbcd94ded03c75
|
[
"MIT"
] | null | null | null |
modeling_wDolly.py
|
mfatihaktas/deep-scheduler
|
ad567465399620ec379cfdaa67fbcd94ded03c75
|
[
"MIT"
] | 5 |
2018-03-16T01:36:46.000Z
|
2019-10-17T03:23:20.000Z
|
import numpy as np
from rvs import Dolly
from math_utils import *
from plot_utils import *
def Pr_Xnk_leq_x(X, n, k, x):
# log(INFO, "x= {}".format(x) )
cdf = 0
for i in range(k, n+1):
cdf += binom_(n, i) * X.cdf(x)**i * X.tail(x)**(n-i)
return cdf
def EXnk(X, n, k, m=1):
if k == 0:
return 0
if m == 1:
# EXnk, abserr = scipy.integrate.quad(lambda x: 1 - Pr_Xnk_leq_x(X, n, k, x), 0.0001, np.Inf) # 2*X.u_l
EXnk = float(mpmath.quad(lambda x: 1 - Pr_Xnk_leq_x(X, n, k, x), [0.0001, 10*X.u_l] ) )
else:
# EXnk, abserr = scipy.integrate.quad(lambda x: m*x**(m-1) * (1 - Pr_Xnk_leq_x(X, n, k, x)), 0.0001, np.Inf)
EXnk = float(mpmath.quad(lambda x: m*x**(m-1) * (1 - Pr_Xnk_leq_x(X, n, k, x) ), [0.0001, 10*X.u_l] ) )
return EXnk
def ECnk(X, n, k):
if k == 0:
return 0
EC = 0
for i in range(1, k):
EC += EXnk(X, n, i)
EC += (n-k+1)*EXnk(X, n, k)
return EC
def plot_cdf_X(X):
x_l, Pr_X_leq_x_l = [], []
for x in np.linspace(0, 30, 100):
x_l.append(x)
Pr_X_leq_x_l.append(X.cdf(x) )
plot.plot(x_l, Pr_X_leq_x_l, c='blue', marker='x', ls=':', mew=0.1, ms=8)
fontsize = 20
plot.legend(loc='best', framealpha=0.5, fontsize=14, numpoints=1)
plot.xlabel(r'$x$', fontsize=fontsize)
plot.ylabel(r'$\Pr\{X \leq x\}$', fontsize=fontsize)
plot.title(r'$X \sim {}$'.format(X.to_latex() ), fontsize=fontsize)
fig = plot.gcf()
fig.set_size_inches(4, 4)
plot.savefig('plot_cdf_X.png', bbox_inches='tight')
fig.clear()
log(INFO, "done.")
def redsmall_ES_wSl(k, r, D, Sl, d=None, red='coding'):
if d is None:
return D.mean()*sum([EXnk(Sl, i, i)*k.pdf(i) for i in k.v_l] )
ED_given_D_leq_doverk = lambda k: D.mean_given_leq_x(d/k)
return redsmall_ES_wSl(k, r, D, Sl, d=None, red=red) \
+ sum([(EXnk(Sl, i*r, i) - EXnk(Sl, i, i) )*ED_given_D_leq_doverk(i)*D.cdf(d/i)*k.pdf(i) for i in k.v_l] )
# + sum([(ES_k_n_pareto(i, i*r, a, alpha) - ES_k_n_pareto(i, i, a, alpha) )*ED_given_D_leq_doverk(i)*D.cdf(d/i)*k.pdf(i) for i in k.v_l] )
def redsmall_ES2_wSl(k, r, D, Sl, d=None, red='coding'):
if d is None:
return D.moment(2)*sum([EXnk(Sl, i, i, m=2)*k.pdf(i) for i in k.v_l] )
ED2_given_D_leq_doverk = lambda k: moment(D, 2, given_X_leq_x=True, x=d/k)
return redsmall_ES2_wSl(k, r, D, Sl, d=None, red=red) \
+ sum([(EXnk(Sl, i*r, i, m=2) - EXnk(Sl, i, i, m=2) )*ED2_given_D_leq_doverk(i)*D.cdf(d/i)*k.pdf(i) for i in k.v_l] )
def redsmall_EC_wSl(k, r, D, Sl, d=None, red='coding'):
if d is None:
return k.mean()*D.mean()*Sl.mean()
ED_given_D_leq_doverk = lambda k: D.mean_given_leq_x(d/k)
return redsmall_EC_wSl(k, r, D, Sl, d=None, red=red) \
+ sum([(ECnk(Sl, i*r, i) - i*Sl.mean())*ED_given_D_leq_doverk(i)*D.cdf(d/i)*k.pdf(i) for i in k.v_l] )
def ar_for_ro0(ro0, N, Cap, k, r, D, Sl):
return ro0*N*Cap/k.mean()/D.mean()/Sl.mean()
def redsmall_ET_EW_Prqing_wMGc_wSl(ro0, N, Cap, k, r, D, Sl, d, red='coding'):
'''Using the result for M/M/c to approximate E[T] in M/G/c.
[https://en.wikipedia.org/wiki/M/G/k_queue]
'''
ar = ar_for_ro0(ro0, N, Cap, k, r, D, Sl)
ES = redsmall_ES_wSl(k, r, D, Sl, d, red)
ES2 = redsmall_ES2_wSl(k, r, D, Sl, d, red)
EC = redsmall_EC_wSl(k, r, D, Sl, d, red)
log(INFO, "d= {}".format(d), ES=ES, ES2=ES2, EC=EC)
EW, Prqing = MGc_EW_Prqing(ar, N*Cap*ES/EC, ES, ES2)
if EW < 0:
# log(ERROR, "!!!", EW=EW, Prqing=Prqing, ES=ES, ES2=ES2, EC=EC)
# return None, None, None
# return (ES + abs(EW))**2, None, None
return 10**6, None, None
ET = ES + EW
# log(INFO, "d= {}, ro= {}, ES= {}, EW= {}, ET= {}".format(d, ro, ES, EW, ET) )
# log(INFO, "d= {}, ro= {}".format(d, ro) )
# return round(ET, 2), round(EW, 2), round(Prqing, 2)
return ET, EW, Prqing
def redsmall_approx_ET_EW_Prqing_wMGc_wSl(ro0, N, Cap, k, r, D, Sl, d, red='coding'):
ar = ar_for_ro0(ro0, N, Cap, k, r, D, Sl)
ro = ro0
ES = redsmall_ES_wSl(k, r, D, Sl, d, red)
# ES2 = redsmall_ES2_wSl(k, r, D, Sl, d, red)
# EC = redsmall_EC_wSl(k, r, D, Sl, d, red)
log(INFO, "d= {}".format(d), ar=ar, ES=ES) # , ES2=ES2, EC=EC
EW = 1/ar * ro**2/(1 - ro)
ET = ES + EW
return ET, EW, ro
def plot_ET(N, Cap, k, r, D, Sl, red='coding'):
def plot_(ro0):
log(INFO, "ro0= {}".format(ro0) )
d_l, ET_l = [], []
for d in np.linspace(D.l_l, D.mean()*15, 7):
ET, EW, Prqing = redsmall_ET_EW_Prqing_wMGc_wSl(ro0, N, Cap, k, r, D, Sl, d, red='coding') # redsmall_ES_wSl(k, r, D, Sl, d, red)
log(INFO, "d= {}, ET= {}, EW= {}, Prqing= {}".format(d, ET, EW, Prqing) )
if ET > 150:
break
d_l.append(d)
ET_l.append(ET)
plot.plot(d_l, ET_l, label=r'$\rho_0= {}$'.format(ro0), c=next(darkcolor_c), marker=next(marker_c), ls=':', mew=0.1, ms=8)
plot_(ro0=0.8)
# plot_(ro0=0.9)
fontsize = 20
plot.legend(loc='best', framealpha=0.5, fontsize=14, numpoints=1)
plot.xlabel(r'$d$', fontsize=fontsize)
plot.ylabel(r'$E[T]$', fontsize=fontsize)
plot.title(r'$r= {}$, $k \sim {}$'.format(r, k.to_latex() ) + "\n" \
+ r'$D \sim {}$, $Sl \sim {}$'.format(D.to_latex(), Sl.to_latex() ), fontsize=fontsize)
fig = plot.gcf()
fig.set_size_inches(4, 4)
plot.savefig('plot_ET.png', bbox_inches='tight')
fig.clear()
log(INFO, "done.")
if __name__ == "__main__":
X = Dolly()
print("EX= {}".format(X.mean() ) )
def EXnk_(n, k):
EX_ = EXnk(X, n, k)
print("n= {}, k= {}, EXnk= {}".format(n, k, EX_) )
# EXnk_(n=10, k=10)
# EXnk_(n=10, k=8)
# EXnk_(n=10, k=5)
N, Cap = 20, 10
k = BZipf(1, 10)
r = 2
D = Pareto(10, 3)
Sl = Dolly()
plot_ET(N, Cap, k, r, D, Sl)
| 33.745562 | 148 | 0.573032 | 3.015625 |
455a586b1f80c0f5f7820245d613a93732cd8a39
| 1,167 |
py
|
Python
|
docs/examples/api/modifying_values_test.py
|
tomkralidis/mappyfile
|
4313f9e52f5c54198988e62c3e2ebc9223f174ef
|
[
"MIT"
] | 48 |
2017-02-07T23:37:37.000Z
|
2021-12-28T12:56:37.000Z
|
docs/examples/api/modifying_values_test.py
|
tomkralidis/mappyfile
|
4313f9e52f5c54198988e62c3e2ebc9223f174ef
|
[
"MIT"
] | 135 |
2017-03-16T08:54:59.000Z
|
2022-03-30T20:00:22.000Z
|
docs/examples/api/modifying_values_test.py
|
tomkralidis/mappyfile
|
4313f9e52f5c54198988e62c3e2ebc9223f174ef
|
[
"MIT"
] | 23 |
2017-01-31T08:46:48.000Z
|
2021-07-08T15:28:49.000Z
|
import mappyfile
def test():
mapfile = mappyfile.open("./docs/examples/raster.map")
# START OF API EXAMPLE
# update the map name
mapfile["name"] = "MyNewMap"
# update a layer name
layers = mapfile["layers"]
layer = layers[0]
layer["name"] = "MyLayer"
# update the error file path in the map config section
# note key names can be lower or upper case
mapfile["config"]["ms_errorfile"] = "/ms4w/tmp/ms_error.txt"
# update the web metadata settings
mapfile["web"]["metadata"]["wms_format"] = "image/png"
print(mappyfile.dumps(mapfile["web"])) # print out just the WEB section
# alternatively we can parse the Mapfile syntax and load it directly
s = """
METADATA
'wms_enable_request' '*'
'wms_feature_info_mime_type' 'text/html'
'wms_format' 'image/jpg'
END"""
metadata = mappyfile.loads(s)
mapfile["web"]["metadata"] = metadata
print(mappyfile.dumps(mapfile))
# END OF API EXAMPLE
assert(layer["name"] == "MyLayer")
assert(mapfile["web"]["metadata"]["wms_format"] == "image/jpg")
if __name__ == "__main__":
test()
| 27.139535 | 75 | 0.624679 | 3.125 |
a3c9f19b8a9b0ecc262dd6efe615b7e25fceed86
| 2,811 |
java
|
Java
|
src/main/java/neqsim/processSimulation/util/example/simpleTopSideProcess2.java
|
EvenSol/neqsim
|
d81df26b2937d4a6665e3c28626721aa55667a51
|
[
"Apache-2.0"
] | null | null | null |
src/main/java/neqsim/processSimulation/util/example/simpleTopSideProcess2.java
|
EvenSol/neqsim
|
d81df26b2937d4a6665e3c28626721aa55667a51
|
[
"Apache-2.0"
] | null | null | null |
src/main/java/neqsim/processSimulation/util/example/simpleTopSideProcess2.java
|
EvenSol/neqsim
|
d81df26b2937d4a6665e3c28626721aa55667a51
|
[
"Apache-2.0"
] | null | null | null |
package neqsim.processSimulation.util.example;
import neqsim.processSimulation.processEquipment.compressor.Compressor;
import neqsim.processSimulation.processEquipment.heatExchanger.Heater;
import neqsim.processSimulation.processEquipment.mixer.Mixer;
import neqsim.processSimulation.processEquipment.separator.GasScrubberSimple;
import neqsim.processSimulation.processEquipment.separator.Separator;
import neqsim.processSimulation.processEquipment.separator.ThreePhaseSeparator;
import neqsim.processSimulation.processEquipment.stream.Stream;
import neqsim.processSimulation.processEquipment.util.Recycle;
import neqsim.processSimulation.processEquipment.valve.ThrottlingValve;
public class simpleTopSideProcess2{
private static final long serialVersionUID = 1000;
/** This method is just meant to test the thermo package.
*/
public static void main(String args[]){
neqsim.thermo.Fluid.setHasWater(true);
neqsim.thermo.system.SystemInterface fluid = neqsim.thermo.Fluid.create("gas condensate");
fluid.setTemperature(45.0, "C");
fluid.setPressure(5.0, "bara");
Stream stream_inlet = new Stream("Stream1", fluid);
Mixer mixer_inlet = new neqsim.processSimulation.processEquipment.mixer.StaticMixer("Mixer HP");
mixer_inlet.addStream(stream_inlet);
ThreePhaseSeparator separator_inlet = new ThreePhaseSeparator("Separator 1", mixer_inlet.getOutStream());
Stream stream_gasFromSep = new Stream(separator_inlet.getGasOutStream());
Heater cooler1 = new Heater(stream_gasFromSep);
cooler1.setOutTemperature(285.25);
Separator scrubber = new Separator("Scrubber 1", cooler1.getOutStream());
Recycle recyleOp =new Recycle("resyc");
recyleOp.addStream(scrubber.getLiquidOutStream());
mixer_inlet.addStream(recyleOp.getOutStream());
neqsim.processSimulation.processSystem.ProcessSystem operations = new neqsim.processSimulation.processSystem.ProcessSystem();
operations.add(stream_inlet);
operations.add(mixer_inlet);
operations.add(separator_inlet);
operations.add(stream_gasFromSep);
operations.add(cooler1);
operations.add(scrubber);
operations.add(recyleOp);
operations.run();
scrubber.displayResult();
stream_inlet.getThermoSystem().setTemperature(273.15+35.0);
operations.run();
// scrubber.displayResult();
stream_inlet.getThermoSystem().setTemperature(273.15+30.0);
operations.run();
// scrubber.displayResult();
stream_inlet.getThermoSystem().setTemperature(273.15+16.0);
operations.run();
// scrubber.displayResult();
}
}
| 41.338235 | 133 | 0.71576 | 3.015625 |
2c6ba217b4bd5b741db7f37ab8667f532f5d4dbc
| 869 |
py
|
Python
|
14_Day_Higher_order_functions/examples/single decorator.py
|
diegofregolente/30-Days-Of-Python
|
e0cad31f6d5ab1384ad6fa5a5d24a84771d6c267
|
[
"Apache-2.0"
] | null | null | null |
14_Day_Higher_order_functions/examples/single decorator.py
|
diegofregolente/30-Days-Of-Python
|
e0cad31f6d5ab1384ad6fa5a5d24a84771d6c267
|
[
"Apache-2.0"
] | null | null | null |
14_Day_Higher_order_functions/examples/single decorator.py
|
diegofregolente/30-Days-Of-Python
|
e0cad31f6d5ab1384ad6fa5a5d24a84771d6c267
|
[
"Apache-2.0"
] | null | null | null |
# def welcome():
# return 'Welcome to the Python' # basic function
#
# def uppercase_decorator(function): # parameter function
# def wrapper(): # function inside of uppercase
# func = function() # variable == function()
# make_uppercase = func.upper() # transforma o retorno da função chamada em upper
# return make_uppercase # retorno do retorno da função em CAPS
# return wrapper # chama a função wrapper que ira retornar o acima return make_uppercase
#
#
# g = uppercase_decorator(welcome) # chama a função com outra função e returna para 'g'
# print(g())
# Example below with decorator
def uppercase_decorator(function):
def wrapper():
func = function()
make_upper = func.upper()
return make_upper
return wrapper
@uppercase_decorator
def welcome():
return 'Hello Friend.'
print(welcome())
| 29.965517 | 92 | 0.686997 | 3.265625 |
cd5b23d5dd15c6a313c2d5ad1299c6084801122d
| 1,893 |
cs
|
C#
|
MultiModelObject.cs
|
KampinKarl1/MultiModelObjectShower
|
c18ec23ad0b9dd30275248a81c1b906ad97cb272
|
[
"MIT"
] | null | null | null |
MultiModelObject.cs
|
KampinKarl1/MultiModelObjectShower
|
c18ec23ad0b9dd30275248a81c1b906ad97cb272
|
[
"MIT"
] | null | null | null |
MultiModelObject.cs
|
KampinKarl1/MultiModelObjectShower
|
c18ec23ad0b9dd30275248a81c1b906ad97cb272
|
[
"MIT"
] | null | null | null |
using UnityEngine;
using System.Collections;
/// <summary>
/// Can give an object a bit of variety by changing the active shown object at start.
/// Example: Synty and PolyPerfect have characters with lots of gameobjects/character models; you can randomly choose those models with this script.
/// </summary>
public class MultiModelObject : MonoBehaviour
{
[SerializeField] GameObject[] models = new GameObject[0];
[SerializeField, Tooltip("Keep the currently shown gameobject as the active model")]
private bool keepCurrentModel = false;
[SerializeField] private bool doFashionShow = false;
[SerializeField] private float timeBetweenChanges = .25f;
WaitForSeconds changeWait;
void Start()
{
if (models.Length == 0 || keepCurrentModel)
return;
for (int i = 0; i < models.Length; i++)
{
if (models[i].activeSelf)
models[i].SetActive(false);
}
ActivateModelAt(Random.Range(0, models.Length));
if (doFashionShow)
{
changeWait = new WaitForSeconds(timeBetweenChanges);
StartCoroutine(DisplayModels());
}
}
private int currentActive = 0;
public void ActivateModelAt(int modelIndex)
{
models[currentActive].SetActive(false);
currentActive = modelIndex;
models[modelIndex].SetActive(true);
}
IEnumerator DisplayModels()
{
while (Application.isPlaying)
{
yield return changeWait;
int randModelInd = RandomIndex();
ActivateModelAt(randModelInd);
}
}
private int RandomIndex()
{
int index = Random.Range(0, models.Length);
if (index == currentActive && models.Length > 1)//dont create infinite loop
return RandomIndex();
return index;
}
}
| 28.681818 | 149 | 0.621236 | 3.21875 |
05e7e76d6e1fca6bc6bee14678cf8bc1619a9c09
| 1,484 |
py
|
Python
|
pirate/symbolic_regression/util.py
|
212726320/PIRATE-1
|
eac2d090286e0a5c13be4829259ea12cbda2f75c
|
[
"MIT"
] | null | null | null |
pirate/symbolic_regression/util.py
|
212726320/PIRATE-1
|
eac2d090286e0a5c13be4829259ea12cbda2f75c
|
[
"MIT"
] | null | null | null |
pirate/symbolic_regression/util.py
|
212726320/PIRATE-1
|
eac2d090286e0a5c13be4829259ea12cbda2f75c
|
[
"MIT"
] | 1 |
2022-01-27T22:34:45.000Z
|
2022-01-27T22:34:45.000Z
|
# File: util
# File Created: Wednesday, 20th November 2019 3:56:01 pm
# Author: Steven Atkinson ([email protected])
from typing import Callable
import deap.gp
import torch
from ..data.experiment import Experiment
from ..function import Function
def get_residual_function(
op: Callable, experiment: Experiment, pset: deap.gp.PrimitiveSet
) -> Function:
"""
Create the parametric residual function r(x; Theta)
:param op: Operator over functions, aka a graph as a compiled function
:return: Callable with signature r(x, theta_0=val, ...theta_m-1=val)
"""
# TODO would like to make it easier to see how many parameters "op" expects
def residual(x, **parameters):
# First, evaluate the operator over functions and parameters:
func = op(
*[experiment.left_hand_side[key] for key in pset.arguments], **parameters
)
# Then, subtract the inhomogeneous function
if experiment.inhomogeneous is not None:
func = func - experiment.inhomogeneous
return func(x)
return residual
def tensor_to_parameter_dict(x: torch.Tensor) -> dict:
"""
Take an array of parameter values and restructure it as a dict that's a
valid input for the **parameters kwarg for a residual function returned by
`get_residual_function()`
:param x: 1D array of parameters
:return: (dict) parameter specification
"""
return {"theta_%i" % i: val for i, val in enumerate(x)}
| 28.538462 | 85 | 0.686658 | 3.328125 |
43a2c364a7f5d2c6db467a2674dd2b95829e0a87
| 3,164 |
tsx
|
TypeScript
|
contexts/AuthContext.tsx
|
ElisioWander/nextAuth
|
c39ab25894dc5783b476720b656e73cfdfab904b
|
[
"MIT"
] | null | null | null |
contexts/AuthContext.tsx
|
ElisioWander/nextAuth
|
c39ab25894dc5783b476720b656e73cfdfab904b
|
[
"MIT"
] | null | null | null |
contexts/AuthContext.tsx
|
ElisioWander/nextAuth
|
c39ab25894dc5783b476720b656e73cfdfab904b
|
[
"MIT"
] | null | null | null |
import { createContext, ReactNode, useEffect, useState } from "react";
import { api } from "../services/apiClient";
import { setCookie, parseCookies, destroyCookie } from "nookies";
import Router from "next/router";
type User = {
email: string;
permissions: string[];
roles: string[];
};
//dados que serão passados para a função signIn
type SignInCredentials = {
email: string;
password: string;
};
//dados que serão compartilhados no contexto
type AuthContextData = {
signIn: (credentials: SignInCredentials) => Promise<void>;
signOut: () => void;
user: User;
isAuthenticated: boolean;
};
//ReactNode é a tipagem que recebe qualquer elemento React
type AuthProviderProps = {
children: ReactNode;
};
//o contexto em sí
export const AuthContext = createContext({} as AuthContextData);
let authChannel: BroadcastChannel
export function signOut() {
destroyCookie(undefined, "nextauth.token");
destroyCookie(undefined, "nextauth.refreshToken");
authChannel.postMessage('signOut')
Router.push("/");
}
//o provider que ficará em volta de toda a aplicação, possibilitando assim, o compartilhamento
//das informações
export function AuthProvider({ children }: AuthProviderProps) {
//gravar os dados do usuário
const [user, setUser] = useState<User>();
const isAuthenticated = !!user;
useEffect(() => {
authChannel = new BroadcastChannel('auth')
authChannel.onmessage = (message) => {
switch (message.data) {
case 'signOut':
signOut();
break;
default:
break;
}
}
}, [])
useEffect(() => {
// pegar todos os cookies com o parseCookies
// pegar o token de dentro dos cookies
const { "nextauth.token": token } = parseCookies();
if (token) {
api
.get("/me")
.then((response) => {
const { email, permissions, roles } = response.data;
setUser({
email,
permissions,
roles,
});
}).catch(() => {
signOut()
});
}
}, []);
async function signIn({ email, password }: SignInCredentials) {
try {
const response = await api.post("/sessions", {
email,
password,
});
const { permissions, roles, token, refreshToken } = response.data;
//salvando o token e o refreshToken nos cookies
setCookie(undefined, "nextauth.token", token, {
maxAge: 30 * 24 * 60 * 60, // 30 dias
path: "/", //todas as rotas da aplicação terão acesso a esse token
});
setCookie(undefined, "nextauth.refreshToken", refreshToken, {
maxAge: 30 * 24 * 60 * 60,
path: "/",
});
//atualizar os dados do usuário após o logIn
setUser({
email,
permissions,
roles,
});
api.defaults.headers.common["Authorization"] = `Bearer ${token}`;
Router.push("/dashboard");
// console.log(response.data);
} catch (error) {
console.log(error);
}
}
return (
<AuthContext.Provider value={{ signIn, isAuthenticated, user, signOut }}>
{children}
</AuthContext.Provider>
);
}
| 23.789474 | 94 | 0.609039 | 3.1875 |
b328efc2e5e2255ab68e10b3d063a09ee14a42e8
| 6,173 |
py
|
Python
|
ElectreCredibilityWithCounterVeto/ElectreCredibilityWithCounterVeto.py
|
sbigaret/electre_diviz
|
ccfb990a2543799acdf3bb21cf8b0c7499a5a3b5
|
[
"MIT"
] | 5 |
2018-01-13T16:19:04.000Z
|
2020-11-28T22:09:49.000Z
|
ElectreCredibilityWithCounterVeto/ElectreCredibilityWithCounterVeto.py
|
sbigaret/electre_diviz
|
ccfb990a2543799acdf3bb21cf8b0c7499a5a3b5
|
[
"MIT"
] | 4 |
2015-02-22T14:01:20.000Z
|
2019-01-02T18:45:04.000Z
|
ElectreCredibilityWithCounterVeto/ElectreCredibilityWithCounterVeto.py
|
sbigaret/electre_diviz
|
ccfb990a2543799acdf3bb21cf8b0c7499a5a3b5
|
[
"MIT"
] | 4 |
2015-01-08T23:04:11.000Z
|
2018-11-06T08:07:34.000Z
|
#!/usr/bin/env python
"""
ElectreCredibilityWithCounterVeto - computes credibility matrix using procedure
which is common to the most methods from the Electre family.
This module is an extended version of 'ElectreCredibility' in that it is
designed to work with the 'counter-veto' concept - i.e. it requires an
additional input file ('counter_veto_crossed.xml') produced by
'ElectreDiscordance' module, which contains the information for which pairs of
variants and on which criteria the 'counter-veto' threshold has been crossed.
Please note that unlike 'ElectreCredibility', this module can accept
discordance indices only in non-aggregated form (i.e. one index per criterion).
Usage:
ElectreCredibilityWithCounterVeto.py -i DIR -o DIR
Options:
-i DIR Specify input directory. It should contain the following files:
alternatives.xml
classes_profiles.xml (optional)
concordance.xml
counter_veto_crossed.xml
discordance.xml
method_parameters.xml
-o DIR Specify output directory. Files generated as output:
credibility.xml
messages.xml
--version Show version.
-h --help Show this screen.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import sys
import traceback
from docopt import docopt
from common import comparisons_to_xmcda, create_messages_file, get_dirs, \
get_error_message, get_input_data, write_xmcda, Vividict
__version__ = '0.1.0'
def get_credibility(comparables_a, comparables_b, concordance, discordance,
with_denominator, only_max_discordance, cv_crossed):
def _get_credibility_idx(x, y, num_crossed, only_max_discordance):
discordance_values = discordance[x][y].values()
if set(discordance_values) == set([0]): # only zeros
c_idx = concordance[x][y]
elif 1 in discordance_values: # at least one '1'
if not concordance[x][y] < 1:
raise RuntimeError("When discordance == 1, "
"concordance must be < 1.")
c_idx = 0.0
elif only_max_discordance and not with_denominator:
c_idx = concordance[x][y] * (1 - max(discordance_values))
else:
factors = []
for d in discordance_values:
if with_denominator:
if d > concordance[x][y]:
factor = (1 - d) / (1 - concordance[x][y])
else:
factor = None
else:
factor = (1 - d)
if factor:
factors.append(factor)
if factors == []:
c_idx = concordance[x][y]
else:
discordance_aggr = reduce(lambda f1, f2: f1 * f2, factors)
c_idx = (concordance[x][y] *
discordance_aggr ** (1 - num_crossed / num_total))
return c_idx
two_way_comparison = True if comparables_a != comparables_b else False
# 'num_total' == total number of criteria.
# Instead of this monstrosity below, maybe it would be better to provide
# 'criteria.xml' as another input..?
num_total = len(discordance.values()[0].values()[0].keys())
credibility = Vividict()
for a in comparables_a:
for b in comparables_b:
num_crossed = len(cv_crossed[a][b])
credibility[a][b] = _get_credibility_idx(a, b, num_crossed,
only_max_discordance)
if two_way_comparison:
credibility[b][a] = _get_credibility_idx(b, a, num_crossed,
only_max_discordance)
return credibility
def main():
try:
args = docopt(__doc__, version=__version__)
output_dir = None
input_dir, output_dir = get_dirs(args)
filenames = [
# every tuple below == (filename, is_optional)
('alternatives.xml', False),
('classes_profiles.xml', True),
('concordance.xml', False),
('counter_veto_crossed.xml', False),
('discordance.xml', False),
('method_parameters.xml', False),
]
params = [
'alternatives',
'categories_profiles',
'comparison_with',
'concordance',
'cv_crossed',
'discordance',
'only_max_discordance',
'with_denominator',
]
d = get_input_data(input_dir, filenames, params, use_partials=True)
# getting the elements to compare
comparables_a = d.alternatives
if d.comparison_with in ('boundary_profiles', 'central_profiles'):
# central_profiles is a dict, so we need to get the keys
comparables_b = [i for i in d.categories_profiles]
else:
comparables_b = d.alternatives
credibility = get_credibility(comparables_a, comparables_b,
d.concordance, d.discordance,
d.with_denominator,
d.only_max_discordance, d.cv_crossed)
# serialization etc.
if d.comparison_with in ('boundary_profiles', 'central_profiles'):
mcda_concept = 'alternativesProfilesComparisons'
else:
mcda_concept = None
comparables = (comparables_a, comparables_b)
xmcda = comparisons_to_xmcda(credibility, comparables,
mcda_concept=mcda_concept)
write_xmcda(xmcda, os.path.join(output_dir, 'credibility.xml'))
create_messages_file(None, ('Everything OK.',), output_dir)
return 0
except Exception, err:
err_msg = get_error_message(err)
log_msg = traceback.format_exc()
print(log_msg.strip())
create_messages_file((err_msg, ), (log_msg, ), output_dir)
return 1
if __name__ == '__main__':
sys.exit(main())
| 38.823899 | 82 | 0.590151 | 3.015625 |
0871d44931864ffd1969e1f28d502b2257ae9711
| 2,469 |
swift
|
Swift
|
Sources/Cocoa/Components/UIImage/ImageFetcher/CompositeImageFetcher.swift
|
mstana/xcore.swift
|
d637b9827b65149968d93cc910059853b62842fb
|
[
"MIT"
] | null | null | null |
Sources/Cocoa/Components/UIImage/ImageFetcher/CompositeImageFetcher.swift
|
mstana/xcore.swift
|
d637b9827b65149968d93cc910059853b62842fb
|
[
"MIT"
] | 1 |
2020-03-02T19:43:19.000Z
|
2020-03-02T19:43:19.000Z
|
Sources/Cocoa/Components/UIImage/ImageFetcher/CompositeImageFetcher.swift
|
mstana/xcore.swift
|
d637b9827b65149968d93cc910059853b62842fb
|
[
"MIT"
] | null | null | null |
//
// Xcore
// Copyright © 2018 Xcore
// MIT license, see LICENSE file for details
//
import UIKit
final class CompositeImageFetcher: ImageFetcher, ExpressibleByArrayLiteral {
/// The registered list of fetchers.
private var fetchers: [ImageFetcher] = []
init(_ fetchers: [ImageFetcher]) {
self.fetchers = fetchers
}
init(arrayLiteral elements: ImageFetcher...) {
self.fetchers = elements
}
/// Add given fetcher if it's not already included in the collection.
///
/// - Note: This method ensures there are no duplicate fetchers.
func add(_ fetcher: ImageFetcher) {
guard !fetchers.contains(where: { $0.id == fetcher.id }) else {
return
}
fetchers.append(fetcher)
}
/// Add list of given fetchers if they are not already included in the
/// collection.
///
/// - Note: This method ensures there are no duplicate fetchers.
func add(_ fetchers: [ImageFetcher]) {
fetchers.forEach(add)
}
/// Removes the given fetcher.
func remove(_ fetcher: ImageFetcher) {
let ids = fetchers.map { $0.id }
guard let index = ids.firstIndex(of: fetcher.id) else {
return
}
fetchers.remove(at: index)
}
}
extension CompositeImageFetcher {
var id: String {
fetchers.map { $0.id }.joined(separator: "_")
}
func canHandle(_ image: ImageRepresentable) -> Bool {
image.imageSource.isValid
}
func fetch(_ image: ImageRepresentable, in imageView: UIImageView?, _ callback: @escaping ResultBlock) {
guard image.imageSource.isValid else {
#if DEBUG
Console.error("Unable to fetch image because of invalid image source.")
#endif
callback(.failure(ImageFetcherError.notFound))
return
}
// 1. Reverse fetchers so the third-party fetchers are always prioritized over
// built-in ones.
// 2. Find the first one that can handle the request.
// 3. Fetch the requested image.
guard let fetcher = fetchers.reversed().first(where: { $0.canHandle(image) }) else {
callback(.failure(ImageFetcherError.notFound))
return
}
imageView?.imageRepresentableSource = image.imageSource
fetcher.fetch(image, in: imageView, callback)
}
func removeCache() {
fetchers.forEach { $0.removeCache() }
}
}
| 28.37931 | 108 | 0.616849 | 3.125 |
0d74b97a64d91ec85993269235d6459b66ff6e3b
| 2,276 |
kt
|
Kotlin
|
Lecture 3/githubclient/app/src/main/java/com/avast/android/lecture/github/Settings.kt
|
LaudateCorpus1/android-lectures
|
edafe7bf572df354cb0de98da1abdb14268f939a
|
[
"Apache-2.0"
] | 28 |
2015-11-12T20:30:14.000Z
|
2022-01-05T05:37:31.000Z
|
Lecture 3/githubclient/app/src/main/java/com/avast/android/lecture/github/Settings.kt
|
LaudateCorpus1/android-lectures
|
edafe7bf572df354cb0de98da1abdb14268f939a
|
[
"Apache-2.0"
] | null | null | null |
Lecture 3/githubclient/app/src/main/java/com/avast/android/lecture/github/Settings.kt
|
LaudateCorpus1/android-lectures
|
edafe7bf572df354cb0de98da1abdb14268f939a
|
[
"Apache-2.0"
] | 11 |
2016-09-15T21:35:45.000Z
|
2022-02-02T13:36:14.000Z
|
package com.avast.android.lecture.github
import android.content.Context
import android.content.SharedPreferences
import android.util.Log
import androidx.datastore.core.DataStore
import androidx.datastore.preferences.core.Preferences
import androidx.datastore.preferences.core.edit
import androidx.datastore.preferences.core.intPreferencesKey
import androidx.datastore.preferences.core.stringPreferencesKey
import androidx.datastore.preferences.preferencesDataStore
import com.avast.android.lecture.github.utils.SingletonHolder
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.first
import kotlinx.coroutines.flow.map
/**
* Wraps access to [SharedPreferences].
*/
class Settings private constructor(private val context: Context) {
//TODO: exercise 6,7
private val Context.dataStore: DataStore<Preferences> by preferencesDataStore(name = "settings")
/**
* Get how many times the application was launched.
*/
suspend fun getAppLaunchesCount(): Int = context.dataStore.data.map { preferences ->
preferences[KEY_APP_LAUNCH_COUNT] ?: 0
}.first()
/**
* Increase app launched counter.
*/
suspend fun increaseAppLaunchesCount() {
context.dataStore.edit { preferences ->
val actualCount = preferences[KEY_APP_LAUNCH_COUNT] ?: 0
preferences[KEY_APP_LAUNCH_COUNT] = (actualCount + 1).also {
Log.d(App::class.java.simpleName, "Increased app count: $it")
}
Log.d(App::class.java.simpleName, "Increased app launch")
}
}
/**
* Read last username from preferences
*/
fun getLastUsername(): Flow<String> {
return context.dataStore.data.map { preferences ->
preferences[KEY_USERNAME].orEmpty()
}
}
/**
* Set username to preferences
*/
suspend fun setLastUsername(username: String) {
context.dataStore.edit { preferences ->
preferences[KEY_USERNAME] = username
}
}
companion object: SingletonHolder<Settings, Context>(::Settings) {
const val APP_PREFERENCES = "github_client"
val KEY_APP_LAUNCH_COUNT = intPreferencesKey("app_launches")
val KEY_USERNAME = stringPreferencesKey("username")
}
}
| 32.056338 | 100 | 0.699912 | 3.109375 |
eee158e5dd153c87f7445449e061f1bd39bb5598
| 6,891 |
go
|
Go
|
pkg/install.go
|
borkod/kindly
|
f6de65b1fe94a266e5669c7e2f99cb512f5b0642
|
[
"Apache-2.0"
] | null | null | null |
pkg/install.go
|
borkod/kindly
|
f6de65b1fe94a266e5669c7e2f99cb512f5b0642
|
[
"Apache-2.0"
] | 7 |
2021-04-25T15:43:32.000Z
|
2021-04-25T15:54:09.000Z
|
pkg/install.go
|
borkod/kindly
|
f6de65b1fe94a266e5669c7e2f99cb512f5b0642
|
[
"Apache-2.0"
] | null | null | null |
package pkg
import (
"bufio"
"bytes"
"context"
"crypto/sha256"
"encoding/hex"
"errors"
"io"
"io/ioutil"
"net"
"net/http"
"os"
"path/filepath"
"strings"
"text/template"
"time"
)
// Install function implements install command
func (k Kindly) Install(ctx context.Context, p string, f bool, u bool) (err error) {
if f && u {
return errors.New("Only one of 'file' or 'url' flags can be set.")
}
if u {
if !isValidUrl(p) {
return errors.New("Invalid URL.")
}
}
// Create a temporary directory where files will be downloaded
tmpDir, err := ioutil.TempDir("", "kindly_")
if err != nil {
k.logger.Println(err)
os.Exit(1)
}
// Clean up temporary directory
defer os.RemoveAll(tmpDir)
var tmpFile string
var yc KindlyStruct
var dl dlInfo
if dl, yc, err = k.getValidYConfig(ctx, p, f, u); err != nil {
return err
}
// Applies Version values to the URL template
if dl.URL, dl.URLSHA, err = executeURL(dl, yc); err != nil {
return err
}
// Downloads package file and package SHA file.
// Calculates package SHA value
// Compares package SHA value to SHA value in the SHA file
if tmpFile, err = k.processFile(ctx, dl, tmpDir); err != nil {
return err
}
// decompress tmpFile into tmpDir
if strings.Contains(tmpFile, "tar.gz") {
if err = decompress(tmpDir, tmpFile); err != nil {
return err
}
}
if strings.Contains(tmpFile, "zip") {
if _, err = unzip(tmpFile, tmpDir); err != nil {
return err
}
}
var l pkgManifest
l.Name = dl.Name
l.Date = time.Now().Format("2006-01-02 15:04:05")
l.Version = dl.Version
l.Source = dl.Source
// Copy all extracted bin files from tmpDir into OutBinDir
for _, n := range yc.Spec.Bin {
if strings.Contains(strings.ReplaceAll(n, " ", ""), "{{.OS}}") ||
strings.Contains(strings.ReplaceAll(n, " ", ""), "{{.Arch}}") {
if n, err = executeBin(n, k.cfg.OS, k.cfg.Arch); err != nil {
k.logger.Println("ERROR")
k.logger.Println(err)
continue
}
}
if k.cfg.OS == "windows" {
n = n + ".exe"
}
cpBool := false
if cpBool, err = copyFile(k.cfg.OutBinDir, tmpDir, n); err != nil {
k.logger.Println("ERROR")
k.logger.Println(err)
}
if cpBool {
l.Bin = append(l.Bin, n)
}
}
// Copy all extracted completion files from tmpDir into OutCompletionDir
for _, n := range yc.Spec.Completion[k.cfg.Completion] {
cpBool := false
if cpBool, err = copyFile(k.cfg.OutCompletionDir, tmpDir, n); err != nil {
k.logger.Println("ERROR")
k.logger.Println(err)
}
if cpBool {
l.Completion = append(l.Completion, n)
}
}
// Copy all extracted man pages files from tmpDir into OutManDir
for _, n := range yc.Spec.Man {
cpBool := false
if cpBool, err = copyFile(k.cfg.OutManDir, tmpDir, n); err != nil {
k.logger.Println("ERROR")
k.logger.Println(err)
}
if cpBool {
l.Man = append(l.Man, n)
}
}
// Write the package manifest file
if err = writeManifest(l, k.cfg.ManifestDir); err != nil {
k.logger.Println(("ERROR"))
k.logger.Println(err)
}
return nil
}
// Downloads package file and package SHA file.
// Calculates package SHA value
// Compares package SHA value to SHA value in the SHA file
func (k Kindly) processFile(ctx context.Context, dl dlInfo, tmpDir string) (string, error) {
// Get the data
if k.cfg.Verbose {
k.logger.Println("Downloading file: ", dl.URL)
}
const ConnectMaxWaitTime = 1 * time.Second
const RequestMaxWaitTime = 5 * time.Second
client := http.Client{
Transport: &http.Transport{
DialContext: (&net.Dialer{
Timeout: ConnectMaxWaitTime,
}).DialContext,
},
}
ctx, cancel := context.WithTimeout(ctx, RequestMaxWaitTime)
defer cancel()
req, err := http.NewRequestWithContext(ctx, http.MethodGet, dl.URL, nil)
if err != nil {
return "", err
}
resp, err := client.Do(req)
if err != nil {
return "", err
}
defer resp.Body.Close()
// DO I REALLY NEED TWO COPIES!?
var buf1, buf2 bytes.Buffer
w := io.MultiWriter(&buf1, &buf2)
if _, err := io.Copy(w, resp.Body); err != nil {
return "", err
}
if k.cfg.Verbose {
k.logger.Println("Download finished.")
}
// Calculate SHA256 of downloaded file
hash := sha256.New()
if _, err := io.Copy(hash, &buf1); err != nil {
return "", err
}
sum := hex.EncodeToString(hash.Sum(nil))
if k.cfg.Verbose {
k.logger.Println("Calculated SHA256 value: ", sum)
}
// Get the sha file
if len(dl.URLSHA) > 1 {
if k.cfg.Verbose {
k.logger.Println("Downloading SHA256 file: ", dl.URLSHA)
}
req, err := http.NewRequestWithContext(ctx, http.MethodGet, dl.URLSHA, nil)
if err != nil {
return "", err
}
respSha, err := client.Do(req)
if err != nil {
return "", err
}
defer respSha.Body.Close()
//buf := new(bytes.Buffer)
newStr := ""
//buf.ReadFrom(respSha.Body)
scanner := bufio.NewScanner(respSha.Body)
for scanner.Scan() {
shaLine := strings.SplitN(scanner.Text(), " ", 2)
if len(shaLine) > 1 {
if strings.Contains(shaLine[1], k.cfg.OS) && strings.Contains(shaLine[1], k.cfg.Arch) {
newStr = shaLine[0]
}
} else {
newStr = shaLine[0]
}
}
// Get the sha file
if k.cfg.Verbose {
k.logger.Println("SHA256 file hash value: ", newStr)
}
// Check if SHA256 values match
if newStr != sum {
return "", errors.New("SHA MISMATCH")
}
} else if k.cfg.Verbose {
k.logger.Println("NO SHA FILE PROVIDED. SKIPPING SHA VALUE CHECK")
}
// Create the output file in temporary
urlPath := strings.Split(dl.URL, "/")
filepath := filepath.Join(tmpDir, urlPath[len(urlPath)-1])
if k.cfg.Verbose {
k.logger.Println("Writing output file: ", filepath)
}
out, err := os.Create(filepath)
if err != nil {
return "", err
}
defer out.Close()
// Write the body to file
_, err = io.Copy(out, &buf2)
return filepath, err
}
// Applies OS and Architecture values to the binary file names template
func executeBin(n string, os string, arch string) (string, error) {
binT, err := template.New("bin").Parse(n)
if err != nil {
return "", err
}
type binS struct {
OS string
Arch string
}
nS := binS{os, arch}
var buf bytes.Buffer
if err = binT.Execute(&buf, nS); err != nil {
return "", err
}
newStr := buf.String()
if os == "windows" {
newStr = newStr + ".exe"
}
return newStr, nil
}
// Applies Version values to the URL template
func executeURL(dl dlInfo, yc KindlyStruct) (string, string, error) {
urlT, err := template.New("url").Parse(yc.Spec.Assets[dl.osArch].URL)
if err != nil {
return "", "", err
}
urlShaT, err := template.New("urlSha").Parse(yc.Spec.Assets[dl.osArch].ShaURL)
if err != nil {
return "", "", err
}
var buf bytes.Buffer
if err = urlT.Execute(&buf, dl); err != nil {
return "", "", err
}
url := buf.String()
buf.Reset()
if err = urlShaT.Execute(&buf, dl); err != nil {
return "", "", err
}
urlSha := buf.String()
return url, urlSha, nil
}
| 21.73817 | 92 | 0.639385 | 3.15625 |
b0c631245ca3559a2ae221817f1108e7fe23e99c
| 3,948 |
lua
|
Lua
|
tests/unit/high_level_database.lua
|
LuaDist-testing/luchia
|
0887d079a2cbf1f0225d391cfae06fbdf3f610f3
|
[
"BSD-3-Clause"
] | 11 |
2015-08-13T23:43:50.000Z
|
2016-11-07T19:13:40.000Z
|
tests/unit/high_level_database.lua
|
LuaDist-testing/luchia
|
0887d079a2cbf1f0225d391cfae06fbdf3f610f3
|
[
"BSD-3-Clause"
] | 2 |
2015-01-17T19:02:42.000Z
|
2019-01-03T16:12:11.000Z
|
tests/unit/high_level_database.lua
|
LuaDist-testing/luchia
|
0887d079a2cbf1f0225d391cfae06fbdf3f610f3
|
[
"BSD-3-Clause"
] | 1 |
2015-02-24T21:18:00.000Z
|
2015-02-24T21:18:00.000Z
|
local lunatest = require "lunatest"
local assert_equal = lunatest.assert_equal
local assert_function = lunatest.assert_function
local assert_table = lunatest.assert_table
local common = require "common_test_functions"
local database = require "luchia.database"
local good_protocol = common.server_good_protocol
local good_host = common.server_good_host
local good_port = common.server_good_port
local user = common.server_user
local password = common.server_password
local request_function = common.server_request
local example_database = common.server_example_database
local conf = {
default = {
server = {
protocol = good_protocol,
host = good_host,
port = good_port,
},
},
}
local tests = {}
local function new_with_default_server_params()
local params = {
custom_configuration = conf,
custom_request_function = request_function,
}
local db = database:new(params)
assert_table(db, "db")
return db
end
function tests.test_new_with_default_server_params_returns_valid_server()
local db = new_with_default_server_params()
assert_table(db.server, "db.server")
assert_function(db.server.request, "db.server:request")
end
function tests.test_new_with_default_server_params_returns_only_server()
local db = new_with_default_server_params()
assert_equal(1, common.table_length(db), "db length")
end
local function new_with_custom_server_params()
local db = database:new(conf.default.server)
assert_table(db, "db")
return db
end
function tests.test_new_with_custom_server_params_returns_valid_server()
local db = new_with_custom_server_params()
assert_table(db.server, "db.server")
assert_function(db.server.request, "db.server:request")
end
function tests.test_new_with_custom_server_params_returns_only_server()
local db = new_with_custom_server_params()
assert_equal(1, common.table_length(db), "db length")
end
function tests.test_list_databases_returns_valid_list()
local db = new_with_default_server_params()
local response, response_code, headers, status = db:list()
assert_table(response)
end
function tests.test_info_with_database_name_returns_valid_database_info()
local db = new_with_default_server_params()
local response, response_code, headers, status = db:info(example_database)
assert_table(response)
assert_equal(response.db_name, example_database)
end
function tests.test_info_with_no_database_name_returns_nil()
local db = new_with_default_server_params()
local response, response_code, headers, status = db:info()
assert_equal(response, nil)
end
function tests.test_create_with_database_name_returns_valid_response()
local db = new_with_default_server_params()
local response, response_code, headers, status = db:create(example_database)
assert_table(response)
assert_equal(response.ok, true)
end
function tests.test_create_with_no_database_name_returns_nil()
local db = new_with_default_server_params()
local response, response_code, headers, status = db:create()
assert_equal(response, nil)
end
function tests.test_delete_with_database_name_returns_valid_response()
local db = new_with_default_server_params()
local response, response_code, headers, status = db:delete(example_database)
assert_table(response)
assert_equal(response.ok, true)
end
function tests.test_delete_with_no_database_name_returns_nil()
local db = new_with_default_server_params()
local response, response_code, headers, status = db:delete()
assert_equal(response, nil)
end
function tests.test_response_ok_with_ok_response_returns_true()
local db = new_with_default_server_params()
local response = {ok = true}
local bool = db:response_ok(response)
assert_equal(bool, true)
end
function tests.test_response_ok_with_not_ok_response_returns_false()
local db = new_with_default_server_params()
local response = {ok = false}
local bool = db:response_ok(response)
assert_equal(bool, false)
end
return tests
| 31.086614 | 78 | 0.802178 | 3.140625 |
641f05b97594a95090cc684863a9544b61050717
| 2,483 |
py
|
Python
|
fastapi+filesUpdate/main.py
|
DeSireFire/Fastapi-sample
|
a7999aedfc25494f5fd6120c7b8042aa8d130693
|
[
"MIT"
] | null | null | null |
fastapi+filesUpdate/main.py
|
DeSireFire/Fastapi-sample
|
a7999aedfc25494f5fd6120c7b8042aa8d130693
|
[
"MIT"
] | null | null | null |
fastapi+filesUpdate/main.py
|
DeSireFire/Fastapi-sample
|
a7999aedfc25494f5fd6120c7b8042aa8d130693
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Author : RaXianch
# CreatDATE : 2021/8/21
# CreatTIME : 20:15
# Blog : https://blog.raxianch.moe/
# Github : https://github.com/DeSireFire
__author__ = 'RaXianch'
from typing import List
import uvicorn
import time
from fastapi import FastAPI, File, UploadFile
from starlette.responses import HTMLResponse
app = FastAPI()
# file: bytes = File(...), # 把文件对象转为bytes类型,这种类型的文件无法保存
# fileb: UploadFile = File(...), # UploadFile转为文件对象,可以保存文件到本地
# notes: str = Form(...) # 获取普通键值对
@app.post("/file/")
async def create_files(file: bytes = File(...)):
with open('./base.jpg', 'wb') as f:
f.write(file)
return {"fileSize": len(file)}
@app.post('/uploadFile')
async def uploadFile(file: UploadFile = File(...)):
"""缺少验证是否上传文件"""
content = await file.read()
with open('./test.jpg', 'wb') as f:
f.write(content)
return {"filename": file.filename}
@app.post("/files/")
async def create_files(
files: List[bytes] = File(...)
):
print(type(files))
print(files)
for i in files:
with open(f'./{time.time()}.jpg', 'wb') as f:
f.write(i)
return {"file_sizes": [len(file) for file in files]}
@app.post("/uploadfiles/")
async def create_upload_files(
files: List[UploadFile] = File(...)
):
for i in files:
content = await i.read()
if content:
print(content)
with open(f'./{time.time()*1000}.jpg', 'wb') as f:
f.write(content)
return {"filenames": [file.filename for file in files]}
@app.get("/")
async def main():
content = """
<body>
<form action="/file/" enctype="multipart/form-data" method="post">
<input name="file" type="file">
<input type="submit" value="file上传">
</form>
<form action="/files/" enctype="multipart/form-data" method="post">
<input name="files" type="file" multiple>
<input type="submit" value="files上传">
</form>
<form action="/uploadFile/" enctype="multipart/form-data" method="post">
<input name="file" type="file">
<input type="submit" value="uploadFile上传">
</form>
<form action="/uploadfiles/" enctype="multipart/form-data" method="post">
<input name="files" type="file" multiple>
<input type="submit" value="uploadfiles上传">
</form>
</body>
"""
return HTMLResponse(content=content)
if __name__ == '__main__':
uvicorn.run(app)
| 26.136842 | 77 | 0.604913 | 3.25 |
2c7e801237ea626ae741f5008f2cad677b7389ef
| 6,536 |
cpp
|
C++
|
Homeworks/miscellaneous/LW15/grade_calculator.cpp
|
sameer-h/CSCE121
|
b6a302bfc248bfaedd03ab38060e12163458c7f8
|
[
"MIT"
] | 1 |
2020-11-01T21:02:03.000Z
|
2020-11-01T21:02:03.000Z
|
Homeworks/miscellaneous/LW15/grade_calculator.cpp
|
sameer-h/CSCE121
|
b6a302bfc248bfaedd03ab38060e12163458c7f8
|
[
"MIT"
] | null | null | null |
Homeworks/miscellaneous/LW15/grade_calculator.cpp
|
sameer-h/CSCE121
|
b6a302bfc248bfaedd03ab38060e12163458c7f8
|
[
"MIT"
] | null | null | null |
#include <vector>
#include <string>
#include <fstream>
#include <stdexcept>
#include "grade_calculator.h"
////////////////////////////////////////////////////////////////////////////////
// TODO(student): implement these methods
double GradeCalculator::exam_average() const {
// TODO(student)
double grade = 0;
int countGrades = 0;
double average;
for (size_t i = 0; i < exam_grades.size(); ++i) {
grade += exam_grades.at(i);
countGrades++;
}
grade += final_exam;
countGrades++;
double examAvg = grade / countGrades;
if (final_exam > examAvg) {
return final_exam;
}
average = grade / countGrades;
return average;
}
double GradeCalculator::zybook_average() const {
// TODO(student)
double grade = 0;
int countGrades = 0;
double average;
for (size_t i = 0; i < zybook_participation_grades.size(); ++i) {
grade += zybook_participation_grades.at(i);
countGrades++;
}
for (size_t i = 0; i < zybook_challenge_grades.size(); ++i) {
grade += zybook_challenge_grades.at(i);
countGrades++;
}
double zybookAvg = grade / countGrades;
if (zybookAvg >= 85) {
return 100;
}
average = zybookAvg + 15;
return average;
}
double GradeCalculator::hw_average() const {
// TODO(student)
double grade = 0;
int countGrades = 0;
double average;
for (size_t i = 0; i < hw_grades.size(); ++i) {
if (hw_redemption_grades.size() > i) {
if (hw_redemption_grades.at(i) > hw_grades.at(i)) {
grade += (hw_redemption_grades.at(i) - hw_grades.at(i)) / 2;
}
}
grade += hw_grades.at(i);
countGrades++;
}
average = grade / countGrades;
return average;
}
double GradeCalculator::lw_average() const {
// TODO(student)
double grade = 0;
int countGrades = 0;
double average;
for (size_t i = 0; i < lw_grades.size(); ++i) {
grade += lw_grades.at(i);
countGrades++;
}
average = 100 * (grade / countGrades);
return average;
}
double GradeCalculator::final_grade_numeric() const {
// TODO(student)
double grade = exam_average() * (0.5) +
zybook_average() * (0.07) +
hw_average() * (0.35) +
lw_average() * (0.08);
return grade;
}
char GradeCalculator::final_grade_letter() const {
// TODO(student)
double course_avg = final_grade_numeric();
if (!has_syllabus_ack || course_avg < 60) { // no syllabus acknowledgement
return 'F';
} else if (exam_average() < 60 || course_avg < 70) {
return 'D';
} else if (course_avg < 80) {
return 'C';
} else if (course_avg < 90) {
return 'B';
} else {
return 'A';
}
}
////////////////////////////////////////////////////////////////////////////////
void GradeCalculator::read_final_exam(std::ifstream& fin) {
has_final_exam = true;
fin >> final_exam;
if (fin.fail()) {
throw std::runtime_error("failed to read final exam grade");
}
}
void GradeCalculator::read_exam_hw_redemption_zybook(std::ifstream& fin, const std::string& category) {
unsigned number;
fin >> number;
if (fin.fail()) {
throw std::runtime_error("failed to read " + category + " number");
}
std::vector<double>* vec;
if (category == "exam") {
vec = &exam_grades;
} else if (category == "hw") {
vec = &hw_grades;
} else if (category == "hw-redemption") {
vec = &hw_redemption_grades;
} else if (category == "zybook") {
char type;
fin >> type;
if (fin.fail()) {
// HOW TO REACH THIS?
throw std::runtime_error("failed to read " + category + " type");
}
switch (type) {
case 'p': vec = &zybook_participation_grades; break;
case 'c': vec = &zybook_challenge_grades; break;
default: throw std::runtime_error("unrecognized zybook assignment type: " + type);
}
} else {
throw std::runtime_error("unrecognized category: " + category);
}
double grade;
fin >> grade;
if (fin.fail()) {
throw std::runtime_error("failed to read "+category+" grade");
}
while (number > vec->size()) {
vec->push_back(0);
}
vec->at(number-1) = grade;
}
void GradeCalculator::read_lw(std::ifstream& fin) {
unsigned number;
fin >> number;
if (fin.fail()) {
throw std::runtime_error("failed to read lw number");
}
std::string str;
fin >> str;
if (fin.fail()) {
// HOW TO REACH THIS?
throw std::runtime_error("failed to read lw grade");
}
while (number > lw_grades.size()) {
lw_grades.push_back(false);
}
if (str == "0" || str == "false") {
lw_grades.at(number-1) = false;
} else if (str == "1" || str == "true") {
lw_grades.at(number-1) = true;
} else {
throw std::runtime_error("invalid lw grade value: " + str);
}
}
void GradeCalculator::read_syllabus_ack(std::ifstream& fin) {
std::string str;
fin >> str;
if (fin.fail()) {
// HOW TO REACH THIS?
throw std::runtime_error("failed to read syllabus-ack type");
}
if (str == "0" || str == "false") {
has_syllabus_ack = false;
} else if (str == "1" || str == "true") {
has_syllabus_ack = true;
} else {
throw std::runtime_error("invalid syllabus-ack grade value: " + str);
}
}
void GradeCalculator::load_grades(const std::string& filename) {
std::ifstream fin(filename);
if (!fin.is_open()) {
throw std::runtime_error("could not open file");
}
while (!fin.eof()) {
std::string category;
fin >> category;
if (fin.fail()) {
if (fin.eof()) {
continue;
}
// HOW TO REACH THIS?
throw std::runtime_error("failed to read category");
}
if (category == "final-exam") {
read_final_exam(fin);
} else if (category == "exam" || category == "hw" || category == "hw-redemption" || category == "zybook") {
read_exam_hw_redemption_zybook(fin, category);
} else if (category == "lw") {
read_lw(fin);
} else if (category == "syllabus-ack") {
read_syllabus_ack(fin);
} else {
throw std::runtime_error("invalid category: " + category);
}
}
}
| 25.333333 | 116 | 0.544829 | 3.15625 |
21d245e04d1dbd2970c466e0da6a84a87325274b
| 7,015 |
js
|
JavaScript
|
src/parser.js
|
ogonkov/djanjucks
|
36c0e58b0ae8805f6a4a31fbde36e880ee30f2cc
|
[
"MIT"
] | 6 |
2019-07-15T09:40:14.000Z
|
2021-07-01T15:29:01.000Z
|
src/parser.js
|
ogonkov/djanjucks
|
36c0e58b0ae8805f6a4a31fbde36e880ee30f2cc
|
[
"MIT"
] | 5 |
2019-09-10T20:32:43.000Z
|
2021-05-10T01:38:05.000Z
|
src/parser.js
|
ogonkov/djanjucks
|
36c0e58b0ae8805f6a4a31fbde36e880ee30f2cc
|
[
"MIT"
] | 2 |
2019-09-10T20:16:16.000Z
|
2019-10-27T22:45:26.000Z
|
import lexer from 'nunjucks/src/lexer';
import { Parser } from 'nunjucks/src/parser';
import nodes from './nodes';
class DjangoParser extends Parser {
// Nunjucks wants kwargs in tags to be comma separated.
// However this is not required in django.
// This parserSignature bypasses that requirement
parseSignature(tolerant, noParens) {
let tok = this.peekToken();
if (!noParens && tok.type !== lexer.TOKEN_LEFT_PAREN) {
if (tolerant) {
return null;
} else {
this.fail('expected arguments', tok.lineno, tok.colno);
}
}
if (tok.type === lexer.TOKEN_LEFT_PAREN) {
tok = this.nextToken();
}
const args = new nodes.NodeList(tok.lineno, tok.colno);
const kwargs = new nodes.KeywordArgs(tok.lineno, tok.colno);
while (1) {
// eslint-disable-line no-constant-condition
tok = this.peekToken();
if (!noParens && tok.type === lexer.TOKEN_RIGHT_PAREN) {
this.nextToken();
break;
} else if (noParens && tok.type === lexer.TOKEN_BLOCK_END) {
break;
}
const arg = this.parseExpression();
if (this.skipValue(lexer.TOKEN_OPERATOR, '=')) {
kwargs.addChild(
new nodes.Pair(arg.lineno, arg.colno, arg, this.parseExpression())
);
} else {
args.addChild(arg);
}
}
if (kwargs.children.length) {
args.addChild(kwargs);
}
return args;
}
// A variation of parseSignature that expects a single argument
parseFilterSignature() {
let tok = this.peekToken();
if (tok.type === lexer.TOKEN_COLON) {
tok = this.nextToken();
}
tok = this.peekToken();
const args = new nodes.NodeList(tok.lineno, tok.colno);
const arg = this.parsePrimary();
args.addChild(arg);
return args;
}
// Django Filters take a single argument
parseFilter(node) {
while (this.skip(lexer.TOKEN_PIPE)) {
const name = this.parseFilterName();
node = new nodes.Filter(
name.lineno,
name.colno,
name,
new nodes.NodeList(
name.lineno,
name.colno,
[node].concat(this.parseFilterArgs(node))
)
);
}
return node;
}
parseFilterPostfix(node) {
let tok = this.peekToken();
if (tok.type === lexer.TOKEN_COLON) {
// Function call
node = new nodes.FunCall(
tok.lineno,
tok.colno,
node,
this.parseFilterSignature()
);
}
return node;
}
parseInclude() {
const tagName = 'include';
const token = this.peekToken();
if (!this.skipSymbol(tagName)) {
this.fail('parseInclude: expected ' + tagName);
}
const node = new nodes.Include(token.lineno, token.colno);
node.template = this.parsePrimary();
if (this.skipSymbol('with')) {
// Cheekily use parseSignature here.
// Arg should be compared to "only"
const kwargs = this.parseSignature(null, true);
if (kwargs.children[0].value === 'only') {
node.only = true;
}
if (kwargs.children[kwargs.children.length - 1]) {
node.kwargs = kwargs.children[kwargs.children.length - 1];
}
}
this.advanceAfterBlockEnd(token.value);
return node;
}
parseFilterArgs(node) {
if (this.peekToken().type === lexer.TOKEN_COLON) {
// Get a FunCall node and add the parameters to the
// filter
const call = this.parseFilterPostfix(node);
return call.args.children;
}
return [];
}
parseFor() {
// Taken verbatim from Nunjucks.
// Add support for empty & reversed.
var forTok = this.peekToken();
var node;
var endBlock;
if (this.skipSymbol('for')) {
node = new nodes.For(forTok.lineno, forTok.colno);
endBlock = 'endfor';
} else if (this.skipSymbol('asyncEach')) {
node = new nodes.AsyncEach(forTok.lineno, forTok.colno);
endBlock = 'endeach';
} else if (this.skipSymbol('asyncAll')) {
node = new nodes.AsyncAll(forTok.lineno, forTok.colno);
endBlock = 'endall';
} else {
this.fail('parseFor: expected for{Async}', forTok.lineno, forTok.colno);
}
node.name = this.parsePrimary();
if (!(node.name instanceof nodes.Symbol)) {
this.fail('parseFor: variable name expected for loop');
}
const type = this.peekToken().type;
if (type === lexer.TOKEN_COMMA) {
// key/value iteration
const key = node.name;
node.name = new nodes.Array(key.lineno, key.colno);
node.name.addChild(key);
while (this.skip(lexer.TOKEN_COMMA)) {
const prim = this.parsePrimary();
node.name.addChild(prim);
}
}
if (!this.skipSymbol('in')) {
this.fail(
'parseFor: expected "in" keyword for loop',
forTok.lineno,
forTok.colno
);
}
node.arr = this.parseExpression();
node.reversed = this.skipSymbol('reversed');
this.advanceAfterBlockEnd(forTok.value);
node.body = this.parseUntilBlocks(endBlock, 'empty');
if (this.skipSymbol('empty')) {
this.advanceAfterBlockEnd('empty');
node.empty = this.parseUntilBlocks(endBlock);
}
this.advanceAfterBlockEnd();
return node;
}
parseStatement() {
var tok = this.peekToken();
var node;
if (tok.type !== lexer.TOKEN_SYMBOL) {
this.fail('tag name expected', tok.lineno, tok.colno);
}
if (this.breakOnBlocks && this.breakOnBlocks.indexOf(tok.value) !== -1) {
return null;
}
switch (tok.value) {
case 'raw':
return this.parseRaw();
case 'verbatim':
return this.parseRaw('verbatim');
case 'if':
case 'ifAsync':
return this.parseIf();
case 'for':
case 'asyncEach':
case 'asyncAll':
return this.parseFor();
case 'block':
return this.parseBlock();
case 'extends':
return this.parseExtends();
case 'set':
return this.parseSet();
case 'macro':
return this.parseMacro();
case 'call':
return this.parseCall();
case 'import':
return this.parseImport();
case 'from':
return this.parseFrom();
case 'filter':
return this.parseFilterStatement();
case 'switch':
return this.parseSwitch();
default:
if (this.extensions.length) {
for (let i = 0; i < this.extensions.length; i++) {
const ext = this.extensions[i];
const tags = ext.tags || [];
if (tags.indexOf(tok.value) !== -1) {
return ext.parse(this, nodes, lexer);
}
}
}
this.fail('unknown block tag: ' + tok.value, tok.lineno, tok.colno);
}
return node;
}
}
export default {
parse(src, extensions, opts) {
var p = new DjangoParser(lexer.lex(src, opts));
if (extensions !== undefined) {
p.extensions = extensions;
}
return p.parseAsRoot();
},
Parser: DjangoParser
};
| 25.233813 | 78 | 0.586458 | 3.09375 |
b0e16130d9096e5db8578a68e90406cbba2777c3
| 9,485 |
py
|
Python
|
tests/integration/test_integration.py
|
windies21/loopchain
|
6e96c8a7e006747af04187155678f2fae59e1389
|
[
"Apache-2.0"
] | 105 |
2018-04-03T05:29:08.000Z
|
2022-01-28T17:33:20.000Z
|
testcase/integration/test_integration.py
|
laurenceyoon/loopchain
|
e87032779be4715c135c2c91d2757d9c63bf4e31
|
[
"Apache-2.0"
] | 135 |
2018-09-04T07:11:02.000Z
|
2021-12-15T06:25:47.000Z
|
testcase/integration/test_integration.py
|
laurenceyoon/loopchain
|
e87032779be4715c135c2c91d2757d9c63bf4e31
|
[
"Apache-2.0"
] | 46 |
2018-05-07T09:12:07.000Z
|
2022-02-23T09:58:37.000Z
|
import random
import time
import pytest
from iconsdk.builder.transaction_builder import MessageTransactionBuilder
from iconsdk.icon_service import IconService
from iconsdk.providers.http_provider import HTTPProvider
from iconsdk.signed_transaction import SignedTransaction
from iconsdk.wallet.wallet import KeyWallet
from loopchain import conf
from loopchain import utils
from loopchain.blockchain.blocks import Block, BlockSerializer
from loopchain.blockchain.transactions import TransactionVerifier
from loopchain.blockchain.transactions import TransactionVersioner
from loopchain.blockchain.types import VarBytes
from . import conftest
from .conftest import Loopchain
# Global variables
peer_conf_path_list = [] # Peer config file path list. Needed to query peers' information.
genesis_data: dict = {} # Genesis tx content. Compared with tx in genesis block.
@pytest.fixture(scope="class", autouse=True)
def loopchain_proc(xprocess, request, generate_peer_conf_path_list_extended):
"""Set up loopchain launcher for integration test"""
# Define test environment
global peer_conf_path_list
global genesis_data
proc_info_list = [] # Loopchain process info. Needed to tear down processes.
peer_type = request.config.getoption("--peer-type")
peer_count = int(request.config.getoption("--peer-count"))
channel_count = int(request.config.getoption("--channel-count"))
channel_list = [f"channel_{i}" for i in range(channel_count)]
print(f"\n*--- Test env:\n Peer Type: {peer_type}, Peer Count: {peer_count}, Channel Count: {channel_count}")
Loopchain.pattern = fr"BroadcastScheduler process\(channel_{channel_count - 1}\) start"
Loopchain.end_line = 80 * peer_count * channel_count
# Generate configure files. Run only one time at the beginning of the test.
print("*--- Generate peer configure path list...")
peer_conf_path_list, channel_manage_data_path = \
generate_peer_conf_path_list_extended(peer_count=peer_count, channel_list=channel_list)
print("> peer_conf_path: ", peer_conf_path_list)
print("> channel_manage_data_path: ", channel_manage_data_path)
genesis_data = conftest.get_genesis_data(conf_path_list=peer_conf_path_list)
# Run Each peer
for peer_order in range(peer_count):
peer_conf_path = peer_conf_path_list[peer_order]
Loopchain.args = ["loop", peer_type, "-d", "-o", peer_conf_path]
proc_name = f"peer{peer_order}"
print(f"==========PEER_{peer_order} READY TO START ==========")
xprocess.ensure(proc_name, Loopchain)
# Store process information for terminate processes at the end of the test
proc_info = xprocess.getinfo(proc_name)
proc_info_list.append(proc_info)
print(f"==========ALL GREEN ==========")
time.sleep(0.5 * peer_count * channel_count)
yield
# Executed here after this fixture's scope ends.
for proc_info in proc_info_list:
proc_info.terminate()
time.sleep(3) # For additional tests, wait for a moment to cool down.
class TestLoopchain:
sent_tx_data = {} # Sent tx data. Needed to be compared whether it equals with the queried one.
tx_hash_by_channel = {} # Tx hashes. It collects return values of 'send_transaction'.
@pytest.mark.parametrize("port, channel_name", conftest.port_channel_list)
def test_health_check_before_test(self, port, channel_name):
"""Health check before test starts
**Assertion Tests**:
- Compare values of `accounts`, `message` and `nid` between queried genesis tx and origin data
"""
global genesis_data
expected_data = genesis_data["transaction_data"]
url = utils.normalize_request_url(str(port), conf.ApiVersion.v3, channel_name)
print("Req url: ", url)
icon_service = IconService(HTTPProvider(url))
genesis_block: dict = icon_service.get_block(0)
# TODO: dummy data to deserialize block. Fix in iconsdk
genesis_block["commit_state"] = None
genesis_block["confirmed_transaction_list"][0]["nid"] = "0x3"
tx_versioner = TransactionVersioner()
block_serializer = BlockSerializer.new("0.1a", TransactionVersioner())
genesis_block: Block = block_serializer.deserialize(block_dumped=genesis_block)
genesis_tx = list(genesis_block.body.transactions.values())[0]
print("genesis_tx: ", genesis_tx)
tv = TransactionVerifier.new("genesis", genesis_tx.type(), tx_versioner)
tv.verify(genesis_tx)
assert expected_data["accounts"] == genesis_tx.raw_data["accounts"]
assert expected_data["message"] == genesis_tx.raw_data["message"]
assert expected_data["nid"] == genesis_tx.raw_data["nid"]
@pytest.mark.parametrize("port, channel_name", conftest.port_channel_list)
def test_get_lastest_block_has_no_error(self, port, channel_name):
"""Test that getLastBlock API has no issue"""
url = utils.normalize_request_url(str(port), conf.ApiVersion.v3, channel_name)
icon_service = IconService(HTTPProvider(url))
block = icon_service.get_block("latest")
print("REQ url: ", url)
print("RES block: ", block)
assert "error" not in block
def test_send_tx_message(self, request):
"""Test for 'send_transaction'
.. note::
Test steps:
1. Get peer info from first peer
2. Extract key and password and make wallet
3. Build message transaction and sign it
4. Send Tx to first channel.
5. Await consensus time(currently 0.5 * <<Number of peers>> 'sec')
6. Repeat from '3'
.. warnings:: Interval await time is essential, due to consensus completion.
**Assertion Test**:
- Check that return value of send_transaction has valid tx hash format.
"""
global peer_conf_path_list
channel_count = int(request.config.getoption("--channel-count"))
from_peer = conftest.get_peer_info(conf_path_list=peer_conf_path_list, order=0)
key_path = from_peer["PRIVATE_PATH"]
key_pass = from_peer["PRIVATE_PASSWORD"]
wallet = KeyWallet.load(key_path, key_pass)
for channel_order in range(channel_count):
# Create message
byte_msg = f"test_msg on {random.randint(0, 44444)}".encode("utf-8")
msg = VarBytes(byte_msg).hex_0x()
# Address
from_to_address = wallet.get_address()
# Store tx data to compare with queried one later.
channel_name = f"channel_{channel_order}"
TestLoopchain.sent_tx_data[channel_name] = {
"from": from_to_address,
"to": from_to_address,
"msg": msg
}
# Build transaction and sign it with wallet
transaction_data = {
"from": from_to_address,
"to": from_to_address,
"step_limit": 100000000,
"nid": 3,
"nonce": 100,
"data": msg,
}
transaction = MessageTransactionBuilder().from_dict(transaction_data).build()
signed_transaction = SignedTransaction(transaction, wallet)
# Send tx
url = utils.normalize_request_url("9000", conf.ApiVersion.v3, channel_name)
print("Req url: ", url)
icon_service = IconService(HTTPProvider(url))
tx_hash = icon_service.send_transaction(signed_transaction)
print("Tx hash: ", tx_hash)
assert tx_hash.startswith("0x")
TestLoopchain.tx_hash_by_channel[channel_name] = tx_hash
await_sec = 0.5 * len(peer_conf_path_list)
print(f"Await consensus...({await_sec})")
time.sleep(await_sec)
print("ALL TXs by channel: ", TestLoopchain.tx_hash_by_channel)
final_await_sec = 1 * channel_count
print(f"Await consensus final...({final_await_sec})")
time.sleep(final_await_sec)
@pytest.mark.parametrize("port, channel_name", conftest.port_channel_list)
def test_sent_tx_is_synced(self, port, channel_name):
"""Following test of 'test_send_tx_message'
Check that send_transaction is successfully completed.
**Test steps**:
1. Get tx_hash from previous test
2. Query tx_hash to first channel
3. Compare queried tx with original data
4. Repeat until the channel order reaches to the end
**Assertion Tests**:
Check Tx values below
1. From address
2. To address
3. Data (message)
"""
print("sent_tx_data: ", TestLoopchain.sent_tx_data)
url = utils.normalize_request_url(str(port), conf.ApiVersion.v3, channel_name)
print("Req url: ", url)
icon_service = IconService(HTTPProvider(url))
tx_hash = TestLoopchain.tx_hash_by_channel[channel_name]
print("Tx hash to be queried: ", tx_hash)
queried_tx = icon_service.get_transaction(tx_hash)
print("Tx result: ", queried_tx)
assert queried_tx["from"] == TestLoopchain.sent_tx_data[channel_name]["from"]
assert queried_tx["to"] == TestLoopchain.sent_tx_data[channel_name]["to"]
assert queried_tx["data"] == TestLoopchain.sent_tx_data[channel_name]["msg"]
time.sleep(0.5)
| 41.060606 | 113 | 0.66758 | 3.046875 |
2cc90b7810bdaed73909a516e876826a4a64bb4c
| 2,104 |
cpp
|
C++
|
uppdev/SvoValue/Checks.cpp
|
dreamsxin/ultimatepp
|
41d295d999f9ff1339b34b43c99ce279b9b3991c
|
[
"BSD-2-Clause"
] | 2 |
2016-04-07T07:54:26.000Z
|
2020-04-14T12:37:34.000Z
|
uppdev/SvoValue/Checks.cpp
|
dreamsxin/ultimatepp
|
41d295d999f9ff1339b34b43c99ce279b9b3991c
|
[
"BSD-2-Clause"
] | null | null | null |
uppdev/SvoValue/Checks.cpp
|
dreamsxin/ultimatepp
|
41d295d999f9ff1339b34b43c99ce279b9b3991c
|
[
"BSD-2-Clause"
] | null | null | null |
#include "SvoValue.h"
void DumpNumber(const Value& v)
{
RDUMP((int)v);
RDUMP((double)v);
RDUMP((int64)v);
RDUMP((bool)v);
}
int xx;
Value Opt0();
void Opt() {
Value v = Opt0();
xx = v;
}
void CheckString()
{
Value v = "ahoj";
for(int i = 0; i < 2; i++) {
String s = v;
RDUMP(s);
ASSERT(s == "ahoj");
WString ws = v;
RDUMP(ws);
ASSERT(ws == WString("ahoj"));
v = ws;
}
v = String("ahoj");
Value w = WString("ahoj");
ASSERT(v == w);
RDUMP(GetHashValue(v));
RDUMP(GetHashValue(w));
ASSERT(GetHashValue(v) == GetHashValue(w));
}
void CheckDateTime()
{
Time tm = GetSysTime();
Date dt = tm;
Value c;
Value v = tm;
RDUMP(v);
ASSERT(v == dt);
Date xx = v;
ASSERT(xx == dt);
c = v;
RDUMP(c);
ASSERT(c == dt);
Value cv = v;
RDUMP(cv);
ASSERT(cv == dt);
Value v2 = tm;
RDUMP(v2);
ASSERT(v2 == v);
c = v;
RDUMP(c);
ASSERT(c == dt);
ASSERT(c == tm);
v = dt;
v2 = ToTime(v);
ASSERT(v == v2);
ASSERT(GetHashValue(v) == GetHashValue(v2));
}
void CheckValueMap()
{
RLOG("------------------------------");
RLOG("CheckValueMap");
Value x = 123;
Value y = x;
ValueMap h;
h.Add("0", 123);
RDUMP(h["0"]);
h.Add("1", Date(2001, 12, 1));
h.Add("2", "test");
Value v = h;
ASSERT(v.GetCount() == 3);
RDUMP(v["0"]);
ASSERT(v["0"] == 123);
ASSERT(v["1"] == Date(2001, 12, 1));
ASSERT(v["2"] == "test");
ValueMap hh = v;
ASSERT(hh == h);
}
void OtherChecks()
{
Value c;
ASSERT(c.IsVoid());
RDUMP(c.IsVoid());
Value x = "Ahoj";
String xx = x;
RDUMP(xx);
ASSERT(xx == "Ahoj");
Value xw = WString("Ahoj");
RDUMP(xw);
RDUMP(xw == x);
Value xc = x;
RDUMP(xc);
c = xc;
RDUMP(c);
Value y = 123;
int yy = y;
RDUMP(yy);
Value xn = (int)Null;
RDUMP(IsNull(xn));
RDUMP(IsNull(yy));
Value yc = y;
RDUMP(y);
c = y;
RDUMP(c);
Value v2 = 123.0;
Value v3 = 123;
Value v4 = 125;
RDUMP(v2 == y);
RDUMP(v3 == y);
RDUMP(v4 == y);
RDUMP(v4 == v2);
Value uu = Uuid::Create();
RDUMP(uu);
Value uuc = uu;
RDUMP(uuc);
{
Color c = Blue;
Value v = c;
RDUMP(v);
Value v2 = v;
c = v2;
RDUMP(c);
}
}
| 13.662338 | 45 | 0.532795 | 3.296875 |
235bce3c095bf341878bf93bc6fcb08c02f765e4
| 8,390 |
lua
|
Lua
|
Milo Yip's ray-tracing bencmark/smallpt_lua/smallpt.lua
|
THISISAGOODNAME/miniRayTracing
|
48b8fc54df0f11d784bcd783885bc9b39d5ae7b1
|
[
"MIT"
] | 2 |
2017-07-13T08:06:32.000Z
|
2018-11-22T05:04:44.000Z
|
Milo Yip's ray-tracing bencmark/smallpt_lua/smallpt.lua
|
THISISAGOODNAME/miniRayTracing
|
48b8fc54df0f11d784bcd783885bc9b39d5ae7b1
|
[
"MIT"
] | null | null | null |
Milo Yip's ray-tracing bencmark/smallpt_lua/smallpt.lua
|
THISISAGOODNAME/miniRayTracing
|
48b8fc54df0f11d784bcd783885bc9b39d5ae7b1
|
[
"MIT"
] | null | null | null |
function RandomLCG(seed)
return function ()
seed = (214013 * seed + 2531011) % 4294967296
return seed * (1.0 / 4294967296.0)
end
end
---------------------------------------
Vec = {}
Vec.__index = Vec
function Vec.new(x_, y_, z_)
local self = { x = x_, y = y_, z = z_}
setmetatable(self, Vec)
return self
end
function Vec.__add(a, b)
return Vec.new(a.x + b.x, a.y + b.y, a.z + b.z)
end
function Vec.__sub(a, b)
return Vec.new(a.x - b.x, a.y - b.y, a.z - b.z)
end
function Vec.__mul(a, b)
return Vec.new(a.x * b, a.y * b, a.z * b)
end
-- component-wise multiplication
function Vec:mult(b)
return Vec.new(self.x * b.x, self.y * b.y, self.z * b.z)
end
function Vec:norm()
return self * (1.0 / math.sqrt(self.x * self.x + self.y * self.y + self.z * self.z))
end
function Vec:dot(b)
return self.x * b.x + self.y * b.y + self.z * b.z
end
-- cross product
function Vec.__mod(a, b)
return Vec.new(a.y * b.z - a.z * b.y, a.z * b.x - a.x * b.z, a.x * b.y - a.y * b.x)
end
Vec.Zero = Vec.new(0, 0, 0)
Vec.XAxis = Vec.new(1, 0, 0)
Vec.YAxis = Vec.new(0, 1, 0)
Vec.ZAxis = Vec.new(0, 0, 1)
---------------------------------------
Refl =
{
DIFF = 0,
SPEC = 1,
REFR = 2
}
---------------------------------------
Ray = {}
Ray.__index = Ray
function Ray.new(o_, d_)
local self = { o = o_, d = d_ }
setmetatable(self, Ray)
return self
end
---------------------------------------
Sphere = {}
Sphere.__index = Sphere
function Sphere.new(rad_, p_, e_, c_, refl_)
local self = { rad = rad_, p = p_, e = e_, c = c_, refl = refl_ }
self.sqRad = rad_ * rad_
self.maxC = math.max(math.max(c_.x, c_.y), c_.z)
self.cc = c_ * (1.0 / self.maxC)
setmetatable(self, Sphere)
return self
end
function Sphere:intersect(r)
-- Solve t^2*d.d + 2*t*(o-p).d + (o-p).(o-p)-R^2 = 0
local op = self.p - r.o
local b = op:dot(r.d)
local det = b * b - op:dot(op) + self.sqRad
local eps = 1e-4
if det < 0 then
return 0
else
local dets = math.sqrt(det)
if b - dets > eps then
return b - dets
elseif b + dets > eps then
return b + dets
else
return 0
end
end
end
---------------------------------------
-- Scene: radius, position, emission, color, material
spheres =
{
Sphere.new(1e5, Vec.new( 1e5+1,40.8,81.6), Vec.Zero, Vec.new(.75,.25,.25), Refl.DIFF), --Left
Sphere.new(1e5, Vec.new(-1e5+99,40.8,81.6), Vec.Zero, Vec.new(.25,.25,.75), Refl.DIFF), --Rght
Sphere.new(1e5, Vec.new(50,40.8, 1e5), Vec.Zero, Vec.new(.75,.75,.75), Refl.DIFF), --Back
Sphere.new(1e5, Vec.new(50,40.8,-1e5+170), Vec.Zero, Vec.Zero, Refl.DIFF), --Frnt
Sphere.new(1e5, Vec.new(50, 1e5, 81.6), Vec.Zero, Vec.new(.75,.75,.75), Refl.DIFF), --Botm
Sphere.new(1e5, Vec.new(50,-1e5+81.6,81.6), Vec.Zero, Vec.new(.75,.75,.75), Refl.DIFF), --Top
Sphere.new(16.5, Vec.new(27,16.5,47), Vec.Zero, Vec.new(1,1,1)*.999, Refl.SPEC), --Mirr
Sphere.new(16.5, Vec.new(73,16.5,78), Vec.Zero, Vec.new(1,1,1)*.999, Refl.REFR), --Glas
Sphere.new(600, Vec.new(50,681.6-.27,81.6), Vec.new(12,12,12), Vec.Zero, Refl.DIFF) --Lite
}
rand = RandomLCG(0)
function clamp(x)
if x < 0 then
return 0
elseif x > 1 then
return 1
else
return x
end
end
function toInt(x)
return (clamp(x) ^ (1 / 2.2)) * 255 + .5
end
function intersect(r)
local t = 1e20
local obj
for i, s in ipairs(spheres) do
local d = s:intersect(r)
if d ~= 0 and d < t then
t = d
obj = s
end
end
return obj, t
end
function radiance(r, depth)
local obj, t
obj, t = intersect(r)
if obj == nil then
return Vec.Zero
else
local newDepth = depth + 1
local isMaxDepth = newDepth > 100
-- Russian roulette for path termination
local isUseRR = newDepth > 5
local isRR = isUseRR and rand() < obj.maxC
if isMaxDepth or (isUseRR and not isRR) then
return obj.e
else
local f = (isUseRR and isRR) and obj.cc or obj.c
local x = r.o + r.d * t
local n = (x - obj.p):norm()
local nl = (n:dot(r.d) < 0) and n or (n * -1)
if obj.refl == Refl.DIFF then -- Ideal DIFFUSE reflection
local r1 = 2 * math.pi * rand()
local r2 = rand()
local r2s = math.sqrt(r2)
local w = nl
local wo = (math.abs(w.x) > .1) and Vec.YAxis or Vec.XAxis
local u = (wo % w):norm()
local v = w % u
local d = (u * math.cos(r1) * r2s + v * math.sin(r1) * r2s + w * math.sqrt(1 - r2)):norm()
return obj.e + f:mult(radiance(Ray.new(x, d), newDepth))
elseif obj.refl == Refl.SPEC then -- Ideal SPECULAR reflection
return obj.e + f:mult(radiance(Ray.new(x, r.d - n * 2 * n:dot(r.d)), newDepth))
else -- Ideal dielectric REFRACTION
local reflRay = Ray.new(x, r.d - n * (2 * n:dot(r.d)))
local into = n:dot(nl) > 0 -- Ray from outside going in?
local nc = 1
local nt = 1.5
local nnt = into and (nc / nt) or (nt / nc)
local ddn = r.d:dot(nl)
local cos2t = 1 - nnt * nnt * (1 - ddn * ddn)
if cos2t < 0 then -- Total internal reflection
return obj.e + f:mult(radiance(reflRay, newDepth))
else
local tdir = (r.d * nnt - n * ((into and 1 or -1) * (ddn * nnt + math.sqrt(cos2t)))):norm()
local a = nt - nc
local b = nt + nc
local R0 = (a * a) / (b * b)
local c = 1 - (into and -ddn or tdir:dot(n))
local Re = R0 + (1 - R0) * c * c * c * c * c
local Tr = 1 - Re
local P = .25 + .5 * Re
local RP = Re / P
local TP = Tr / (1 - P)
local result
if newDepth > 2 then
-- Russian roulette and splitting for selecting reflection and/or refraction
if rand() < P then
result = radiance(reflRay, newDepth) * RP
else
result = radiance(Ray.new(x, tdir), newDepth) * TP
end
else
result = radiance(reflRay, newDepth) * Re + radiance(Ray.new(x, tdir), newDepth) * Tr
end
return obj.e + f:mult(result)
end
end
end
end
end
local start = os.clock()
local w = 256
local h = 256
local samps = 25
-- cam pos, dir
local cam = Ray.new(Vec.new(50, 52, 295.6), Vec.new(0, -0.042612, -1):norm())
local cx = Vec.new(w * .5135 / h, 0, 0)
local cy = (cx % cam.d):norm() * .5135
-- final color buffer
local c = {}
-- Loop over image rows
for y = 0, h - 1 do
io.stderr:write(string.format("\rRendering (%d spp) %5.2f%%", samps * 4, 100 * y / (h - 1)))
-- Loop cols
for x = 0, w - 1 do
local i = (h - y - 1) * w + x
c[i] = Vec.Zero
-- 2x2 subpixel rows
for sy = 0, 1 do
-- 2x2 subpixel cols
for sx = 0, 1 do
local r = Vec.Zero
for s = 1, samps do
local r1 = 2 * rand()
local r2 = 2 * rand()
local dx = (r1 < 1) and (math.sqrt(r1) - 1) or (1 - math.sqrt(2 - r1))
local dy = (r2 < 1) and (math.sqrt(r2) - 1) or (1 - math.sqrt(2 - r2))
local d = cx * (((sx + .5 + dx) / 2 + x) / w - .5) +
cy * (((sy + .5 + dy) / 2 + y) / h - .5) + cam.d
-- Camera rays are pushed forward to start in interior
local camRay = Ray.new(cam.o + d * 140, d:norm())
-- Accumuate radiance
r = r + radiance(camRay, 0) * (1.0 / samps)
end
-- Convert radiance to color
c[i] = c[i] + Vec.new(clamp(r.x), clamp(r.y), clamp(r.z)) * .25
end
end
end
end
print(string.format("\n%f sec", os.clock() - start))
local f = io.open("image.ppm", "w")
f:write(string.format("P3\n%d %d\n%d\n", w, h, 255))
for i = 0, w * h -1 do
f:write(string.format("%d %d %d\n", toInt(c[i].x), toInt(c[i].y), toInt(c[i].z)))
end
| 28.831615 | 111 | 0.493325 | 3.046875 |
89baae91903b47687d2513bcff5a7ea50e7738cc
| 10,440 |
swift
|
Swift
|
ApplepieDemo/ApplepieTests/UI/Router/APRouterTests.swift
|
cdtschange/ios-applepie
|
e0db0cb5b6564011e4f55342a701909e876853e3
|
[
"MIT"
] | 3 |
2018-11-14T10:08:40.000Z
|
2021-02-03T07:20:49.000Z
|
ApplepieDemo/ApplepieTests/UI/Router/APRouterTests.swift
|
cdtschange/ios-applepie
|
e0db0cb5b6564011e4f55342a701909e876853e3
|
[
"MIT"
] | null | null | null |
ApplepieDemo/ApplepieTests/UI/Router/APRouterTests.swift
|
cdtschange/ios-applepie
|
e0db0cb5b6564011e4f55342a701909e876853e3
|
[
"MIT"
] | 1 |
2018-11-17T18:08:49.000Z
|
2018-11-17T18:08:49.000Z
|
//
// APRouterTests.swift
// ApplepieTests
//
// Created by 山天大畜 on 2018/12/13.
// Copyright © 2018 山天大畜. All rights reserved.
//
import XCTest
import Applepie
import PromiseKit
class APRouterTests: BaseTestCase {
@objc(ViewController1)
private class ViewController1: UIViewController {}
@objc(ViewController2)
private class ViewController2: UIViewController, APRouterProtocol {
var params: [String : Any] = [:]
}
@objc(ViewController3)
private class ViewController3: UIViewController {}
override func setUp() {
// Put setup code here. This method is called before the invocation of each test method in the class.
}
override func tearDown() {
// Put teardown code here. This method is called after the invocation of each test method in the class.
}
func testRouter() {
// This is an example of a functional test case.
// Use XCTAssert and related functions to verify your tests produce the correct results.
let expectation = XCTestExpectation(description: "Complete")
let root = UIViewController.topMostViewController()
var v2: ViewController2?
let delayTime = 1000
firstly {
assert(root != nil)
root?.tabBarController?.selectedIndex = 1
return after(.seconds(1))
}.then { () -> Guarantee<Void> in
root?.tabBarController?.selectedIndex = 0
return after(.milliseconds(delayTime))
}.then { () -> Guarantee<Void> in
APRouter.route(toName: "ListTypeViewController", params: ["type": "both"], animation: false, pop: false)
return after(.seconds(2))
}.then { () -> Guarantee<Void> in
APRouter.routeBack()
return after(.milliseconds(delayTime))
}.then { () -> Guarantee<Void> in
APRouter.route(toName: "BaseWebViewController", params: ["url": "https://www.baidu.com"], storyboardName: nil, animation: false, pop: false)
return after(.seconds(2))
}.then { () -> Guarantee<Void> in
APRouter.routeBack()
return after(.milliseconds(delayTime))
}.then { () -> Guarantee<Void> in
assert(APRouter.route(toName: "None", animation: false, pop: true) == false)
assert(APRouter.route(toName: "routeToLogin", animation: false, pop: true) == true)
assert(NSStringFromClass(UIViewController.topMostViewController()!.classForCoder) == "ApplepieDemo.ViewController")
assert(APRouter.routeBack(animation: false) == true)
return after(.milliseconds(delayTime))
}.then { () -> Guarantee<Void> in
assert(UIViewController.topMostViewController() == root)
let nav = UINavigationController(rootViewController: ViewController1())
root?.present(nav, animated: false, completion: nil)
assert(NSStringFromClass(UIViewController.topMostViewController()!.classForCoder) == "ViewController1")
assert(APRouter.route(toUrl: "http://test", name: "ViewController2", params: ["a": "1", "b": 2, "c": true, "d": 1.2], animation: false) == true)
return after(.milliseconds(delayTime))
}.then { () -> Guarantee<Void> in
assert(NSStringFromClass(UIViewController.topMostViewController()!.classForCoder) == "ViewController2")
v2 = UIViewController.topMostViewController() as? ViewController2
assert((v2?.params["a"] as? String) == "1")
assert((v2?.params["b"] as? Int) == 2)
assert((v2?.params["c"] as? Bool) == true)
assert((v2?.params["d"] as? Double) == 1.2)
assert((v2?.params["url"] as? String) == "http://test")
assert(UIViewController.topMostViewController()?.navigationController?.viewControllers.count == 2)
assert(APRouter.route(toName: "ViewController3", animation: false) == true)
return after(.milliseconds(delayTime))
}.then { () -> Guarantee<Void> in
assert(NSStringFromClass(UIViewController.topMostViewController()!.classForCoder) == "ViewController3")
assert(UIViewController.topMostViewController()?.ap.containsViewControllerInNavigation("ViewController2") == true)
assert(UIViewController.topMostViewController()?.ap.containsViewControllerInNavigation("ViewController1") == true)
assert(UIViewController.topMostViewController()?.ap.containsViewControllerInNavigation("ViewController3") == true)
assert(UIViewController.topMostViewController()?.ap.containsViewControllerInNavigation("ViewController4") == false)
assert(APRouter.routeBack(params: ["a": "2"], animation: false) == true)
return after(.milliseconds(delayTime))
}.then { () -> Guarantee<Void> in
assert(NSStringFromClass(UIViewController.topMostViewController()!.classForCoder) == "ViewController2")
assert((v2?.params["a"] as? String) == "2")
assert(APRouter.route(toName: "ViewController3", animation: false) == true)
return after(.milliseconds(delayTime))
}.then { () -> Guarantee<Void> in
assert(APRouter.routeBack(toName: "ViewController4", animation: false) == false)
assert(APRouter.routeBack(toName: "ViewController2", params: ["a": "3"], animation: false) == true)
return after(.milliseconds(delayTime))
}.then { () -> Guarantee<Void> in
assert(NSStringFromClass(UIViewController.topMostViewController()!.classForCoder) == "ViewController2")
assert((v2?.params["a"] as? String) == "3")
assert(APRouter.routeBack(toName: "ViewController1", animation: false) == true)
return after(.milliseconds(delayTime))
}.then { () -> Guarantee<Void> in
assert(NSStringFromClass(UIViewController.topMostViewController()!.classForCoder) == "ViewController1")
assert(APRouter.route(toName: "ViewController2", animation: false) == true)
return after(.milliseconds(delayTime))
}.then { () -> Guarantee<Void> in
assert(APRouter.route(toName: "ViewController3", animation: false) == true)
return after(.milliseconds(delayTime))
}.then { () -> Guarantee<Void> in
assert(APRouter.routeBack(skip: 0, animation: false) == true)
return after(.milliseconds(delayTime))
}.then { () -> Guarantee<Void> in
assert(NSStringFromClass(UIViewController.topMostViewController()!.classForCoder) == "ViewController2")
assert(APRouter.route(toName: "ViewController3", animation: false) == true)
return after(.milliseconds(delayTime))
}.then { () -> Guarantee<Void> in
assert(APRouter.routeBack(skip: 1, animation: false) == true)
return after(.milliseconds(delayTime))
}.then { () -> Guarantee<Void> in
assert(NSStringFromClass(UIViewController.topMostViewController()!.classForCoder) == "ViewController1")
assert(APRouter.route(toName: "ViewController2", animation: false) == true)
return after(.milliseconds(delayTime))
}.then { () -> Guarantee<Void> in
assert(APRouter.route(toName: "ViewController3", animation: false) == true)
return after(.milliseconds(delayTime))
}.then { () -> Guarantee<Void> in
assert(APRouter.route(toName: "ViewController2", animation: false) == true)
return after(.milliseconds(delayTime))
}.then { () -> Guarantee<Void> in
assert(APRouter.route(toName: "ViewController3", animation: false) == true)
return after(.milliseconds(delayTime))
}.then { () -> Guarantee<Void> in
assert(APRouter.route(toName: "ViewController2", animation: false) == true)
return after(.milliseconds(delayTime))
}.then { () -> Guarantee<Void> in
assert(UIViewController.topMostViewController()?.navigationController?.viewControllers.count == 6)
assert(APRouter.route(toName: "ViewController3", animation: false, pop: true) == true)
return after(.milliseconds(delayTime))
}.then { () -> Guarantee<Void> in
assert(NSStringFromClass(UIViewController.topMostViewController()!.classForCoder) == "ViewController3")
assert(APRouter.routeBack(toName: "ViewController2", animation: false) == false)
assert(APRouter.routeBack(animation: false) == true)
return after(.milliseconds(delayTime))
}.then { () -> Guarantee<Void> in
assert(NSStringFromClass(UIViewController.topMostViewController()!.classForCoder) == "ViewController2")
assert(APRouter.route(toName: "ViewController3", animation: false, pop: true) == true)
return after(.milliseconds(delayTime))
}.then { () -> Guarantee<Void> in
assert(NSStringFromClass(UIViewController.topMostViewController()!.classForCoder) == "ViewController3")
assert(APRouter.routeBack(skip: 1, animation: false) == true)
return after(.milliseconds(delayTime))
}.then { () -> Guarantee<Void> in
assert(NSStringFromClass(UIViewController.topMostViewController()!.classForCoder) == "ViewController2")
assert(APRouter.routeBack(skip: 5, animation: false) == false)
assert(APRouter.routeToRoot(animation: false) == true)
return after(.milliseconds(delayTime))
}.then { () -> Guarantee<Void> in
assert(NSStringFromClass(UIViewController.topMostViewController()!.classForCoder) == "ViewController1")
assert(APRouter.routeBack(animation: false) == true)
return after(.milliseconds(delayTime))
}.done { _ in
expectation.fulfill()
}
wait(for: [expectation], timeout: 60)
}
}
| 60.697674 | 160 | 0.609291 | 3.09375 |
7904924e644f6b006781706d6816f56a48b198c7
| 2,607 |
lua
|
Lua
|
test_scripts/API/ButtonSubscription/Versioning/007_UnsubscribeButton_PLAY_PAUSE_majorVersion_5.lua
|
LuxoftSDL/sdl_atf_test_scripts
|
2594ac912640008fb990594f377068d8e9e2e785
|
[
"BSD-3-Clause"
] | 3 |
2016-03-17T02:26:56.000Z
|
2021-11-06T08:04:19.000Z
|
test_scripts/API/ButtonSubscription/Versioning/007_UnsubscribeButton_PLAY_PAUSE_majorVersion_5.lua
|
smartdevicelink/sdl_atf_test_scripts
|
2594ac912640008fb990594f377068d8e9e2e785
|
[
"BSD-3-Clause"
] | 1,335 |
2016-03-14T18:29:40.000Z
|
2022-03-30T10:40:28.000Z
|
test_scripts/API/ButtonSubscription/Versioning/007_UnsubscribeButton_PLAY_PAUSE_majorVersion_5.lua
|
smartdevicelink/sdl_atf_test_scripts
|
2594ac912640008fb990594f377068d8e9e2e785
|
[
"BSD-3-Clause"
] | 72 |
2016-03-30T13:44:17.000Z
|
2021-07-26T06:48:24.000Z
|
------------------------------------------------------------------------------------------------------------------------
-- Proposal:
-- https://github.com/smartdevicelink/sdl_evolution/blob/master/proposals/0192-button_subscription_response_from_hmi.md
------------------------------------------------------------------------------------------------------------------------
-- Description: Check processing of UnsubscribeButton request with 'PLAY_PAUSE' parameter in case
-- mobile app is registered with syncMsgVersion (5.0)
------------------------------------------------------------------------------------------------------------------------
-- In case:
-- 1. Mobile app is registered with major version=5.0
-- 2. Mobile app requests SubscribeButton(PLAY_PAUSE)
-- 3. Mobile app requests UnsubscribeButton(PLAY_PAUSE)
-- SDL does:
-- - send Buttons.UnsubscribeButton(PLAY_PAUSE, appId) to HMI
-- - wait response from HMI
-- - receive Buttons.UnsubscribeButton(SUCCESS)
-- - respond UnsubscribeButton(SUCCESS) to mobile app
-- - send OnHashChange with updated hashId to mobile app
-- In case:
-- 4. HMI sends OnButtonEvent and OnButtonPress notifications for "PLAY_PAUSE"
-- SDL does:
-- - not transfer OnButtonEvent and OnButtonPress to App
------------------------------------------------------------------------------------------------------------------------
--[[ Required Shared libraries ]]
local common = require('test_scripts/API/ButtonSubscription/commonButtonSubscription')
--[[ Test Configuration ]]
config.application1.registerAppInterfaceParams.syncMsgVersion.majorVersion = 5
--[[ Local Variables ]]
local appSessionId1 = 1
local buttonName = "PLAY_PAUSE"
--[[ Scenario ]]
common.runner.Title("Preconditions")
common.runner.Step("Clean environment", common.preconditions)
common.runner.Step("Start SDL, HMI, connect Mobile, start Session", common.start)
common.runner.Step("App registration", common.registerAppWOPTU)
common.runner.Step("App activation", common.activateApp)
common.runner.Step("SubscribeButton " .. buttonName, common.rpcSuccess,
{ appSessionId1, "SubscribeButton", buttonName })
common.runner.Step("On Button Press " .. buttonName, common.buttonPress, { appSessionId1, buttonName })
common.runner.Title("Test")
common.runner.Step("UnsubscribeButton " .. buttonName, common.rpcSuccess,
{ appSessionId1, "UnsubscribeButton", buttonName })
common.runner.Step("Check unsubscribe " .. buttonName, common.buttonPress,
{ appSessionId1, buttonName, common.isNotExpected })
common.runner.Title("Postconditions")
common.runner.Step("Stop SDL", common.postconditions)
| 51.117647 | 120 | 0.633679 | 3.125 |
dd8174ef4e34df65403db322b19a98fe82588a84
| 737 |
java
|
Java
|
IO/Output.java
|
himanshugawari/java-code
|
115ca4c35e7797b6c11fef9bf041350dbad89357
|
[
"MIT"
] | null | null | null |
IO/Output.java
|
himanshugawari/java-code
|
115ca4c35e7797b6c11fef9bf041350dbad89357
|
[
"MIT"
] | null | null | null |
IO/Output.java
|
himanshugawari/java-code
|
115ca4c35e7797b6c11fef9bf041350dbad89357
|
[
"MIT"
] | null | null | null |
package IO;
import java.lang.Math;
class Output {
// println , print
// public static void main(String[] args) {
// int x = 10, y = 20;
// char z = 'a';
// String str = "GFG";
// System.out.println(x);
// System.out.println(x + y);
// System.out.println(x + " " + y);
// System.out.print(str + " ");
// System.out.print("Courses");
// System.out.print("\n");
// System.out.println(z);
// System.out.println(z + 1);
// System.out.println(z + 'a');
// }
// printf, format
public static void main(String[] args) {
int x = 100, y = 200;
double z = Math.PI;
System.out.format("Value of x is %d\n", x);
System.out.println(z);
System.out.printf("x=%d and y=%d and z=%f", x, y, z);
}
}
| 23.774194 | 57 | 0.552239 | 3.140625 |
c6b93c77cca2e59db6740fac50f93884f8354fd1
| 1,757 |
py
|
Python
|
tools/ipgeo/ipgeo.py
|
onurrozkaan/interrogator
|
799613bdb39cfdf00ad19381a8121be2a7291c2b
|
[
"MIT"
] | 5 |
2019-03-09T09:45:58.000Z
|
2021-06-27T10:18:24.000Z
|
tools/ipgeo/ipgeo.py
|
ozkanonur/interrogator
|
799613bdb39cfdf00ad19381a8121be2a7291c2b
|
[
"MIT"
] | null | null | null |
tools/ipgeo/ipgeo.py
|
ozkanonur/interrogator
|
799613bdb39cfdf00ad19381a8121be2a7291c2b
|
[
"MIT"
] | null | null | null |
import urllib.request
import sys
import json
from assets.styling.colors import *
def getOutput():
print(purple + "Enter the Target IP:")
sys.stdout.write(green + "> ")
ip_input = input()
with urllib.request.urlopen("http://ip-api.com/json/" + ip_input) as url:
def is_input_ip(checker_value):
data = checker_value.split('.')
if len(data) != 4:
return False
for x in data:
if not x.isdigit():
return False
i = int(x)
if i < 0 or i > 255:
return False
return True
if is_input_ip(ip_input) == True:
data = url.read().decode()
output = data.strip('"').split(",")
print("\r")
for value in output:
print(red + "--> " + value.replace('"', "").replace("}", "").replace("{", "").replace("as:", yellow + "AS: "+ green).replace("city:", yellow + "CITY: "+ green).replace("country:", yellow + "COUNTRY: " + green).replace("isp:", yellow + "ISP: " +green ).replace("lat:", yellow + "LAT: " +green ).replace("countryCode:", yellow + "COUNTRY CODE: " +green ).replace(
"lon:", yellow + "LON: " +green ).replace("org:", yellow + "ORG: " +green ).replace("query:", yellow + "QUERY: " +green ).replace("region:", yellow + "REGION: " +green ).replace("regionName:", yellow + "REGION NAME: " +green ).replace("status:", yellow + "STATUS: " +green ).replace("timezone:", yellow + "TIMEZONE: " +green ).replace("zip:", yellow + "ZIP: " +green ))
else:
print("\r")
print(red + "Value looks like not an Ip Address, please enter a correct Ip Address.")
| 45.051282 | 389 | 0.525327 | 3.40625 |
b36e1c2289027ada956cc59ba2f5a5624dfdd969
| 870 |
py
|
Python
|
arborlife/utils.py
|
rogerhurwitz/arborlife
|
36777200940a0be22340bea938cdad6b45c9ab6e
|
[
"MIT"
] | 1 |
2020-11-17T17:16:20.000Z
|
2020-11-17T17:16:20.000Z
|
arborlife/utils.py
|
rogerhurwitz/arborlife
|
36777200940a0be22340bea938cdad6b45c9ab6e
|
[
"MIT"
] | 6 |
2019-11-30T18:19:36.000Z
|
2019-12-16T06:50:53.000Z
|
arborlife/utils.py
|
rogerhurwitz/arborlife
|
36777200940a0be22340bea938cdad6b45c9ab6e
|
[
"MIT"
] | null | null | null |
import math
import numpy as np
import scipy.stats as stats
def calc_cubic(a, b, c, d):
p = -b / (3 * a)
q = math.pow(p, 3) + (b * c - 3 * a * d) / (6 * math.pow(a, 2))
r = c / (3 * a)
x = (
math.pow(
q + (math.pow(math.pow(q, 2) + (math.pow(r - math.pow(p, 2), 3)), 0.5)), 1 / 3, # noqa: E501
)
+ math.pow(
q - (math.pow(math.pow(q, 2) + (math.pow(r - math.pow(p, 2), 3)), 0.5)), 1 / 3, # noqa: E501
)
+ p
)
return x
def calc_truncnorm(mean, sd, *, clip_a=-np.inf, clip_b=np.inf):
"""Returns a float in a normal distribution (mean, sd) clipped (clip_a, clip_b)."""
# https://docs.scipy.org/doc/scipy/reference/generated/scipy.stats.truncnorm.html
return stats.truncnorm.rvs(
a=(clip_a - mean) / sd, b=(clip_b - mean) / sd, loc=mean, scale=sd
)
| 29 | 108 | 0.514943 | 3.109375 |
2003c19caf4c21741f7293766b0ae3d9eef44f56
| 11,896 |
py
|
Python
|
byconeer/biosamplesInserter.py
|
sofiapfund/bycon
|
d7993eaf99cfce46f3025718ab3aa3c0f812badd
|
[
"CC0-1.0"
] | null | null | null |
byconeer/biosamplesInserter.py
|
sofiapfund/bycon
|
d7993eaf99cfce46f3025718ab3aa3c0f812badd
|
[
"CC0-1.0"
] | 1 |
2021-03-18T12:17:59.000Z
|
2021-03-18T12:19:24.000Z
|
byconeer/biosamplesInserter.py
|
sofiapfund/bycon
|
d7993eaf99cfce46f3025718ab3aa3c0f812badd
|
[
"CC0-1.0"
] | null | null | null |
#!/usr/local/bin/python3
import re, argparse
import datetime, time
import sys, base36
import json
import pandas as pd
from pymongo import MongoClient
import random
from os import path, environ, pardir
from progress.bar import Bar
from pydoc import locate
# from jsonschema import validate
# local
dir_path = path.dirname( path.abspath(__file__) )
pkg_path = path.join( dir_path, pardir )
sys.path.append( pkg_path )
from beaconServer.lib.read_specs import *
from beaconServer.lib.parse_filters import *
from beaconServer.lib.service_utils import *
from lib.table_tools import *
"""
## `newSampleInserter`
"""
################################################################################
################################################################################
def _get_args(byc):
parser = argparse.ArgumentParser()
parser.add_argument('-t', '--test', help='No. of samples for test run')
parser.add_argument('-d', '--output_db', help='the database to write into.')
parser.add_argument('-s', '--source', help='which repo is input data from')
byc.update({"args": parser.parse_args() })
return byc
################################################################################
def main():
biosamples_inserter()
################################################################################
def biosamples_inserter():
curr_time = datetime.datetime.now()
byc = initialize_service()
_get_args(byc)
if not byc['args'].source in byc["these_prefs"]['data_sources']:
print( 'No accepted "--source" data source has been provided...')
exit()
table_prefs = read_local_prefs( "datatables", dir_path )
for d_k, d_v in table_prefs.items():
byc.update( { d_k: d_v } )
# TODO: check for import_path
# ... prompt for continuation w/ Q "new dataset ??? etc."
################################################################################
### read in meta table
metapath = path.join(byc['import_path'])
mytable = pd.read_csv(metapath, sep = '\t', dtype = str)
mytable = mytable.where((pd.notnull(mytable)), None) ## convert pd.nan to None
### define list/dictionary of objects to insert in 4 collections
variants_list = []
callsets_dict = {}
biosamples_dict = {}
individuals_dict = {}
### find all existing ids in each output database and collections.
exist_callset_id = {}
exist_biosample_id = {}
exist_individual_id = {}
mongo_client = MongoClient( )
ds_id = byc['args'].output_db
data_db = mongo_client[ds_id]
exist_callset_id[ds_id] = data_db.callsets.distinct('info.legacy_id')
exist_biosample_id[ds_id] = data_db.biosamples.distinct('info.legacy_id')
exist_individual_id[ds_id] = data_db.individuals.distinct('info.legacy_id')
no_row = mytable.shape[0]
if byc['args'].test:
test = int(byc['args'].test)
bar_length = test
rdm_row = random.sample(range(no_row), test)
mytable = mytable.iloc[rdm_row, :]
print( f"TEST MODE for {test} of samples.")
else:
bar_length = no_row
bar = Bar("Reading in metadata table", max = bar_length, suffix="%(percent)d%%"+" of "+str(bar_length) )
for row in mytable.itertuples():
if row.loc['status'].startswith('excluded'):
continue
### assign variables from info fields
info_field = {}
column_names = io_table_header( **byc )
field_to_db = io_map_to_db( **byc )
bs = 'biosamples'
cs = 'callsets'
ind = 'individuals'
for field in column_names:
if field in row:
if field in field_to_db:
info_field[field_to_db[field][0]][field] = row.loc[field]
else:
info_field[field] = row.loc[field]
if byc['args'].source == 'arrayexpress':
info_field['uid'] = 'AE-'+ info_field['experiment'].replace('E-','').replace('-','_') + '-' +\
info_field['uid'].replace("-", "_").replace(' ', '_').replace('.CEL','') + '-' +\
byc['platform_rename'][info_field['platform_id']] ## initial rename
# TODO: default should **not** be blood but rather icdot-C76.9: ill-defined
if not info_field[bs]['icdot::id']:
info_field[bs]['icdot::id'] = 'C42.0'
if not info_field[bs]['icdot::label']:
info_field[bs]['icdot::label'] = 'Blood'
if info_field[bs]['icdom::id']:
info_field[bs]['icdom::id'] = info_field[bs]['icdom::id'].replace('/','')
else:
info_field[bs]['icdom::id'] = '00000'
info_field[bs]['icdom::label'] = 'Normal'
if not 'description' in info_field:
info_field[bs]['description'] = ''
if 'age_iso' not in info_field[bs] and info_field['age']:
try:
# TODO: separate method; also e.g. using a split or regex after stringify
age = int(info_field['age'])
info_field[bs]['age_iso'] = 'P'+str(v)+'Y'
except ValueError:
age = float(info_field['age'])
rem_age = age - int(age)
info_field[bs]['age_iso'] = 'P'+str(int(age)) +'Y'+ str(round(rem_age*12)) + 'M'
if 'PATO::id' not in info_field[ind] and info_field['sex']:
sex = info_field['sex'].lower()
if sex == 'female':
info_field[ind]['PATO::id'] = 'PATO:0020002'
info_field[ind]['PATO::label'] = 'female genotypic sex'
elif sex == 'male':
info_field[ind]['PATO::id'] = 'PATO:0020001'
info_field[ind]['PATO::label'] = 'male genotypic sex'
### derived attributes that are shared by collections
info_field[bs]['legacy_id'] = 'PGX_AM_BS_' + info_field['uid']
info_field[cs]['legacy_id'] = 'pgxcs::{}::{}'.format(info_field['experiment'], info_field['uid'])
info_field[ind]['legacy_id'] = 'PGX_IND_' + info_field['uid']
info_field[bs]['id'] = _generate_id('pgxbs')
info_field[cs]['id'] = _generate_id('pgxcs')
info_field[cs]['biosample_id'] = info_field[bs]['id']
info_field[ind]['id'] = _generate_id('pgxind')
info_field[bs]['individual_id'] = info_field[ind]['id']
info_field[bs]['EFO::id'] = 'EFO:0009654' if info_field[bs]['icdom::id'] == '00000' else 'EFO:0009656'
info_field[bs]['EFO::label'] = 'reference sample' if info_field[bs]['icdom::id'] == '00000' else 'neoplastic sample'
info_field[ind]['NCBITaxon::id'] = 'NCBITaxon:9606'
info_field[ind]['NCBITaxon::label'] = 'Homo sapiens'
for collection in [bs, cs, ind]:
info_field[collection]['DUO::id'] = 'DUO:0000004'
info_field[collection]['DUO::label'] = 'no restriction'
############################
## variants & callsets ##
############################
variants, callset = _initiate_vs_cs(info_field['experiment'], info_field['uid'], **byc)
## variants
for variant in variants:
variant['callset_id'] = info_field[cs]['id']
variant['biosample_id'] = info_field[bs]['id']
variant['updated'] = curr_time
variants_list.append(variants)
## callsets
for k,v in info_field[cs].items():
db_key, attr_type = field_to_db['.'.join([cs,k])]
assign_nested_value(callset, db_key, locate(attr_type)(v))
if info_field['platform_id']:
callset['description'] = _retrievePlatformLabel(mongo_client, info_field['platform_id'])
callsets_dict[info_field[cs]['legacy_id']] = callset
############################
###### biosamples #######
############################
biosample= {
'updated': curr_time,
}
if byc['args']['source'] == 'arrayexpress':
info_field[bs][bs+'.'+'arrayexpress::id'] = 'arrayexpress:'+ info_field['experiment'],
biosample['project_id'] = 'A' + info_field['experiment']
for k,v in info_field[bs].items():
db_key, attr_type = field_to_db['.'.join([bs,k])]
assign_nested_value(biosample, db_key, locate(attr_type)(v))
biosamples_dict[info_field[bs]['legacy_id']] = biosample
############################
###### individuals ######
############################
individual = {
'updated': curr_time
}
for k,v in info_field[ind].items():
db_key, attr_type = field_to_db['.'.join([ind,k])]
assign_nested_value(individual, db_key, locate(attr_type)(v))
individuals_dict[info_field[ind]['legacy_id']] = individual
bar.next()
bar.finish()
############################
### database write-in ###
############################
confirm = input("""Processed {} variants, {} callsets, {} biosamples and {} individuals for update.
Do you want to continue? [y/n]""".format(sum([len(v) for v in variants_list]), len(callsets_dict), len(biosamples_dict),
len(individuals_dict)))
update = input("""In case of existing record (matching info.legacy_id). Do you want to update? [y/n] """)
if confirm == 'y':
for variant_obj in variants_list:
try:
data_db.variants.insert_many(variant_obj)
except TypeError:
pass
for callset_id_leg, callset_obj in callsets_dict.items():
if (not update) and (callset_id_leg in exist_callset_id[ds_id]):
continue
data_db.callsets.insert_one(callset_obj)
for biosample_id_leg, biosample_obj in biosamples_dict.items():
if (not update) and (biosample_id_leg in exist_biosample_id[ds_id]):
continue
data_db.biosamples.insert_one(biosample_obj)
for individual_id_leg, individual_obj in individuals_dict.items():
if (not update) and (individual_id_leg in exist_individual_id[ds_id]):
continue
data_db.individuals.insert_one(individual_obj)
################################################################################
################################################################################
def _generate_id(prefix):
time.sleep(.001)
return '{}-{}'.format(prefix, base36.dumps(int(time.time() * 1000))) ## for time in ms
################################################################################
def _initiate_vs_cs(ser, arr, **byc):
## variant collections
# TODO: use path.join( )
v_j_p == path.join(byc['json_file_root'], ser, arr, "variants.json")
with open(v_j_p) as json_data:
variants_json = json.load(json_data)
variant_obj = []
for v in variants_json:
v.pop('no', None)
v['info']['cnv_value'] = v['info'].pop('value')
v['info']['var_length'] = v['info'].pop('svlen')
v['info'].pop('probes', None)
v['variantset_id'] = 'AM_VS_GRCH38'
variant_obj.append(v)
## callset collections
# TODO: use path.join( )
cs_j_p == path.join(byc['json_file_root'], ser, arr, "callset.json")
with open(cs_j_p) as json_data:
callset_json = json.load(json_data)
callset_json.pop('callset_id', None)
callset_obj = callset_json
return variant_obj, callset_obj
################################################################################
################################################################################
################################################################################
if __name__ == '__main__':
main()
| 37.05919 | 124 | 0.534633 | 3.046875 |
4587b24935f24ffdcda299047c2da705ecf90d2c
| 840 |
py
|
Python
|
dicotomia.py
|
lauralardies/ordenar
|
f1f926b4fc17a4ed798c0c0880ccac581bfa0d22
|
[
"Apache-2.0"
] | null | null | null |
dicotomia.py
|
lauralardies/ordenar
|
f1f926b4fc17a4ed798c0c0880ccac581bfa0d22
|
[
"Apache-2.0"
] | null | null | null |
dicotomia.py
|
lauralardies/ordenar
|
f1f926b4fc17a4ed798c0c0880ccac581bfa0d22
|
[
"Apache-2.0"
] | null | null | null |
class Dicotomia():
def __init__(self, tabla) -> None:
self.fin = len(tabla) - 1
self.tabla = tabla
self.tablaordenada =[]
def bubbleSort(self):
for i in range(0, self.fin):
for j in range(0, self.fin - i):
if self.tabla[j] > self.tabla[j + 1]:
temp = self.tabla[j]
self.tabla[j] = self.tabla[j+1]
self.tabla[j+1] = temp
def insercion (self):
for i in range (0,self.fin+1):
self.tablaordenada.append (self.tabla[i])
for j in range (i,0,-1):
if self.tablaordenada[j-1]>self.tablaordenada[j]:
aux = self.tablaordenada[j]
self.tablaordenada[j]=self.tablaordenada[j-1]
self.tablaordenada[j-1]=aux
| 36.521739 | 65 | 0.495238 | 3.078125 |
2c4be5b8c216efbf134873ad26befbe21f48a89b
| 9,195 |
py
|
Python
|
server/trydocpie.py
|
TylerTemp/trydocpie
|
4d48255cb05178c8d636fb085f69943dd87ea67f
|
[
"MIT"
] | null | null | null |
server/trydocpie.py
|
TylerTemp/trydocpie
|
4d48255cb05178c8d636fb085f69943dd87ea67f
|
[
"MIT"
] | 6 |
2021-03-09T01:55:42.000Z
|
2022-02-26T10:12:14.000Z
|
server/trydocpie.py
|
TylerTemp/trydocpie
|
4d48255cb05178c8d636fb085f69943dd87ea67f
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
#-*- coding: utf-8 -*-
"""
Usage:
trydocpie web [<port>]
trydocpie gen
"""
import logging
# import time
import json
import shlex
import sys
import os
import re
import textwrap
# import inspect
import html
try:
from io import StringIO
except ImportError:
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
try:
from urllib.parse import urlparse, urlunparse
except ImportError:
from urlparse import urlparse, urlunparse
import flask
import markdown
# import markdown2
from docutils import core
from docutils.writers.html4css1 import Writer,HTMLTranslator
from bs4 import BeautifulSoup
import docpie
logging.getLogger('docpie').setLevel(logging.CRITICAL)
logger = logging.getLogger('trydocpie')
app = flask.Flask(__name__)
class StdoutRedirect(StringIO):
if sys.hexversion >= 0x03000000:
def u(self, string):
return string
else:
def u(self, string):
return unicode(string)
def write(self, s):
super(StdoutRedirect, self).write(self.u(s))
def __enter__(self):
self.real_out = sys.stdout
sys.stdout = self
return super(StdoutRedirect, self).__enter__()
def __exit__(self, exc_type, exc_val, exc_tb):
sys.stdout = self.real_out
return super(StdoutRedirect, self).__exit__(exc_type, exc_val, exc_tb)
def trydocpie(doc, argv, *a, **k):
with StdoutRedirect() as stdout:
error = False
try:
pie = docpie.docpie(doc, argv, *a, **k)
except (docpie.DocpieExit, SystemExit) as e:
error = True
output = str(e)
else:
output = str(pie)
if not output.strip():
output = stdout.getvalue()
return error, output
@app.route('/', methods=('POST',))
def trydocpiehandler():
body = flask.request.get_data().decode('utf-8')
args = json.loads(body)
argvstr = args.pop('argvnofilestr')
argv = shlex.split('pie.py ' + argvstr)
args['argv'] = argv
unexpected_error = False
try:
expected_error, output = trydocpie(**args)
except BaseException as e:
logger.error(e, exc_info=True)
unexpected_error = True
output = '{}: {}'.format(e.__class__.__name__, (e.args[0] if e.args else '') or '')
if unexpected_error:
code = 500
resp = {
'message': output
}
else:
code = 200
resp = {
'ok': (not expected_error),
'result': output
}
return flask.Response(json.dumps(resp), status=code, mimetype='application/json')
@app.route('/', methods=('GET',))
def trydocpieinfohandler():
info = {
'version_time': docpie.__timestamp__,
'version': docpie.__version__,
}
return flask.Response(json.dumps(info), mimetype='application/json')
class HTMLFragmentTranslator( HTMLTranslator ):
def __init__( self, document ):
HTMLTranslator.__init__( self, document )
self.head_prefix = ['','','','','']
self.body_prefix = []
self.body_suffix = []
self.stylesheet = []
def astext(self):
return ''.join(self.body)
def gen_folder():
project_root = os.path.normpath(os.path.join(__file__, '..', '..'))
codebase = os.path.join(project_root, 'server', 'codebase')
configs = (
{
'source': os.path.join(codebase, 'docpie'),
'target': os.path.join(project_root, 'build', 'static', 'docpie'),
},
{
'source': os.path.join(codebase, 'docpie.wiki'),
'target': os.path.join(project_root, 'build', 'static', 'docpie-wiki'),
},
)
fenced_code_re = re.compile(r'(?P<indent>\s+)```(?P<lang>[\w\ \-_]*)(?P<content>.*?)\ +```', re.DOTALL)
for config in configs:
source_folder = config['source']
target_folder = config['target']
if not os.path.isdir(target_folder):
os.makedirs(target_folder)
_dirpath, _dirnames, filenames = next(os.walk(source_folder))
for filename in filenames:
print('processing {}'.format(filename))
filebase, fileext = os.path.splitext(filename)
fileext_lower = fileext.lower()
if fileext_lower == '.md':
filetype = 'md'
elif fileext_lower == '.rst':
filetype = 'rst'
else:
continue
with open(os.path.join(source_folder, filename), 'r', encoding='utf-8') as f:
content_raw = f.read()
if '```' in content_raw:
# middle_parts = []
# content_parts = content_raw.split('```')
# # first_part = content_parts.pop(0)
# last_part = content_parts.pop(-1)
# for content, codepart in zip(content_parts[::2], content_parts[1::2]):
# middle_parts.append(content)
#
# print(codepart)
# code_parts = codepart.splitlines()
# language = code_parts.pop(0)
# code_rejoined = textwrap.dedent('\n'.join(code_parts)).replace('\n','<br />').rstrip()
#
# middle_parts.append("""
# <div class="codehilite">
# <pre class="language-{lang}"><code>{content}</code></pre>
# </div>
# """.format(lang=language, content=code_rejoined)
# )
# content = '\n'.join(middle_parts) + last_part
content = fenced_code_re.sub(lambda matchobj: """
{indent}<div class="codehilite"><pre class="language-{lang}"><code>{content}</code></pre></div>
""".format(
lang=matchobj.groupdict()['lang'],
indent=matchobj.groupdict()['indent'].replace('\n', ''),
content=html.escape(textwrap.dedent(matchobj.groupdict()['content']).rstrip())[1:]
).replace('\n', '<br />'),
content_raw
)
# print(content)
# assert False
else:
content = content_raw
if filetype == 'md':
if filebase == 'Usage-Format':
# content = content.replace('\\<argument\\>', '<argument>')
content_body = content.split('\n\n', 1)[1].replace('\\<argument\\>', '<argument>')
content = '[TOC]\n\n' + content_body
html_content = markdown.markdown(content, extensions=[
'markdown.extensions.fenced_code',
'markdown.extensions.footnotes',
'markdown.extensions.codehilite',
'markdown.extensions.toc',
])
# html_content = content
# html = markdown2.markdown(content, extras=[
# 'toc',
# 'fenced-code-blocks',
# 'footnotes',
# ])
elif filetype == 'rst':
html_fragment_writer = Writer()
html_fragment_writer.translator_class = HTMLFragmentTranslator
html_content = core.publish_string(content, writer=html_fragment_writer).decode('utf-8')
if filebase in ('Home', '_Sidebar'):
html_content = re.sub('\\[\\[(.*?)\\]\\]', lambda matchobj: '<a href="/document/{link}">{linkname}</a>'.format(link=matchobj.group(1).replace(' ', '-'), linkname=matchobj.group(1)), html_content)
soup = BeautifulSoup(html_content, 'html5lib')
for link in soup.find_all('a'):
href = link.get('href')
if href and (href.startswith('http://') or href.startswith('https://')):
url_obj = urlparse(href)
if url_obj.hostname in ('docpie.comes.today', 'docpie.notexists.top'):
url = urlunparse(('', '', url_obj.path, url_obj.params, url_obj.query, url_obj.fragment))
link['href'] = url
for pre in soup.find_all('pre'):
# break
inner_pre = pre.decode_contents()
pre_class = pre.get('class') or []
inner_break = inner_pre.replace('\n', '<br />')
pre_soup = BeautifulSoup('<pre class="{classes}">{content}</pre>'.format(classes=' '.join(pre_class), content=inner_break), 'html5lib')
pre.replace_with(pre_soup.find('pre'))
body = soup.body.decode_contents()
target_filename = filebase + '.html'
logger.info('saving %s', target_filename)
with open(os.path.join(target_folder, target_filename), 'w', encoding='utf-8') as t:
t.write(body)
if __name__ == '__main__':
args = docpie.docpie(__doc__)
logging.basicConfig()
logger.setLevel(logging.DEBUG)
if args['web']:
args_port = args['<port>']
port = int(args_port) if args_port is not None else 8080
app.run(debug=False, port=port)
elif args['gen']:
# folder = args['<folder>']
gen_folder()
| 34.438202 | 211 | 0.554214 | 3.125 |
af8a5e60b53f239fae73143bff4ffaa80fca775c
| 5,023 |
py
|
Python
|
core/randomdata.py
|
ducanh-99/project_design
|
9f59aa4a0748a26d9c58dab8f69fce22e372ecb0
|
[
"MIT"
] | null | null | null |
core/randomdata.py
|
ducanh-99/project_design
|
9f59aa4a0748a26d9c58dab8f69fce22e372ecb0
|
[
"MIT"
] | null | null | null |
core/randomdata.py
|
ducanh-99/project_design
|
9f59aa4a0748a26d9c58dab8f69fce22e372ecb0
|
[
"MIT"
] | null | null | null |
from hashlib import new
import random
import bisect
import csv
import datetime
import time
class StartDate:
year = 2020
month = 1
date = 1
class EndDate:
year = 2021
month = 1
date = 1
class graph:
def __init__(self, gdict=None, res=[]):
if gdict is None:
gdict = {}
self.gdict = gdict
self.res = res
def getVertices(self):
return list(self.gdict.keys())
def addVertex(self, v):
if v not in self.gdict:
self.gdict[v] = {}
def addEdge(self, v1, v2, w):
if v1 in self.gdict:
self.gdict[v1][v2] = w
else:
self.gdict[v1] = [[v2, w]]
def printGraph(self):
for vertex in self.gdict:
for edge in self.gdict[vertex]:
print(str(vertex) + " -> " + str(edge) +
", edge weight: " + str(self.gdict[vertex][edge]))
def getSubGraph(self, vertexSet):
res = graph(None)
for v1 in vertexSet:
#print("First vertex is :", v1)
res.addVertex(v1)
for v2 in vertexSet:
#print("Second vertex is :", v2)
if v2 in self.gdict[v1]:
#print(v1, " --> ", v2)
res.addEdge(v1, v2, self.gdict[v1][v2])
#print ("-----")
return res
def getRandomWeightedVertex(self, v):
sums = {}
S = 0
for vertex in self.gdict[v]:
if vertex not in self.res:
# print "Adding " + str(self.gdict[v][vertex])
S += self.gdict[v][vertex]
sums[vertex] = S
# print sums
r = random.uniform(0, S)
for k in sums.keys():
if (r <= sums[k]):
return k
def randomWeightedPath(self, first_edge, max_len):
self.res = []
prev_vertex = 0
self.res.append(first_edge)
weight_value = 0
while(len(self.res) < len(self.gdict.keys())):
new_vertex = self.getRandomWeightedVertex(prev_vertex)
if len(self.res) >= max_len:
break
if new_vertex not in self.res:
self.res.append(new_vertex)
weight_value += self.gdict[prev_vertex][new_vertex]
prev_vertex = new_vertex
else:
continue
return self.res, weight_value
def generateTime(self, duration):
rtime = int(random.random()*86400)
hours = 0
while hours < 7 or (12 < hours and hours < 12) or 17 < hours:
rtime = int(random.random()*86400)
hours = int(rtime/3600)
minutes = int((rtime - hours*3600)/60)
seconds = rtime - hours*3600 - minutes*60
time_string = datetime.time(hour=hours, minute=minutes, second=seconds)
return time_string
def generateDate(self):
start_date = datetime.date(
StartDate.year, StartDate.month, StartDate.date)
end_date = datetime.date(EndDate.year, EndDate.month, EndDate.date)
time_between_dates = end_date - start_date
days_between_dates = time_between_dates.days
random_number_of_days = random.randrange(days_between_dates)
random_date = start_date + \
datetime.timedelta(days=random_number_of_days)
print(random_date)
def autoPlus(self, duration, start_time):
minutes_added = datetime.timedelta(minutes=duration)
end_time = start_time + minutes_added
print(end_time)
return end_time
testgraph = graph({0: {1: 5, 2: 0, 3: 5, 4: 0, 5: 1, 6: 1, 7: 4, 8: 2, 9: 0, 10: 12},
1: {0: 0, 2: 2, 3: 8, 4: 0, 5: 1, 6: 1, 7: 15, 8: 0, 9: 4, 10: 1},
2: {0: 0, 1: 5, 3: 5, 4: 0, 5: 0, 6: 4, 7: 3, 8: 2, 9: 1, 10: 1},
3: {0: 0, 1: 4, 2: 6, 4: 0, 5: 1, 6: 3, 7: 2, 8: 0, 9: 1, 10: 6},
4: {0: 3, 1: 4, 2: 3, 5: 2, 6: 0, 7: 0, 8: 0, 9: 0, 10: 0},
5: {0: 2, 1: 4, 2: 10, 4: 0, 6: 0, 7: 0, 8: 0, 9: 0, 10: 0},
6: {0: 2, 1: 4, 2: 10, 4: 0, 5: 0, 7: 1, 8: 0, 9: 1, 10: 0},
7: {0: 2, 1: 4, 2: 10, 4: 0, 5: 0, 6: 0, 8: 0, 9: 1, 10: 0},
8: {0: 2, 1: 4, 2: 10, 4: 0, 5: 0, 6: 10, 7: 0, 9: 1, 10: 2},
9: {0: 2, 1: 4, 2: 10, 4: 0, 5: 0, 6: 3, 7: 4, 8: 0, 10: 2},
10: {0: 2, 1: 4, 2: 10, 4: 0, 5: 0, 6: 2, 7: 8, 8: 5, 9: 3}
})
res = []
for i in range(5):
max_len = random.randrange(1, 10)
first_edge = random.randrange(0, 9)
# print(testgraph.randomWeightedPath(first_edge, max_len))
a = testgraph.generateTime(5)
# testgraph.generateDate()
testgraph.autoPlus(start_time=a, duration=4)
# res.append(testgraph.randomWeightedPath(
# first_edge=first_edge, max_len=max_len)[0])
# f = open('data.csv', 'a')
# with f:
# writer = csv.writer(f)
# for row in res:
# writer.writerow(row)
| 31.198758 | 85 | 0.506868 | 3.15625 |
438c0ac979a808d7c4c58dec4b12844640066890
| 2,201 |
ts
|
TypeScript
|
ngx-tl-common-sample/src/app/services/component-preferences.service.ts
|
vbeauplet/ngx-tl-common
|
14975f1ba5afddbd61097e86496cceb265204828
|
[
"MIT"
] | 4 |
2021-03-13T18:03:55.000Z
|
2021-05-26T10:45:34.000Z
|
ngx-tl-common-sample/src/app/services/component-preferences.service.ts
|
vbeauplet/ngx-tl-common
|
14975f1ba5afddbd61097e86496cceb265204828
|
[
"MIT"
] | null | null | null |
ngx-tl-common-sample/src/app/services/component-preferences.service.ts
|
vbeauplet/ngx-tl-common
|
14975f1ba5afddbd61097e86496cceb265204828
|
[
"MIT"
] | null | null | null |
import { Injectable } from '@angular/core';
import { Subject } from 'rxjs';
import { ITlSelectProposal } from 'ngx-tl-common';
export interface IComponentStyle{
tlStyle: string,
subTlStyle: string;
size: string,
shape: string
subShape: string;
color: string;
}
@Injectable({
providedIn: 'root'
})
export class ComponentPreferencesService {
/**
* Binded basic custom style of the component sample
*/
public style: IComponentStyle;
/**
* Subject to emit new component style every time is it modified
*/
public styleSubject: Subject<IComponentStyle> = new Subject<IComponentStyle>();
/**
* Binded size property
* tl-neumorphic by default
*/
public size: string = 'tl-half-responsive';
constructor() {
this.reset();
}
/**
* Resets component to initial values
*/
reset(){
this.style = {
tlStyle: 'tl-neumorphic',
subTlStyle: 'tl-soft-transparent',
size: 'tl-half-responsive',
shape: 'round',
subShape: 'round',
color: 'tl-outline'
}
}
/**
* Handles selection of a tlStyle
*/
public onSelectStyle(tlStyleProposal: ITlSelectProposal){
this.style.tlStyle = tlStyleProposal.name;
this.styleSubject.next();
}
/**
* Handles selection of a sub-tlStyle
*/
public onSelectSubStyle(tlStyleProposal: ITlSelectProposal){
this.style.subTlStyle = tlStyleProposal.name;
this.styleSubject.next();
}
/**
* Handles selection of a size
*/
public onSelectSize(sizeProposal: ITlSelectProposal){
this.style.size = sizeProposal.name;
this.styleSubject.next();
}
/**
* Handles selection of a shape
*/
public onSelectShape(shapeProposal: ITlSelectProposal){
this.style.shape = shapeProposal.name;
this.styleSubject.next();
}
/**
* Handles selection of a sub-shape
*/
public onSelectSubShape(shapeProposal: ITlSelectProposal){
this.style.subShape = shapeProposal.name;
this.styleSubject.next();
}
/**
* Handles selection of a color
*/
public onSelectColor(colorProposal: ITlSelectProposal){
this.style.color = colorProposal.name;
this.styleSubject.next();
}
}
| 21.163462 | 81 | 0.6597 | 3 |
4ce359d2d70da6141d571bfc5f73cf5429be8923
| 7,174 |
py
|
Python
|
tests/api/config_test.py
|
Cal-CS-61A-Staff/templar
|
2c2a5a544da377e3c311750fadd6606bba3b7e75
|
[
"MIT"
] | 6 |
2015-05-04T06:17:49.000Z
|
2021-09-11T00:13:14.000Z
|
tests/api/config_test.py
|
albert12132/templar
|
39851c89730ab69e5c73d0a46adca2a44ecc4165
|
[
"MIT"
] | 17 |
2015-01-07T03:25:47.000Z
|
2016-10-08T03:53:44.000Z
|
tests/api/config_test.py
|
Cal-CS-61A-Staff/templar
|
2c2a5a544da377e3c311750fadd6606bba3b7e75
|
[
"MIT"
] | 6 |
2015-10-08T11:37:17.000Z
|
2019-06-03T21:27:52.000Z
|
"""Tests templar/api/config.py"""
from templar.api.config import ConfigBuilder
from templar.api.config import ConfigBuilderError
from templar.api.rules.core import Rule
import unittest
import mock
class ConfigBuilderTest(unittest.TestCase):
def testEmptyBuilder(self):
config = ConfigBuilder().build()
self.assertSequenceEqual([], config.template_dirs)
self.assertDictEqual({}, config.variables)
self.assertSequenceEqual([], config.preprocess_rules)
self.assertSequenceEqual([], config.postprocess_rules)
def testTemplateDirs(self):
with mock.patch('os.path.isdir', lambda s: True):
builder = ConfigBuilder().add_template_dirs('template/path1', 'template/path2')
self.assertSequenceEqual(
['template/path1', 'template/path2'],
builder.build().template_dirs)
builder.clear_template_dirs()
self.assertSequenceEqual([], builder.build().template_dirs)
def testTemplateDirs_preventNonStrings(self):
with self.assertRaises(ConfigBuilderError) as cm:
ConfigBuilder().add_template_dirs(4)
self.assertEqual(
'template_dir must be a string, but instead was: 4',
str(cm.exception))
def testTemplateDirs_preventNonExistentPath(self):
with mock.patch('os.path.isdir', lambda s: False):
with self.assertRaises(ConfigBuilderError) as cm:
ConfigBuilder().add_template_dirs('no/such/path')
self.assertEqual(
'template_dir path is not a directory: no/such/path',
str(cm.exception))
def testVariables_addVariable(self):
builder = ConfigBuilder().add_variable('var1', 'val1').add_variable('var2', 'val2')
self.assertDictEqual({'var1': 'val1', 'var2': 'val2'}, builder.build().variables)
builder.clear_variables()
self.assertDictEqual({}, builder.build().variables)
def testVariables_preventNonStrings_addVariable(self):
with self.assertRaises(ConfigBuilderError) as cm:
ConfigBuilder().add_variable(4, 'val1')
self.assertEqual('variable must be a string, but instead was: 4', str(cm.exception))
def testVariables_addVariables(self):
builder = ConfigBuilder().add_variables({
'var1': 'val1',
'var2': 'val2',
})
self.assertDictEqual({'var1': 'val1', 'var2': 'val2'}, builder.build().variables)
def testVariables_preventNonStrings_addVariables(self):
with self.assertRaises(ConfigBuilderError) as cm:
ConfigBuilder().add_variables({
4: 'val1',
'var2': 'val2',
})
self.assertEqual('variable must be a string, but instead was: 4', str(cm.exception))
def testRecursiveEvaluateJinjaExpressions(self):
builder = ConfigBuilder()
# Default should be False.
self.assertFalse(builder.build().recursively_evaluate_jinja_expressions)
builder.set_recursively_evaluate_jinja_expressions(True)
self.assertTrue(builder.build().recursively_evaluate_jinja_expressions)
def testTemplateDirs_preventNonBooleans(self):
with self.assertRaises(ConfigBuilderError) as cm:
ConfigBuilder().set_recursively_evaluate_jinja_expressions(4)
self.assertEqual(
'recursively_evaluate_jinja_expressions must be a boolean, '
'but instead was: 4',
str(cm.exception))
def testCompilerRules(self):
rule1, rule2 = Rule(), Rule()
builder = ConfigBuilder().append_compiler_rules(rule1, rule2)
self.assertSequenceEqual([rule1, rule2], builder.build().compiler_rules)
rule3, rule4 = Rule(), Rule()
builder.prepend_compiler_rules(rule3, rule4)
self.assertSequenceEqual([rule3, rule4, rule1, rule2], builder.build().compiler_rules)
builder.clear_compiler_rules()
self.assertSequenceEqual([], builder.build().compiler_rules)
def testCompilerRules_preventNonRules(self):
with self.assertRaises(ConfigBuilderError) as cm:
ConfigBuilder().append_compiler_rules(4)
self.assertEqual(
'compiler_rule must be a Rule, but instead was: 4',
str(cm.exception))
def testPreprocessRules(self):
rule1, rule2 = Rule(), Rule()
builder = ConfigBuilder().append_preprocess_rules(rule1, rule2)
self.assertSequenceEqual([rule1, rule2], builder.build().preprocess_rules)
rule3, rule4 = Rule(), Rule()
builder.prepend_preprocess_rules(rule3, rule4)
self.assertSequenceEqual([rule3, rule4, rule1, rule2], builder.build().preprocess_rules)
builder.clear_preprocess_rules()
self.assertSequenceEqual([], builder.build().preprocess_rules)
def testPreprocessRules_preventNonRules(self):
with self.assertRaises(ConfigBuilderError) as cm:
ConfigBuilder().append_preprocess_rules(4)
self.assertEqual(
'preprocess_rule must be a Rule object, but instead was: 4',
str(cm.exception))
def testPostprocessRules(self):
rule1, rule2 = Rule(), Rule()
builder = ConfigBuilder().append_postprocess_rules(rule1, rule2)
self.assertSequenceEqual([rule1, rule2], builder.build().postprocess_rules)
rule3, rule4 = Rule(), Rule()
builder.prepend_postprocess_rules(rule3, rule4)
self.assertSequenceEqual([rule3, rule4, rule1, rule2], builder.build().postprocess_rules)
builder.clear_postprocess_rules()
self.assertSequenceEqual([], builder.build().postprocess_rules)
def testPostprocessRules_preventNonRules(self):
with self.assertRaises(ConfigBuilderError) as cm:
ConfigBuilder().append_postprocess_rules(4)
self.assertEqual(
'postprocess_rule must be a Rule object, but instead was: 4',
str(cm.exception))
def testRules(self):
rule1, rule2, rule3, = Rule(), Rule(), Rule()
builder = ConfigBuilder().append_preprocess_rules(rule1)
builder.append_compiler_rules(rule2)
builder.append_postprocess_rules(rule3)
self.assertSequenceEqual([rule1, rule2, rule3], builder.build().rules)
def testConfigIsImmutable(self):
with mock.patch('os.path.isdir', lambda s: True):
builder = ConfigBuilder().add_template_dirs('template/path1', 'template/path2')
builder.add_variable('var1', 'val1')
config = builder.build()
# Verify config was constructed correctly.
self.assertSequenceEqual(['template/path1', 'template/path2'], config.template_dirs)
self.assertDictEqual({'var1': 'val1'}, config.variables)
new_builder = config.to_builder()
new_builder.clear_template_dirs()
new_builder.add_variable('var2', 'val2')
# Verify previously built config was not affected by changes to new_builder.
self.assertSequenceEqual(['template/path1', 'template/path2'], config.template_dirs)
self.assertDictEqual({'var1': 'val1'}, config.variables)
| 42.702381 | 97 | 0.668943 | 3.046875 |
731eed7f95af2126220c5d491028ce3efe2ddb19
| 6,495 |
lua
|
Lua
|
assets/codebase/core/game.lua
|
drikdrok/Ludum-Dare41
|
4d56d5aaf51b1e592a30bf8474ce23991f875880
|
[
"MIT"
] | null | null | null |
assets/codebase/core/game.lua
|
drikdrok/Ludum-Dare41
|
4d56d5aaf51b1e592a30bf8474ce23991f875880
|
[
"MIT"
] | null | null | null |
assets/codebase/core/game.lua
|
drikdrok/Ludum-Dare41
|
4d56d5aaf51b1e592a30bf8474ce23991f875880
|
[
"MIT"
] | null | null | null |
game = class("game")
local heartImage = love.graphics.newImage("assets/gfx/hud/heart.png")
function game:initialize()
createSoundEffects()
self.fonts = {}
self.font = love.graphics.newFont("assets/gfx/fonts/04B_03__.ttf")
love.graphics.setFont(self.font)
camera = Camera(players[controller.currentPlayer].x, players[controller.currentPlayer].y)
camera.smoother = Camera.smooth.damped(5)
camera:zoom(2)
self.team1Score = 0
self.team2Score = 0
self.teamPossesion = 0
self.team1Players = 3
self.team2Players = 3
self.state = "menu"
self.startingTimer = 3
self.goalColor = {0.5, 0.5, 0.5}
self.justStarted = true
self.mins = 2
self.secs = 30
self.endMessage = "YOU WON!"
self.endTimer = 7
end
function game:update(dt)
if self.state == "playing" then
self.justStarted = false
doPlayerAI(dt)
for i,v in pairs(players) do
if not v.dead then
v:update(dt)
v.hasAId = false
end
end
controller:update(dt)
ball:update(dt)
updateBullets(dt)
updateZombies(dt)
self.secs = self.secs - dt
if self.secs <= 0 then
self.secs = 59
self.mins = self.mins - 1
end
if self.mins < 0 then
self:endGame()
end
elseif self.state == "starting" then
self.startingTimer = self.startingTimer - dt
if not self.justStarted then
self.goalColor = {255/255, 229/255, 2/255}
end
if self.startingTimer < 0 then
self.state = "playing"
self.goalColor = {0.5, 0.5, 0.5}
end
elseif self.state == "menu" then
menu:update()
elseif self.state == "endgame" then
self.endTimer = self.endTimer - dt
if self.endTimer <= 0 then
self.state = "menu"
end
end
if self.state ~= "endgame" and self.state ~= "menu" then
if not players[controller.currentPlayer] then
controller:switchToRandom()
end
camera:lockPosition(players[controller.currentPlayer].x, players[controller.currentPlayer].y)
end
end
function game:draw()
if self.state == "starting" or self.state == "playing" or self.state == "endgame" then
camera:attach() -- Everyhing that will be influcenced by the camera
pitch:draw()
drawBullets()
drawZombies()
for i,v in pairs(players) do
v:draw()
end
ball:draw()
if debug then
local items, len = collisionWorld:getItems()
for i,v in pairs(items) do
love.graphics.rectangle("line", v.x, v.y, v.width, v.height)
end
end
camera:detach()
--HUD
self:drawHud()
elseif self.state == "menu" then
menu:draw()
end
end
function game:fontSize(size)
if self.fonts[size] then
love.graphics.setFont(self.fonts[size])
else
self.font = love.graphics.newFont("assets/gfx/fonts/04B_03__.ttf", size)
self.fonts[size] = self.font
love.graphics.setFont(self.font)
end
end
function game:drawHud()
if self.state == "playing" or self.state == "starting" then
love.graphics.setColor(109/255, 112/255, 117/255)
love.graphics.rectangle("fill", love.graphics.getWidth()/2-175, 0, 350, 35)
love.graphics.setColor(self.goalColor)
self:fontSize(22)
love.graphics.print("GOAL!", love.graphics.getWidth()/2-175+40, 5)
love.graphics.setColor(1,1,1)
if math.ceil(self.secs) > 9 then
love.graphics.print(self.mins..":"..math.ceil(self.secs), love.graphics.getWidth()/2+80, 5)
else
love.graphics.print(self.mins..":0"..math.ceil(self.secs), love.graphics.getWidth()/2+80, 5)
end
end
if self.state == "playing" then
self:fontSize(22)
love.graphics.print(self.team1Score.. "-"..self.team2Score, love.graphics.getWidth()/2 - self.font:getWidth(self.team1Score.. "-"..self.team2Score)/2, 5)
love.graphics.draw(heartImage, 10, 10, 0, 2 ,2)
self:fontSize(17)
love.graphics.print("x"..players[controller.currentPlayer].health, 46, 20)
elseif self.state == "starting" then
self:fontSize(22)
love.graphics.print(math.ceil(self.startingTimer), love.graphics.getWidth()/2 - self.font:getWidth(math.ceil(self.startingTimer))/2, 5)
elseif self.state == "endgame" then
self:fontSize(22)
love.graphics.setColor(109/255, 112/255, 117/255)
love.graphics.rectangle("fill", love.graphics.getWidth()/2-175, 0, 350, 35)
love.graphics.setColor(255/255, 229/255, 2/255)
love.graphics.print(game.endMessage, love.graphics.getWidth()/2 - self.font:getWidth(self.endMessage)/2, 5)
love.graphics.setColor(1,1,1)
end
end
function game:setState(state)
self.state = state
if self.state == "starting" then
self.startingTimer = 3
end
end
function game:endGame()
if self.team1Score > self.team2Score then
self.endMessage = "YOU WON!"
elseif self.team1Score == self.team2Score then
self.endMessage = "YOU DREW!"
else
self.endMessage = "YOU LOST"
end
if self.team1Players == 0 then
self.endMessage = "YOU LOST!"
elseif self.team2Players == 0 then
self.endMessage = "YOU WON!"
end
self.state = "endgame"
self.endTimer = 7
end
function game:reset()
self.team1Score = 0
self.team2Score = 0
self.teamPossesion = 0
self.team1Players = 3
self.team2Players = 3
self.state = state
self.startingTimer = 3
self.state = "starting"
self.goalColor = {0.5, 0.5, 0.5}
self.mins = 2
self.secs = 30
self.justStarted = true
for i, v in pairs(players) do
v.x = 100000 --Fixes "phantom" player bug
collisionWorld:remove(v)
end
players = {
player:new(math.random(100, 350), math.random(50, 100), 1, 1), player:new(math.random(100, 350), math.random(50, 100), 2, 1), player:new(math.random(100, 350), math.random(50, 100), 3, 1),
player:new(math.random(100, 350), math.random(500, 550), 4, 2), player:new(math.random(100, 350), math.random(500, 550), 5, 2) , player:new(math.random(100, 350), math.random(500, 550), 6, 2)
}
ball:reset()
local ballExists = false
local items, len = collisionWorld:getItems()
for i,v in pairs(items) do
if v.type == "ball" then
ballExists = true
end
end
if ballExists then -- This should fix a crash that happens rarely where the ball gets removed from the world
collisionWorld:update(ball, ball.x, ball.y)
else
collisionWorld:add(ball, ball.x, ball.y)
end
removeAllZombies()
removeAllBullets()
controller:reset()
end
function createSoundEffects()
buttonSound = love.audio.newSource("assets/sfx/button.wav", "static")
goalSound = love.audio.newSource("assets/sfx/goal.wav", "static")
hurtSound = love.audio.newSource("assets/sfx/hurt.wav", "static")
passSound = love.audio.newSource("assets/sfx/pass.wav", "static")
shootSound = love.audio.newSource("assets/sfx/shoot.wav", "static")
end
| 24.235075 | 193 | 0.692841 | 3.078125 |
981b2583b2dfdf3db840e393d537b4cf59adfe97
| 3,462 |
py
|
Python
|
Features/New OT Class with Selected Glyphs.py
|
juandelperal/Glyphs-Scripts
|
1f3cb71683ec044dff67a46cd895773e8271effa
|
[
"Apache-2.0"
] | null | null | null |
Features/New OT Class with Selected Glyphs.py
|
juandelperal/Glyphs-Scripts
|
1f3cb71683ec044dff67a46cd895773e8271effa
|
[
"Apache-2.0"
] | null | null | null |
Features/New OT Class with Selected Glyphs.py
|
juandelperal/Glyphs-Scripts
|
1f3cb71683ec044dff67a46cd895773e8271effa
|
[
"Apache-2.0"
] | null | null | null |
#MenuTitle: New OT Class with Selected Glyphs
# -*- coding: utf-8 -*-
from __future__ import division, print_function, unicode_literals
from builtins import str
__doc__="""
Creates a new OT class containing the selected glyphs.
"""
import vanilla
class OTClassCreator( object ):
def __init__( self ):
self.w = vanilla.FloatingWindow( (400, 104), "Make OT Class from Selected Glyphs", minSize=(400, 120), maxSize=(500, 120) )
self.w.text_otclass = vanilla.TextBox((15, 12+2, 130, 14), "OT class name:", sizeStyle='small')
self.w.class_name = vanilla.EditText((105, 12-1, -90, 20), "xxxx", sizeStyle='small', callback=self.buttonCheck)
self.w.overwrite_check = vanilla.CheckBox((105, 34, -15, 20), "Overwrite existing class", sizeStyle='small', callback=self.buttonCheck, value=True)
self.w.keep_window = vanilla.CheckBox((105, 54, -15, 20), "Keep this window open", sizeStyle='small', callback=None, value=True)
self.w.class_name_check = vanilla.TextBox((15, 80, -15, 14), "Class name appears to be ok.", sizeStyle='small')
self.w.make_button = vanilla.Button((-80, 12, -15, 17), "Create", sizeStyle='small', callback=self.createClass)
self.w.setDefaultButton( self.w.make_button )
self.w.open()
self.buttonCheck( self.w.class_name )
def buttonCheck( self, sender ):
myClassName = sender.get()
existingClasses = [ c.name for c in Glyphs.font.classes ]
#print existingClasses
if myClassName in existingClasses:
if self.w.overwrite_check.get() == False:
self.w.make_button.enable( False )
self.w.class_name_check.set( "Class name already exists." )
else:
self.w.make_button.enable( True )
self.w.class_name_check.set( "Will overwrite existing class." )
elif len( myClassName ) == 0 :
self.w.make_button.enable( False )
self.w.class_name_check.set( "Class name too short." )
elif self.checkstring( myClassName ):
self.w.make_button.enable( True )
self.w.class_name_check.set( "Class name appears to be ok." )
elif myClassName[0] in "0123456789":
self.w.make_button.enable( False )
self.w.class_name_check.set( "Class name must not start with a figure." )
else:
self.w.make_button.enable( False )
self.w.class_name_check.set( "Illegal characters. Only use A-Z, a-z, figures, period, underscore." )
def checkstring(self, teststring, ok=True):
allowedchars="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz1234567890._"
if len( teststring ) > 1 :
return self.checkstring( teststring[:-1], ok ) and ( teststring[-1] in allowedchars )
else:
# first char must not be a figure
return ( teststring[-1] in allowedchars and teststring[-1] not in "1234567890" )
def createClass(self, sender):
Doc = Glyphs.currentDocument
Font = Glyphs.font
listOfGlyphNames = [ x.parent.name for x in Font.selectedLayers ]
listOfClasses = Font.classes
listOfClassNames = [ c.name for c in listOfClasses ]
myClassName = self.w.class_name.get()
myClassCode = " ".join( listOfGlyphNames )
if myClassName in listOfClassNames:
print("Changing class", myClassName, "to these glyphs:", myClassCode)
Font.classes[ myClassName ].code = myClassCode
else:
print("Creating class", myClassName, "with these glyphs:", myClassCode)
myNewClass = GSClass()
myNewClass.name = myClassName
myNewClass.code = myClassCode
Font.classes.append( myNewClass )
if not self.w.keep_window.get():
self.w.close()
OTClassCreator()
| 38.043956 | 149 | 0.712305 | 3.0625 |
d214c1f3c68c502f8f27bd9b2549957b3e6a3ffc
| 3,804 |
sql
|
SQL
|
Complete SQL Mastery - Mosh/02.Retrieving Data From a Single Table.sql
|
aman-gupta-1995/SQL-Data-Analysis-and-Data-Visualisation-Projects
|
a7aa7172dc10fe4890770dee8d8e5f3382527902
|
[
"MIT"
] | 195 |
2020-11-28T20:45:09.000Z
|
2022-03-29T17:49:07.000Z
|
Complete SQL Mastery - Mosh/02.Retrieving Data From a Single Table.sql
|
mineoran/SQL-Data-Analysis-and-Visualization-Projects
|
d5462caa7c79b4fa0c6a9d488e25b889820025d8
|
[
"MIT"
] | null | null | null |
Complete SQL Mastery - Mosh/02.Retrieving Data From a Single Table.sql
|
mineoran/SQL-Data-Analysis-and-Visualization-Projects
|
d5462caa7c79b4fa0c6a9d488e25b889820025d8
|
[
"MIT"
] | 124 |
2020-11-18T06:09:50.000Z
|
2022-03-31T19:12:30.000Z
|
/*********************************/
/* Reterive Data */
/*********************************/
USE mosh_sql_store;
SELECT * FROM customers
WHERE customer_id > 5
ORDER BY first_name;
SELECT DISTINCT state
FROM customers;
/*return all the products name, unit price, new price (unit price *1.1) */
SELECT name, unit_price, (unit_price *1.1) AS new_price
FROM products;
/*********************************/
/* Operators */
/*********************************/
/*
>
<
>=
<=
!=
<>
AND
OR
NOT
IN
NOT IN
BETWEEN
LIKE
IS NULL
*/
/*get the orders placed this year*/
SELECT * FROM orders
WHERE order_date >= '2020-01-01';
SELECT *
FROM customers
WHERE birth_date > '1990-01-01' AND points > 1000;
SELECT *
FROM customers
WHERE NOT (birth_date > '1990-01-01' AND points > 1000);
/*
get items of order #6 where total price is greater than 30
*/
SELECT *
FROM order_items
WHERE order_id = 6 AND quantity * unit_price > 30;
SELECT *
FROM customers
WHERE state IN ('VA','GA','FL');
SELECT *
FROM customers
WHERE state NOT IN ('VA','GA','FL');
/*return products with quanity in stock equal to 49, 38, 72*/
SELECT *
FROM products
WHERE quantity_in_stock IN (49,38,72);
SELECT *
FROM customers
WHERE points BETWEEN 1000 AND 3000;
/*return customers born between 1/1/1990 and 1/1/2000*/
SELECT *
FROM customers
WHERE birth_date BETWEEN '1990-01-01' AND '2000-01-01';
/*get the orders that are not shipped*/
SELECT *
FROM orders
WHERE shipped_date IS NULL;
SELECT *
FROM customers
ORDER BY last_name DESC, state ASC
LIMIT 10;
SELECT order_id, product_id, quantity, unit_price
FROM order_items
WHERE order_id = 2;
/* offset start from 2nd record */
SELECT order_id, product_id, quantity, unit_price
FROM order_items
LIMIT 5 OFFSET 2;
-- customer 5 records start from 6 for pagniation (example 1-5 for one page, 6-10 for second page...)
SELECT *
FROM customers
LIMIT 5,6;
/*get top 3 loyal customers*/
SELECT *
FROM customers
ORDER BY points DESC
LIMIT 3;
/*
% any number of characters
_ single character
*/
SELECT *
FROM customers
WHERE last_name LIKE '_____y';
SELECT *
FROM customers
WHERE last_name LIKE 'b____y';
/*get customers addresses contain TRAIL or AVENUE OR phone numbers end with 9*/
SELECT *
FROM customers
WHERE address LIKE '%TRAIL%' OR address LIKE '%AVENUE%'
OR phone LIKE '%9';
SELECT *
FROM customers
WHERE address REGEXP 'trail|avenue';
SELECT *
FROM customers
WHERE phone REGEXP '9$';
/*********************************/
/* Regular Expressions */
/*********************************/
SELECT *
FROM customers
WHERE last_name LIKE '%field%';
SELECT *
FROM customers
WHERE last_name REGEXP 'field';
SELECT *
FROM customers
WHERE first_name REGEXP 'I...';
/*
^ : beginning of the string of the field
$ : end of the string
| : either OR
[] : includes one of these insides []. set of characters [abc] OR [a-h]
https://www.oreilly.com/library/view/mysql-cookbook/0596001452/ch04s08.html
*/
SELECT *
FROM customers
WHERE last_name REGEXP 'field$';
SELECT *
FROM customers
WHERE last_name REGEXP 'field|mac|rose';
SELECT *
FROM customers
WHERE last_name REGEXP '^field|mac|rose';
/*
ge
ie
me
includes in last name
*/
SELECT *
FROM customers
WHERE last_name REGEXP '[gim]e';
/*
ae
be
...
he
includes in last name
*/
SELECT *
FROM customers
WHERE last_name REGEXP '[a-h]e';
/*
Get the customers whose
- first names are ELKA or AMBUR
- last names end with EY or ON
- last name start with MY or contain SE
- last name contain B followed by R or U
*/
SELECT *
FROM customers
WHERE first_name REGEXP 'ELKA|AMBUR';
SELECT *
FROM customers
WHERE last_name REGEXP 'EY$|ON$';
SELECT *
FROM customers
WHERE last_name REGEXP '^MY|SE';
SELECT *
FROM customers
WHERE last_name REGEXP 'B[R|U]';
| 17.693023 | 101 | 0.668507 | 3.234375 |
c525efe2abec866eae4d6e75dbfb9ffb1dd2ad89
| 787 |
ps1
|
PowerShell
|
Modules/SharePointDsc/Examples/Resources/SPSearchServiceSettings/1-ConfigureSettings.ps1
|
JamesHoC/SharePointDsc
|
2c67e59a2882dafac5e825cde5498cbbe1ffadba
|
[
"MIT"
] | null | null | null |
Modules/SharePointDsc/Examples/Resources/SPSearchServiceSettings/1-ConfigureSettings.ps1
|
JamesHoC/SharePointDsc
|
2c67e59a2882dafac5e825cde5498cbbe1ffadba
|
[
"MIT"
] | 1 |
2018-07-19T15:16:08.000Z
|
2018-07-19T15:16:08.000Z
|
Modules/SharePointDsc/Examples/Resources/SPSearchServiceSettings/1-ConfigureSettings.ps1
|
JamesHoC/SharePointDsc
|
2c67e59a2882dafac5e825cde5498cbbe1ffadba
|
[
"MIT"
] | null | null | null |
<#
.EXAMPLE
This example creates a new search service app in the local farm
#>
Configuration Example
{
param(
[Parameter(Mandatory = $true)]
[PSCredential]
$SetupAccount,
[Parameter(Mandatory = $true)]
[PSCredential]
$SearchAccount
)
Import-DscResource -ModuleName SharePointDsc
node localhost {
SPSearchServiceSettings SearchServiceSettings
{
IsSingleInstance = "Yes"
PerformanceLevel = "Maximum"
ContactEmail = "[email protected]"
WindowsServiceAccount = $SearchAccount
PsDscRunAsCredential = $SetupAccount
}
}
}
| 26.233333 | 67 | 0.526048 | 3.046875 |
eabe128d161f95610ff52fba39146e460229eb33
| 1,979 |
rb
|
Ruby
|
spec/services/create_passcode_service_spec.rb
|
marnen/passcoder
|
a5e14da70c2f53d092cef7b55cf197d5aa6c821f
|
[
"MIT"
] | null | null | null |
spec/services/create_passcode_service_spec.rb
|
marnen/passcoder
|
a5e14da70c2f53d092cef7b55cf197d5aa6c821f
|
[
"MIT"
] | null | null | null |
spec/services/create_passcode_service_spec.rb
|
marnen/passcoder
|
a5e14da70c2f53d092cef7b55cf197d5aa6c821f
|
[
"MIT"
] | null | null | null |
require 'rails_helper'
describe CreatePasscodeService do
shared_examples_for 'a passcode with line items' do
it 'has associated line items' do
line_items = passcode.line_items
expect(line_items.size).to be == 1
expect(line_items.first.attributes.symbolize_keys).to include line_items_attributes
end
end
describe '.call' do
let(:extra_attributes) { {} }
let(:passcode_attributes) { FactoryGirl.attributes_for :passcode }
let(:passcode) { CreatePasscodeService.call ActionController::Parameters.new passcode: passcode_attributes.merge(extra_attributes) }
context 'with valid parameters' do
it 'creates a passcode with the given attributes' do
expect(passcode).to be_a_kind_of Passcode
expect(passcode.attributes.symbolize_keys).to include passcode_attributes
end
context 'with line items' do
let(:line_items_attributes) { FactoryGirl.attributes_for :line_item, item_id: FactoryGirl.create(:item).id }
let(:extra_attributes) { {line_items_attributes: [line_items_attributes]} }
context 'creates the line items along with the passcode' do
it_behaves_like 'a passcode with line items'
end
end
end
context 'with invalid passcode parameters' do
let(:line_items_attributes) { FactoryGirl.attributes_for :line_item, item_id: FactoryGirl.create(:item).id }
let(:extra_attributes) { {code: nil, line_items_attributes: [line_items_attributes]} }
context 'it retains the line items' do
it_behaves_like 'a passcode with line items'
end
it 'retains the error messages' do
expect(passcode.errors.messages).not_to be_empty
end
context 'no line items' do
let(:extra_attributes) { {} }
it 'creates a blank line item' do
expect(passcode.line_items.size).to be == 1
expect(passcode.line_items.first.item_id).to be_nil
end
end
end
end
end
| 35.339286 | 136 | 0.700354 | 3.21875 |
b00700882368eabad6cf83f93a243cbae3a56601
| 2,793 |
py
|
Python
|
tests/test_add.py
|
mcgid/morenines
|
b5825d33ae4c44e39fc0b9763bdf371e00112b64
|
[
"MIT"
] | null | null | null |
tests/test_add.py
|
mcgid/morenines
|
b5825d33ae4c44e39fc0b9763bdf371e00112b64
|
[
"MIT"
] | 21 |
2016-04-15T19:22:12.000Z
|
2016-07-08T16:22:54.000Z
|
tests/test_add.py
|
mcgid/morenines
|
b5825d33ae4c44e39fc0b9763bdf371e00112b64
|
[
"MIT"
] | null | null | null |
import os
from click.testing import CliRunner
from conftest import tmp_chdir, assert_mn_dirs_equal
from morenines import application
def test_add_no_changes(data_dir, expected_dir):
"""No difference between source and expected."""
with tmp_chdir(data_dir):
runner = CliRunner()
result = runner.invoke(application.main, ['add'])
assert result.exit_code == 0
expected_output = u"WARNING: No action taken (supply one or more PATHS to files to add to the repository)\n"
assert result.output == expected_output
assert_mn_dirs_equal(data_dir, expected_dir)
def test_add(data_dir, expected_dir):
"""Adds a single new file to an index that doesn't have a parent"""
with tmp_chdir(data_dir):
runner = CliRunner()
result = runner.invoke(application.main, ['add', '2012/new_file.txt'])
assert result.exit_code == 0
assert_mn_dirs_equal(data_dir, expected_dir)
def test_add2(data_dir, expected_dir):
"""Adds a new file to an index that has a parent"""
with tmp_chdir(data_dir):
runner = CliRunner()
result = runner.invoke(application.main, ['add', 'new_file2.txt'])
assert result.exit_code == 0
assert_mn_dirs_equal(data_dir, expected_dir)
def test_add_path_outside_repo(data_dir, expected_dir):
"""Tries to add a path outside of the repository"""
bad_abs_path = os.path.join(os.path.dirname(data_dir), "fake_file.txt")
with tmp_chdir(data_dir):
runner = CliRunner()
result = runner.invoke(application.main, ['add', bad_abs_path])
assert result.exit_code == 1
assert_mn_dirs_equal(data_dir, expected_dir)
def test_add_multiple(data_dir, expected_dir):
"""Adds a single new file to an index that doesn't have a parent"""
with tmp_chdir(data_dir):
runner = CliRunner()
result = runner.invoke(application.main, ['add', '2012/new_file.txt', 'new_file2.txt'])
assert result.exit_code == 0
assert_mn_dirs_equal(data_dir, expected_dir)
def test_add_missing_arg(data_dir, expected_dir):
"""Tries to add without specifying path to add"""
with tmp_chdir(data_dir):
runner = CliRunner()
result = runner.invoke(application.main, ['add'])
assert result.exit_code == 0
expected_output = u"WARNING: No action taken (supply one or more PATHS to files to add to the repository)\n"
assert result.output == expected_output
assert_mn_dirs_equal(data_dir, expected_dir)
def test_add_dir(data_dir, expected_dir):
"""Tries to add a directory with a file in it"""
with tmp_chdir(data_dir):
runner = CliRunner()
result = runner.invoke(application.main, ['add', '2012/new_dir'])
assert result.exit_code == 0
assert_mn_dirs_equal(data_dir, expected_dir)
| 30.032258 | 112 | 0.703545 | 3.453125 |
1ac066ac7fe2a2b36ec212672702b70fbaf86fe0
| 3,906 |
py
|
Python
|
main.py
|
Krishna-Sivakumar/movie-night
|
2a5eb3b774a87a15f2f69fd0833e80c3f8cde351
|
[
"MIT"
] | null | null | null |
main.py
|
Krishna-Sivakumar/movie-night
|
2a5eb3b774a87a15f2f69fd0833e80c3f8cde351
|
[
"MIT"
] | null | null | null |
main.py
|
Krishna-Sivakumar/movie-night
|
2a5eb3b774a87a15f2f69fd0833e80c3f8cde351
|
[
"MIT"
] | 1 |
2021-06-03T06:57:14.000Z
|
2021-06-03T06:57:14.000Z
|
from datetime import datetime
from flask import Flask, request, render_template
import flask
import flask_login
import hashlib
import redis
import toml
config = toml.loads(
open("config.toml", "r").read()
)
auth_dict = toml.loads(
open("auth.toml", "r").read()
)
admins = auth_dict['admin']
admin_dict = {}
for i in admins:
admin_dict[i["username"]] = i
app = Flask(__name__)
app.secret_key = auth_dict["secret_key"]
r = redis.Redis(
host=config["redis"]["address"],
port=config["redis"]["port"],
db=config["redis"]["mlist_index"]
)
login_manager = flask_login.LoginManager()
login_manager.init_app(app)
def update_movie_today():
global start, movie_today, message
now = datetime.now() - start
if now.days >= 1:
start = datetime.now().replace(hour=0, minute=0, second=0, microsecond=0)
movie_today = r.srandmember('mlist', 1)
if len(r.smembers('mlist')) == 0:
movie_today = []
if len(r.smembers('mlist')) > 0 and len(movie_today) == 0:
movie_today = r.srandmember('mlist', 1)
if len(movie_today) > 0 and not r.sismember('mlist', movie_today[0].decode()):
movie_today = r.srandmember('mlist', 1)
message = ""
if len(movie_today) > 0:
message = f"{movie_today[0].decode()}"
else:
message = "Add something to the list :("
start = datetime.now()
movie_today = []
update_movie_today()
autocompleteAddr = f"{config['servers']['autocomplete']['address']}:{config['servers']['autocomplete']['port']}"
class User(flask_login.UserMixin):
pass
@login_manager.user_loader
def user_loader(name):
if name not in admin_dict.keys():
return
user = User()
user.id = name
return user
@app.route('/', methods=['GET', 'POST'])
def home():
update_movie_today()
if request.method == 'POST':
if 'movie' in request.form and len(request.form['movie']) > 0:
r.sadd('mlist', request.form['movie'])
update_movie_today()
return render_template(
'main.html',
addr=autocompleteAddr,
movie_today=message
)
@app.route('/modify', methods=['GET', 'POST'])
@flask_login.login_required
def modify():
if len(request.form) > 0:
for key in request.form:
r.srem('mlist', key)
update_movie_today()
mlist = [s.decode() for s in r.smembers('mlist')]
return render_template(
'modify.html',
addr=autocompleteAddr,
mlist=mlist
)
@app.route('/view', methods=['GET'])
def viewList():
mlist = [s.decode() for s in r.smembers('mlist')]
return render_template(
'view.html',
addr=autocompleteAddr,
mlist=mlist
)
@app.route('/login', methods=['GET', 'POST'])
def login():
def validate(username, password):
if username in admin_dict.keys() and hashlib.sha256(password.encode()).hexdigest() == admin_dict[username]["password"]:
return True
return False
if request.method == 'GET':
# Return a login form here
if flask_login.current_user.is_authenticated:
return flask.redirect('/')
return render_template('login.html')
elif request.method == 'POST':
if 'username' in request.form.keys() and 'password' in request.form.keys():
if validate(request.form["username"], request.form["password"]):
user = user_loader(request.form["username"])
flask_login.login_user(user, remember=False)
return flask.redirect('/')
return flask.redirect('/login')
@app.route('/logout', methods=['GET'])
@flask_login.login_required
def logout():
flask_login.logout_user()
return flask.redirect('/')
if __name__ == '__main__':
app.run(
host=config["servers"]["website"]["address"],
port=config["servers"]["website"]["port"],
debug=True
)
| 25.2 | 127 | 0.621352 | 3.046875 |
aa71264e2d3ea8dd6819a78c9b198049140598a6
| 1,136 |
rb
|
Ruby
|
api/routes/locations.rb
|
myAmericaDevSummit2015/Ascent
|
93a2b16246469fdbd6017ceeede3c46128ba988d
|
[
"Apache-2.0"
] | null | null | null |
api/routes/locations.rb
|
myAmericaDevSummit2015/Ascent
|
93a2b16246469fdbd6017ceeede3c46128ba988d
|
[
"Apache-2.0"
] | null | null | null |
api/routes/locations.rb
|
myAmericaDevSummit2015/Ascent
|
93a2b16246469fdbd6017ceeede3c46128ba988d
|
[
"Apache-2.0"
] | null | null | null |
require 'sinatra'
require 'mongoid'
require_relative '../models/location'
require_relative '../app/locations_impl'
module Sinatra
module SampleApp
module Routing
module Locations
def self.registered(app)
locations_impl = LocationsImpl.new
# Returns a list of locations associated to a
# particular user, via the userName parameter
#
get_locations = lambda do
locations_impl.get_user_locations(params['userName']).to_json(except: :_id)
end
# Associates a location with a user, as long as the following are true:
# 1) The userName exists
# 2) It has been over 24 hours since the user was last associated to the
# given location
#
# If the score for the location is not provided, it is defaulted to 1
#
post_location = lambda do
locations_impl.create_location(JSON.parse(request.body.read)).to_json
end
app.get '/locations', &get_locations
app.post '/locations', &post_location
end
end
end
end
end
| 29.894737 | 87 | 0.621479 | 3.078125 |
b751e9acf8a3c429052bbcff08f55f9dce66b607
| 2,237 |
cs
|
C#
|
Demonstration Programs/Demonstration Programs/BintoDec.cs
|
brandon-crandall/CSC440FinalPortfolio
|
e7f8d66d7b1cc0bae46467772c68f02f2852cd2d
|
[
"MIT"
] | null | null | null |
Demonstration Programs/Demonstration Programs/BintoDec.cs
|
brandon-crandall/CSC440FinalPortfolio
|
e7f8d66d7b1cc0bae46467772c68f02f2852cd2d
|
[
"MIT"
] | null | null | null |
Demonstration Programs/Demonstration Programs/BintoDec.cs
|
brandon-crandall/CSC440FinalPortfolio
|
e7f8d66d7b1cc0bae46467772c68f02f2852cd2d
|
[
"MIT"
] | null | null | null |
/// <summary>
/// SOURCE: Homework assignment in python class. (Python code at bottom)
///
/// STUDENT: Brandon Crandall
///
/// SYNOPSIS: Create a function to change binary into decimal.
/// </summary>
using System;
using System.Collections.Generic;
using System.Text;
namespace Demonstration_Programs
{
//class ProgramOutputs
//{
// static void Main()
// {
// BintoDec test = new BintoDec();
// Console.WriteLine(test.BinToDec("1100110101"));
// Console.WriteLine(test.BinToDec("1010101011"));
// Console.WriteLine();
// //Using a built in method to check answers.
// Console.WriteLine(Convert.ToInt32("1100110101", 2).ToString());
// Console.WriteLine(Convert.ToInt32("1010101011", 2).ToString());
// }
//}
class BintoDec
{
//Method to convert a binary string to a decimal
public int BinToDec(string binary)
{
//parsing through the string to covert it into a number
int num = Int32.Parse(binary);
int decnum = 0;
int temp = 0;
//need to iterate through the number
while (num > 0)
{
//by dividing by 10 the remainder is the last digit
//which will be a 1 or 0; then we can increase it by the
//power of the temp number which increases by 1 each iteration
decnum += num % 10 * (int)Math.Pow(2, temp);
num = num / 10;
temp++;
}
return decnum;
}
}
}
//MODIFICATIONS: Created myself but had to look up serveral difficulties related to string conversions.
//ANALYSIS: Changes a binary string into a number then iterates through the number by dividing by 10 and using the remainder.
//Faced several challenges that required me to find a different solution in C# when compared to python.
/* Sample Output
821
683
821
683
*/
/*
def bin_to_dec(bin_string):
decimal = 0
i = len(bin_string) - 1
for char in bin_string:
decimal += int (char) * pow(2, i)
i -= 1
return decimal
print(bin_to_dec("1100110101"))
print(bin_to_dec("1010101011"))
*/
| 29.434211 | 125 | 0.594099 | 3.125 |
c6cf7f0a27dec6c446025467e2a6b4ac4ea015e9
| 2,016 |
py
|
Python
|
rec_to_nwb/processing/time/timestamp_converter.py
|
LorenFrankLab/rec_to_nwb
|
d0630f414662963ebbe23aedf8f3ce07628636bc
|
[
"Apache-2.0"
] | 1 |
2021-01-20T00:26:30.000Z
|
2021-01-20T00:26:30.000Z
|
rec_to_nwb/processing/time/timestamp_converter.py
|
LorenFrankLab/rec_to_nwb
|
d0630f414662963ebbe23aedf8f3ce07628636bc
|
[
"Apache-2.0"
] | 12 |
2020-11-13T01:36:32.000Z
|
2022-01-23T20:35:55.000Z
|
rec_to_nwb/processing/time/timestamp_converter.py
|
LorenFrankLab/rec_to_nwb
|
d0630f414662963ebbe23aedf8f3ce07628636bc
|
[
"Apache-2.0"
] | 3 |
2020-10-20T06:52:45.000Z
|
2021-07-06T23:00:53.000Z
|
import logging.config
import os
import numpy as np
path = os.path.dirname(os.path.abspath(__file__))
logging.config.fileConfig(
fname=os.path.join(str(path), os.pardir, os.pardir, 'logging.conf'),
disable_existing_loggers=False)
logger = logging.getLogger(__name__)
NANOSECONDS_PER_SECOND = 1E9
class TimestampConverter:
@staticmethod
def convert_timestamps(continuous_times, timestamps):
'''Matches the trodes timestamp index from the camera to the adjusted
timestamps (in unix time) from the ephys recording.
The adjusted timestamps are the ephys recoding timestamps adjusted for
jitter from the arrival times of packets from the MCU.
Timestamps from the camera that do not having matching timestamps from
the ephys recording will be marked as NaN. This can happen when the
position tracking is shut off after the ephys recording is done or
started before the ephys recording starts.
Parameters
----------
continuous_times: ndarray, shape (2, n_ephys_time)
From the continuous time file
row 0: trodestime, row 1: adjusted_systime_
timestamps: ndarray, shape (n_position_time, )
trodes timestamps relative to camera’s timing (from pos_online.dat)
Returns
-------
converted_timestamps : ndarray, shape (n_position_time,)
Timestamps from the position tracking in terms of the adjusted
timestamps. Also converted to seconds.
'''
# Find the matching timestamp index (trodestime)
timestamp_ind = np.digitize(timestamps, continuous_times[0, 1:])
converted_timestamps = (continuous_times[1, timestamp_ind] /
NANOSECONDS_PER_SECOND)
# Mark timestamps not found in continuous time as NaN
not_found = timestamps != continuous_times[0, timestamp_ind]
converted_timestamps[not_found] = np.nan
return converted_timestamps
| 36 | 79 | 0.685516 | 3.25 |
131d98c1015b6a9c577e99660062e0f60b5c31ba
| 2,429 |
dart
|
Dart
|
packages/binder/test/observer_test.dart
|
letsar/binder
|
8cb571c17be1c8c91a3085034757993c18c3b298
|
[
"MIT"
] | 177 |
2020-10-26T07:46:48.000Z
|
2022-02-22T15:03:04.000Z
|
packages/binder/test/observer_test.dart
|
letsar/binder
|
8cb571c17be1c8c91a3085034757993c18c3b298
|
[
"MIT"
] | 19 |
2020-10-29T00:25:09.000Z
|
2021-08-01T23:13:21.000Z
|
packages/binder/test/observer_test.dart
|
letsar/binder
|
8cb571c17be1c8c91a3085034757993c18c3b298
|
[
"MIT"
] | 9 |
2020-10-27T19:15:06.000Z
|
2022-01-28T02:43:40.000Z
|
import 'package:binder/src/build_context_extensions.dart';
import 'package:binder/src/core.dart';
import 'package:flutter/material.dart';
import 'package:flutter_test/flutter_test.dart';
void main() {
group('DelegatingStateObserver', () {
testWidgets(
'An observer should be called for every changes with expected parameters',
(tester) async {
late BuildContext ctx;
final logs = <String>[];
final counterRef = StateRef(0, name: 'counter');
bool onStateUpdated<T>(
StateRef<T> ref,
T oldState,
T newState,
Object? action,
) {
logs.add(
'[${ref.key.name}#$action] changed from $oldState to $newState',
);
return true;
}
await tester.pumpWidget(
BinderScope(
observers: [DelegatingStateObserver(onStateUpdated)],
child: Builder(
builder: (context) {
ctx = context;
return const SizedBox();
},
),
),
);
expect(logs, isEmpty);
ctx.write(counterRef, 2, 'a');
expect(logs, [
'[counter#a] changed from 0 to 2',
]);
ctx.write(counterRef, 5, 'b');
expect(logs, [
'[counter#a] changed from 0 to 2',
'[counter#b] changed from 2 to 5',
]);
},
);
testWidgets(
'An observer should be called for every changes that occurs below it',
(tester) async {
late BuildContext ctx;
final logs = <String>[];
final counterRef = StateRef(0, name: 'counter');
bool onStateUpdated<T>(
StateRef<T> ref,
T oldState,
T newState,
Object? action,
) {
logs.add('[${ref.key.name}] changed from $oldState to $newState');
return true;
}
await tester.pumpWidget(
BinderScope(
child: BinderScope(
observers: [DelegatingStateObserver(onStateUpdated)],
child: Builder(
builder: (context) {
ctx = context;
return const SizedBox();
},
),
),
),
);
expect(logs, isEmpty);
ctx.write(counterRef, 2);
expect(logs, ['[counter] changed from 0 to 2']);
},
);
});
}
| 26.402174 | 80 | 0.503911 | 3.046875 |
b00709cfcd2390ee9d65927e10fce5a3163fcad2
| 2,732 |
py
|
Python
|
mydb/mydb_setup.py
|
dappsunilabs/DB4SCI
|
54bdd03aaa12957e622c921b263e187740a8b2ae
|
[
"Apache-2.0"
] | 7 |
2018-12-05T19:18:20.000Z
|
2020-11-21T07:27:54.000Z
|
mydb/mydb_setup.py
|
dappsunilabs/DB4SCI
|
54bdd03aaa12957e622c921b263e187740a8b2ae
|
[
"Apache-2.0"
] | 8 |
2018-04-25T06:02:41.000Z
|
2020-09-08T21:55:56.000Z
|
mydb/mydb_setup.py
|
FredHutch/DB4SCI
|
cc950a36b6b678fe16c1c91925ec402581636fc0
|
[
"Apache-2.0"
] | 2 |
2019-11-14T02:09:09.000Z
|
2021-12-28T19:05:51.000Z
|
#!/usr/bin/python
import os
import time
import postgres_util
import container_util
import admin_db
from send_mail import send_mail
from config import Config
"""
import mydb_setup
mydb_setup.mydb_setup()
"""
def mydb_setup():
"""Create mydb_admin database if it does not exist.
DB4SCI depends on mydb_admin database.
"""
if container_util.container_exists('mydb_admin'):
print('Administrative DB is running.\nStarting DB4Sci')
return
print('Create Administrative DB')
params = setup_data()
dbtype = params['dbtype']
con_name = params['dbname']
result = postgres_util.create(params)
# wait for container to startup
print('Container Id: %s' % params['con']['Id'] )
print('Waiting for mydb_admin to start')
time.sleep(20)
badness = 0
status = False
while (not status) and (badness < 6):
badness += 1
status = postgres_util.auth_check(params['dbuser'],
params['dbuserpass'],
params['port'])
print('mydb_admin setup status: %s count: %d' % (status, badness))
time.sleep(5)
if not status:
print('mydb_admin restart error. Could not setup db')
return
print('Setup mydb_admin tables')
admin_db.init_db()
inspect = container_util.inspect_con(params['con']['Id'])
c_id = admin_db.add_container(inspect, params)
state_info = admin_db.get_container_state(con_name)
description = 'created %s by user %s' % (con_name, params['username'])
admin_db.add_container_log(c_id, con_name, 'created', description)
def setup_data():
""" create parameters for admin database
"""
dbtype = 'Postgres'
params = {'dbname': 'mydb_admin',
'dbtype': dbtype,
'dbengine': dbtype,
'port': Config.admin_port,
'dbuser': Config.accounts['admindb']['admin'],
'dbuserpass': Config.accounts['admindb']['admin_pass'],
'db_vol': "/opt/DB4SCI/",
'bak_vol': "/opt/DB4SCI/backup",
'support': 'Basic',
'owner': Config.accounts['admindb']['owner'],
'description': 'Test the Dev',
'contact': Config.accounts['admindb']['contact'],
'life': 'long',
'backup_type': 'User',
'backup_freq': 'Daily',
'backup_life': '6',
'backup_window': 'any',
'phi': 'No',
'pitr': 'n',
'maintain': 'standard',
'username': Config.accounts['admindb']['admin'],
'image': Config.info[dbtype]['images'][0][1],
}
return params
| 33.728395 | 74 | 0.572474 | 3.09375 |
8c82bf9b8a888cdc50456b434c275cce92c2cd3d
| 1,208 |
go
|
Go
|
queue.go
|
duomi520/utils
|
e4523ecaadbf0e6a6b70b52f0bcdcd8eeb88a3df
|
[
"MIT"
] | null | null | null |
queue.go
|
duomi520/utils
|
e4523ecaadbf0e6a6b70b52f0bcdcd8eeb88a3df
|
[
"MIT"
] | null | null | null |
queue.go
|
duomi520/utils
|
e4523ecaadbf0e6a6b70b52f0bcdcd8eeb88a3df
|
[
"MIT"
] | null | null | null |
package utils
import (
"runtime"
"sync/atomic"
)
//LockList 加锁读写数组,适用不频繁写的场景
//修改时新数据原子替换旧数据地址,旧数据由GC回收。
type LockList struct {
//0-unlock 1-lock
mutex int64
slice atomic.Value
}
//NewLockList 新
func NewLockList() *LockList {
l := LockList{}
var data []any
l.slice.Store(data)
return &l
}
//Add 增加
func (l *LockList) Add(element any) {
for {
if atomic.CompareAndSwapInt64(&l.mutex, 0, 1) {
base := l.slice.Load().([]any)
size := len(base)
data := make([]any, size+1)
copy(data[:size], base)
data[size] = element
l.slice.Store(data)
atomic.StoreInt64(&l.mutex, 0)
return
}
runtime.Gosched()
}
}
//Remove 移除
func (l *LockList) Remove(judge func(any) bool) {
for {
if atomic.CompareAndSwapInt64(&l.mutex, 0, 1) {
base := l.slice.Load().([]any)
size := len(base)
data := make([]any, 0, size)
for i := 0; i < size; i++ {
if !judge(base[i]) {
data = append(data, base[i])
}
}
l.slice.Store(data)
atomic.StoreInt64(&l.mutex, 0)
return
}
runtime.Gosched()
}
}
//List 列
func (l *LockList) List() []any {
return l.slice.Load().([]any)
}
// https://github.com/yireyun/go-queue
// https://github.com/Workiva/go-datastructures
| 17.764706 | 49 | 0.620033 | 3.125 |
af54bd42d587209f4bc971b57f924918e09b2dac
| 2,356 |
py
|
Python
|
pkg/extractor.py
|
sveatlo/detektilo
|
d4a2f4abb90be5238ab537e648f35a2e4dc703a5
|
[
"MIT"
] | null | null | null |
pkg/extractor.py
|
sveatlo/detektilo
|
d4a2f4abb90be5238ab537e648f35a2e4dc703a5
|
[
"MIT"
] | null | null | null |
pkg/extractor.py
|
sveatlo/detektilo
|
d4a2f4abb90be5238ab537e648f35a2e4dc703a5
|
[
"MIT"
] | null | null | null |
import cv2
import imagehash
import xml.etree.ElementTree
import sys
from PIL import Image
from pathlib import Path
hashes = {}
class Extractor():
def __init__(self, root_path, video_file_path, images_dir, skipped_frames=1000, interactive=False):
self.video_file = str(video_file_path)
event_name = list(video_file_path.parts)[
len(list(root_path.parts)) - 1:-1]
self.images_path = Path(
"{}/{}".format(images_dir, "-".join(event_name)))
self.skipped_frames = skipped_frames
self.cap = None
self.frames_cnt = 0
self.interactive = interactive
def process(self):
self.cap = cv2.VideoCapture(self.video_file)
self.frames_cnt = int(self.cap.get(cv2.CAP_PROP_FRAME_COUNT))
self.images_path.mkdir(parents=True, exist_ok=True)
i = 0
while self.cap.isOpened():
# get image
ok, frame = self.cap.read()
if not ok:
break
# skip
i += self.skipped_frames
if i > self.frames_cnt:
i = self.frames_cnt
self.cap.set(cv2.CAP_PROP_POS_FRAMES, i)
# save the image
image = Image.fromarray(frame)
hash = str(imagehash.phash(image, hash_size=12))
if hash in hashes:
continue
hashes[hash] = True
# show image
if self.interactive:
cv2.imshow('frame', frame)
b = False
r = False
while True:
k = cv2.waitKey(0)
if k & 0xFF == ord('q'): # quit
b = True
break
elif k & 0xFF == ord('r'): # reject
r = True
break
elif k & 0xFF == ord('a'): # accept
break
if b:
break
elif r:
continue # skip to next frame
# save image
image_path = "{}/{}.jpg".format(str(self.images_path), i)
if not Path(image_path).exists():
cv2.imwrite(image_path, frame)
try:
self.images_path.rmdir()
except Exception as e:
pass
| 29.45 | 103 | 0.490662 | 3.359375 |
c9357bb53846e809a9beb9054e0a0ffa1dc3fd53
| 1,371 |
ts
|
TypeScript
|
test/function.test.ts
|
nullabletypo/atomic
|
6493d1e247d4674a2cc0ec5ad39443e370c334eb
|
[
"MIT"
] | null | null | null |
test/function.test.ts
|
nullabletypo/atomic
|
6493d1e247d4674a2cc0ec5ad39443e370c334eb
|
[
"MIT"
] | 8 |
2020-03-22T02:34:36.000Z
|
2021-09-23T02:26:14.000Z
|
test/function.test.ts
|
nullabletypo/atomic
|
6493d1e247d4674a2cc0ec5ad39443e370c334eb
|
[
"MIT"
] | null | null | null |
import { after, before, compose, delayed, once } from '../src/function'
test('once', () => {
const origin = (a: number, b: number) => a + b
const mock = jest.fn(origin)
const fn = once(mock)
expect(fn(1, 1)).toBe(2)
expect(fn(1, 1)).toBe(2)
expect(mock).toBeCalledTimes(1)
})
test('before', () => {
const origin = (i: number) => i
const mock = jest.fn(origin)
const fn = before(2, mock)
expect(fn(1)).toBe(1)
expect(fn(1)).toBe(1)
expect(fn(1)).toBeUndefined()
expect(mock).toHaveBeenCalledTimes(2)
})
test('after', () => {
const origin = (n: number) => n
const mock = jest.fn(origin)
const fn = after(2, mock)
expect(fn(1)).toBeUndefined()
expect(fn(1)).toBe(1)
expect(fn(1)).toBe(1)
expect(mock).toHaveBeenCalledTimes(2)
})
test('delayed', async () => {
expect.assertions(1)
const fn = delayed(3, (i: number) => i)
expect(await fn(1)).toBe(1)
})
test('compose', () => {
const fn = compose(
(a: number, b: number) => a + b,
(n: number) => n * n,
(s: number) => String(s),
)
expect(fn(1, 2)).toBe('9')
})
test('compose.async', async () => {
expect.assertions(2)
const fn = compose.async(
(a: number, b: number) => a + b,
async (n: number) => n * n,
(s: number) => String(s),
)
const promise = fn(1, 2)
expect(promise).toBeInstanceOf(Promise)
expect(await promise).toBe('9')
})
| 23.637931 | 71 | 0.585704 | 3.328125 |
55f9a6c215b3346244cc13eeafbf040dab0ee9ef
| 1,281 |
sql
|
SQL
|
SistemaUniversidad.BackEnd.BD/Stored Procedures/SP_CursosEnMatricula_Actualizar.sql
|
estibent10/SistemaUniversidad.BackEnd
|
bf80b401bd40178383c7c033f8cc8fb4ea418ce7
|
[
"MIT"
] | null | null | null |
SistemaUniversidad.BackEnd.BD/Stored Procedures/SP_CursosEnMatricula_Actualizar.sql
|
estibent10/SistemaUniversidad.BackEnd
|
bf80b401bd40178383c7c033f8cc8fb4ea418ce7
|
[
"MIT"
] | null | null | null |
SistemaUniversidad.BackEnd.BD/Stored Procedures/SP_CursosEnMatricula_Actualizar.sql
|
estibent10/SistemaUniversidad.BackEnd
|
bf80b401bd40178383c7c033f8cc8fb4ea418ce7
|
[
"MIT"
] | 1 |
2021-12-20T07:03:59.000Z
|
2021-12-20T07:03:59.000Z
|
CREATE PROCEDURE SP_CursosEnMatricula_Actualizar
@CodigoMatricula INT,
@CodigoCurso INT,
@FechaModificacion DATE= GETDATE,
@ModificadoPor VARCHAR (60),
@ExisteError BIT OUTPUT,
@DetalleError VARCHAR(60) OUTPUT
AS
BEGIN TRY
BEGIN TRANSACTION
DECLARE @ExisteCursoEnMatricula BIT
SET @ExisteCursoEnMatricula = dbo.FN_CursosEnMatricula_VerificaExistenciaPorId(@CodigoMatricula,@CodigoCurso)
IF(@ExisteCursoEnMatricula = 1)
BEGIN
UPDATE CursosEnMatricula
SET
CodigoMatricula = @CodigoMatricula,
CodigoCurso = @CodigoCurso,
FechaModificacion = @FechaModificacion,
ModificadoPor = @ModificadoPor
WHERE
CodigoMatricula = @CodigoMatricula AND CodigoCurso = @CodigoCurso
SET @ExisteError = 0
END
ELSE
BEGIN
SET @ExisteError = 1
SET @DetalleError = 'El Curso en Matricula: '+ @CodigoMatricula + ' , '+ @CodigoCurso +', No Existe'
END
COMMIT TRANSACTION
END TRY
BEGIN CATCH
ROLLBACK TRANSACTION
DECLARE @NumeroDeError INT
EXEC @NumeroDeError = SP_ErroresBD_Insertar @ModificadoPor
SET @ExisteError = 1
SET @DetalleError = 'Error actualizando el Curso en Matricula: '+ @CodigoMatricula + ' , '+ @CodigoCurso + '. Número de Error: ' + @NumeroDeError
END CATCH
| 28.466667 | 149 | 0.71975 | 3.140625 |
40aa36175547927e0e42858621c63cf39bf37ebe
| 918 |
ts
|
TypeScript
|
src/helpers/error.ts
|
YGT-cxy/ts-axios
|
2785d934b25122908cb5a0a2cbcf67d5afa1357c
|
[
"MIT"
] | null | null | null |
src/helpers/error.ts
|
YGT-cxy/ts-axios
|
2785d934b25122908cb5a0a2cbcf67d5afa1357c
|
[
"MIT"
] | null | null | null |
src/helpers/error.ts
|
YGT-cxy/ts-axios
|
2785d934b25122908cb5a0a2cbcf67d5afa1357c
|
[
"MIT"
] | null | null | null |
import { AxiosRequestConfig, AxiosResponse } from './../types'
// AxiosError参数集合接口
interface AxiosErrorArgs {
message: string // Error的报错信息
config: AxiosRequestConfig // request的config配置项
code?: string | null // 状态码
request?: any // request实例本身
response?: AxiosResponse // 响应体
}
// request请求响应出错类
class AxiosError extends Error {
isAxiosError: boolean
config: AxiosRequestConfig
code?: string | number | null
request?: any
response?: AxiosResponse
constructor(args: AxiosErrorArgs) {
const { message, config, code, request, response } = args
super(message)
this.isAxiosError = true
this.config = config
this.code = code
this.request = request
this.response = response
Object.setPrototypeOf(this, AxiosError.prototype)
}
}
/**
* 创建axios请求错误的信息
* @param args 参数集合
*/
export function createError(args: AxiosErrorArgs): any {
return new AxiosError(args)
}
| 22.95 | 62 | 0.711329 | 3.03125 |
a469c217cb2965333cb4a18d108a5d813b205666
| 2,421 |
php
|
PHP
|
app/Http/Controllers/API/BookController.php
|
ThuHtetDev/Rentook_02
|
7e5f09bc3ada3020a1be17102cdc9a4484f89058
|
[
"MIT"
] | null | null | null |
app/Http/Controllers/API/BookController.php
|
ThuHtetDev/Rentook_02
|
7e5f09bc3ada3020a1be17102cdc9a4484f89058
|
[
"MIT"
] | null | null | null |
app/Http/Controllers/API/BookController.php
|
ThuHtetDev/Rentook_02
|
7e5f09bc3ada3020a1be17102cdc9a4484f89058
|
[
"MIT"
] | null | null | null |
<?php
namespace App\Http\Controllers\API;
use Illuminate\Http\Request;
use App\Http\Controllers\API\BaseController as BaseController;
use App\Http\Resources\BookResource;
use App\Http\Resources\BookResourceCollection;
use App\Book;
use Validator;
class BookController extends BaseController
{
public function index()
{
$book = Book::all();
return $this->sendRequest(new BookResourceCollection($book),"All books are in view");
}
public function create()
{
//
}
public function store(Request $request)
{
$validator = Validator::make($request->all(), [
'name' => 'required',
'description' => 'required'
]);
if($validator->fails()){
return $this->errorRequest('Validation Error.', $validator->errors());
}
// $validation = $this->apiValidation($request->all(),[
// 'name' => 'required',
// 'description' => 'required'
// ]);
// if($validation instanceof Response){
// return $validation;
// }
$bookStore = Book::create($request->all());
return $this->sendRequest($bookStore,"New Book is successfully added in view");
}
public function show($id)
{
$bookDetail = Book::find($id);
if(is_null( $bookDetail)){
return $this->notFoundError();
}
return $this->sendRequest(new BookResource($bookDetail),"Detail Book is in view");
}
public function update(Request $request, $id)
{
$bookEdit = Book::find($id);
if(is_null( $bookEdit)){
return $this->notFoundError();
}
$validator = Validator::make($request->all(), [
'name' => 'required',
'description' => 'required'
]);
if($validator->fails()){
return $this->errorRequest('Validation Error.', $validator->errors());
}
$bookEdit->name = $request['name'];
$bookEdit->description = $request['description'];
$bookEdit->save();
return $this->sendRequest($bookEdit,"Book is successfully edited in view");
}
public function destroy($id)
{
$bookDelete = Book::find($id);
$bookDelete->delete();
return $this->giveMsg("Selected Book is successfully deleted in view",202);
}
public function validated(){
}
}
| 29.168675 | 93 | 0.567534 | 3.09375 |
71a7eb306ae3a8fe7053737ba8c9f7115ebbb2d3
| 1,462 |
rs
|
Rust
|
src/main.rs
|
marti1125/terminal_utils
|
b685a43aa93368d27c89ed98fca9021584b7801c
|
[
"MIT"
] | null | null | null |
src/main.rs
|
marti1125/terminal_utils
|
b685a43aa93368d27c89ed98fca9021584b7801c
|
[
"MIT"
] | null | null | null |
src/main.rs
|
marti1125/terminal_utils
|
b685a43aa93368d27c89ed98fca9021584b7801c
|
[
"MIT"
] | null | null | null |
use std::env;
use std::process::Command;
fn main() {
for argument in env::args() {
if argument.len() >= 10 {
// To list any process listening to the port
let open_port = &argument[..10];
if open_port == "open_port=" {
let port = &argument[10..argument.len()];
let cmd = "-i:";
Command::new("lsof")
.args(&[cmd.to_owned() + port])
.spawn()
.expect("failed to execute process");
}
}
if argument.len() >= 15 {
// Kill process in the port
let open_port_kill = &argument[..15];
if open_port_kill == "open_port_kill=" {
let port = &argument[15..argument.len()];
let output = Command::new("lsof")
.args(&["-t", &format!("-i:{port}", port=port), "-sTCP:LISTEN"])
.output()
.expect("failed to execute process");
let mut pid = String::from_utf8_lossy(&output.stdout);
println!("{:?}", &pid[0..5]);
let mut lines = pid.lines();
println!("{:?}", lines.next());
Command::new("kill")
.args(&["-9", &pid[0..5]])
.spawn()
.expect("failed to execute process");
}
}
}
}
| 24.366667 | 84 | 0.413133 | 3.3125 |
42f67bb8ada877e33dda59c127a0cf639ae96f0e
| 307 |
sql
|
SQL
|
DB-with-C#/Labs-And-Homeworks/Databases Basics - MS SQL Server/06. Built-in functions - Exercise/10RankEmployeesBySalary.sql
|
veloman86/SoftUni-Software-Engineering
|
20348f2335dc4256a5fe5622569104eaa28b2131
|
[
"MIT"
] | 1 |
2020-02-05T23:22:02.000Z
|
2020-02-05T23:22:02.000Z
|
DB-with-C#/Labs-And-Homeworks/Databases Basics - MS SQL Server/06. Built-in functions - Exercise/10RankEmployeesBySalary.sql
|
veloman86/SoftUni-Software-Engineering
|
20348f2335dc4256a5fe5622569104eaa28b2131
|
[
"MIT"
] | null | null | null |
DB-with-C#/Labs-And-Homeworks/Databases Basics - MS SQL Server/06. Built-in functions - Exercise/10RankEmployeesBySalary.sql
|
veloman86/SoftUni-Software-Engineering
|
20348f2335dc4256a5fe5622569104eaa28b2131
|
[
"MIT"
] | 7 |
2019-07-09T12:53:25.000Z
|
2021-01-05T16:07:54.000Z
|
SELECT EmployeeID,
FirstName,
LastName,
Salary,
DENSE_RANK() OVER (
PARTITION BY Salary
ORDER BY EmployeeID
)
[Rank]
FROM Employees
WHERE Salary BETWEEN 10000 AND 50000
ORDER BY Salary
DESC
| 23.615385 | 39 | 0.495114 | 3.046875 |
39308b78fd66d3b216155fa729174e751b15f79d
| 3,038 |
py
|
Python
|
SnP500/ARMA.py
|
tillaczel/LSTM-GRU-RF-predicting-SP500
|
165bda67dbd613181a52e73d014996a6035762fc
|
[
"MIT"
] | null | null | null |
SnP500/ARMA.py
|
tillaczel/LSTM-GRU-RF-predicting-SP500
|
165bda67dbd613181a52e73d014996a6035762fc
|
[
"MIT"
] | null | null | null |
SnP500/ARMA.py
|
tillaczel/LSTM-GRU-RF-predicting-SP500
|
165bda67dbd613181a52e73d014996a6035762fc
|
[
"MIT"
] | null | null | null |
from pandas import read_csv
from pandas import datetime
from pandas import DataFrame
from pmdarima.arima import auto_arima
import matplotlib.pyplot as plt
import numpy as np
import time
import pandas as pd
from manipulate_data import *
def predict(coef, history):
yhat = 0.0
for i in range(1, len(coef)+1):
yhat += coef[i-1] * history[-i]
return yhat
def train_ARMA(number_of_study_periods, study_periods, frequency_index, frequencies, frequencies_number_of_samples):
ARMA_start_time = time.time()
model_results = np.ones((number_of_study_periods,2))*np.Inf
model_names = [None]*number_of_study_periods
train_size, valid_size, test_size = data_split(study_periods)
mse = np.zeros((number_of_study_periods,2))
parameters = np.zeros((number_of_study_periods,2))
predictions = np.zeros((number_of_study_periods,study_periods.shape[2]))
predictions[:] = np.nan
for period in range(number_of_study_periods):
X = study_periods[0,period]
train, test = X[:train_size+valid_size], X[train_size+valid_size:]
mean = np.mean(train)
std = np.std(train)
train_norm, test_norm = (train-mean)/std, (test-mean)/std
# fit model
model = auto_arima(train_norm, exogenous=None, start_p=0, start_q=0, max_p=5, max_q=5, max_order=10, seasonal=False,\
stationary=True, information_criterion='bic', alpha=0.05, test='kpss', stepwise=True, n_jobs=1,\
solver='nm', maxiter=1000, disp=0, suppress_warnings=True, error_action='ignore',\
return_valid_fits=False, out_of_sample_size=0, scoring='mse')
mse[period,0] = np.mean(np.square(train-(model.predict_in_sample()*std+mean)))
forecast = list()
for t in range(len(test_norm)):
yhat = model.predict(n_periods=1)[0]
model.arima_res_.model.endog = np.append(model.arima_res_.model.endog, [test_norm[t]])
forecast.append(yhat)
forecast = np.array(forecast)*std+mean
mse[period,1] = np.mean(np.square(forecast-test))
predictions[period,-len(forecast):] = forecast
parameters[period] = [int(model.order[0]), int(model.order[2])]
print(f'Period: {period}, order: {parameters[period]}, mse: {mse[period]}')
pd.DataFrame(parameters).to_csv('results/ARMA_names_frequency_'+str(frequencies[frequency_index])+'.csv',\
index=False, header=False)
pd.DataFrame(mse).to_csv('results/ARMA_mse_frequency_'+str(frequencies[frequency_index])+'.csv',\
index=False, header=False)
pd.DataFrame(predictions).to_csv('results/ARMA_predictions_frequency_'+str(frequencies[frequency_index])+'.csv',\
index=False, header=False)
print(f'ARMA training time: {np.round((time.time()-ARMA_start_time)/60,2)} minutes')
return parameters, mse, predictions
| 46.738462 | 125 | 0.64944 | 3.375 |
ff64bb59b460b0cdf0f109dac57e797cdbbace77
| 10,038 |
py
|
Python
|
physipy/quantity/dimension.py
|
mocquin/physipy
|
a44805dbf4e68544c987e07564dd4a8d50be8b4c
|
[
"MIT"
] | 5 |
2021-01-23T11:23:07.000Z
|
2022-02-28T15:38:58.000Z
|
physipy/quantity/dimension.py
|
mocquin/physipy
|
a44805dbf4e68544c987e07564dd4a8d50be8b4c
|
[
"MIT"
] | null | null | null |
physipy/quantity/dimension.py
|
mocquin/physipy
|
a44805dbf4e68544c987e07564dd4a8d50be8b4c
|
[
"MIT"
] | 2 |
2020-11-07T20:08:08.000Z
|
2021-06-09T02:58:04.000Z
|
# !/usr/bin/env python
# -*- coding: utf-8 -*-
"""allows manipulating physical Dimension objects.
PROPOSITIONS:
* method to return a latex-formated str ?
* change the str/repr style to a table-view of the dimension content ?
* should sr be just a unit with dimension rad**2 ?
* add a full-named repr ? (ex: "length/time")
* should Dimension implement add/sub operation (allowed when dims are equal) ?
* change the dimension representation from dict to array (faster) ?
* allow construction with strings (Dimension("m**2") or Dimension ("L**2")) ?
* could define a contains method to check if a dimension is not 0
* try to not relie on numpy/sympy
* should allow complex exponent ?
* move has_integer_dimension from Quantity to Dimension ?
* allow None definition with Dimension() ?
* implementation should not rely on the dimension system choosen
PLEASE NOTE :
- rad and sr are not base SI-units, but were added for convenience. They can be
deleted if not needed, but update tests in consequence.
- this modules relies on :
- sympy to compute the concatenated representation of the Dimension object
- numpy to check if the dimension powers are scalars
"""
import json
import os
import sympy as sp
import numpy as np
from sympy.parsing.sympy_parser import parse_expr
import sympy.printing.latex as latex
dirname = os.path.dirname(__file__)
with open(os.path.join(dirname, "dimension.txt")) as file:
SI_UNIT_SYMBOL = json.load(file)
SI_SYMBOL_LIST = list(SI_UNIT_SYMBOL.keys())
NO_DIMENSION_STR = "no-dimension"
NULL_SI_DICT = {dim: 0 for dim in SI_SYMBOL_LIST}
def parse_str_to_dic(exp_str):
parsed = parse_expr(exp_str)
exp_dic = {str(key):value for key,value in parsed.as_powers_dict().items()}
return exp_dic
def check_pattern(exp_str, symbol_list):
exp_dic = parse_str_to_dic(exp_str)
return set(exp_dic.keys()).issubset(set(symbol_list))
class DimensionError(Exception):
"""Exception class for dimension errors."""
def __init__(self, dim_1, dim_2, binary=True):
"""Init method of DimensionError class."""
if binary:
self.message = ("Dimension error : dimensions of "
"operands are {} and {}, and are "
"differents.").format(str(dim_1), str(dim_2))
else:
self.message = ("Dimension error : dimension is {} "
"but should be {}").format(str(dim_1), str(dim_2))
def __str__(self):
"""Str method of DimensionError class."""
return self.message
class Dimension(object):
"""Allows to manipulate physical dimensions."""
# DEFAULT REPR LATEX can be used to change the way a Dimension
# object is displayed in JLab
DEFAULT_REPR_LATEX = "dim_dict" # "SI_unit"
def __init__(self, definition):
"""Allow the creation of Dimension object with 3 possibile ways."""
self.dim_dict = NULL_SI_DICT.copy()
if definition is None:
pass # dim_dict already initialized
# example : {"L":1, "T":-2}
elif (isinstance(definition, dict) and
set(list(definition.keys())).issubset(SI_SYMBOL_LIST)): #and
#all([np.isscalar(v) for v in definition.values()])):
for dim_symbol, dim_power in definition.items():
self.dim_dict[dim_symbol] = dim_power
# example : "L"
elif definition in list(self.dim_dict.keys()):
self.dim_dict[definition] = 1
# example : "L**2/T**3"
elif (isinstance(definition, str) and check_pattern(definition, SI_UNIT_SYMBOL.keys())):
definition = parse_str_to_dic(definition)
for dim_symbol, dim_power in definition.items():
if dim_power == int(dim_power):
dim_power = int(dim_power)
self.dim_dict[dim_symbol] = dim_power
# example : "m"
elif (isinstance(definition, str) and check_pattern(definition, SI_UNIT_SYMBOL.values())):
definition = parse_str_to_dic(definition)
for my_si_symbol, dim_power in definition.items():
if dim_power == int(dim_power):
dim_power = int(dim_power)
dim_symbol = [dim_symbol for dim_symbol, si_symbol in SI_UNIT_SYMBOL.items() if my_si_symbol == si_symbol][0]
self.dim_dict[dim_symbol] = dim_power
else:
raise TypeError(("Dimension can be constructed with either a "
"string among {}, either None, either a "
"dictionnary with keys included in {}, "
"either a string of sympy expression with "
"those same keys "
"but not {}.").format(SI_SYMBOL_LIST,
SI_SYMBOL_LIST,
definition))
def __str__(self):
"""Concatenate symbol-wise the content of the dim_dict attribute."""
return compute_str(self.dim_dict, NO_DIMENSION_STR)
def __format__(self, format_spec):
raw = self.__str__()
return format(raw, format_spec)
def __repr__(self):
"""Return the dim_dict into a <Dimension : ...> tag."""
return "<Dimension : " + str(self.dim_dict) + ">"
def _repr_latex_(self):
"""Latex repr hook for IPython."""
if self.DEFAULT_REPR_LATEX == "dim_dict":
expr_dim = expand_dict_to_expr(self.dim_dict)
return "$" + latex(expr_dim) + "$"
else:# self.DEFAULT_REPR_LATEX == "SI_unit":
return self.latex_SI_unit()
def __mul__(self, y):
"""Allow the multiplication of Dimension objects."""
if isinstance(y, Dimension):
new_dim_dict = {d: self.dim_dict[d] + y.dim_dict[d] for d in self.dim_dict.keys()}
return Dimension(new_dim_dict)
else:
raise TypeError(("A dimension can only be multiplied "
"by another dimension, not {}.").format(y))
__rmul__ = __mul__
def __truediv__(self, y):
"""Allow the division of Dimension objects."""
if isinstance(y, Dimension):
new_dim_dict = {d: self.dim_dict[d] - y.dim_dict[d] for d in self.dim_dict.keys()}
return Dimension(new_dim_dict)
#elif y == 1: # allowing division by one
# return self
else:
raise TypeError(("A dimension can only be divided "
"by another dimension, not {}.").format(y))
def __rtruediv__(self, x):
"""Only used to raise a TypeError."""
if x == 1: # allowing one-divion
#return self.inverse()
return self**-1
else:
raise TypeError("A Dimension can only divide 1 to be inverted.")
def __pow__(self, y):
"""Allow the elevation of Dimension objects to a real power."""
if np.isscalar(y):
new_dim_dict = {d: self.dim_dict[d] * y for d in self.dim_dict.keys()}
return Dimension(new_dim_dict)
else:
raise TypeError(("The power of a dimension must be a scalar,"
"not {}").format(type(y)))
def __eq__(self, y):
"""Dimensions are equal if their dim_dict are equal."""
return self.dim_dict == y.dim_dict
#def __ne__(self, y):
# """Return not (self == y)."""
# return not self.__eq__(y)
#def inverse(self):
# """Inverse the dimension by taking the negative of the powers."""
# inv_dict = {key: -value for key, value in self.dim_dict.items()}
# return Dimension(inv_dict)
def siunit_dict(self):
"""Return a dict where keys are SI unit
string, and value are powers."""
return {SI_UNIT_SYMBOL[key]: value for key, value in self.dim_dict.items()}
def str_SI_unit(self):
"""Compute the symbol-wise SI unit."""
str_dict = self.siunit_dict()
return compute_str(str_dict, "")
def latex_SI_unit(self):
"""Latex repr of SI unit form."""
expr_SI = expand_dict_to_expr(self.siunit_dict())
return "$" + latex(expr_SI) + "$"
@property
def dimensionality(self):
"""Return the first dimensionality with same
dimension found in DIMENSIONALITY"""
return [dimensionality for dimensionality, dimension in DIMENSIONALITY.items() if dimension == self][0]
def compute_str(dic, default_str, output_init=1):
"""Compute the product-concatenation of the
dict as key**value."""
output = expand_dict_to_expr(dic, output_init)
if output == output_init:
return default_str
else:
return str(output)
def expand_dict_to_expr(dic, output_init=1):
"""Compute the sympy expression from exponent dict,
starting the product with ouptput=1."""
output = output_init
for key, value in dic.items():
output *= sp.Symbol(key)**value
return output
DIMENSIONALITY = {
# Base dimension
"length": Dimension("L"),
"mass": Dimension("M"),
"time": Dimension("T"),
"electric_current": Dimension("I"),
"temperature": Dimension("theta"),
"amount_of_substance":Dimension("N"),
"luminous_intensity": Dimension("J"),
"plane_angle": Dimension("RAD"),
"solid_angle": Dimension("SR"),
#
"area": Dimension({"L":2}),
"volume": Dimension({"L":3}),
"speed": Dimension({"L":1, "T":-1}),
"acceleration": Dimension({"L":1, "T":-2}),
"force": Dimension({"M":1, "L":1, "T":-2}),
"energy": Dimension({"M":1, "L":2, "T":-2}),
"power": Dimension({"M":1, "L":2, "T":-3}),
"capacitance": Dimension({"M":-1, "L":-2, "T":4, "I":2}),
"voltage": Dimension({"M":1, "L":2, "T":-3, "I":-1}),
}
| 38.45977 | 125 | 0.595138 | 3.21875 |
b30fe42f8cca7219a213d2592238401b41e2044f
| 2,184 |
py
|
Python
|
pysal/contrib/network/weights.py
|
cubensys/pysal
|
8d50990f6e6603ba79ae1a887a20a1e3a0734e51
|
[
"MIT",
"BSD-3-Clause"
] | null | null | null |
pysal/contrib/network/weights.py
|
cubensys/pysal
|
8d50990f6e6603ba79ae1a887a20a1e3a0734e51
|
[
"MIT",
"BSD-3-Clause"
] | null | null | null |
pysal/contrib/network/weights.py
|
cubensys/pysal
|
8d50990f6e6603ba79ae1a887a20a1e3a0734e51
|
[
"MIT",
"BSD-3-Clause"
] | 1 |
2021-07-19T01:46:17.000Z
|
2021-07-19T01:46:17.000Z
|
"""
A library of spatial network functions.
Not to be used without permission.
Contact:
Andrew Winslow
GeoDa Center for Geospatial Analysis
Arizona State University
Tempe, AZ
[email protected]
"""
import csv
import numpy as np
from pysal import W
import unittest
import test
def dist_weights(distfile, weight_type, ids, cutoff, inverse=False):
"""
Returns a distance-based weights object using user-defined options
Parameters
----------
distfile: string, a path to distance csv file
weighttype: string, either 'threshold' or 'knn'
ids: a numpy array of id values
cutoff: float or integer; float for 'threshold' weight type and integer for knn type
inverse: boolean; true if inversed weights required
"""
try:
data_csv = csv.reader(open(distfile))
if csv.Sniffer().has_header(distfile):
data_csv.next()
except:
data_csv = None
if weight_type == 'threshold':
def neighbor_func(dists, threshold):
dists = filter(lambda x: x[0] <= threshold, dists)
return dists
else:
def neighbor_func(dists, k):
dists.sort()
return dists[:k]
if inverse:
def weight_func(dists, alpha=-1.0):
return list((np.array(dists)**alpha).round(decimals=6))
else:
def weight_func(dists, binary=False):
return [1]*len(dists)
dist_src = {}
for row in data_csv:
des = dist_src.setdefault(row[0], {})
if row[0] != row[1]:
des[row[1]] = float(row[2])
neighbors, weights = {}, {}
for id_val in ids:
if id_val not in dist_src:
raise ValueError, 'An ID value doest not exist in distance file'
else:
dists = zip(dist_src[id_val].values(), dist_src[id_val].keys())
ngh, wgt = [], []
if len(dists) > 0:
nghs = neighbor_func(dists, cutoff)
for d, i in nghs:
ngh.append(i)
wgt.append(d)
neighbors[id_val] = ngh
weights[id_val] = weight_func(wgt)
w = W(neighbors, weights)
w.id_order = ids
return w
| 26.962963 | 88 | 0.59478 | 3.171875 |
72c8154d936593da1dea593e6a3c592564c73b83
| 2,297 |
cs
|
C#
|
src/Tests/Nest.Tests.Integration/Search/Filter/RangeFilterTests.cs
|
rlugojr/elasticsearch-net
|
3f4dbaa050fc007e4544da6c3afeee6b52b45705
|
[
"Apache-2.0"
] | 2 |
2019-05-01T01:42:54.000Z
|
2019-11-23T03:36:13.000Z
|
src/Tests/Nest.Tests.Integration/Search/Filter/RangeFilterTests.cs
|
funnelfire/elasticsearch-net
|
49940f04ef8014b01fb15e1697beae316797bb56
|
[
"Apache-2.0"
] | null | null | null |
src/Tests/Nest.Tests.Integration/Search/Filter/RangeFilterTests.cs
|
funnelfire/elasticsearch-net
|
49940f04ef8014b01fb15e1697beae316797bb56
|
[
"Apache-2.0"
] | 12 |
2016-10-09T11:52:34.000Z
|
2021-09-13T08:59:51.000Z
|
using System.Linq;
using Elasticsearch.Net;
using NUnit.Framework;
using Nest.Tests.MockData;
using Nest.Tests.MockData.Domain;
namespace Nest.Tests.Integration.Search.Filter
{
/// <summary>
/// Integrated tests of RangeFilter with elasticsearch.
/// </summary>
[TestFixture]
public class RangeFilterTests : IntegrationTests
{
/// <summary>
/// Document used in test.
/// </summary>
private ElasticsearchProject _LookFor;
[TestFixtureSetUp]
public void Initialize()
{
_LookFor = NestTestData.Session.Single<ElasticsearchProject>().Get();
_LookFor.Name = "mmm";
var status = this.Client.Index(_LookFor, i=>i.Refresh()).ConnectionStatus;
Assert.True(status.Success, status.ResponseRaw.Utf8String());
}
/// <summary>
/// Set of filters that should not filter de documento _LookFor.
/// </summary>
[Test]
public void TestNotFiltered()
{
var name = _LookFor.Name;
this.DoFilterTest(f => f.Range(range => range.OnField(e => e.Name).GreaterOrEquals(name).LowerOrEquals(name)), _LookFor, true);
this.DoFilterTest(f => f.Range(range => range.OnField(e => e.Name).GreaterOrEquals("aaa").LowerOrEquals("zzz")), _LookFor, true);
this.DoFilterTest(f => f.Range(range => range.OnField(e => e.Name).GreaterOrEquals(name)), _LookFor, true);
this.DoFilterTest(f => f.Range(range => range.OnField(e => e.Name).LowerOrEquals(name)), _LookFor, true);
this.DoFilterTest(f => f.Range(range => range.OnField(e => e.Id).GreaterOrEquals(1), RangeExecution.FieldData), _LookFor, true);
this.DoFilterTest(f => f.Range(range => range.OnField(e => e.Name).LowerOrEquals(name), RangeExecution.Index), _LookFor, true);
}
/// <summary>
/// Set of filters that should filter de documento _LookFor.
/// </summary>
[Test]
public void TestFiltered()
{
var name = _LookFor.Name;
this.DoFilterTest(f => f.Range(range => range.OnField(e => e.Name).GreaterOrEquals("zzz")), _LookFor, false);
this.DoFilterTest(f => f.Range(range => range.OnField(e => e.Name).LowerOrEquals("aaa")), _LookFor, false);
this.DoFilterTest(f => f.Range(range => range.OnField(e => e.Name).GreaterOrEquals(name)), _LookFor, true);
this.DoFilterTest(f => f.Range(range => range.OnField(e => e.Name).LowerOrEquals(name)), _LookFor, true);
}
}
}
| 32.352113 | 132 | 0.691772 | 3.015625 |
3a3069240fee96d573aba309e145ba64777d0faa
| 2,347 |
lua
|
Lua
|
Themes/_fallback/BGAnimations/ScreenPlayerOptions overlay.lua
|
kangalioo/etterna
|
11630aa1c23bad46b2da993602b06f8b659a4961
|
[
"MIT"
] | 1 |
2020-11-09T21:58:28.000Z
|
2020-11-09T21:58:28.000Z
|
Themes/_fallback/BGAnimations/ScreenPlayerOptions overlay.lua
|
kangalioo/etterna
|
11630aa1c23bad46b2da993602b06f8b659a4961
|
[
"MIT"
] | null | null | null |
Themes/_fallback/BGAnimations/ScreenPlayerOptions overlay.lua
|
kangalioo/etterna
|
11630aa1c23bad46b2da993602b06f8b659a4961
|
[
"MIT"
] | null | null | null |
local t = Def.ActorFrame {}
local NSPreviewSize = 0.5
local NSPreviewX = 35
local NSPreviewY = 125
local NSPreviewXSpan = 35
local NSPreviewReceptorY = -30
local OptionRowHeight = 35
local NoteskinRow = 0
function NSkinPreviewWrapper(dir, ele)
return Def.ActorFrame {
InitCommand = function(self)
self:zoom(NSPreviewSize)
end,
LoadNSkinPreview("Get", dir, ele, PLAYER_1)
}
end
t[#t + 1] =
Def.ActorFrame {
OnCommand = function(self)
self:xy(NSPreviewX, NSPreviewY)
for i = 0, SCREENMAN:GetTopScreen():GetNumRows() - 1 do
if SCREENMAN:GetTopScreen():GetOptionRow(i) and SCREENMAN:GetTopScreen():GetOptionRow(i):GetName() == "NoteSkins" then
NoteskinRow = i
end
end
self:SetUpdateFunction(
function(self)
local row = SCREENMAN:GetTopScreen():GetCurrentRowIndex(PLAYER_1)
local pos = 0
if row > 4 then
pos =
NSPreviewY + NoteskinRow * OptionRowHeight -
(SCREENMAN:GetTopScreen():GetCurrentRowIndex(PLAYER_1) - 4) * OptionRowHeight
else
pos = NSPreviewY + NoteskinRow * OptionRowHeight
end
self:y(pos)
self:visible(NoteskinRow - row > -5 and NoteskinRow - row < 7)
end
)
end,
Def.ActorFrame {
NSkinPreviewWrapper("Down", "Tap Note")
},
Def.ActorFrame {
InitCommand = function(self)
self:y(NSPreviewReceptorY)
end,
NSkinPreviewWrapper("Down", "Receptor")
}
}
if GetScreenAspectRatio() > 1.7 then
t[#t][#(t[#t]) + 1] =
Def.ActorFrame {
Def.ActorFrame {
InitCommand = function(self)
self:x(NSPreviewXSpan * 1)
end,
NSkinPreviewWrapper("Left", "Tap Note")
},
Def.ActorFrame {
InitCommand = function(self)
self:x(NSPreviewXSpan * 1):y(NSPreviewReceptorY)
end,
NSkinPreviewWrapper("Left", "Receptor")
},
Def.ActorFrame {
InitCommand = function(self)
self:x(NSPreviewXSpan * 2)
end,
NSkinPreviewWrapper("Up", "Tap Note")
},
Def.ActorFrame {
InitCommand = function(self)
self:x(NSPreviewXSpan * 2):y(NSPreviewReceptorY)
end,
NSkinPreviewWrapper("Up", "Receptor")
},
Def.ActorFrame {
InitCommand = function(self)
self:x(NSPreviewXSpan * 3)
end,
NSkinPreviewWrapper("Right", "Tap Note")
},
Def.ActorFrame {
InitCommand = function(self)
self:x(NSPreviewXSpan * 3):y(NSPreviewReceptorY)
end,
NSkinPreviewWrapper("Right", "Receptor")
}
}
end
return t
| 24.447917 | 121 | 0.683426 | 3.15625 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.