text
stringlengths
27
775k
package com.github.hotm.world.gen.surfacebuilder import com.github.hotm.HotMConstants import net.minecraft.util.registry.BuiltinRegistries import net.minecraft.util.registry.Registry import net.minecraft.world.gen.surfacebuilder.ConfiguredSurfaceBuilder import net.minecraft.world.gen.surfacebuilder.SurfaceConfig /** * Contains surface configs for the Heart of the Machine biomes. */ object HotMConfiguredSurfaceBuilders { /** * Thinking Forest biome surface builder. */ lateinit var THINKING_FOREST: ConfiguredSurfaceBuilder<NectereSurfaceConfig> /** * Wasteland biome surface builder. */ lateinit var WASTELAND: ConfiguredSurfaceBuilder<NectereSurfaceConfig> fun register() { THINKING_FOREST = register("thinking_forest", HotMSurfaceBuilders.PARTIAL.withConfig(HotMSurfaceBuilders.GRASS_CONFIG)) WASTELAND = register("wasteland", HotMSurfaceBuilders.DEFAULT.withConfig(HotMSurfaceBuilders.WASTELAND_CONFIG)) } /** * Used for statically registering configured surface builders. */ private fun <SC : SurfaceConfig> register( name: String, builder: ConfiguredSurfaceBuilder<SC> ): ConfiguredSurfaceBuilder<SC> { return Registry.register(BuiltinRegistries.CONFIGURED_SURFACE_BUILDER, HotMConstants.identifier(name), builder) } }
# errorIfNotIntegerZeroOrGreater(arg): void In order to not trigger an error, `arg` must be type 'number' and an integer of 0 or greater. ## Examples ``` errorIfNotIntegerZeroOrGreater('1'); // Error: "Input must be a finite number of type 'number' " errorIfNotIntegerZeroOrGreater(0.55); // Error: "Input must be integer" errorIfNotIntegerZeroOrGreater(-2); // Error: "Input must be an integer not less than 0" errorIfNotIntegerZeroOrGreater(2); // no error. ``` ## Installation `npm i error-if-not-integer-zero-or-greater` ## Loading ```js import { errorIfNotIntegerZeroOrGreater} from 'error-if-not-integer-zero-or-greater'; ```
import React, { useState } from 'react'; import { Button, Flex } from '@chakra-ui/react'; import { motion } from 'framer-motion'; import { useAppDispatch } from '../AppContext'; import TodoModal from '../TodoModal'; const MotionFlex = motion.custom(Flex); interface TodoProps { todo: { id: string; title: string; }; } function Todo({ todo }: TodoProps) { const [isModalOpen, setIsModalOpen] = useState(false); const dispatch = useAppDispatch(); return ( <MotionFlex justifyContent="space-between" initial={{ x: -200, opacity: 0 }} animate={{ x: 0, opacity: 1 }} transition={{ ease: 'easeOut', duration: 0.5 }} exit={{ x: -400, opacity: 0 }} overflow="visible" > <Button display="block" size="sm" variant="outline" mr={1} mb={2} onClick={() => setIsModalOpen(true)} textOverflow="ellipsis" paddingLeft="10px" paddingRight="10px" overflow="hidden" whiteSpace="nowrap" width={360} > {todo.title} </Button> <Button size="sm" colorScheme="red" onClick={() => dispatch({ type: 'REMOVE_TODO', payload: { id: todo?.id, }, }) } > x </Button> <TodoModal isOpen={isModalOpen} onClose={() => setIsModalOpen(false)} todo={todo} /> </MotionFlex> ); } export default Todo;
using Newtonsoft.Json; namespace PetitGeo30.Models { public class GeoCacheModel { [JsonProperty("_id")] public string Id { get; set; } [JsonProperty("_rev")] public string Rev { get; set; } [JsonProperty("geoCacheId")] public int GeoCacheId { get; set; } [JsonProperty("geoCacheHiddenTimestamp")] public long GeoCacheHiddenTimestamp { get; set; } [JsonProperty("geoCacheLatitude")] public double GeoCacheLatitude { get; set; } [JsonProperty("geoCacheLongitude")] public double GeoCacheLongitude { get; set; } } }
print('FOR/ELSE EM OLD') variavel = ['Caio','Cesar','Pereira','Neves'] for valor in variavel: print(valor) print() for nome in variavel: # startswith = VERIFICA SE UM VALOR DA STRING COMEÇA COM LETRA ESPECIFICA if nome.startswith('C'): print(f'Começa com C: {nome}') else: print(f'Não começa com C: {nome}') print() for name in variavel: if name.startswith('N'): break else: print(f'Não existe uma palavra com N: {name}.')
package io.piano.android.ktlint import org.gradle.api.Plugin import org.gradle.api.Project import org.gradle.kotlin.dsl.apply import org.jlleitschuh.gradle.ktlint.KtlintExtension import org.jlleitschuh.gradle.ktlint.KtlintPlugin class KtlintConfigPlugin : Plugin<Project> { override fun apply(target: Project) = target.run { apply<KtlintPlugin>() if (this != rootProject) { extensions.configure(KtlintExtension::class.java) { android.set(true) } } } }
# Society-and-Civilization- Implementation of an Optimization Algorithm Project cancelled since I coulden't figure out the constraint handling =)
# Hasee-K580C-Hackintosh Hasee laptop K580C hackintosh drivers and Clover configuration. # Drivers Changes - Use Clover patcher to drive Sound. 2015-1-28 # Hasee laptop K580C Hardware List CPU : Core i7 4700MQ Haswell Chipset : Intel HM86 GPU : NVIDIA GT750M/Intel HD4600 Mobile Sound : ALC282 Ethernet: Realtek RTL8168G/8111G Wi-Fi : BCM4322(Replaced) # Drivers List ####The drivers is now maintaining by ym2008#### ### DSDT/SSDT Generated and fix by ym2008. Have disabled and shielded NVIDIA GPU by Jakes ### GPU Rehabman's FakePCIID drivers. ### Sound - ym2008's faked AppleHDA - Clover path ### Ethernet Rehabman's RTL 8111 driver. ### Wi-Fi - Jakes: Use BCM4322 to replace the defualt. # Contributors 1. ym2008 2. Jakes 3. more... #### Please let me know your effort. Contact GMail [email protected].
organization := "com.typesafe.sbt" name := "sbt-gzip" description := "sbt-web plugin for gzipping assets" addSbtWeb("1.4.2")
--- title: "Decentralized Exchange" disableToc: false #no table of contents --- A Decentralized [Exchange](notes/Exchange.md) - [Decentralization](notes/Decentralization.md)
// <copyright file="FhirCodeSystem.cs" company="Microsoft Corporation"> // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. // </copyright> using System; using System.Collections.Generic; using System.Text; namespace Microsoft.Health.Fhir.SpecManager.Models { /// <summary>A fhir code system.</summary> public class FhirCodeSystem { /// <summary>The root concept.</summary> private readonly FhirConceptTreeNode _rootConcept; /// <summary>The concepts, by code.</summary> private readonly Dictionary<string, FhirConceptTreeNode> _conceptLookup; /// <summary>Initializes a new instance of the <see cref="FhirCodeSystem"/> class.</summary> /// <exception cref="ArgumentNullException">Thrown when one or more required arguments are null.</exception> /// <param name="name"> The name.</param> /// <param name="id"> The identifier.</param> /// <param name="version"> The version.</param> /// <param name="title"> The title.</param> /// <param name="url"> The URL.</param> /// <param name="standardStatus">The standard status.</param> /// <param name="description"> The description.</param> /// <param name="content"> The content.</param> /// <param name="rootConcept"> The root concept.</param> /// <param name="conceptLookup"> The concept lookup.</param> public FhirCodeSystem( string name, string id, string version, string title, string url, string standardStatus, string description, string content, FhirConceptTreeNode rootConcept, Dictionary<string, FhirConceptTreeNode> conceptLookup) { if (url == null) { throw new ArgumentNullException(nameof(url)); } Name = name; Id = id; Version = version; Title = title; URL = url; StandardStatus = standardStatus; Description = description; Content = content; _rootConcept = rootConcept; _conceptLookup = conceptLookup; } /// <summary>Gets the name.</summary> /// <value>The name.</value> public string Name { get; } /// <summary>Gets the identifier.</summary> /// <value>The identifier.</value> public string Id { get; } /// <summary>Gets the version.</summary> /// <value>The version.</value> public string Version { get; } /// <summary>Gets the title.</summary> /// <value>The title.</value> public string Title { get; } /// <summary>Gets URL of the document.</summary> /// <value>The URL.</value> public string URL { get; } /// <summary>Gets the standard status.</summary> /// <value>The standard status.</value> public string StandardStatus { get; } /// <summary>Gets the description.</summary> /// <value>The description.</value> public string Description { get; } /// <summary>Gets the content.</summary> /// <value>The content.</value> public string Content { get; } /// <summary>Gets the root concept.</summary> /// <value>The root concept.</value> public FhirConceptTreeNode RootConcept => _rootConcept; /// <summary>Gets the concepts (by code).</summary> /// <value>The concepts (by code).</value> public Dictionary<string, FhirConceptTreeNode> ConceptLookup => _conceptLookup; /// <summary>Indexer to get slices based on name.</summary> /// <exception cref="ArgumentOutOfRangeException">Thrown when one or more arguments are outside the /// required range.</exception> /// <param name="code">The code.</param> /// <returns>The indexed item.</returns> public FhirConceptTreeNode this[string code] { get { if (!_conceptLookup.ContainsKey(code)) { throw new ArgumentOutOfRangeException(nameof(code)); } return _conceptLookup[code]; } } /// <summary>Query if this system contains a concept, specified by code.</summary> /// <param name="code">The code.</param> /// <returns>True if this system has the concept, false if it does not.</returns> public bool ContainsConcept(string code) { return _conceptLookup.ContainsKey(code); } } }
using TCC.TeraCommon.Game.Services; namespace TCC.TeraCommon.Game.Messages.Server { public class S_LEAVE_PARTY_MEMBER : ParsedMessage { internal S_LEAVE_PARTY_MEMBER(TeraMessageReader reader) : base(reader) { var nameoffset = reader.ReadUInt16(); ServerId = reader.ReadUInt32(); PlayerId = reader.ReadUInt32(); Name = reader.ReadTeraString(); } public uint ServerId { get; } public uint PlayerId { get; } public string Name { get; } } }
<?php namespace yiiunit\data\modules\magic\controllers; class ETagController extends \yii\console\Controller { public function actionListETags() { return ''; } public function actionDelete() { return 'deleted'; } }
import cat from "./abis/cat.json"; import dai from "./abis/dai.json"; import daiJoin from "./abis/daiJoin.json"; import dsProxy from "./abis/dsProxy.json"; import dssCdpManager from "./abis/dssCdpManager.json"; import dssProxyActions from "./abis/dssProxyActions.json"; import dssProxyActionsDsr from "./abis/dssProxyActionsDsr.json"; import erc20 from "./abis/erc20.json"; import erc20Events from "./abis/erc20Events.json"; import ethJoin from "./abis/ethJoin.json"; import jug from "./abis/jug.json"; import pot from "./abis/pot.json"; import proxyActions from "./abis/proxyActions.json"; import proxyRegistry from "./abis/proxyRegistry.json"; import spotter from "./abis/spotter.json"; import vat from "./abis/vat.json"; export default { cat, dai, daiJoin, dsProxy, dssCdpManager, dssProxyActions, dssProxyActionsDsr, erc20, erc20Events, ethJoin, jug, pot, proxyActions, proxyRegistry, spotter, vat, };
# Implementation of simple Convay's life game designed mainly # for node compiler testing # # (C) 2015 Alexey Voskov # License: 2-clause BSD module LifeGame # Representation of the cell of the game grid (field) # Keeps the state of the cell (EMPTY, FILLED, BORN, DEAD) # and neighbours of the cell (as array of pointers to another Cell # object examples) class Cell EMPTY = 0 FILLED = 1 BORN = 2 DEAD = 3 attr_reader :value, :neighbours def initialize @value = EMPTY @neighbours = [] end def value=(val) @value = val.to_i end def neighbours=(val) @neighbours = val end def chr case @value when EMPTY; '.' when FILLED; 'O' when BORN; '+' when DEAD; '-' else; '?' end end def update case @value when BORN; @value = FILLED when DEAD; @value = EMPTY end end end # Convay's life game grid (field) class Grid public def initialize(height, width) width, height = width.to_i, height.to_i raise "Invalid value of width" if (width < 3 || width > 100) raise "Invalid value of height" if (height < 3 || height > 100) @width, @height = width, height @f = Array.new(@height) {|ind| Array.new(@width) {|ind| Cell.new }} # Set neighbours for each cell @@xy_shifts = [[-1, -1], [-1, 0], [-1, 1], [0, -1], [0, 1], [1, -1], [1, 0], [1, 1]] (0...@height).each do |y| (0...@width).each do |x| # Calculate neighbours coordinates xy = @@xy_shifts.map do |elem| q = [elem[0] + y, elem[1] + x] (q[0] < 0 || q[0] >= @height || q[1] < 0 || q[1] >= @width) ? nil : q end xy.compact! # And transform them to the matrix @f[y][x].neighbours = xy.map {|q| @f[q[0]][q[1]] } end end self end # Initialize game field with one glider def cfg_glider! self.clear! @f[1][2].value = Cell::FILLED @f[2][3].value = Cell::FILLED @f[3][1].value = Cell::FILLED @f[3][2].value = Cell::FILLED @f[3][3].value = Cell::FILLED end # Initialize game field with glider gun def cfg_glider_gun! self.clear! gun = [ '........................O...........', '......................O.O...........', '............OO......OO............OO', '...........O...O....OO............OO', 'OO........O.....O...OO..............', 'OO........O...O.OO....O.O...........', '..........O.....O.......O...........', '...........O...O....................', '............OO......................']; yshift, xshift = 2, 2 gun.each_index do |yi| line = gun[yi] (0..line.length).each {|xi| @f[yi+yshift][xi+xshift].value = Cell::FILLED if gun[yi][xi] == 'O'} end end # Clear game field def clear! @f.each do |line| line.each { |cell| cell.value = Cell::EMPTY } end end # Convert game field to ASCII string (best viewed when typed in # monospaced font). Suitable for autotesting def to_ascii txt = "" @f.each do |line| line.each { |field| txt += field.chr } txt += "\n" end return txt end # Make one step (turn) def make_step! # Cells birth @f.each_index do |yi| @f[yi].each_index do |xi| n = cell_neighbours_num(yi, xi) @f[yi][xi].value = Cell::BORN if (@f[yi][xi].value == Cell::EMPTY && n == 3) @f[yi][xi].value = Cell::DEAD if (@f[yi][xi].value == Cell::FILLED && !(n == 2 || n == 3)) end end # Cells update @f.each do |line| line.each {|val| val.update} end self end private def cell_neighbours_num(y, x) (@f[y][x].neighbours.select {|q| q.value == Cell::FILLED || q.value == Cell::DEAD }).length end end end
@using DietCalculatorSystem @using DietCalculatorSystem.Models @using DietCalculatorSystem.Data.Models @using DietCalculatorSystem.Models.Home @using DietCalculatorSystem.Models.Diets @using DietCalculatorSystem.Models.Foods @using DietCalculatorSystem.Services.Foods.Models @using Microsoft.AspNetCore.Identity @addTagHelper *, Microsoft.AspNetCore.Mvc.TagHelpers
require 'spec_helper_acceptance' describe 'basic swift' do context 'default parameters' do it 'should work with no errors' do pp= <<-EOS include ::openstack_integration include ::openstack_integration::repos include ::openstack_integration::rabbitmq include ::openstack_integration::mysql include ::openstack_integration::keystone package { 'curl': ensure => present } class { '::memcached': listen_ip => '127.0.0.1', } # Swift resources class { '::swift': # not sure how I want to deal with this shared secret swift_hash_suffix => 'secrete', package_ensure => latest, } class { '::swift::keystone::auth': password => 'a_big_secret', } # === Configure Storage class { '::swift::storage': storage_local_net_ip => '127.0.0.1', } # create xfs partitions on a loopback device and mounts them swift::storage::loopback { '2': require => Class['swift'], } # sets up storage nodes which is composed of a single # device that contains an endpoint for an object, account, and container swift::storage::node { '2': mnt_base_dir => '/srv/node', weight => 1, manage_ring => true, zone => '2', storage_local_net_ip => '127.0.0.1', require => Swift::Storage::Loopback[2] , } class { '::swift::ringbuilder': part_power => '18', replicas => '1', min_part_hours => 1, require => Class['swift'], } class { '::swift::proxy': proxy_local_net_ip => '127.0.0.1', pipeline => ['healthcheck', 'cache', 'tempauth', 'dlo', 'proxy-server'], account_autocreate => true, require => Class['swift::ringbuilder'], } class { '::swift::proxy::authtoken': admin_password => 'a_big_secret', } class {'::swift::objectexpirer': interval => 600, } class { [ '::swift::proxy::healthcheck', '::swift::proxy::cache', '::swift::proxy::tempauth', '::swift::proxy::dlo' ]: } EOS # Need to be run 2 times because we have an exported when creating the ring. apply_manifest(pp, :catch_failures => false) apply_manifest(pp, :catch_failures => true) # The third run tests idempotency apply_manifest(pp, :catch_changes => true) end describe port(8080) do it { is_expected.to be_listening.with('tcp') } end end context 'Using swiftinit service provider' do it 'should work with no errors' do swiftinit_pp= <<-EOS include ::openstack_integration include ::openstack_integration::repos include ::openstack_integration::rabbitmq include ::openstack_integration::mysql include ::openstack_integration::keystone package { 'curl': ensure => present } class { '::memcached': listen_ip => '127.0.0.1', } # Swift resources class { '::swift': # not sure how I want to deal with this shared secret swift_hash_suffix => 'secrete', package_ensure => latest, } class { '::swift::keystone::auth': password => 'a_big_secret', } # === Configure Storage class { '::swift::storage': storage_local_net_ip => '127.0.0.1', } # create xfs partitions on a loopback device and mounts them swift::storage::loopback { '2': require => Class['swift'], } # sets up storage nodes which is composed of a single # device that contains an endpoint for an object, account, and container swift::storage::node { '2': mnt_base_dir => '/srv/node', weight => 1, manage_ring => true, zone => '2', storage_local_net_ip => '127.0.0.1', require => Swift::Storage::Loopback[2] , } class { '::swift::storage::account': service_provider => 'swiftinit', } class { '::swift::storage::container': service_provider => 'swiftinit', } class { '::swift::storage::object': service_provider => 'swiftinit', } class { '::swift::ringbuilder': part_power => '18', replicas => '1', min_part_hours => 1, require => Class['swift'], } class { '::swift::proxy': proxy_local_net_ip => '127.0.0.1', pipeline => ['healthcheck', 'cache', 'tempauth', 'proxy-server'], account_autocreate => true, require => Class['swift::ringbuilder'], service_provider => 'swiftinit', } class { '::swift::proxy::authtoken': admin_password => 'a_big_secret', } class {'::swift::objectexpirer': interval => 600, service_provider => 'swiftinit', } class { ['::swift::proxy::healthcheck', '::swift::proxy::cache', '::swift::proxy::tempauth']: } EOS # Run one time to catch any errors upgrading to swiftinit service provider apply_manifest(swiftinit_pp, :catch_failures => true) # The second run tests idempotency apply_manifest(swiftinit_pp, :catch_changes => true) end describe port(8080) do it { is_expected.to be_listening.with('tcp') } end end end
#Exercício - 2 puts "Escreva algo:" string=gets; verificar=string.ascii_only? if verificar puts "Contém somente caracteres ASCII" else puts "Contém caracteres não ASCII" end
#!/bin/env bash # supervisord and attendance_management_bot logs are auto cleaned. # see the configure files for them. # remove out of date attendance_management_bot logs and compress log clover_log_path=/home1/irteam/logs/attendance_management_bot/ find $attendance_management_bot_log_path -mtime +50 -name 'attendance_management_bot.log.*.xz' -delete find $attendance_management_bot_log_path -name 'attendance_management_bot.log.*-*' -not -name 'attendance_management_bot.log.*.xz' -execdir xz -z -T4 {} \;
import { subscribe } from "@uxland/event-aggregator"; import { handleErrors, INVALID_CREDENTIALS_EVENT, INVALID_REQUEST_EVENT, } from "../../../src/handlers/handle-errors"; describe("Given a response", () => { describe("and response is ok", () => { const response: any = { ok: true, status: 200, json: () => Promise.resolve({ foo: "bar" }), }; it("should return response", async (done) => { expect(await handleErrors(response)).toEqual(response); done(); }); }); describe("and response is not ok", () => { describe("if result contains body", () => { it("should throw a new error containing data body, status and statusText", async (done) => { const response: any = { ok: false, status: 401, statusText: "Crendentials invalid", json: () => Promise.resolve({ foo: "bar" }), }; const data = await response.json(); const error = { ...new Error(), data, status: response.status, statusText: response.statusText, }; try { await handleErrors(response); } catch (err) { expect(err).toEqual(error); } finally { done(); } }); }); describe("if body deserialization fails", () => { it("should return error containing status and statusText", async (done) => { const response: any = { ok: false, status: 401, statusText: "Crendentials invalid", json: () => Promise.reject(), }; const error = { ...new Error(), status: response.status, statusText: response.statusText, }; try { await handleErrors(response); } catch (err) { expect(err).toEqual(error); } finally { done(); } }); }); describe("if status is 401", () => { it("should publish INVALID_CREDENTIALS_EVENT", async (done) => { const r: any = { ok: false, status: 401, statusText: "Crendentials invalid", }; const error = { ...new Error(), status: r.status, statusText: r.statusText, }; const subscriber = jest.fn(); subscribe(INVALID_CREDENTIALS_EVENT, (err: Error) => subscriber(err)); try { await handleErrors(r); } catch (err) { expect(subscriber).toHaveBeenCalled(); expect(subscriber).toHaveBeenCalledWith(error); } finally { done(); } }); }); describe("for any other status", () => { it("should publish INVALID_REQUEST_EVENT", async (done) => { const r: any = { ok: false, status: 400, statusText: "Dump" }; const error = { ...new Error(), status: r.status, statusText: r.statusText, }; const subscriber = jest.fn(); subscribe(INVALID_REQUEST_EVENT, (err: Error) => subscriber(err)); try { await handleErrors(r); } catch (err) { expect(subscriber).toHaveBeenCalled(); expect(subscriber).toHaveBeenCalledWith(error); } finally { done(); } }); }); }); });
import { ActionType } from './action-type'; import { FromJson } from './from-json'; /** * Rule JSON */ export interface RuleJson { id: number; prefix_name: string; from: FromJson; action: ActionType; }
#!/bin/bash # Copyright (c) 2019 P3TERX # From https://github.com/P3TERX/Actions-OpenWrt set -eo pipefail if [ -z "${OPENWRT_COMPILE_DIR}" ] || [ -z "${OPENWRT_CUR_DIR}" ] || [ -z "${OPENWRT_SOURCE_DIR}" ]; then echo "::error::'OPENWRT_COMPILE_DIR', 'OPENWRT_CUR_DIR' or 'OPENWRT_SOURCE_DIR' is empty" >&2 exit 1 fi [ "x${TEST}" != "x1" ] || exit 0 cd "${OPENWRT_CUR_DIR}" make download -j8 find dl -size -1024c -exec ls -l {} \; find dl -size -1024c -exec rm -f {} \;
package token import ( "context" "github.com/go-kratos/kratos/v2/middleware" "github.com/go-kratos/kratos/v2/transport/grpc" "github.com/go-kratos/kratos/v2/transport/http" "google.golang.org/grpc/metadata" ) func JwtWithAuthInfo(jt *Token) middleware.Middleware { return func(handler middleware.Handler) middleware.Handler { return func(ctx context.Context, req interface{}) (interface{}, error) { var token string if info, ok := http.FromServerContext(ctx); ok { token = info.Request.Header.Get("Authorization") } else if _, ok := grpc.FromServerContext(ctx); ok { if md, ok := metadata.FromIncomingContext(ctx); ok { mdtk := md.Get("Authorization") if len(mdtk) > 0 { token = md.Get("Authorization")[0] } } } if token != "" { cc, err := jt.Decode(token) if err == nil { ctx = WithLoginContext(ctx, cc) } } return handler(ctx, req) } } }
import 'dart:typed_data'; import 'bzip2/bzip2.dart'; import 'bzip2/bz2_bit_reader.dart'; import 'util/archive_exception.dart'; import 'util/byte_order.dart'; import 'util/input_stream.dart'; import 'util/output_stream.dart'; /** * Decompress bzip2 compressed data. * Derived from libbzip2 (http://www.bzip.org). */ class BZip2Decoder { List<int> decodeBytes(List<int> data, {bool verify: false}) { return decodeBuffer(new InputStream(data, byteOrder: BIG_ENDIAN), verify: verify); } List<int> decodeBuffer(InputStream _input, {bool verify: false}) { OutputStream output = new OutputStream(); Bz2BitReader br = new Bz2BitReader(_input); _groupPos = 0; _groupNo = 0; _gSel = 0; _gMinlen = 0; if (br.readByte() != BZip2.BZH_SIGNATURE[0] || br.readByte() != BZip2.BZH_SIGNATURE[1] || br.readByte() != BZip2.BZH_SIGNATURE[2]) { throw new ArchiveException('Invalid Signature'); } _blockSize100k = br.readByte() - BZip2.HDR_0; if (_blockSize100k < 0 || _blockSize100k > 9) { throw new ArchiveException('Invalid BlockSize'); } _tt = new Uint32List(_blockSize100k * 100000); int combinedCrc = 0; while (true) { int type = _readBlockType(br); if (type == BLOCK_COMPRESSED) { int storedBlockCrc = 0; storedBlockCrc = (storedBlockCrc << 8) | br.readByte(); storedBlockCrc = (storedBlockCrc << 8) | br.readByte(); storedBlockCrc = (storedBlockCrc << 8) | br.readByte(); storedBlockCrc = (storedBlockCrc << 8) | br.readByte(); int blockCrc = _readCompressed(br, output); blockCrc = BZip2.finalizeCrc(blockCrc); if (verify && blockCrc != storedBlockCrc) { throw new ArchiveException('Invalid block checksum.'); } combinedCrc = ((combinedCrc << 1) | (combinedCrc >> 31)) & 0xffffffff; combinedCrc ^= blockCrc; } else if (type == BLOCK_EOS) { int storedCrc = 0; storedCrc = (storedCrc << 8) | br.readByte(); storedCrc = (storedCrc << 8) | br.readByte(); storedCrc = (storedCrc << 8) | br.readByte(); storedCrc = (storedCrc << 8) | br.readByte(); if (verify && storedCrc != combinedCrc) { throw new ArchiveException('Invalid combined checksum: ${combinedCrc} : ${storedCrc}'); } return output.getBytes(); } } } int _readBlockType(Bz2BitReader br) { bool eos = true; bool compressed = true; // .eos_magic:48 0x177245385090 (BCD sqrt(pi)) // .compressed_magic:48 0x314159265359 (BCD (pi)) for (int i = 0; i < 6; ++i) { int b = br.readByte(); if (b != BZip2.COMPRESSED_MAGIC[i]) { compressed = false; } if (b != BZip2.EOS_MAGIC[i]) { eos = false; } if (!eos && !compressed) { throw new ArchiveException('Invalid Block Signature'); } } return (compressed) ? BLOCK_COMPRESSED : BLOCK_EOS; } int _readCompressed(Bz2BitReader br, OutputStream output) { int blockRandomized = br.readBits(1); int origPtr = br.readBits(8); origPtr = (origPtr << 8) | br.readBits(8); origPtr = (origPtr << 8) | br.readBits(8); // Receive the mapping table _inUse16 = new Uint8List(16); for (int i = 0; i < 16; ++i) { _inUse16[i] = br.readBits(1); } _inUse = new Uint8List(256); for (int i = 0, k = 0; i < 16; ++i, k += 16) { if (_inUse16[i] != 0) { for (int j = 0; j < 16; ++j) { _inUse[k + j] = br.readBits(1); } } } _makeMaps(); if (_numInUse == 0) { throw new ArchiveException('Data error'); } int alphaSize = _numInUse + 2; // Now the selectors int numGroups = br.readBits(3); if (numGroups < 2 || numGroups > 6) { throw new ArchiveException('Data error'); } _numSelectors = br.readBits(15); if (_numSelectors < 1) { throw new ArchiveException('Data error'); } _selectorMtf = new Uint8List(BZ_MAX_SELECTORS); _selector = new Uint8List(BZ_MAX_SELECTORS); for (int i = 0; i < _numSelectors; ++i) { int j = 0; while (true) { int b = br.readBits(1); if (b == 0) { break; } j++; if (j >= numGroups) { throw new ArchiveException('Data error'); } } _selectorMtf[i] = j; } // Undo the MTF values for the selectors. Uint8List pos = new Uint8List(BZ_N_GROUPS); for (int i = 0; i < numGroups; ++i) { pos[i] = i; } for (int i = 0; i < _numSelectors; ++i) { int v = _selectorMtf[i]; int tmp = pos[v]; while (v > 0) { pos[v] = pos[v - 1]; v--; } pos[0] = tmp; _selector[i] = tmp; } // Now the coding tables _len = new List<Uint8List>(BZ_N_GROUPS); for (int t = 0; t < numGroups; ++t) { _len[t] = new Uint8List(BZ_MAX_ALPHA_SIZE); int c = br.readBits(5); for (int i = 0; i < alphaSize; ++i) { while (true) { if (c < 1 || c > 20) { throw new ArchiveException('Data error'); } int b = br.readBits(1); if (b == 0) { break; } b = br.readBits(1); if (b == 0) { c++; } else { c--; } } _len[t][i] = c; } } // Create the Huffman decoding tables _limit = new List<Int32List>(BZ_N_GROUPS); _base = new List<Int32List>(BZ_N_GROUPS); _perm = new List<Int32List>(BZ_N_GROUPS); _minLens = new Int32List(BZ_N_GROUPS); for (int t = 0; t < numGroups; t++) { _limit[t] = new Int32List(BZ_MAX_ALPHA_SIZE); _base[t] = new Int32List(BZ_MAX_ALPHA_SIZE); _perm[t] = new Int32List(BZ_MAX_ALPHA_SIZE); int minLen = 32; int maxLen = 0; for (int i = 0; i < alphaSize; ++i) { if (_len[t][i] > maxLen) { maxLen = _len[t][i]; } if (_len[t][i] < minLen) { minLen = _len[t][i]; } } _hbCreateDecodeTables(_limit[t], _base[t], _perm[t], _len[t], minLen, maxLen, alphaSize); _minLens[t] = minLen; } // Now the MTF values int EOB = _numInUse + 1; int nblockMAX = 100000 * _blockSize100k; _unzftab = new Int32List(256); // MTF init _mtfa = new Uint8List(MTFA_SIZE); _mtfbase = new Int32List(256 ~/ MTFL_SIZE); int kk = MTFA_SIZE - 1; for (int ii = 256 ~/ MTFL_SIZE - 1; ii >= 0; ii--) { for (int jj = MTFL_SIZE - 1; jj >= 0; jj--) { _mtfa[kk] = ii * MTFL_SIZE + jj; kk--; } _mtfbase[ii] = kk + 1; } int nblock = 0; _groupPos = 0; _groupNo = -1; int nextSym = _getMtfVal(br); int uc = 0; while (true) { if (nextSym == EOB) { break; } if (nextSym == BZ_RUNA || nextSym == BZ_RUNB) { int es = -1; int N = 1; do { // Check that N doesn't get too big, so that es doesn't // go negative. The maximum value that can be // RUNA/RUNB encoded is equal to the block size (post // the initial RLE), viz, 900k, so bounding N at 2 // million should guard against overflow without // rejecting any legitimate inputs. if (N >= 2 * 1024 * 1024) { throw new ArchiveException('Data error'); } if (nextSym == BZ_RUNA) { es = es + (0 + 1) * N; } else if (nextSym == BZ_RUNB) { es = es + (1 + 1) * N; } N = N * 2; nextSym = _getMtfVal(br); } while (nextSym == BZ_RUNA || nextSym == BZ_RUNB); es++; uc = _seqToUnseq[_mtfa[_mtfbase[0]]]; _unzftab[uc] += es; while (es > 0) { if (nblock >= nblockMAX) { throw new ArchiveException('Data error'); } _tt[nblock] = uc; nblock++; es--; }; continue; } else { if (nblock >= nblockMAX) { throw new ArchiveException('Data error'); } // uc = MTF ( nextSym-1 ) int nn = nextSym - 1; if (nn < MTFL_SIZE) { // avoid general-case expense int pp = _mtfbase[0]; uc = _mtfa[pp + nn]; while (nn > 3) { int z = pp + nn; _mtfa[(z)] = _mtfa[(z)-1]; _mtfa[(z) - 1] = _mtfa[(z) - 2]; _mtfa[(z) - 2] = _mtfa[(z) - 3]; _mtfa[(z) - 3] = _mtfa[(z) - 4]; nn -= 4; } while (nn > 0) { _mtfa[(pp+nn)] = _mtfa[(pp + nn) - 1]; nn--; } _mtfa[pp] = uc; } else { // general case int lno = nn ~/ MTFL_SIZE; int off = nn % MTFL_SIZE; int pp = _mtfbase[lno] + off; uc = _mtfa[pp]; while (pp > _mtfbase[lno]) { _mtfa[pp] = _mtfa[pp - 1]; pp--; } _mtfbase[lno]++; while (lno > 0) { _mtfbase[lno]--; _mtfa[_mtfbase[lno]] = _mtfa[_mtfbase[lno - 1] + MTFL_SIZE - 1]; lno--; } _mtfbase[0]--; _mtfa[_mtfbase[0]] = uc; if (_mtfbase[0] == 0) { kk = MTFA_SIZE-1; for (int ii = 256 ~/ MTFL_SIZE - 1; ii >= 0; ii--) { for (int jj = MTFL_SIZE - 1; jj >= 0; jj--) { _mtfa[kk] = _mtfa[_mtfbase[ii] + jj]; kk--; } _mtfbase[ii] = kk + 1; } } } // end uc = MTF ( nextSym-1 ) _unzftab[_seqToUnseq[uc]]++; _tt[nblock] = (_seqToUnseq[uc]); nblock++; nextSym = _getMtfVal(br); continue; } } // Now we know what nblock is, we can do a better sanity // check on s->origPtr. if (origPtr < 0 || origPtr >= nblock) { throw new ArchiveException('Data error'); } // Set up cftab to facilitate generation of T^(-1) // Check: unzftab entries in range. for (int i = 0; i <= 255; i++) { if (_unzftab[i] < 0 || _unzftab[i] > nblock) { throw new ArchiveException('Data error'); } } // Actually generate cftab. _cftab = new Int32List(257); _cftab[0] = 0; for (int i = 1; i <= 256; i++) { _cftab[i] = _unzftab[i - 1]; } for (int i = 1; i <= 256; i++) { _cftab[i] += _cftab[i - 1]; } // Check: cftab entries in range. for (int i = 0; i <= 256; i++) { if (_cftab[i] < 0 || _cftab[i] > nblock) { // s->cftab[i] can legitimately be == nblock throw new ArchiveException('Data error'); } } // Check: cftab entries non-descending. for (int i = 1; i <= 256; i++) { if (_cftab[i - 1] > _cftab[i]) { throw new ArchiveException('Data error'); } } // compute the T^(-1) vector for (int i = 0; i < nblock; i++) { uc = (_tt[i] & 0xff); _tt[_cftab[uc]] |= (i << 8); _cftab[uc]++; } int blockCrc = BZip2.INITIAL_CRC; int tPos = _tt[origPtr] >> 8; int numBlockUsed = 0; int k0; int rNToGo = 0; int rTPos = 0; if (blockRandomized != 0) { rNToGo = 0; rTPos = 0; if (tPos >= 100000 * _blockSize100k) { throw new ArchiveException('Data error'); } tPos = _tt[tPos]; k0 = tPos & 0xff; tPos >>= 8; numBlockUsed++; if (rNToGo == 0) { rNToGo = BZ2_rNums[rTPos]; rTPos++; if (rTPos == 512) { rTPos = 0; } } rNToGo--; k0 ^= ((rNToGo == 1) ? 1 : 0); } else { // c_tPos is unsigned, hence test < 0 is pointless. if (tPos >= 100000 * _blockSize100k) { return blockCrc; } tPos = _tt[tPos]; k0 = (tPos & 0xff); tPos >>= 8; numBlockUsed++; } // UnRLE to output int c_state_out_len = 0; int c_state_out_ch = 0; int s_save_nblockPP = nblock + 1; int c_nblock_used = numBlockUsed; int c_k0 = k0; int k1; if (blockRandomized != 0) { while (true) { // try to finish existing run while (true) { if (c_state_out_len == 0) { break; } output.writeByte(c_state_out_ch); blockCrc = BZip2.updateCrc(c_state_out_ch, blockCrc); c_state_out_len--; } // can a new run be started? if (c_nblock_used == s_save_nblockPP) { return blockCrc; } // Only caused by corrupt data stream? if (c_nblock_used > s_save_nblockPP) { throw new ArchiveException('Data error.'); } c_state_out_len = 1; c_state_out_ch = k0; tPos = _tt[tPos]; k1 = tPos & 0xff; tPos >>= 8; if (rNToGo == 0) { rNToGo = BZ2_rNums[rTPos]; rTPos++; if (rTPos == 512) { rTPos = 0; } } rNToGo--; k1 ^= ((rNToGo == 1) ? 1 : 0); c_nblock_used++; if (c_nblock_used == s_save_nblockPP) { continue; } if (k1 != k0) { k0 = k1; continue; } c_state_out_len = 2; tPos = _tt[tPos]; k1 = tPos & 0xff; tPos >>= 8; if (rNToGo == 0) { rNToGo = BZ2_rNums[rTPos]; rTPos++; if (rTPos == 512) { rTPos = 0; } } k1 ^= ((rNToGo == 1) ? 1 : 0); c_nblock_used++; if (c_nblock_used == s_save_nblockPP) { continue; } if (k1 != k0) { k0 = k1; continue; } c_state_out_len = 3; tPos = _tt[tPos]; k1 = tPos & 0xff; tPos >>= 8; if (rNToGo == 0) { rNToGo = BZ2_rNums[rTPos]; rTPos++; if (rTPos == 512) { rTPos = 0; } } k1 ^= ((rNToGo == 1) ? 1 : 0); c_nblock_used++; if (c_nblock_used == s_save_nblockPP) { continue; } if (k1 != k0) { k0 = k1; continue; } tPos = _tt[tPos]; k1 = tPos & 0xff; tPos >>= 8; if (rNToGo == 0) { rNToGo = BZ2_rNums[rTPos]; rTPos++; if (rTPos == 512) { rTPos = 0; } } k1 ^= ((rNToGo == 1) ? 1 : 0); c_nblock_used++; c_state_out_len = k1 + 4; tPos = _tt[tPos]; k0 = tPos & 0xff; tPos >>= 8; if (rNToGo == 0) { rNToGo = BZ2_rNums[rTPos]; rTPos++; if (rTPos == 512) { rTPos = 0; } } k0 ^= ((rNToGo == 1) ? 1 : 0); c_nblock_used++; } } else { while (true) { // try to finish existing run if (c_state_out_len > 0) { while (true) { if (c_state_out_len == 1) { break; } output.writeByte(c_state_out_ch); blockCrc = BZip2.updateCrc(c_state_out_ch, blockCrc); c_state_out_len--; } output.writeByte(c_state_out_ch); blockCrc = BZip2.updateCrc(c_state_out_ch, blockCrc); } // Only caused by corrupt data stream? if (c_nblock_used > s_save_nblockPP) { throw new ArchiveException('Data error'); } // can a new run be started? if (c_nblock_used == s_save_nblockPP) { c_state_out_len = 0; return blockCrc; } c_state_out_ch = c_k0; int k1; if (tPos >= 100000 * _blockSize100k) { throw new ArchiveException('Data Error'); } tPos = _tt[tPos]; k1 = tPos & 0xff; tPos >>= 8; c_nblock_used++; if (k1 != c_k0) { c_k0 = k1; output.writeByte(c_state_out_ch); blockCrc = BZip2.updateCrc(c_state_out_ch, blockCrc); c_state_out_len = 0; continue; } if (c_nblock_used == s_save_nblockPP) { output.writeByte(c_state_out_ch); blockCrc = BZip2.updateCrc(c_state_out_ch, blockCrc); c_state_out_len = 0; continue; } c_state_out_len = 2; if (tPos >= 100000 * _blockSize100k) { throw new ArchiveException('Data Error'); } tPos = _tt[tPos]; k1 = tPos & 0xff; tPos >>= 8; c_nblock_used++; if (c_nblock_used == s_save_nblockPP) { continue; } if (k1 != c_k0) { c_k0 = k1; continue; } c_state_out_len = 3; if (tPos >= 100000 * _blockSize100k) { throw new ArchiveException('Data Error'); } tPos = _tt[tPos]; k1 = tPos & 0xff; tPos >>= 8; c_nblock_used++; if (c_nblock_used == s_save_nblockPP) { continue; } if (k1 != c_k0) { c_k0 = k1; continue; } if (tPos >= 100000 * _blockSize100k) { throw new ArchiveException('Data Error'); } tPos = _tt[tPos]; k1 = tPos & 0xff; tPos >>= 8; c_nblock_used++; c_state_out_len = k1 + 4; if (tPos >= 100000 * _blockSize100k) { throw new ArchiveException('Data Error'); } tPos = _tt[tPos]; c_k0 = tPos & 0xff; tPos >>= 8; c_nblock_used++; } } return blockCrc; // ignore: dead_code } int _getMtfVal(Bz2BitReader br) { if (_groupPos == 0) { _groupNo++; if (_groupNo >= _numSelectors) { throw new ArchiveException('Data error'); } _groupPos = BZ_G_SIZE; _gSel = _selector[_groupNo]; _gMinlen = _minLens[_gSel]; _gLimit = _limit[_gSel]; _gPerm = _perm[_gSel]; _gBase = _base[_gSel]; } _groupPos--; int zn = _gMinlen; int zvec = br.readBits(zn); while (true) { if (zn > 20) { throw new ArchiveException('Data error'); } if (zvec <= _gLimit[zn]) { break; } zn++; int zj = br.readBits(1); zvec = (zvec << 1) | zj; } if (zvec - _gBase[zn] < 0 || zvec - _gBase[zn] >= BZ_MAX_ALPHA_SIZE) { throw new ArchiveException('Data error'); } return _gPerm[zvec - _gBase[zn]]; } void _hbCreateDecodeTables(Int32List limit, Int32List base, Int32List perm, Uint8List length, int minLen, int maxLen, int alphaSize) { int pp = 0; for (int i = minLen; i <= maxLen; i++) { for (int j = 0; j < alphaSize; j++) { if (length[j] == i) { perm[pp] = j; pp++; } } } for (int i = 0; i < BZ_MAX_CODE_LEN; i++) { base[i] = 0; } for (int i = 0; i < alphaSize; i++) { base[length[i]+1]++; } for (int i = 1; i < BZ_MAX_CODE_LEN; i++) { base[i] += base[i - 1]; } for (int i = 0; i < BZ_MAX_CODE_LEN; i++) { limit[i] = 0; } int vec = 0; for (int i = minLen; i <= maxLen; i++) { vec += (base[i + 1] - base[i]); limit[i] = vec-1; vec <<= 1; } for (int i = minLen + 1; i <= maxLen; i++) { base[i] = ((limit[i - 1] + 1) << 1) - base[i]; } } void _makeMaps() { _numInUse = 0; _seqToUnseq = new Uint8List(256); for (int i = 0; i < 256; ++i) { if (_inUse[i] != 0) { _seqToUnseq[_numInUse++] = i; } } } int _blockSize100k; Uint32List _tt; Uint8List _inUse16; Uint8List _inUse; Uint8List _seqToUnseq; Uint8List _mtfa; Int32List _mtfbase; Uint8List _selectorMtf; Uint8List _selector; List<Int32List> _limit; List<Int32List> _base; List<Int32List> _perm; Int32List _minLens; Int32List _unzftab; int _numSelectors; int _groupPos = 0; int _groupNo = -1; int _gSel = 0; int _gMinlen = 0; Int32List _gLimit; Int32List _gPerm; Int32List _gBase; Int32List _cftab; List<Uint8List> _len; int _numInUse = 0; static const int BZ_N_GROUPS = 6; static const int BZ_G_SIZE = 50; static const int BZ_N_ITERS = 4; static const int BZ_MAX_ALPHA_SIZE = 258; static const int BZ_MAX_CODE_LEN = 23; static const int BZ_MAX_SELECTORS = (2 + (900000 ~/ BZ_G_SIZE)); static const int MTFA_SIZE = 4096; static const int MTFL_SIZE = 16; static const int BZ_RUNA = 0; static const int BZ_RUNB = 1; static const int BLOCK_COMPRESSED = 0; static const int BLOCK_EOS = 2; static const List<int> BZ2_rNums = const [ 619, 720, 127, 481, 931, 816, 813, 233, 566, 247, 985, 724, 205, 454, 863, 491, 741, 242, 949, 214, 733, 859, 335, 708, 621, 574, 73, 654, 730, 472, 419, 436, 278, 496, 867, 210, 399, 680, 480, 51, 878, 465, 811, 169, 869, 675, 611, 697, 867, 561, 862, 687, 507, 283, 482, 129, 807, 591, 733, 623, 150, 238, 59, 379, 684, 877, 625, 169, 643, 105, 170, 607, 520, 932, 727, 476, 693, 425, 174, 647, 73, 122, 335, 530, 442, 853, 695, 249, 445, 515, 909, 545, 703, 919, 874, 474, 882, 500, 594, 612, 641, 801, 220, 162, 819, 984, 589, 513, 495, 799, 161, 604, 958, 533, 221, 400, 386, 867, 600, 782, 382, 596, 414, 171, 516, 375, 682, 485, 911, 276, 98, 553, 163, 354, 666, 933, 424, 341, 533, 870, 227, 730, 475, 186, 263, 647, 537, 686, 600, 224, 469, 68, 770, 919, 190, 373, 294, 822, 808, 206, 184, 943, 795, 384, 383, 461, 404, 758, 839, 887, 715, 67, 618, 276, 204, 918, 873, 777, 604, 560, 951, 160, 578, 722, 79, 804, 96, 409, 713, 940, 652, 934, 970, 447, 318, 353, 859, 672, 112, 785, 645, 863, 803, 350, 139, 93, 354, 99, 820, 908, 609, 772, 154, 274, 580, 184, 79, 626, 630, 742, 653, 282, 762, 623, 680, 81, 927, 626, 789, 125, 411, 521, 938, 300, 821, 78, 343, 175, 128, 250, 170, 774, 972, 275, 999, 639, 495, 78, 352, 126, 857, 956, 358, 619, 580, 124, 737, 594, 701, 612, 669, 112, 134, 694, 363, 992, 809, 743, 168, 974, 944, 375, 748, 52, 600, 747, 642, 182, 862, 81, 344, 805, 988, 739, 511, 655, 814, 334, 249, 515, 897, 955, 664, 981, 649, 113, 974, 459, 893, 228, 433, 837, 553, 268, 926, 240, 102, 654, 459, 51, 686, 754, 806, 760, 493, 403, 415, 394, 687, 700, 946, 670, 656, 610, 738, 392, 760, 799, 887, 653, 978, 321, 576, 617, 626, 502, 894, 679, 243, 440, 680, 879, 194, 572, 640, 724, 926, 56, 204, 700, 707, 151, 457, 449, 797, 195, 791, 558, 945, 679, 297, 59, 87, 824, 713, 663, 412, 693, 342, 606, 134, 108, 571, 364, 631, 212, 174, 643, 304, 329, 343, 97, 430, 751, 497, 314, 983, 374, 822, 928, 140, 206, 73, 263, 980, 736, 876, 478, 430, 305, 170, 514, 364, 692, 829, 82, 855, 953, 676, 246, 369, 970, 294, 750, 807, 827, 150, 790, 288, 923, 804, 378, 215, 828, 592, 281, 565, 555, 710, 82, 896, 831, 547, 261, 524, 462, 293, 465, 502, 56, 661, 821, 976, 991, 658, 869, 905, 758, 745, 193, 768, 550, 608, 933, 378, 286, 215, 979, 792, 961, 61, 688, 793, 644, 986, 403, 106, 366, 905, 644, 372, 567, 466, 434, 645, 210, 389, 550, 919, 135, 780, 773, 635, 389, 707, 100, 626, 958, 165, 504, 920, 176, 193, 713, 857, 265, 203, 50, 668, 108, 645, 990, 626, 197, 510, 357, 358, 850, 858, 364, 936, 638]; }
# Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. import math import multiprocessing as mp import numpy as np class TireForces: def __init__(self, stiffness, road_friction): self.c_alpha_front, self.c_alpha_rear, self.c_x_front, self.c_x_rear = stiffness self.road_friction = road_friction @staticmethod def _calculate_tire_angles(chassis, client, action): # Currently, we use parallel steering for front steering. fl_tire_angle = -chassis.steering + np.pi * 0.5 + chassis.pose.heading fr_tire_angle = fl_tire_angle rl_tire_angle = np.pi * 0.5 + chassis.pose.heading rr_tire_angle = rl_tire_angle return [fl_tire_angle, fr_tire_angle, rl_tire_angle, rr_tire_angle] @staticmethod def _calculate_slip_angles(chassis, client, action): steering_angles_corners = [ -chassis.steering, -chassis.steering, 0, 0, ] slip_angles = np.zeros(4) relative_corner_vector = [ np.array( [ chassis.front_rear_axle_CG_distance[0], 0.5 * chassis.front_track_width, 0, ] ), np.array( [ chassis.front_rear_axle_CG_distance[0], -0.5 * chassis.front_track_width, 0, ] ), np.array( [ -chassis.front_rear_axle_CG_distance[1], 0.5 * chassis.rear_track_width, 0, ] ), np.array( [ -chassis.front_rear_axle_CG_distance[1], -0.5 * chassis.rear_track_width, 0, ] ), ] plane_speed_vector = np.array( [ chassis.longitudinal_lateral_speed[0], -chassis.longitudinal_lateral_speed[1], 0, ] ) abs_speed_corners = [ math.sqrt( client.getLinkState(chassis.bullet_id, wheel_id, 1)[6][0] ** 2 + client.getLinkState(chassis.bullet_id, wheel_id, 1)[6][1] ** 2 ) for wheel_id in [2, 4, 5, 6] ] z_yaw = chassis.velocity_vectors[1][2] # 0.1 is the threshold for speed of the center of wheel # to activate slip angle caluclation. for i in range(4): if abs(abs_speed_corners[i]) > 0.1: corner_speed = plane_speed_vector + np.cross( np.array([0, 0, z_yaw]), relative_corner_vector[i] ) slip_angles[i] = steering_angles_corners[i] - ( ( math.atan2( corner_speed[1], corner_speed[0], ) ) ) return slip_angles @staticmethod def _calculate_slip_ratios(chassis, client, action): slip_ratios = np.zeros(4) tires_center_speed = np.zeros(4) tire_angles = TireForces._calculate_tire_angles(chassis, client, action) wheel_index = [2, 4, 5, 6] wheel_spin = [ math.sqrt( client.getLinkState(chassis.bullet_id, wheel_id, 1)[7][0] ** 2 + client.getLinkState(chassis.bullet_id, wheel_id, 1)[7][1] ** 2 ) for wheel_id in [2, 4, 5, 6] ] for idx in range(len(wheel_index)): tires_center_speed[idx] = client.getLinkState( chassis.bullet_id, wheel_index[idx], 1 )[6][0] * math.cos(tire_angles[idx]) + client.getLinkState( chassis.bullet_id, wheel_index[idx], 1 )[ 6 ][ 1 ] * math.sin( tire_angles[idx] ) if abs(wheel_spin[idx]) >= 0.1: slip_ratios[idx] = ( chassis.wheel_radius * wheel_spin[idx] - tires_center_speed[idx] ) / (chassis.wheel_radius * wheel_spin[idx]) return slip_ratios @staticmethod def build_tire_model(stiffness, tire_model_type, road_friction): if tire_model_type == "LinearTireforce(wheel)": return LinearTireForces(stiffness, road_friction) elif tire_model_type == "LinearTireforce(contact)": return LinearTireForcesContact(stiffness, road_friction) elif tire_model_type == "NonlinearTireForces(wheel)": return NonlinearTireForces(stiffness, road_friction) elif tire_model_type == "NonlinearTireForces(contact)": return NonlinearTireForcesContact(stiffness, road_friction) else: raise Exception("Requested tire force model does not exist.") class LinearTireForces(TireForces): def _calculate_tire_forces(self, chassis, client, action): tire_angles = self._calculate_tire_angles(chassis, client, action) ( fl_slip_angle, fr_slip_angle, rl_slip_angle, rr_slip_angle, ) = self._calculate_slip_angles(chassis, client, action) # Maximum lateral force for front tires are set to 2000N and # for rear tires it is set to 6000 N, this is to ensure stability. # Note that the maximum force for rear tires can be calculate using # max_lateral_force=sqrt(mu^2*Fn^2-Fx^2), where Fn is the normal force and # Fx is the longitudinal force. # TODO: Directly get the bounds by using the normal forces from Bullet max_normal_force = [2000, 2000, 6000, 6000] min_normal_force = [-2000, -2000, -6000, -6000] lat_forces = [ self.road_friction * np.clip( self.c_alpha_front * fl_slip_angle, min_normal_force[0], max_normal_force[0], ), self.road_friction * np.clip( self.c_alpha_front * fr_slip_angle, min_normal_force[1], max_normal_force[1], ), self.road_friction * np.clip( self.c_alpha_rear * rl_slip_angle, min_normal_force[2], max_normal_force[2], ), self.road_friction * np.clip( self.c_alpha_rear * rr_slip_angle, min_normal_force[3], max_normal_force[3], ), ] # The maximum force at the center of each wheel is set to 1000N lon_forces = [self.road_friction * action[0][idx] * 1000 for idx in range(4)] # If brake is activated, then override the throttle. Max brake is set # to 100 N, # TODO: Replace the maximum brake force using the values of # maximum brake torque in chassis. if action[1] > 0 and chassis.longitudinal_lateral_speed[0] > 0.1: lon_forces = [action[1] * -100 for idx in range(4)] forces = [] for idx in range(4): forces.append( [ lon_forces[idx] * math.cos(tire_angles[idx]) + lat_forces[idx] * math.cos(tire_angles[idx] + 0.5 * math.pi), lon_forces[idx] * math.sin(tire_angles[idx]) + lat_forces[idx] * math.sin(tire_angles[idx] + 0.5 * math.pi), 0, ] ) return (forces, lat_forces, lon_forces) def apply_tire_forces(self, chassis, client, action): wheel_index = [2, 4, 5, 6] wheel_positions = [ np.array(client.getLinkState(chassis.bullet_id, wheel_idx)[0]) for wheel_idx in wheel_index ] # Final Bounds for forces at wheels for cases # that the rise and fall filters are used at wheels. bounds = [15e4, 15e4, 2e5, 2e5] forces, lat_forces, lon_forces = self._calculate_tire_forces( chassis, client, action ) for idx in range(len(wheel_index)): client.applyExternalForce( chassis.bullet_id, 0, np.clip(forces[idx], -bounds[idx], bounds[idx]), wheel_positions[idx], client.WORLD_FRAME, ) return (lat_forces, lon_forces) # TODO: Implement tire forces at contact points class LinearTireForcesContact(TireForces): pass # TODO: Implement nonlinear tire forces class NonlinearTireForces(TireForces): pass # TODO: Implement nonlinear tire forces at contact points class NonlinearTireForcesContact(TireForces): pass
package blobstoreBenchmark.rocksDB import java.io.File import org.rocksdb.RocksDB import org.rocksdb.Options import org.rocksdb.WriteBatch import org.rocksdb.WriteOptions import blobstoreBenchmark.core.Harness import blobstoreBenchmark.core.Key import blobstoreBenchmark.core.Pair import blobstoreBenchmark.core.Plan import blobstoreBenchmark.core.Step import blobstoreBenchmark.core.Sum object Main extends Harness { def init(plan: Plan): Unit = withDb(plan.dbDir, db => { plan.pairs .grouped(100) .foreach(writeBatch(db, plan.blobSize, _)) }) def run(plan: Plan): Long = withDb(plan.dbDir, db => { plan.steps.toStream .map(runStep(db, plan, _)) .sum }) def runStep( db: RocksDB, plan: Plan, step: Step ): Long = { val sum = step.queries.toStream .map(read(db, _)) .sum writeBatch(db, plan.blobSize, step.updates) sum } def withDb[T](dbDir: File, block: RocksDB => T): T = { val options = new Options() .setCreateIfMissing(true) .setAllowMmapReads(true) .setAllowMmapWrites(true) val db = RocksDB.open( options, new File(dbDir, "store").getPath()) val result = block(db) db.close() options.close() result } def writeBatch( db: RocksDB, blobSize: Int, pairs: List[Pair] ): Unit = { val batch = new WriteBatch() pairs.foreach(write(batch, blobSize, _)) val options = new WriteOptions() .setSync(false) .setDisableWAL(true) db.write(options, batch) batch.close() } def write( batch: WriteBatch, blobSize: Int, pair: Pair ): Unit = batch.put( pair.key.toArray, pair.blobStub.generateArray(blobSize)) def read( db: RocksDB, key: Key ): Long = { val array = db.get(key.toArray) Sum.fromArray(array) } }
var scroller = angular.module("scroller", ["ngTouch", "angular-websql"]);
package io.elves.core.context; import com.google.common.collect.ImmutableList; import io.elves.core.coder.Coder; import io.netty.handler.codec.http.HttpHeaders; import java.nio.charset.StandardCharsets; import java.util.Collections; import java.util.List; import java.util.Map; /** * this class is read only * * @author lee */ public final class RequestContext { private final byte[] body; private final HttpHeaders headers; private final String url; private final Map<String, Object> parameters; private final Coder decoder; public RequestContext(byte[] body , HttpHeaders headers , String url , Map<String, Object> parameters , Coder coder) { this.body = body; this.headers = headers; this.url = url; this.parameters = parameters; this.decoder = coder; } public Object getParam(String key) { return parameters == null ? null : parameters.get(key); } public Map<String, Object> getParameters() { return parameters; } public String getUrl() { return url; } public final byte[] getBody() { return body; } public final HttpHeaders getHeaders() { return headers; } public final String getHeader(final String name) { return headers == null ? "" : headers.get(name); } public final List<String> getHeaderAll(final String name) { return headers == null ? Collections.emptyList() : ImmutableList.copyOf(headers.getAll(name)); } public <T> T body(Class<T> clazz) { return decoder.decode(body, clazz, StandardCharsets.UTF_8); } }
using System; namespace Business.Common.Requests { public class RequestObject : IRequestObject { public RequestObject() { DateCreatedUtc = DateTime.UtcNow; } public RequestObject(DateTime dateCreatedUtc) { DateCreatedUtc = dateCreatedUtc; } public string CommandName { get; set; } public object RequestData { get; set; } public string CorrelationId { get; set; } public DateTime DateCreatedUtc { get; set; } public string Title { get; set; } public string Description { get; set; } public string ExceptionGroup { get; set; } public string HostComputerName { get; set; } public string HostUserName { get; set; } public string HostUserDomain { get; set; } public string ExecutingAssemblyFullName { get; set; } public string CallingAssemblyFullName { get; set; } public string EntryAssemblyFullName { get; set; } public string TypeName { get; set; } public string MemberName { get; set; } public string ParentName { get; set; } public string AppId { get; set; } public string ClientName { get; set; } public string ClientIp { get; set; } } }
package co.com.lafemmeapp.lafemmeapp.presentation.history import android.app.Activity import android.content.Context import android.support.v4.widget.SwipeRefreshLayout import co.com.lafemmeapp.core.domain.entities.abstracts.User import co.com.lafemmeapp.lafemmeapp.events.OnAppointmentRatedEvent import co.com.lafemmeapp.utilmodule.presentation.view_interfaces.IBaseFragmentPresenter import co.com.lafemmeapp.utilmodule.presentation.view_interfaces.IFragmentCallbacks import com.google.android.gms.common.api.GoogleApiClient import io.reactivex.observers.DisposableObserver /** * Created by oscargallon on 5/22/17. */ interface IHistoryFragmentPresenter : IBaseFragmentPresenter, IFragmentCallbacks, SwipeRefreshLayout.OnRefreshListener { fun getUserAppointmentsHistory() fun filterAppointments(onGoing: Boolean) fun toggleSOS(context: Context) fun getGoogleApiClientAndShowGPSDialog(activity: Activity) fun checkSession(observer: DisposableObserver<User>) fun checkSOS() fun initHistory() }
#!/usr/bin/env perl use strict; # mdbox-obfuscate.pl < ~/mdbox/storage/m.1 > m.obfuscated # Check with text editor that everything appears to be obfuscated. # This script isn't perfect.. # For testing that you can reproduce problem: # mkdir -p ~/mdbox-test/storage # cp m.obfuscated ~/mdbox-test/storage/m.1 # doveadm -o mail=mdbox:~/mdbox-test force-resync INBOX # /usr/local/libexec/dovecot/imap -o mail=mdbox:~/mdbox-test my $state = 0; my $hdr_name = ""; my @boundaries = (); sub obs { my $len = length($_[0]); return "x"x$len; } sub find_boundary { my $str = $_[0]; $str =~ s/--$//; foreach $b (@boundaries) { return 1 if $b eq $str; } return 0; } while (<>) { chop $_; if ($state == 0) { # file header die "Not a valid dbox" if !/^2 /; print "$_\n"; $state++; } elsif ($state == 1) { # dbox mail header die "Invalid mail header" if !/^\001\002/; print "$_\n"; @boundaries = (); $state++; } elsif ($state == 2) { # mail header my ($key, $ws, $value); my $continued = 0; if (/^([ \t])(.*)$/) { $key = $hdr_name; $ws = $1; $value = $2; $continued = 1; } elsif (/^([^:]+)(:[ \t]*)(.*)$/) { ($key, $ws, $value) = ($1, $2, $3); $hdr_name = $key; } elsif (/^$/) { print "\n"; $state++; next; } else { print obs($_)."\n"; next; } if ($key =~ /^Content-/i && $key !~ /^Content-Description/i) { if ($key =~ /^Content-Type/) { if ($value =~ /boundary="([^"]+)"/i || $value =~ /boundary=([^ \t]+)/i) { push @boundaries, $1; } } $_ =~ s/(name=")([^"]*)(")/$1.obs($2).$3/ge; $_ =~ s/(name=)([^ \t]*)/$1.obs($2)/ge; print "$_\n"; } else { print $key if (!$continued); print $ws.obs($value)."\n"; } } elsif ($state == 3) { # mail body if (/^\001\003$/) { print "$_\n"; $state++; } elsif (/^--(.*)$/ && find_boundary($1)) { if ($2 eq "") { # mime header $state = 2; } print "$_\n"; } else { print obs($_)."\n"; } } elsif ($state == 4) { # dbox metadata if (/^$/) { $state = 1; } print "$_\n"; } }
content.prop.material.xenotech = content.prop.material.base.invent({ name: 'material/xenotech', createSynth: function () { this.synth = engine.audio.synth.createAm({ carrierDetune: 0, carrierFrequency: this.rootFrequency, carrierGain: 1/2, carrierType: 'square', gain: 1, modDepth: 1/2, modFrequency: 0, modType: 'square', }).filtered({ detune: 0, frequency: this.rootFrequency, Q: 5, type: 'bandpass', }).connect(this.output) const lfo = engine.audio.synth.createLfo({ depth: 600, frequency: 1/2, type: 'triangle', }).connect(this.synth.param.detune) this.synth.chainStop(lfo) const lfoModFrequency = engine.audio.synth.createLfo({ depth: 1, frequency: 1/5, }) const scale = engine.audio.circuit.scale({ from: lfoModFrequency.output, fromMax: 1, fromMin: -1, to: this.synth.param.mod.frequency, toMax: 24, toMin: 12, }) this.synth.chainStop(scale) }, })
# wunderwaffle A tiny, asynchronous, multiple account «miner» for VK Coin # Instructions 1. Install Python >=3.7 and Node.js¹ ==any 2. Install Python's packages: websockets, requests, asyncio 3. Create a file «accs.txt» 4. Fill it with accounts formatted as «login:password»(note that first line is master account, but other are workers) 5. Run «python wunderwaffle.py» 6. Coins will be automatically mined by all workers and transfered to the master ¹ - Note that you could install Python's package dukpy instead # Command line arguments Script «wunderwaffle.py» has few command line arguments: - `-i` — disable the autobuy (idle_mode) - `-m` - disable the autobuy for master (idle_main_mode) - `-n` — disable the supporting - `-d` - send all coins amount - `-v` - enable verbose mode (log everything) - `-b name` - buy only the specified item - `-a val` — set the autotransfer triggering amount # Fixing some problems - If you had added new acccount, but it was ignored, try remove the «save.dat» file and restart the script.
import styles from './RawVis.module.css'; interface Props { value: unknown; } function RawVis(props: Props) { const { value } = props; const valueAsStr = JSON.stringify(value, null, 2); if (valueAsStr.length > 1000) { console.log(value); // eslint-disable-line no-console return ( <p className={styles.fallback}> The dataset is too big to be displayed and was logged to the console instead. </p> ); } return <pre className={styles.raw}>{valueAsStr}</pre>; } export default RawVis;
#!/bin/bash EXEC=./Castro2d.gnu.MPI.TRUESDC.ex CONV_TOOL=RichardsonConvergenceTest2d.gnu.ex mpiexec -n 8 ${EXEC} inputs_2d.32 amr.plot_file=bubble_32_sdc4_plt >& 32.out mpiexec -n 8 ${EXEC} inputs_2d.64 amr.plot_file=bubble_64_sdc4_plt >& 64.out mpiexec -n 8 ${EXEC} inputs_2d.128 amr.plot_file=bubble_128_sdc4_plt >& 128.out ${CONV_TOOL} coarFile=bubble_32_sdc4_plt00334 mediFile=bubble_64_sdc4_plt00667 fineFile=bubble_128_sdc4_plt01334 > sdc_converge.lo.out mpiexec -n 8 ${EXEC} inputs_2d.256 amr.plot_file=bubble_256_sdc4_plt >& 256.out ${CONV_TOOL} coarFile=bubble_64_sdc4_plt00667 mediFile=bubble_128_sdc4_plt01334 fineFile=bubble_256_sdc4_plt02667 > sdc_converge.mid.out mpiexec -n 8 ${EXEC} inputs_2d.512 amr.plot_file=bubble_512_sdc4_plt >& 512.out ${CONV_TOOL} coarFile=bubble_128_sdc4_plt01334 mediFile=bubble_256_sdc4_plt02667 fineFile=bubble_512_sdc4_plt05334 > sdc_converge.hi.out
class Instrument{ int numberOfStrings; String brand; String color; void printingMessage(int var1, String var2, String var3){ System.out.println("This guitar has: "+var1+", has an incredible "+var2+" and it's a "+var3+" model."); } }
package com.milo.ms.services; import com.milo.brewery.model.BeerOrderDto; import com.milo.brewery.model.BeerOrderPagedList; import org.springframework.data.domain.Pageable; import java.util.UUID; public interface BeerOrderService { BeerOrderPagedList listOrders(UUID customerId, Pageable pageable); BeerOrderDto placeOrder(UUID customerId, BeerOrderDto beerOrderDto); BeerOrderDto getOrderById(UUID customerId, UUID orderId); void pickupOrder(UUID customerId, UUID orderId); }
# admin-upms 用户认证中心与权限中心 # 预览 ![1](https://raw.githubusercontent.com/hb0730/admin-upms/main/doc/img/20210105142531.gif) # 前端地址 [upms-ui](https://github.com/hb0730/admin-upms-ui)
package brandenc.com.chatmatch import brandenc.com.chatmatch.Callbacks.TitlesCallback import org.jetbrains.anko.doAsync /** * Auxiliary class used to grab the titles from the provided urls * * @constructor Creates a PageTitleRetriever */ class PageTitleRetriever(private val titleRetriever: TitleRetriever) { /** * Retrieves the page titles for the given urls * * @param urls the list of urls to gather the titles from * @param titlesCallback the callback where the successful information will be returned */ fun getPageTitles(urls: List<String>, titlesCallback: TitlesCallback) { doAsync { val titles = urls.map { titleRetriever.getTitleForUrl(it) } titlesCallback.onSuccessfulPageTitle(titles) } } }
package bootstrap import ( . "github.com/onsi/ginkgo" . "github.com/onsi/ginkgo/extensions/table" . "github.com/onsi/gomega" "github.com/kumahq/kuma/pkg/config/api-server/catalog" kuma_cp "github.com/kumahq/kuma/pkg/config/app/kuma-cp" "github.com/kumahq/kuma/pkg/config/core" ) var _ = Describe("Auto configuration", func() { type testCase struct { cpConfig func() kuma_cp.Config expectedCatalogConfig catalog.CatalogConfig } DescribeTable("should autoconfigure catalog", func(given testCase) { // given cfg := given.cpConfig() // when err := autoconfigure(&cfg) // then Expect(err).ToNot(HaveOccurred()) // and Expect(*cfg.ApiServer.Catalog).To(Equal(given.expectedCatalogConfig)) }, Entry("with public settings for dataplane token server", testCase{ cpConfig: func() kuma_cp.Config { cfg := kuma_cp.DefaultConfig() cfg.General.AdvertisedHostname = "kuma.internal" cfg.DataplaneTokenServer.Local.Port = 1111 cfg.DataplaneTokenServer.Public.Enabled = true cfg.DataplaneTokenServer.Public.Interface = "192.168.0.1" cfg.DataplaneTokenServer.Public.Port = 2222 cfg.BootstrapServer.Port = 3333 cfg.ApiServer.Port = 1234 return cfg }, expectedCatalogConfig: catalog.CatalogConfig{ ApiServer: catalog.ApiServerConfig{ Url: "http://kuma.internal:1234", }, Bootstrap: catalog.BootstrapApiConfig{ Url: "http://kuma.internal:3333", }, DataplaneToken: catalog.DataplaneTokenApiConfig{ LocalUrl: "http://localhost:1111", PublicUrl: "https://kuma.internal:2222", }, Admin: catalog.AdminApiConfig{ LocalUrl: "http://localhost:1111", PublicUrl: "https://kuma.internal:2222", }, MonitoringAssignment: catalog.MonitoringAssignmentApiConfig{ Url: "grpc://kuma.internal:5676", }, }, }), Entry("without public port explicitly defined", testCase{ cpConfig: func() kuma_cp.Config { cfg := kuma_cp.DefaultConfig() cfg.General.AdvertisedHostname = "kuma.internal" cfg.DataplaneTokenServer.Local.Port = 1111 cfg.DataplaneTokenServer.Public.Enabled = true cfg.DataplaneTokenServer.Public.Interface = "192.168.0.1" cfg.BootstrapServer.Port = 3333 return cfg }, expectedCatalogConfig: catalog.CatalogConfig{ ApiServer: catalog.ApiServerConfig{ Url: "http://kuma.internal:5681", }, Bootstrap: catalog.BootstrapApiConfig{ Url: "http://kuma.internal:3333", }, DataplaneToken: catalog.DataplaneTokenApiConfig{ LocalUrl: "http://localhost:1111", PublicUrl: "https://kuma.internal:1111", // port is autoconfigured from the local port }, Admin: catalog.AdminApiConfig{ LocalUrl: "http://localhost:1111", PublicUrl: "https://kuma.internal:1111", // port is autoconfigured from the local port }, MonitoringAssignment: catalog.MonitoringAssignmentApiConfig{ Url: "grpc://kuma.internal:5676", }, }, }), Entry("without public settings for dataplane token server", testCase{ cpConfig: func() kuma_cp.Config { cfg := kuma_cp.DefaultConfig() cfg.General.AdvertisedHostname = "kuma.internal" cfg.DataplaneTokenServer.Local.Port = 1111 cfg.BootstrapServer.Port = 3333 return cfg }, expectedCatalogConfig: catalog.CatalogConfig{ ApiServer: catalog.ApiServerConfig{ Url: "http://kuma.internal:5681", }, Bootstrap: catalog.BootstrapApiConfig{ Url: "http://kuma.internal:3333", }, DataplaneToken: catalog.DataplaneTokenApiConfig{ LocalUrl: "http://localhost:1111", PublicUrl: "", }, Admin: catalog.AdminApiConfig{ LocalUrl: "http://localhost:1111", PublicUrl: "", }, MonitoringAssignment: catalog.MonitoringAssignmentApiConfig{ Url: "grpc://kuma.internal:5676", }, }, }), Entry("without dataplane token server", testCase{ cpConfig: func() kuma_cp.Config { cfg := kuma_cp.DefaultConfig() cfg.DataplaneTokenServer.Enabled = false return cfg }, expectedCatalogConfig: catalog.CatalogConfig{ ApiServer: catalog.ApiServerConfig{ Url: "http://localhost:5681", }, Bootstrap: catalog.BootstrapApiConfig{ Url: "http://localhost:5682", }, DataplaneToken: catalog.DataplaneTokenApiConfig{ LocalUrl: "", PublicUrl: "", }, Admin: catalog.AdminApiConfig{ LocalUrl: "http://localhost:5679", PublicUrl: "", }, MonitoringAssignment: catalog.MonitoringAssignmentApiConfig{ Url: "grpc://localhost:5676", }, }, }), Entry("with public settings for bootstrap and mads server", testCase{ cpConfig: func() kuma_cp.Config { cfg := kuma_cp.DefaultConfig() cfg.General.AdvertisedHostname = "kuma.internal" cfg.DataplaneTokenServer.Local.Port = 1111 cfg.DataplaneTokenServer.Public.Enabled = true cfg.DataplaneTokenServer.Public.Interface = "192.168.0.1" cfg.DataplaneTokenServer.Public.Port = 2222 cfg.BootstrapServer.Port = 3333 cfg.ApiServer.Catalog.Bootstrap.Url = "https://bootstrap.kuma.com:1234" cfg.ApiServer.Catalog.MonitoringAssignment.Url = "grpcs://mads.kuma.com:1234" return cfg }, expectedCatalogConfig: catalog.CatalogConfig{ ApiServer: catalog.ApiServerConfig{ Url: "http://kuma.internal:5681", }, Bootstrap: catalog.BootstrapApiConfig{ Url: "https://bootstrap.kuma.com:1234", }, DataplaneToken: catalog.DataplaneTokenApiConfig{ LocalUrl: "http://localhost:1111", PublicUrl: "https://kuma.internal:2222", }, Admin: catalog.AdminApiConfig{ LocalUrl: "http://localhost:1111", PublicUrl: "https://kuma.internal:2222", }, MonitoringAssignment: catalog.MonitoringAssignmentApiConfig{ Url: "grpcs://mads.kuma.com:1234", }, }, }), ) It("should autoconfigure gui config", func() { // given cfg := kuma_cp.DefaultConfig() cfg.Environment = core.KubernetesEnvironment cfg.General.AdvertisedHostname = "kuma.internal" cfg.ApiServer.Port = 1234 // when err := autoconfigure(&cfg) // then Expect(err).ToNot(HaveOccurred()) // and Expect(cfg.GuiServer.ApiServerUrl).To(Equal("")) }) It("should autoconfigure xds params", func() { // given cfg := kuma_cp.DefaultConfig() cfg.General.AdvertisedHostname = "kuma.internal" cfg.XdsServer.GrpcPort = 1234 // when err := autoconfigure(&cfg) // then Expect(err).ToNot(HaveOccurred()) // and Expect(cfg.BootstrapServer.Params.XdsHost).To(Equal("kuma.internal")) Expect(cfg.BootstrapServer.Params.XdsPort).To(Equal(uint32(1234))) }) It("should autoconfigure admin server from old dataplane token server config", func() { // given cfg := kuma_cp.DefaultConfig() cfg.DataplaneTokenServer.Local.Port = 1111 cfg.DataplaneTokenServer.Public.Enabled = true cfg.DataplaneTokenServer.Public.Interface = "192.168.0.1" cfg.DataplaneTokenServer.Public.Port = 2222 // when err := autoconfigure(&cfg) // then Expect(err).ToNot(HaveOccurred()) // and Expect(cfg.AdminServer.Public.Interface).To(Equal("192.168.0.1")) Expect(cfg.AdminServer.Public.Enabled).To(BeTrue()) Expect(cfg.AdminServer.Public.Port).To(Equal(uint32(2222))) Expect(cfg.AdminServer.Local.Port).To(Equal(uint32(1111))) }) It("should not rewrite values of admin server when old dataplane token server config was not changed", func() { // given cfg := kuma_cp.DefaultConfig() cfg.AdminServer.Local.Port = 1111 cfg.AdminServer.Public.Enabled = true cfg.AdminServer.Public.Interface = "192.168.0.1" cfg.AdminServer.Public.Port = 2222 // when err := autoconfigure(&cfg) // then Expect(err).ToNot(HaveOccurred()) // and Expect(cfg.AdminServer.Public.Interface).To(Equal("192.168.0.1")) Expect(cfg.AdminServer.Public.Enabled).To(BeTrue()) Expect(cfg.AdminServer.Public.Port).To(Equal(uint32(2222))) Expect(cfg.AdminServer.Local.Port).To(Equal(uint32(1111))) }) It("should autoconfigure MonitoringAssignment server", func() { // given cfg := kuma_cp.DefaultConfig() cfg.General.AdvertisedHostname = "kuma.internal" cfg.MonitoringAssignmentServer.GrpcPort = 8765 // when err := autoconfigure(&cfg) // then Expect(err).ToNot(HaveOccurred()) // and Expect(cfg.ApiServer.Catalog.MonitoringAssignment.Url).To(Equal("grpc://kuma.internal:8765")) }) })
--- title: new Vue发生了什么事情 tags: - vue --- :::warning 疑问 为什么`mounted()` 方法中通过`this.属性名`就可以获取到`data`里面的属性, 既然this可以取到这个属性, 说明this上也有这个属性,他是怎么做到的呢 ::: 1. `new Vue()`干了哪些事情 Vue的本质就是一个`function` ```js // instance/index.js function Vue (options) { if (process.env.NODE_ENV !== 'production' && !(this instanceof Vue) ) { warn('Vue is a constructor and should be called with the `new` keyword') } // new Vue的时候执行_init this._init(options) } ``` 2. 构造函数中执行了`this._init(options)`, _init()是通过`initMixin()`在`Vue.prototype`上添加了`_init(...)` 3. `_init()` ```js Vue.prototype._init = function (options) { ... // expose real self vm._self = vm initLifecycle(vm) // 初始化生命周期 initEvents(vm) // 初始化事件 initRender(vm) // 初始化渲染相关 callHook(vm, 'beforeCreate') // 执行beforeCreate钩子函数 initInjections(vm) // resolve injections before data/props initState(vm) // initState就是处理data中的数据 initProvide(vm) // resolve provide after data/props callHook(vm, 'created') // 执行created钩子函数 ... } ``` 4. 针对上面提出的疑问, 那么主要看下`initState(vm)` 主要看下`initData(vm)` ```js export function initState (vm: Component) { ... if (opts.data) { initData(vm) // 初始化data } else { observe(vm._data = {}, true /* asRootData */) } ... } ``` 5. `initData(vm)` 先获取到data的这个对象, 然后再通过proxy来代理取值和赋值 ```js function initData (vm: Component) { ... let data = vm.$options.data // 先获取到 data, 并且给vm添加一个_data属性 data = vm._data = typeof data === 'function' ? getData(data, vm) : data || {}; const keys = Object.keys(data) // 获取到data对象的属性名称 - 数组 const props = vm.$options.props const methods = vm.$options.methods let i = keys.length while (i--) { const key = keys[i] ... proxy(vm, `_data`, key) // 这句话是关键, 做了一层代理 } ... } ``` 6. `proxy()`, 相当于`this.a = 123`实际取的是`this._data.a = 123` ```js const sharedPropertyDefinition = { enumerable: true, configurable: true, get: noop, set: noop } // 通过proxy来给vm声明一个和data中属性相同的对象, 因为是Object.defineProperty创建的, 所以可以拦截到取值和赋值, 在取值的时候就通过get来间接获取到_data上的属性 export function proxy (target: Object, sourceKey: string, key: string) { sharedPropertyDefinition.get = function proxyGetter () { return this[sourceKey][key] } sharedPropertyDefinition.set = function proxySetter (val) { this[sourceKey][key] = val } // target就是vm, 这句话就是把key挂载到vm上, vm.key 则回去调用sharedPropertyDefinition.get, 这样最终执行的就是vm._data.key Object.defineProperty(target, key, sharedPropertyDefinition) } ```
; RUN: opt < %s -constmerge -S | FileCheck %s ; CHECK: = constant i32 1, !dbg [[A:![0-9]+]], !dbg [[B:![0-9]+]] @a = internal constant i32 1, !dbg !0 @b = unnamed_addr constant i32 1, !dbg !9 define void @test1(i32** %P1, i32** %P2) { store i32* @a, i32** %P1 store i32* @b, i32** %P2 ret void } !llvm.dbg.cu = !{!2} !llvm.module.flags = !{!7, !8} ; CHECK: [[A]] = !DIGlobalVariableExpression(var: [[VA:![0-9]+]]) ; CHECK: [[VA]] = distinct !DIGlobalVariable(name: "y" ; CHECK: [[B]] = !DIGlobalVariableExpression(var: [[VB:![0-9]+]]) ; CHECK: [[VB]] = distinct !DIGlobalVariable(name: "x" !0 = !DIGlobalVariableExpression(var: !1) !1 = distinct !DIGlobalVariable(name: "x", scope: !2, file: !3, line: 1, type: !6, isLocal: false, isDefinition: true) !2 = distinct !DICompileUnit(language: DW_LANG_C_plus_plus, file: !3, producer: "clang version 5.0.0 (trunk 297227) (llvm/trunk 297234)", isOptimized: false, runtimeVersion: 0, emissionKind: FullDebug, enums: !4, globals: !5) !3 = !DIFile(filename: "1.cc", directory: "/build") !4 = !{} !5 = !{!0} !6 = !DIBasicType(name: "int", size: 32, encoding: DW_ATE_signed) !7 = !{i32 2, !"Dwarf Version", i32 4} !8 = !{i32 2, !"Debug Info Version", i32 3} !9 = !DIGlobalVariableExpression(var: !10) !10 = distinct !DIGlobalVariable(name: "y", scope: !2, file: !3, line: 1, type: !6, isLocal: false, isDefinition: true)
#!/bin/bash -e echo "${NODE}.${duration} sleep: ${duration} sec" /bin/sleep $duration echo "Test input with all types of variables" # simple property echo "Comment with instanciated variables : ${comment}" # Diplaying data file echo "Display local data artifact file" cat ${data} if [ -z "$http_artifact" ]; then echo "http_artifact is not set" #exit 1 else echo "Display http artifact file" cat ${http_artifact} fi if [ -z "$git_artifact" ]; then echo "git_artifact is not set" #exit 1 else echo "Display git artifact file" cat ${git_artifact} fi if [ -z "$maven_artifact" ]; then echo "maven_artifact is not set" #exit 1 else echo "Display maven artifact file" cat ${maven_artifact} fi
# Resolving lib path $LIBP=(Resolve-Path .\lb\).Path # Prepping the environment $env:LIB="$($env:LIB)$LIBP;" Write-Host "LIB @ $LIBP" # Building target if ($env:BUILD_MODE -eq "release") { cargo build --release } else { cargo build }
cask :v1 => 'story-writer' do version '1.3.1' if Hardware::CPU.is_32_bit? sha256 '8e109e3f6e66dd380fcb2f1d68dda592dc12a36ca9c72b7f477057f6bff8c844' url "http://soft.xiaoshujiang.com/version/Story-writer-v#{version}/Story-writer-osx32.zip" else sha256 'cf3203587775e6260513511b4a9bc91e39d0699ccc59eff3734320aaa54c3bef' url "http://soft.xiaoshujiang.com/version/Story-writer-v#{version}/Story-writer-osx64.zip" end name 'Story Writer' homepage 'http://soft.xiaoshujiang.com' license :gratis app 'Story-writer.app' end
<?php namespace App\Http\Controllers; use App\Models\About; use App\Models\Coffe; use App\Models\Product; use App\Models\Promise; use Illuminate\Http\Request; class AllController extends Controller { public function home(){ $coffe = Coffe::first(); $promise = Promise::first(); // dd($promise); return view('home', compact('coffe', 'promise')); } public function about(){ $about = About::first(); return view('pages.about', compact('about')); } public function product(){ $products = Product::all(); return view('pages.product', compact('products')); } // Pas réalisé car DB non fait // public function store(){ // return view('pages.store'); // } }
import React from 'react'; import { Link } from 'react-router-dom'; import PropTypes from 'prop-types'; import Loading from '../Loading'; export default function PostsPage({ posts }) { return !posts ? ( <Loading /> ) : ( <div className="PostsPage"> <ul> {posts.map((post) => ( <li className="PostListItem" key={post._id}> <Link to={`/posts/${post._id}`}> <article> <h3> {post.title.length > 200 ? `${post.title.slice(0, 200)}...` : post.title} </h3> <p>{post.timestamp}</p> </article> </Link> </li> ))} </ul> </div> ); } PostsPage.propTypes = { posts: PropTypes.arrayOf(PropTypes.object) };
<?php namespace App\Http\Controllers\Dashboard; use Illuminate\Http\Request; use App\Http\Controllers\Controller; class LinksCreate extends Controller { public function __invoke(Request $request) { // new link $linkData = []; return view( 'links.create', compact('linkData') ); } }
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using TripPlanner.Core.Entities; namespace TripPlanner.Core.Interfaces { public interface IDestinationTripRepository { Response<DestinationTrip> Add(DestinationTrip destinationTrip); Response Edit(DestinationTrip destinationTrip); Response Remove(int destinationID, int tripID); Response<DestinationTrip> Get(int destinationID, int tripID); Response<List<DestinationTrip>> GetByTrip(int tripID); } }
package pl.touk.krush.types import org.assertj.core.api.Assertions.assertThat import org.jetbrains.exposed.sql.SchemaUtils import org.jetbrains.exposed.sql.select import org.jetbrains.exposed.sql.transactions.transaction import org.junit.jupiter.api.Test import pl.touk.krush.base.BaseDatabaseTest import java.time.Clock import java.time.Instant import java.time.LocalDate import java.time.LocalDateTime import java.time.ZoneId.systemDefault import java.time.ZonedDateTime import java.util.UUID.randomUUID class EventTest : BaseDatabaseTest() { @Test fun shouldHandleUUIDAndDateTypes() { transaction { SchemaUtils.create(EventTable) // given val clock = Clock.fixed(Instant.parse("2019-10-22T09:00:00.000Z"), systemDefault()) val createTime = ZonedDateTime.now(clock) val event = EventTable.insert(Event( eventDate = LocalDate.now(clock), processTime = LocalDateTime.now(clock), createTime = createTime, updateTime = Instant.now(clock), otherUpdateTime = ExampleInstantWrapper(Instant.now(clock)), externalId = randomUUID() )) //when val events = (EventTable) .select { EventTable.createTime greater createTime.minusDays(1) } .toEventList() //then assertThat(events).containsOnly(event) } } }
use crate::event::{EventHandler, EventName}; use crate::{AttributeValue, Patch, PatchSpecialAttribute}; use crate::{VElement, VirtualNode}; use std::cmp::min; use std::collections::HashMap; use std::mem; /// Given two VirtualNode's generate Patch's that would turn the old virtual node's /// real DOM node equivalent into the new VirtualNode's real DOM node equivalent. pub fn diff<'a>(old: &'a VirtualNode, new: &'a VirtualNode) -> Vec<Patch<'a>> { diff_recursive(&old, &new, &mut 0, &mut 0) } fn diff_recursive<'a, 'b>( old: &'a VirtualNode, new: &'a VirtualNode, old_node_idx: &'b mut u32, new_node_idx: &'b mut u32, ) -> Vec<Patch<'a>> { let mut patches = vec![]; let node_variants_different = mem::discriminant(old) != mem::discriminant(new); let mut element_tags_different = false; if let (VirtualNode::Element(old_element), VirtualNode::Element(new_element)) = (old, new) { element_tags_different = old_element.tag != new_element.tag; } let should_fully_replace_node = node_variants_different || element_tags_different; if should_fully_replace_node { if let Some(velem) = old.as_velement_ref() { if velem.events.has_events() { patches.push(Patch::RemoveAllManagedEventsWithNodeIdx(*old_node_idx)); } } let replaced_old_idx = *old_node_idx; if let VirtualNode::Element(old_element_node) = old { for child in old_element_node.children.iter() { process_deleted_old_node_child(child, old_node_idx, &mut patches); } } patches.push(Patch::Replace { old_idx: replaced_old_idx, new_idx: *new_node_idx, new_node: new, }); if let Some(velem) = old.as_velement_ref() { if velem.special_attributes.on_remove_element_key().is_some() { patches.push(Patch::SpecialAttribute( PatchSpecialAttribute::CallOnRemoveElem(*old_node_idx, old), )); } } if let VirtualNode::Element(new_element_node) = new { for child in new_element_node.children.iter() { increment_idx_for_child(child, new_node_idx); } } return patches; } match (old, new) { (VirtualNode::Text(old_text), VirtualNode::Text(new_text)) => { if old_text != new_text { patches.push(Patch::ChangeText(*old_node_idx, &new_text)); } } (VirtualNode::Element(old_element), VirtualNode::Element(new_element)) => { let mut attributes_to_add: HashMap<&str, &AttributeValue> = HashMap::new(); let mut attributes_to_remove: Vec<&str> = vec![]; let mut events_to_add = HashMap::new(); let mut events_to_remove = vec![]; find_attributes_to_add( *old_node_idx, &mut attributes_to_add, old_element, new_element, &mut patches, ); find_attributes_to_remove( &mut attributes_to_add, &mut attributes_to_remove, old_element, new_element, ); find_events_to_add(&mut events_to_add, old_element, new_element); find_events_to_remove( &mut events_to_add, &mut events_to_remove, old_element, new_element, ); if attributes_to_add.len() > 0 { patches.push(Patch::AddAttributes(*old_node_idx, attributes_to_add)); } if attributes_to_remove.len() > 0 { patches.push(Patch::RemoveAttributes(*old_node_idx, attributes_to_remove)); } if events_to_remove.len() > 0 { patches.push(Patch::RemoveEvents(*old_node_idx, events_to_remove)); } if events_to_add.len() > 0 { patches.push(Patch::AddEvents(*old_node_idx, events_to_add)); } // FIXME: Move into function match ( old_element.special_attributes.dangerous_inner_html.as_ref(), new_element.special_attributes.dangerous_inner_html.as_ref(), ) { (None, Some(_)) => { patches.push(Patch::SpecialAttribute( PatchSpecialAttribute::SetDangerousInnerHtml(*old_node_idx, new), )); } (Some(old_inner), Some(new_inner)) => { if old_inner != new_inner { patches.push(Patch::SpecialAttribute( PatchSpecialAttribute::SetDangerousInnerHtml(*old_node_idx, new), )); } } (Some(_), None) => { patches.push(Patch::SpecialAttribute( PatchSpecialAttribute::RemoveDangerousInnerHtml(*old_node_idx), )); } (None, None) => {} }; // FIXME: Move into function match ( old_element.special_attributes.on_create_element_key(), new_element.special_attributes.on_create_element_key(), ) { (None, Some(_)) => { patches.push(Patch::SpecialAttribute( PatchSpecialAttribute::CallOnCreateElem(*old_node_idx, new), )); } (Some(old_id), Some(new_id)) => { if new_id != old_id { patches.push(Patch::SpecialAttribute( PatchSpecialAttribute::CallOnCreateElem(*old_node_idx, new), )); } } (Some(_), None) | (None, None) => {} }; match ( old_element.special_attributes.on_remove_element_key(), new_element.special_attributes.on_remove_element_key(), ) { (Some(_), None) => { patches.push(Patch::SpecialAttribute( PatchSpecialAttribute::CallOnRemoveElem(*old_node_idx, old), )); } (Some(old_id), Some(new_id)) => { if old_id != new_id { patches.push(Patch::SpecialAttribute( PatchSpecialAttribute::CallOnRemoveElem(*old_node_idx, old), )); } } _ => {} } let old_elem_has_events = old_element.events.has_events(); let new_elem_has_events = new_element.events.has_events(); if !old_elem_has_events && new_elem_has_events { patches.push(Patch::SetEventsId { old_idx: *old_node_idx, new_idx: *new_node_idx, }); } else if old_elem_has_events && !new_elem_has_events { patches.push(Patch::RemoveEventsId(*old_node_idx)); } else if old_elem_has_events && new_elem_has_events { if old_node_idx != new_node_idx { patches.push(Patch::SetEventsId { old_idx: *old_node_idx, new_idx: *new_node_idx, }); } } generate_patches_for_children( old_node_idx, new_node_idx, old_element, new_element, &mut patches, ); } (VirtualNode::Text(_), VirtualNode::Element(_)) | (VirtualNode::Element(_), VirtualNode::Text(_)) => { unreachable!("Unequal variant discriminants should already have been handled"); } }; patches } /// Add attributes from the new element that are not already on the old one or that have changed. fn find_attributes_to_add<'a>( cur_node_idx: u32, attributes_to_add: &mut HashMap<&'a str, &'a AttributeValue>, old_element: &VElement, new_element: &'a VElement, patches: &mut Vec<Patch<'a>>, ) { for (new_attr_name, new_attr_val) in new_element.attrs.iter() { match old_element.attrs.get(new_attr_name) { Some(ref old_attr_val) => { if old_attr_val != &new_attr_val { attributes_to_add.insert(new_attr_name, new_attr_val); } else if new_attr_name == "value" { patches.push(Patch::ValueAttributeUnchanged(cur_node_idx, new_attr_val)); } } None => { attributes_to_add.insert(new_attr_name, new_attr_val); } }; } } /// Remove attributes that were on the old element that are not present on the new element. fn find_attributes_to_remove<'a>( attributes_to_add: &mut HashMap<&str, &AttributeValue>, attributes_to_remove: &mut Vec<&'a str>, old_element: &'a VElement, new_element: &VElement, ) { for (old_attr_name, old_attr_val) in old_element.attrs.iter() { if attributes_to_add.get(&old_attr_name[..]).is_some() { continue; }; match new_element.attrs.get(old_attr_name) { Some(ref new_attr_val) => { if new_attr_val != &old_attr_val { attributes_to_remove.push(old_attr_name); } } None => { attributes_to_remove.push(old_attr_name); } }; } } /// Add attributes from the new element that are not already on the old one or that have changed. fn find_events_to_add<'a>( events_to_add: &mut HashMap<&'a EventName, &'a EventHandler>, old_element: &VElement, new_element: &'a VElement, ) { for (new_event_name, new_event) in new_element.events.iter() { if !old_element.events.contains_key(new_event_name) { events_to_add.insert(new_event_name, new_event); } } } /// Remove non delegated that were on the old element that are not present on the new element. fn find_events_to_remove<'a>( events_to_add: &mut HashMap<&'a EventName, &'a EventHandler>, events_to_remove: &mut Vec<(&'a EventName, &'a EventHandler)>, old_element: &'a VElement, new_element: &'a VElement, ) { for (old_event_name, old_event) in old_element.events.iter() { if events_to_add.contains_key(old_event_name) { continue; }; if new_element.events.contains_key(old_event_name) { continue; } events_to_remove.push((old_event_name, old_event)); } } fn generate_patches_for_children<'a, 'b>( old_node_idx: &'b mut u32, new_node_idx: &'b mut u32, old_element: &'a VElement, new_element: &'a VElement, patches: &mut Vec<Patch<'a>>, ) { let old_child_count = old_element.children.len(); let new_child_count = new_element.children.len(); let current_old_node_idx = *old_node_idx; if new_child_count < old_child_count { patches.push(Patch::TruncateChildren( current_old_node_idx, new_child_count, )); } let min_count = min(old_child_count, new_child_count); for index in 0..min_count { *old_node_idx += 1; *new_node_idx += 1; let old_child = &old_element.children[index]; let new_child = &new_element.children[index]; patches.append(&mut diff_recursive( &old_child, &new_child, old_node_idx, new_node_idx, )) } if new_child_count < old_child_count { for child in old_element.children[min_count..].iter() { process_deleted_old_node_child(child, old_node_idx, patches); } } else if new_child_count > old_child_count { let mut append_patch = vec![]; for new_node in new_element.children[old_child_count..].iter() { *new_node_idx += 1; append_patch.push((*new_node_idx, new_node)); if let Some(elem) = new_node.as_velement_ref() { for child in elem.children.iter() { increment_idx_for_child(child, new_node_idx); } } } patches.push(Patch::AppendChildren { old_idx: current_old_node_idx, new_nodes: append_patch, }) } } /// Increment the `cur_node_idx` to account for this deleted node. /// /// Then iterate through all of its children, recursively, and increment the `cur_node_idx`. /// /// Along the way we also push patches to remove all tracked events for deleted nodes /// (if they had events). fn process_deleted_old_node_child<'a>( old_node: &'a VirtualNode, cur_node_idx: &mut u32, patches: &mut Vec<Patch<'a>>, ) { *cur_node_idx += 1; if let VirtualNode::Element(element_node) = old_node { if element_node.events.len() > 0 { patches.push(Patch::RemoveAllManagedEventsWithNodeIdx(*cur_node_idx)); } if element_node .special_attributes .on_remove_element_key() .is_some() { patches.push(Patch::SpecialAttribute( PatchSpecialAttribute::CallOnRemoveElem(*cur_node_idx, old_node), )); } for child in element_node.children.iter() { process_deleted_old_node_child(&child, cur_node_idx, patches); } } } /// Recursively increment the node idx for each child, depth first. fn increment_idx_for_child(new_node: &VirtualNode, new_node_idx: &mut u32) { *new_node_idx += 1; if let VirtualNode::Element(element_node) = new_node { for child in element_node.children.iter() { increment_idx_for_child(child, new_node_idx); } } } #[cfg(test)] mod diff_test_case; #[cfg(test)] mod tests { use super::*; use crate::event::EventName; use crate::{html, EventAttribFn, PatchSpecialAttribute, VText, VirtualNode}; use std::collections::HashMap; use std::rc::Rc; use virtual_node::IterableNodes; use wasm_bindgen::JsValue; use super::diff_test_case::*; #[test] fn replace_node() { DiffTestCase { old: html! { <div> </div> }, new: html! { <span> </span> }, expected: vec![Patch::Replace { old_idx: 0, new_idx: 0, new_node: &html! { <span></span> }, }], } .test(); DiffTestCase { old: html! { <div> <b></b> </div> }, new: html! { <div> <strong></strong> </div> }, expected: vec![Patch::Replace { old_idx: 1, new_idx: 1, new_node: &html! { <strong></strong> }, }], } .test(); DiffTestCase { old: html! { <div> <b>1</b> <b></b> </div> }, new: html! { <div> <i>{"1"} {"2"}</i> <br /> </div>}, expected: vec![ Patch::Replace { old_idx: 1, new_idx: 1, new_node: &html! { <i>{"1"} {"2"}</i> }, }, Patch::Replace { old_idx: 3, new_idx: 4, new_node: &html! { <br /> }, }, ], } .test(); } /// Verify that we use the proper new node idx when we replace a node. #[test] fn replace_node_proper_new_node_idx() { DiffTestCase { old: html! { <div> <div><em></em></div> <div></div> </div> }, new: html! { <div> <span></span> <strong></strong> </div> }, expected: vec![ Patch::Replace { old_idx: 1, new_idx: 1, new_node: &html! { <span></span> }, }, Patch::Replace { old_idx: 3, new_idx: 2, new_node: &html! { <strong></strong> }, }, ], } .test(); } #[test] fn add_children() { DiffTestCase { old: html! { <div> <b></b> </div> }, new: html! { <div> <b></b> <span></span> </div> }, expected: vec![Patch::AppendChildren { old_idx: 0, new_nodes: vec![(2, &html! { <span></span> })], }], } .test(); } /// Verify that we use the proper new node idx for appended children. #[test] fn proper_new_node_idx_for_added_children() { DiffTestCase { old: html! { <div> <span><em></em></span> <div> <br /> </div> </div> }, new: html! { <div> <i></i> <div> <br /> <div><br /></div> <div></div> </div> </div> }, expected: vec![ Patch::Replace { old_idx: 1, new_idx: 1, new_node: &html! { <i></i>}, }, Patch::AppendChildren { old_idx: 3, new_nodes: vec![ (4, &html! { <div><br /></div> }), (6, &html! { <div></div> }), ], }, ], } .test(); } #[test] fn remove_nodes() { DiffTestCase { old: html! { <div> <b></b> <span></span> </div> }, new: html! { <div> </div> }, expected: vec![Patch::TruncateChildren(0, 0)], } .test(); DiffTestCase { old: html! { <div> <span> <b></b> // This `i` tag will get removed <i></i> </span> // This `strong` tag will get removed <strong></strong> </div> }, new: html! { <div> <span> <b></b> </span> </div> }, expected: vec![Patch::TruncateChildren(0, 1), Patch::TruncateChildren(1, 1)], } .test(); DiffTestCase { old: html! { <div> <b> <i></i> <i></i> </b> <b></b> </div> }, new: html! { <div> <b> <i></i> </b> <i></i> </div> }, expected: vec![ Patch::TruncateChildren(1, 1), Patch::Replace { old_idx: 4, new_idx: 3, new_node: &html! { <i></i> }, }, ], } .test(); } #[test] fn add_attributes() { let mut attributes = HashMap::new(); let id = "hello".into(); attributes.insert("id", &id); DiffTestCase { old: html! { <div> </div> }, new: html! { <div id="hello"> </div> }, expected: vec![Patch::AddAttributes(0, attributes.clone())], } .test(); DiffTestCase { old: html! { <div id="foobar"> </div> }, new: html! { <div id="hello"> </div> }, expected: vec![Patch::AddAttributes(0, attributes)], } .test(); } #[test] fn remove_attributes() { DiffTestCase { old: html! { <div id="hey-there"></div> }, new: html! { <div> </div> }, expected: vec![Patch::RemoveAttributes(0, vec!["id"])], } .test(); } #[test] fn change_attribute() { let mut attributes = HashMap::new(); let id = "changed".into(); attributes.insert("id", &id); DiffTestCase { old: html! { <div id="hey-there"></div> }, new: html! { <div id="changed"> </div> }, expected: vec![Patch::AddAttributes(0, attributes)], } .test(); } #[test] fn replace_text_node() { DiffTestCase { old: html! { Old }, new: html! { New }, expected: vec![Patch::ChangeText(0, &VText::new("New"))], } .test(); } /// If an input or textarea has a value attribute we always push a patch for setting the value /// attribute so that we can replace anything that might have been typed into the field. #[test] fn always_pushes_patch_for_value() { DiffTestCase { old: html! { <input value="abc" /> }, new: html! { <input value="abc" /> }, expected: vec![Patch::ValueAttributeUnchanged(0, &"abc".into())], } .test(); DiffTestCase { old: html! { <textarea value="abc" /> }, new: html! { <textarea value="abc" /> }, expected: vec![Patch::ValueAttributeUnchanged(0, &"abc".into())], } .test(); DiffTestCase { old: html! { <textarea value="abc" /> }, new: html! { <textarea value="def" /> }, expected: vec![Patch::AddAttributes( 0, vec![("value", &"def".into())].into_iter().collect(), )], } .test(); } /// Verify that we push an on create elem patch if the new node has the special attribute /// and the old node does not. #[test] fn on_create_elem() { let old = VirtualNode::element("div"); let mut new = VirtualNode::element("div"); set_on_create_elem_with_unique_id(&mut new, "150"); let mut expected = VirtualNode::element("div"); set_on_create_elem_with_unique_id(&mut expected, "150"); DiffTestCase { old, new, expected: vec![Patch::SpecialAttribute( PatchSpecialAttribute::CallOnCreateElem(0, &expected), )], } .test(); } /// Verify that if two different nodes have the same on_create_elem unique identifiers we /// do not push a CallOnCreateElem patch. #[test] fn same_on_create_elem_id() { let mut old = VirtualNode::element("div"); set_on_create_elem_with_unique_id(&mut old, "70"); let mut new = VirtualNode::element("div"); set_on_create_elem_with_unique_id(&mut new, "70"); DiffTestCase { old, new, expected: vec![], } .test(); } /// Verify that if two different nodes have different on_create_elem unique identifiers we push /// a patch to call the new on_create_elem. #[test] fn different_on_create_elem_id() { let mut old = VirtualNode::element("div"); set_on_create_elem_with_unique_id(&mut old, "50"); let mut new = VirtualNode::element("div"); set_on_create_elem_with_unique_id(&mut new, "99"); let mut expected = VirtualNode::element("div"); set_on_create_elem_with_unique_id(&mut expected, "99"); DiffTestCase { old, new, expected: vec![Patch::SpecialAttribute( PatchSpecialAttribute::CallOnCreateElem(0, &expected), )], } .test(); } /// Verify that we push an on remove elem patch if the new node has the special attribute /// and the old node does not. #[test] fn on_remove_elem_for_replaced_elem() { let mut old = VirtualNode::element("div"); set_on_remove_elem_with_unique_id(&mut old, "150"); let expected = { let mut old = VirtualNode::element("div"); set_on_remove_elem_with_unique_id(&mut old, "150"); old }; let new = VirtualNode::element("span"); DiffTestCase { old, new, expected: vec![ Patch::Replace { old_idx: 0, new_idx: 0, new_node: &VirtualNode::element("span"), }, Patch::SpecialAttribute(PatchSpecialAttribute::CallOnRemoveElem(0, &expected)), ], } .test(); } /// Verify that we push on remove element patches for replaced children, their replaced /// children, etc. #[test] fn on_remove_elem_for_replaced_children_recursively() { let mut grandchild = VirtualNode::element("strong"); set_on_remove_elem_with_unique_id(&mut grandchild, "key"); let mut child = VirtualNode::element("em"); set_on_remove_elem_with_unique_id(&mut child, "key"); child.as_velement_mut().unwrap().children.push(grandchild); let old = html! { <div> {child} </div> }; let expected_child = { let mut grandchild = VirtualNode::element("strong"); set_on_remove_elem_with_unique_id(&mut grandchild, "key"); let mut child = VirtualNode::element("em"); set_on_remove_elem_with_unique_id(&mut child, "key"); child.as_velement_mut().unwrap().children.push(grandchild); child }; let expected_grandchild = { let mut grandchild = VirtualNode::element("strong"); set_on_remove_elem_with_unique_id(&mut grandchild, "key"); grandchild }; let new = VirtualNode::element("span"); DiffTestCase { old, new, expected: vec![ Patch::SpecialAttribute(PatchSpecialAttribute::CallOnRemoveElem( 1, &expected_child, )), Patch::SpecialAttribute(PatchSpecialAttribute::CallOnRemoveElem( 2, &expected_grandchild, )), Patch::Replace { old_idx: 0, new_idx: 0, new_node: &VirtualNode::element("span"), }, ], } .test(); } /// Verify that we push on remove element patches for truncated children, their children, /// etc. #[test] fn on_remove_elem_for_truncated_children_recursively() { let mut grandchild = VirtualNode::element("strong"); set_on_remove_elem_with_unique_id(&mut grandchild, "key"); let mut child = VirtualNode::element("em"); set_on_remove_elem_with_unique_id(&mut child, "key"); child.as_velement_mut().unwrap().children.push(grandchild); let old = html! { <div> <span></span> // Gets truncated. {child} </div> }; let new = html! { <div> <span></span> </div> }; let expected_child = { let mut grandchild = VirtualNode::element("strong"); set_on_remove_elem_with_unique_id(&mut grandchild, "key"); let mut child = VirtualNode::element("em"); set_on_remove_elem_with_unique_id(&mut child, "key"); child.as_velement_mut().unwrap().children.push(grandchild); child }; let expected_grandchild = { let mut grandchild = VirtualNode::element("strong"); set_on_remove_elem_with_unique_id(&mut grandchild, "key"); grandchild }; DiffTestCase { old, new, expected: vec![ Patch::TruncateChildren(0, 1), Patch::SpecialAttribute(PatchSpecialAttribute::CallOnRemoveElem( 2, &expected_child, )), Patch::SpecialAttribute(PatchSpecialAttribute::CallOnRemoveElem( 3, &expected_grandchild, )), ], } .test(); } /// Verify that when patching attributes, if the old has an on remove element callback but the /// new node does not, we call the on remove element callback. /// /// But only for that element, since the element's below it might not get removed from the /// DOM. #[test] fn new_node_does_not_have_on_remove_elem() { let old_child = on_remove_node_with_on_remove_child(); let mut old = html! { <div> {old_child} </div> }; set_on_remove_elem_with_unique_id(&mut old, "some-key"); let expected = { let old_child = on_remove_node_with_on_remove_child(); let mut old = html! { <div> {old_child} </div> }; set_on_remove_elem_with_unique_id(&mut old, "some-key"); old }; let new_child = on_remove_node_with_on_remove_child(); let new = html! { <div> {new_child} </div> }; DiffTestCase { old, new, expected: vec![Patch::SpecialAttribute( PatchSpecialAttribute::CallOnRemoveElem(0, &expected), )], } .test(); } /// Verify that when patching attributes, if the old and new node are of the same tag type but /// have different on remove element ID, a patch is pushed. /// /// But only for that element, since the element's below it might not get removed from the /// DOM. #[test] fn different_on_remove_elem_id() { let old_child = on_remove_node_with_on_remove_child(); let mut old = html! { <div> {old_child} </div> }; set_on_remove_elem_with_unique_id(&mut old, "start"); let expected = { let old_child = on_remove_node_with_on_remove_child(); let mut old = html! { <div> {old_child} </div> }; set_on_remove_elem_with_unique_id(&mut old, "start"); old }; let new_child = on_remove_node_with_on_remove_child(); let mut new = html! { <div> {new_child} </div> }; set_on_remove_elem_with_unique_id(&mut new, "end"); DiffTestCase { old, new, expected: vec![Patch::SpecialAttribute( PatchSpecialAttribute::CallOnRemoveElem(0, &expected), )], } .test(); } /// Verify that if the old and new node have the same on remove element ID, no patch is pushed. #[test] fn same_on_remove_elem_id() { let mut old = VirtualNode::element("div"); set_on_remove_elem_with_unique_id(&mut old, "same"); let mut new = VirtualNode::element("div"); set_on_remove_elem_with_unique_id(&mut new, "same"); DiffTestCase { old, new, expected: vec![], } .test(); } /// Verify that if the old node and new node have the same dangerous_inner_html we do not push /// an SetDangerousInnerHtml patch. #[test] fn same_dangerous_inner_html() { let mut old = VirtualNode::element("div"); set_dangerous_inner_html(&mut old, "hi"); let mut new = VirtualNode::element("div"); set_dangerous_inner_html(&mut new, "hi"); DiffTestCase { old, new, expected: vec![], } .test(); } /// Verify that if the new node has dangerous_inner_html that is different from the old node's, /// we push a patch to set the new inner html. #[test] fn different_dangerous_inner_html() { let mut old = VirtualNode::element("div"); set_dangerous_inner_html(&mut old, "old"); let mut new = VirtualNode::element("div"); set_dangerous_inner_html(&mut new, "new"); let mut expected = VirtualNode::element("div"); set_dangerous_inner_html(&mut expected, "new"); DiffTestCase { old, new, expected: vec![Patch::SpecialAttribute( PatchSpecialAttribute::SetDangerousInnerHtml(0, &expected), )], } .test(); } /// Verify that if the new node does not have dangerous_inner_html and the old node does, we /// push a patch to truncate all children along with a patch to push the new node's /// children. #[test] fn remove_dangerous_inner_html() { let mut old = VirtualNode::element("div"); set_dangerous_inner_html(&mut old, "hi"); let new = html! { <div><em></em></div> }; DiffTestCase { old, new, expected: vec![ Patch::SpecialAttribute(PatchSpecialAttribute::RemoveDangerousInnerHtml(0)), Patch::AppendChildren { old_idx: 0, new_nodes: vec![(1, &VirtualNode::element("em"))], }, ], } .test(); } /// Verify that if a node goes from no events to having at least one event, we create a patch /// to set the events ID on the dom node. #[test] fn set_events_id_if_events_added() { let old = VElement::new("div"); let mut new = VElement::new("div"); new.events.insert(onclick_name(), mock_event_handler()); DiffTestCase { old: VirtualNode::Element(old), new: VirtualNode::Element(new), expected: vec![ Patch::AddEvents( 0, vec![(&EventName::ONCLICK, &mock_event_handler())] .into_iter() .collect(), ), Patch::SetEventsId { old_idx: 0, new_idx: 0, }, ], } .test(); } /// Verify that we set the proper old and new node indices in the set events ID patch. #[test] fn uses_correct_new_node_idx_in_set_events_id_patch() { let old = html! { <div> <em> <area /> </em> <div></div> </div> }; let new = html! { <div> <span></span> <div onclick=||{}></div> </div> }; DiffTestCase { old, new, expected: vec![ Patch::Replace { old_idx: 1, new_idx: 1, new_node: &VirtualNode::element("span"), }, Patch::AddEvents( 3, vec![(&EventName::ONCLICK, &mock_event_handler())] .into_iter() .collect(), ), Patch::SetEventsId { old_idx: 3, new_idx: 2, }, ], } .test(); } /// Verify that if a node already had a event and we are patching it with another /// event we do not create a patch for setting the events ID. #[test] fn does_not_set_events_id_if_already_had_events() { let mut old = VElement::new("div"); old.events.insert(onclick_name(), mock_event_handler()); let mut new = VElement::new("div"); new.events.insert(onclick_name(), mock_event_handler()); DiffTestCase { old: VirtualNode::Element(old), new: VirtualNode::Element(new), expected: vec![], } .test(); } /// Verify that if an earlier node in the tree was replaced, and a later node has events, all /// nodes after it get their events ID increased based on the number of elements removed. #[test] fn resets_events_id_if_earlier_nodes_replaced() { let old = html! { <div> // This node gets replaced <span> <em> <area /> </em> </span> <strong onclick=|| {}> <div></div> <a onclick=|| {}></a> </strong> </div> }; let new = html! { <div> <div></div> <strong onclick=|| {}> <div></div> <a onclick=|| {}></a> </strong> </div> }; DiffTestCase { old, new, expected: vec![ Patch::Replace { old_idx: 1, new_idx: 1, new_node: &VirtualNode::element("div"), }, Patch::SetEventsId { old_idx: 4, new_idx: 2, }, Patch::SetEventsId { old_idx: 6, new_idx: 4, }, ], } .test(); } /// Verify that if 5 earlier node were replaced replaced by 5 different nodes, we do not /// reset the events ID for nodes that come after it since the total number of nodes has not /// changed. /// /// This test should also cover cases where the same number of earlier nodes are /// truncated / appended, since our implementation just checks whether or not the new node IDX /// is equal to the old node IDX. /// If not, then that node and every node after it needs its events ID reset /// (if they have events). #[test] fn does_not_reset_events_id_if_earlier_node_replaced_by_same_number_of_nodes() { let old = html! { <div> // This node gets replaced, but with the same number of nodes. <span> <em> <area /> </em> </span> <strong onclick=|| {}> <div></div> <a onclick=|| {}></a> </strong> </div> }; let new = html! { <div> <div> <ul> <li> </li> </ul> </div> <strong onclick=|| {}> <div></div> <a onclick=|| {}></a> </strong> </div> }; DiffTestCase { old, new, expected: vec![Patch::Replace { old_idx: 1, new_idx: 1, new_node: &html! {<div> <ul> <li> </li> </ul> </div>}, }], } .test(); } /// Verify that if somewhere earlier in the tree there were child nodes truncated /// (so the net number of earlier nodes decreased) we push a patch to set the later node's /// events ID. #[test] fn resets_events_if_if_earlier_nodes_truncated() { let old = html! { <div> // This node gets its children truncated. <span> <em></em> <area /> </span> <strong onclick=|| {}> <div></div> <a onclick=|| {}></a> </strong> </div> }; let new = html! { <div> <span> <em></em> </span> <strong onclick=|| {}> <div></div> <a onclick=|| {}></a> </strong> </div> }; DiffTestCase { old, new, expected: vec![ Patch::TruncateChildren(1, 1), Patch::SetEventsId { old_idx: 4, new_idx: 3, }, Patch::SetEventsId { old_idx: 6, new_idx: 5, }, ], } .test(); } /// Verify that if somewhere earlier in the tree there were child nodes appended /// (so the net number of earlier nodes increased) we push a patch to set the later node's /// events ID. #[test] fn resets_events_if_if_earlier_nodes_appended() { let old = html! { <div> // This node gets its children appended to. <span> <em></em> </span> <strong onclick=|| {}> <div></div> <a onclick=|| {}></a> </strong> </div> }; let new = html! { <div> <span> <em></em> <area /> </span> <strong onclick=|| {}> <div></div> <a onclick=|| {}></a> </strong> </div> }; DiffTestCase { old, new, expected: vec![ Patch::AppendChildren { old_idx: 1, new_nodes: vec![(3, &VirtualNode::element("area"))], }, Patch::SetEventsId { old_idx: 3, new_idx: 4, }, Patch::SetEventsId { old_idx: 5, new_idx: 6, }, ], } .test(); } /// Verify that if we previously had events but we no longer have any events we push a patch /// to remove the events ID. #[test] fn removes_events_id_if_no_more_events() { let mut old = VElement::new("div"); old.events.insert(onclick_name(), mock_event_handler()); let new = VElement::new("div"); DiffTestCase { old: VirtualNode::Element(old), new: VirtualNode::Element(new), expected: vec![ Patch::RemoveEvents( 0, vec![(&EventName::ONCLICK, &mock_event_handler())] .into_iter() .collect(), ), Patch::RemoveEventsId(0), ], } .test(); } /// Verify that if an element has added and removed multiple non-delegated events, the remove /// event listener patches come before the add event listener patches. /// This ensures that we can look up the old functions in the `EventsByNodeIdx` that we'll need /// to pass into .remove_event_listener() before the SetEventListeners patch overwrites those /// functions. #[test] fn remove_event_patches_come_before_add_event_patches() { let mut old = VElement::new("div"); old.events.insert(oninput_name(), mock_event_handler()); let mut new = VElement::new("div"); new.events.insert(onmousemove_name(), mock_event_handler()); DiffTestCase { old: VirtualNode::Element(old), new: VirtualNode::Element(new), expected: vec![ Patch::RemoveEvents(0, vec![(&oninput_name(), &mock_event_handler())]), Patch::AddEvents( 0, vec![(&onmousemove_name(), &mock_event_handler())] .into_iter() .collect(), ), ], } .test(); } /// Verify that if a node has events but the node is replaced we push a patch to remove all /// of its events from the EventsByNodeIdx. /// We ensure that this event removal patch should come before the patch to replace the node, /// so that we don't accidentally remove events that were for the node that replaced it. #[test] fn remove_all_tracked_events_if_replaced() { let mut old = VElement::new("div"); old.events.insert(oninput_name(), mock_event_handler()); let new = VElement::new("some-other-element"); DiffTestCase { old: VirtualNode::Element(old), new: VirtualNode::Element(new), expected: vec![ Patch::RemoveAllManagedEventsWithNodeIdx(0), Patch::Replace { old_idx: 0, new_idx: 0, new_node: &VirtualNode::Element(VElement::new("some-other-element")), }, ], } .test(); } /// Verify that if a node's ancestor (parent, grandparent, ..etc) was replaced we push a patch /// to remove all of its events from the EventsByNodeIdx. /// We ensure that this event removal patch should come before the patch to replace the node, /// so that we don't accidentally remove events that were for the node that replaced it. #[test] fn removes_tracked_events_if_ancestor_replaced() { // node idx 0 let mut old = VElement::new("div"); // node idx 1 old.children.push(VirtualNode::Element(VElement::new("a"))); // node idx 2 old.children.push(VirtualNode::text("b")); // node idx 3 let mut child_of_old = VElement::new("div"); child_of_old .events .insert(oninput_name(), mock_event_handler()); old.children.push(VirtualNode::Element(child_of_old)); let new = VElement::new("some-other-element"); DiffTestCase { old: VirtualNode::Element(old), new: VirtualNode::Element(new), expected: vec![ Patch::RemoveAllManagedEventsWithNodeIdx(3), Patch::Replace { old_idx: 0, new_idx: 0, new_node: &VirtualNode::Element(VElement::new("some-other-element")), }, ], } .test(); } /// Verify that if a child node is truncated and it had events we push a patch to remove all /// of its events from the EventsByNodeIdx #[test] fn remove_tracked_events_if_truncated() { let mut old = VElement::new("div"); let mut child_of_old = VElement::new("div"); child_of_old .events .insert(oninput_name(), mock_event_handler()); old.children.push(VirtualNode::Element(child_of_old)); let new = VElement::new("div"); DiffTestCase { old: VirtualNode::Element(old), new: VirtualNode::Element(new), expected: vec![ Patch::TruncateChildren(0, 0), Patch::RemoveAllManagedEventsWithNodeIdx(1), ], } .test(); } fn set_on_create_elem_with_unique_id(node: &mut VirtualNode, on_create_elem_id: &'static str) { node.as_velement_mut() .unwrap() .special_attributes .set_on_create_element(on_create_elem_id, |_: web_sys::Element| {}); } fn set_on_remove_elem_with_unique_id(node: &mut VirtualNode, on_remove_elem_id: &'static str) { node.as_velement_mut() .unwrap() .special_attributes .set_on_remove_element(on_remove_elem_id, |_: web_sys::Element| {}); } fn set_dangerous_inner_html(node: &mut VirtualNode, html: &str) { node.as_velement_mut() .unwrap() .special_attributes .dangerous_inner_html = Some(html.to_string()); } /// Return a node that has an on remove element function. /// /// This node has a child that also has an on remove element function. /// /// <div> /// <div></div> /// </div> fn on_remove_node_with_on_remove_child() -> VirtualNode { let mut child = VirtualNode::element("div"); set_on_remove_elem_with_unique_id(&mut child, "555"); let mut node = VirtualNode::element("div"); set_on_remove_elem_with_unique_id(&mut node, "666"); node.as_velement_mut().unwrap().children.push(child); node } fn mock_event_handler() -> EventHandler { EventHandler::UnsupportedSignature(EventAttribFn(Rc::new(Box::new(JsValue::NULL)))) } fn onclick_name() -> EventName { "onclick".into() } fn oninput_name() -> EventName { "oninput".into() } fn onmousemove_name() -> EventName { "onmousemove".into() } }
import debug from 'debug'; import { API } from './api.js'; import { AuthManager } from './authManager.js'; import { Board } from './board.js'; import { Database } from './database.js'; import { Executer } from './executer.js'; import { Monitor } from './monitor.js'; import { Server } from './server.js'; import { TaskManager } from './taskManager.js'; import { TokenManager } from './tokenManager.js'; import { UserManager } from './userManager.js'; /** @typedef {{ api: typeof API; board: typeof Board; database: typeof Database; userManager:typeof UserManager; authManager:typeof AuthManager; tokenManager:typeof TokenManager; taskManager:typeof TaskManager; executer: typeof Executer; monitor: typeof Monitor; server: typeof Server; }} DrawComponents @typedef {{[name in keyof DrawComponents]:ConstructorParameters<DrawComponents[name]>[1]}} DrawConfig */ export class Drawer { /** * @param {DrawConfig} config */ constructor(config) { debug('drawer')('config %O', config); this.api = new API(this, config.api); this.database = new Database(this, config.database); this.board = new Board(this, config.board); this.userManager = new UserManager(this, config.userManager); this.authManager = new AuthManager(this, config.authManager); this.tokenManager = new TokenManager(this, config.tokenManager); this.taskManager = new TaskManager(this, config.taskManager); this.executer = new Executer(this, config.executer); this.monitor = new Monitor(this, config.monitor); this.server = new Server(this, config.server); } }
-- | -- Module : Prime.Servant.Time -- License : BSD-style -- Maintainer : Nicolas Di Prima <[email protected]> -- Stability : stable -- Portability : Good -- {-# LANGUAGE GeneralizedNewtypeDeriving #-} module Prime.Servant.Time ( Time , Elapsed , timeCurrent , timeAdd , H.timePrint ) where import Foundation import Foundation.Numerical (Subtractive(..)) import qualified Prelude import Data.Hourglass (Elapsed, Timeable) import qualified Data.Hourglass as H import qualified Time.System as H import Data.Aeson (ToJSON(..), FromJSON(..)) import Database.Persist.Class (PersistField(..)) import Database.Persist.Types (PersistValue(..)) import Database.Persist.Sql (PersistFieldSql(..), SqlType(..)) import Control.Monad.IO.Class newtype Time = Time Elapsed deriving (Eq, Ord, Typeable, H.Time, Timeable) instance Prelude.Show Time where show = H.timePrint H.ISO8601_DateAndTime instance ToJSON Time where toJSON = toJSON . H.timePrint "EPOCH" instance FromJSON Time where parseJSON o = do a <- parseJSON o case H.timeParse "EPOCH" a of Nothing -> fail "unable to parse EPOCH time" Just t -> return $ Time $ H.timeGetElapsed t instance PersistField Time where toPersistValue (Time (H.Elapsed (H.Seconds i))) = PersistInt64 i fromPersistValue a = Time . H.Elapsed . H.Seconds <$> fromPersistValue a instance PersistFieldSql Time where sqlType _ = SqlInt64 instance Subtractive Time where type Difference Time = Elapsed (-) (Time a1) (Time a2) = a1 Prelude.- a2 timeCurrent :: MonadIO io => io Time timeCurrent = Time <$> liftIO H.timeCurrent timeAdd :: Time -> Elapsed -> Time timeAdd (Time t) e = Time $ t Prelude.+ e
from . import generator_utils from .Span2Phrase import span2phrase from .Tag2Phrase import tag2phrase from .Chunk2Phrase import chunk2phrase from .Rank2Phrase import rank2phrase
# Linux chattr Command ------------------- ### Command Introduction (命令介绍) > **chattr - change file attributes on a Linux file system** ### Command Format and Options (命令格式和选项) ``` #chattr --help Usage: chattr [-RVf] [-+=aAcCdDeijsStTu] [-v version] files... ``` ### Command Example (命令范例) ``` chattr Change attributes of files or directories. - Make a file or directory immutable to changes and deletion, even by superuser: chattr +i path/to/file_or_directory - Make a file or directory mutable: chattr -i path/to/file_or_directory - Recursively make an entire directory and contents immutable: chattr -R +i path/to/directory ```
require 'rubygems' require 'selenium-webdriver' require 'chunky_png' require 'open-uri' require 'rbconfig' require 'appium_lib' include RbConfig require_relative 'misc_methods' require_relative 'error_handling_methods'
<?php /** * @file * @brief * Mirrors content of a song by its URL in this domain to another (our) domain :) */ if(!$_GET||!$_GET['id']) { echo "Hey, you want to print some song? Go to the <a href='seznam'>seznam</a> first!"; die();//Yes, commit suicide. So soon? } $song_id = $_GET['id']; echo file_get_contents('https://dorostmladez.cz/song/'.$song_id.'/?print=print');//Mirror from ugly url to less ugly :) ?>
import Pkg Pkg.activate(".") Pkg.instantiate() using WebIO WebIO.install_jupyter_nbextension() using DataFrames using DataFramesMeta # using Gadfly # Removed for now because downgrades DataFrames version using IJulia using Weave using CSV using Revise using TableView using Printf using PrettyTables using Formatting using Statistics # ==================== GENERAL UTILITY FUNCTIONS ==================== broadwrap(f) = function (args...) broadcast(f, args...) end esc_latex(s) = replace(s, r"(#|%|&|_)" => s"\\\1") function number2latex(num; enclose = true) if ismissing(num) "--" elseif isa(num, Integer) (s -> enclose ? "\\($s\\)" : s)(sprintf1("%'d", num)) elseif isa(num, AbstractFloat) (s -> enclose ? "\\($s\\)" : s)(sprintf1("%'.2f", num)) elseif isa(num, NTuple{2, Number}) "$(number2latex(num[1]; enclosed = enclose)) ($(number2latex(num[2]; enclosed = enclose)))" else error("Unexpected type.") end end # ==================== METHODS FOR highlight_best_values! ==================== wrap_in_textbf(str) = "\\textbf{$str}" wrap_in_textit(str) = "\\textit{$str}" # See https://www.pcre.org/current/doc/html/pcre2syntax.html#SEC2 const MATH_MODE_REGEX = Regex( "^(?:\\Q\$\\E|\\Q\\(\\E)(.+)(?:\\Q\$\\E|\\Q\\)\\E)\$" ) function dirt2number( dirt, decimal_separator = '.', failure = NaN ) :: Float64 gold = filter(c -> isdigit(c) | (c == decimal_separator), dirt) number = tryparse(Float64, gold) isnothing(number) && return failure return number end function latex2number( str, failure = NaN; separator = nothing ) :: Float64 str = isnothing(separator) ? str : replace(str, separator => "") v = tryparse(Float64, str) if isnothing(v) m = match(MATH_MODE_REGEX, str) if !isnothing(m) v = tryparse(Float64, m.captures[1]) end end return isnothing(v) ? failure : v end find_index_min(a) = isempty(a) ? Int[] : Int[findmin(a)[2]] find_index_max(a) = isempty(a) ? Int[] : Int[findmax(a)[2]] find_index_allmin(a) = isempty(a) ? Int[] : findall(isequal(findmin(a)[1]), a) find_index_allmax(a) = isempty(a) ? Int[] : findall(isequal(findmax(a)[1]), a) function highlight_best_values!( df; # Set of columns to be considered. columns = 1:ncol(df), # Transforms the cell body into another object. cleaner = latex2number, # Return true if the object returned by cleaner should be ignored. ignorer = isnan, # Takes a row of clean non-ignored values and returns the # indexes of the ones that should be highlighted. chooser = find_index_min, # Takes the cell body and return the highlighted cell body. changer = wrap_in_textbf ) for row_id in 1:nrow(df) row = df[row_id, :] selected_values = [] selected_columns = empty(columns) for col_id in columns value = cleaner(row[col_id]) if !ignorer(value) push!(selected_values, value) push!(selected_columns, col_id) end end chosen = chooser(selected_values) for col_id in selected_columns[chosen] df[row_id, col_id] = changer(df[row_id, col_id]) end end return end
#pragma once #include "par_io.h" #include "xo/container/flat_map.h" #include "xo/filesystem/path.h" namespace spot { class SPOT_API static_par_set : public par_io { public: static_par_set() : par_io() {} static_par_set( const path& filename ) : par_io() { load( filename ); } virtual size_t dim() const override { return values_.size(); } virtual xo::optional< par_t > try_get( const string& name ) const override; virtual par_t add( const par_info& pi ) override; size_t load( const path& filename ); size_t merge( const path& filename, bool overwrite ); private: xo::flat_map< string, par_t > values_; }; }
package me.liuqingwen.android.projectandroidtest import android.graphics.Color import android.view.View /** * Created by Qingwen on 2018-6-9, project: ProjectAndroidTest. * * @Author: Qingwen * @DateTime: 2018-6-9 * @Package: me.liuqingwen.android.projectandroidtest in project: ProjectAndroidTest * * Notice: If you are using this class or file, check it and do some modification. */ enum class FloatingButtonType(val srcCompat: Int, val backgroundTint: Int) { ADD(R.drawable.ic_add_black_24dp, R.color.colorAccent), DONE(R.drawable.ic_check_black_24dp, R.color.blueColor), CANCEL(R.drawable.ic_clear_black_24dp, R.color.redColor), NONE(0, 0) } interface IMainInteractionListener { fun setTitle(title : String) fun configFloatingButton(buttonType: FloatingButtonType, isVisible:Boolean, onClickListener: ((View) -> Unit)?) }
package com.tq.requisition.domain.model.roleAccount; import java.util.UUID; import com.tq.requisition.domain.share.AggregateRoot; /** * 角色用户关系聚合根 * @author jjh * @time 2015-12-21 17:21 */ public class RoleAccount extends AggregateRoot{ /*private fields*/ /**账户id*/ private UUID accountId; /**角色id*/ private UUID roleId; /**是否删除*/ private boolean del; /*constructors*/ public RoleAccount(){} public RoleAccount(UUID id,UUID accountId, UUID roleId) { super(); this.accountId = accountId; this.roleId = roleId; this.id = id; this.del = false; } /*getters and setters*/ public UUID getAccountId() { return accountId; } public void setAccountId(UUID accountId) { this.accountId = accountId; } public UUID getRoleId() { return roleId; } public void setRoleId(UUID roleId) { this.roleId = roleId; } public void setId(UUID _id) { id = _id; } public UUID getId() { return id; } public void setDel(boolean isDel) { this.del = isDel; } public boolean getDel() { return this.del; } /*override toString*/ @Override public String toString() { return "RoleAccount [accountId=" + accountId + ", roleId=" + roleId + ", id=" + id + "]"; } /*public static methods*/ public static RoleAccount obtain(UUID uid,UUID rid) { if(uid == null || rid == null) { throw new NullPointerException("账户id或者角色id为null"); } return new RoleAccount(UUID.randomUUID(),uid,rid); } }
export class WatchlistItem { ticker: string; name: string; price: number; static with(ticker: string, name: string, price: number): WatchlistItem { const item = new WatchlistItem(); item.ticker = ticker; item.name = name; item.price = price; return item; } static from(item: WatchlistItem): WatchlistItem { const clone = new WatchlistItem(); Object.assign(clone, item); return clone; } private round(value: number): number { return +(value.toFixed(2)); } getChange(newPrice: number): number { return this.round(newPrice - this.price); } getChangePercent(newPrice: number): number { return this.round(((newPrice - this.price) / this.price) * 100); } }
-module(aoc_2015_22_1). -export([start/0]). start() -> In = input(), Out = do(In), io:format("~p~n", [Out]), ok. input() -> {ok, [HP]} = io:fread("", "Hit Points: ~d"), {ok, [Atk]} = io:fread("", "Damage: ~d"), {HP, Atk}. do({OHP, Atk}) -> bfs(OHP, Atk). -record(state, {spend = 0, php = 50, mana = 500, ohp, atk, shield = 0, poison = 0, recharge = 0}). bfs(OHP, Atk) -> bfs_impl(1000000, [#state{ohp = OHP, atk = Atk}], []). -define(SPELLS, [fun magic_missle/1, fun drain/1, fun shield/1, fun poison/1, fun recharge/1]). bfs_impl(Min, [], []) -> Min; bfs_impl(Min, [], L) -> bfs_impl(Min, L, []); bfs_impl(Min, [State | T], L) -> F = fun(Spell, {MinAcc, LAcc}) -> case Spell(State) of lose -> {MinAcc, LAcc}; {win, Spend} -> {min(MinAcc, Spend), LAcc}; #state{spend = Spend} = NewState when Spend < MinAcc -> {MinAcc, [NewState | LAcc]}; _ -> {MinAcc, LAcc} end end, {NewMin, NewL} = lists:foldl(F, {Min, L}, ?SPELLS), bfs_impl(NewMin, T, NewL). magic_missle(#state{mana = Mana}) when Mana < 53 -> lose; magic_missle(State) -> case apply_effects(State) of #state{ohp = HP, spend = S, mana = Mana} = State2 -> boss_turn(State2#state{ohp = HP - 4, spend = S + 53, mana = Mana - 53}); Ending -> Ending end. drain(#state{mana = Mana}) when Mana < 73 -> lose; drain(State) -> case apply_effects(State) of #state{php = HP1, ohp = HP2, spend = S, mana = Mana} = State2 -> boss_turn(State2#state{php = HP1 + 2, ohp = HP2 - 2, spend = S + 73, mana = Mana - 73}); Ending -> Ending end. shield(#state{mana = Mana}) when Mana < 113 -> lose; shield(#state{shield = S}) when S > 1 -> lose; shield(State) -> case apply_effects(State) of #state{spend = S, mana = Mana} = State2 -> boss_turn(State2#state{shield = 6, spend = S + 113, mana = Mana - 113}); Ending -> Ending end. poison(#state{mana = Mana}) when Mana < 173 -> lose; poison(#state{poison = P}) when P > 1 -> lose; poison(State) -> case apply_effects(State) of #state{spend = S, mana = Mana} = State2 -> boss_turn(State2#state{poison = 6, spend = S + 173, mana = Mana - 173}); Ending -> Ending end. recharge(#state{mana = Mana}) when Mana < 229 -> lose; recharge(#state{recharge = R}) when R > 1 -> lose; recharge(State) -> case apply_effects(State) of #state{spend = S, mana = Mana} = State2 -> boss_turn(State2#state{recharge = 5, spend = S + 229, mana = Mana - 229}); Ending -> Ending end. apply_effects(#state{shield = Shield, poison = Poison, recharge = Recharge, mana = Mana, ohp = HP, spend = Spend} = State) -> State2 = State#state{shield = max(0, Shield - 1)}, State3 = case Recharge of 0 -> State2; R -> State2#state{mana = Mana + 101, recharge = R - 1} end, case Poison of 0 -> State3; _ when HP =< 3 -> {win, Spend}; P -> State3#state{ohp = HP - 3, poison = P - 1} end. boss_turn(#state{ohp = HP, spend = Spend}) when HP =< 0 -> {win, Spend}; boss_turn(State) -> case apply_effects(State) of #state{php = HP, shield = S, atk = Atk} = State2 when S > 0 -> case HP =< max(1, Atk - 7) of true -> lose; _ -> State2#state{php = HP - max(1, Atk - 7)} end; #state{php = HP, atk = Atk} when HP =< Atk -> lose; #state{php = HP, atk = Atk} = State2 -> State2#state{php = HP - Atk}; Ending -> Ending end.
// Copyright 2014 Olivier Gillet. // // Author: Olivier Gillet ([email protected]) // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // // See http://creativecommons.org/licenses/MIT/ for more information. // // ----------------------------------------------------------------------------- // // Main processing class. #include "clouds/dsp/granular_processor.h" #include <cstring> #include "clouds/drivers/debug_pin.h" #include "stmlib/dsp/parameter_interpolator.h" #include "stmlib/utils/buffer_allocator.h" #include "clouds/resources.h" namespace clouds { using namespace std; using namespace stmlib; void GranularProcessor::Init( void* large_buffer, size_t large_buffer_size, void* small_buffer, size_t small_buffer_size) { buffer_[0] = large_buffer; buffer_[1] = small_buffer; buffer_size_[0] = large_buffer_size; buffer_size_[1] = small_buffer_size; num_channels_ = 1; low_fidelity_ = false; bypass_ = false; src_down_.Init(); src_up_.Init(); ResetFilters(); previous_playback_mode_ = PLAYBACK_MODE_LAST; reset_buffers_ = true; dry_wet_ = 0.0f; } void GranularProcessor::ResetFilters() { for (int32_t i = 0; i < 2; ++i) { fb_filter_[i].Init(); } } void GranularProcessor::ProcessGranular( FloatFrame* input, FloatFrame* output, size_t size) { parameters_.spectral.quantization = parameters_.texture; parameters_.spectral.refresh_rate = 0.01f + 0.99f * parameters_.density; float warp = parameters_.size - 0.5f; parameters_.spectral.warp = 4.0f * warp * warp * warp + 0.5f; float randomization = parameters_.density - 0.5f; randomization *= randomization * 4.2f; randomization -= 0.05f; CONSTRAIN(randomization, 0.0f, 1.0f); parameters_.spectral.phase_randomization = randomization; phase_vocoder_.Process(parameters_, input, output, size); if (num_channels_ == 1) { for (size_t i = 0; i < size; ++i) { output[i].r = output[i].l; } } } void GranularProcessor::Process( ShortFrame* input, ShortFrame* output, size_t size) { // TIC if (bypass_) { copy(&input[0], &input[size], &output[0]); return; } if (silence_ || reset_buffers_ || previous_playback_mode_ != playback_mode_) { short* output_samples = &output[0].l; fill(&output_samples[0], &output_samples[size << 1], 0); return; } // Convert input buffers to float, and mixdown for mono processing. for (size_t i = 0; i < size; ++i) { in_[i].l = static_cast<float>(input[i].l) / 32768.0f; in_[i].r = static_cast<float>(input[i].r) / 32768.0f; } if (num_channels_ == 1) { for (size_t i = 0; i < size; ++i) { in_[i].l = (in_[i].l + in_[i].r) * 0.5f; in_[i].r = in_[i].l; } } // Apply feedback, with high-pass filtering to prevent build-ups at very // low frequencies (causing large DC swings). ONE_POLE(freeze_lp_, parameters_.freeze ? 1.0f : 0.0f, 0.0005f) float feedback = parameters_.feedback; float cutoff = (20.0f + 100.0f * feedback * feedback) / sample_rate(); fb_filter_[0].set_f_q<FREQUENCY_FAST>(cutoff, 1.0f); fb_filter_[1].set(fb_filter_[0]); fb_filter_[0].Process<FILTER_MODE_HIGH_PASS>(&fb_[0].l, &fb_[0].l, size, 2); fb_filter_[1].Process<FILTER_MODE_HIGH_PASS>(&fb_[0].r, &fb_[0].r, size, 2); float fb_gain = feedback * (1.0f - freeze_lp_); for (size_t i = 0; i < size; ++i) { in_[i].l += fb_gain * ( SoftLimit(fb_gain * 1.4f * fb_[i].l + in_[i].l) - in_[i].l); in_[i].r += fb_gain * ( SoftLimit(fb_gain * 1.4f * fb_[i].r + in_[i].r) - in_[i].r); } ProcessGranular(in_, out_, size); // This is what is fed back. Reverb is not fed back. copy(&out_[0], &out_[size], &fb_[0]); const float post_gain = 1.2f; ParameterInterpolator dry_wet_mod(&dry_wet_, parameters_.dry_wet, size); for (size_t i = 0; i < size; ++i) { float dry_wet = dry_wet_mod.Next(); float fade_in = Interpolate(lut_xfade_in, dry_wet, 16.0f); float fade_out = Interpolate(lut_xfade_out, dry_wet, 16.0f); float l = static_cast<float>(input[i].l) / 32768.0f * fade_out; float r = static_cast<float>(input[i].r) / 32768.0f * fade_out; l += out_[i].l * post_gain * fade_in; r += out_[i].r * post_gain * fade_in; output[i].l = SoftConvert(l); output[i].r = SoftConvert(r); } } void GranularProcessor::PreparePersistentData() { persistent_state_.write_head[0] = buffer_16_[0].head(); persistent_state_.write_head[1] = buffer_16_[1].head(); persistent_state_.quality = quality(); persistent_state_.spectral = playback_mode() == PLAYBACK_MODE_SPECTRAL; } void GranularProcessor::GetPersistentData( PersistentBlock* block, size_t *num_blocks) { PersistentBlock* first_block = block; block->tag = FourCC<'s', 't', 'a', 't'>::value; block->data = &persistent_state_; block->size = sizeof(PersistentState); ++block; // Create save block holding the audio buffers. for (int32_t i = 0; i < num_channels_; ++i) { block->tag = FourCC<'b', 'u', 'f', 'f'>::value; block->data = buffer_[i]; block->size = buffer_size_[num_channels_ - 1]; ++block; } *num_blocks = block - first_block; } bool GranularProcessor::LoadPersistentData(const uint32_t* data) { // Force a silent output while the swapping of buffers takes place. silence_ = true; PersistentBlock block[4]; size_t num_blocks; GetPersistentData(block, &num_blocks); for (size_t i = 0; i < num_blocks; ++i) { // Check that the format is correct. if (block[i].tag != data[0] || block[i].size != data[1]) { silence_ = false; return false; } // All good. Load the data. 2 words have already been used for the block tag // and the block size. data += 2; memcpy(block[i].data, data, block[i].size); data += block[i].size / sizeof(uint32_t); if (i == 0) { // We now know from which mode the data was saved. bool currently_spectral = playback_mode_ == PLAYBACK_MODE_SPECTRAL; bool requires_spectral = persistent_state_.spectral; if (currently_spectral ^ requires_spectral) { set_playback_mode(PLAYBACK_MODE_SPECTRAL); } set_quality(persistent_state_.quality); // We can force a switch to this mode, and once everything has been // initialized for this mode, we continue with the loop to copy the // actual buffer data - with all state variables correctly initialized. Prepare(); GetPersistentData(block, &num_blocks); } } // We can finally reset the position of the write heads. buffer_16_[0].Resync(persistent_state_.write_head[0]); buffer_16_[1].Resync(persistent_state_.write_head[1]); parameters_.freeze = true; silence_ = false; return true; } void GranularProcessor::Prepare() { bool playback_mode_changed = previous_playback_mode_ != playback_mode_; bool benign_change = false; if (!reset_buffers_ && playback_mode_changed && benign_change) { ResetFilters(); previous_playback_mode_ = playback_mode_; } if ((playback_mode_changed && !benign_change) || reset_buffers_) { parameters_.freeze = false; } if (reset_buffers_ || (playback_mode_changed && !benign_change)) { void* buffer[2]; size_t buffer_size[2]; void* workspace; size_t workspace_size; if (num_channels_ == 1) { // Large buffer: 120k of sample memory. // small buffer: fully allocated to FX workspace. buffer[0] = buffer_[0]; buffer_size[0] = buffer_size_[0]; buffer[1] = NULL; buffer_size[1] = 0; workspace = buffer_[1]; workspace_size = buffer_size_[1]; } else { // Large buffer: 64k of sample memory + FX workspace. // small buffer: 64k of sample memory. buffer_size[0] = buffer_size[1] = buffer_size_[1]; buffer[0] = buffer_[0]; buffer[1] = buffer_[1]; workspace_size = buffer_size_[0] - buffer_size_[1]; workspace = static_cast<uint8_t*>(buffer[0]) + buffer_size[0]; } float sr = sample_rate(); BufferAllocator allocator(workspace, workspace_size); phase_vocoder_.Init( buffer, buffer_size, lut_sine_window_4096, 4096, num_channels_, resolution(), sr); reset_buffers_ = false; previous_playback_mode_ = playback_mode_; } phase_vocoder_.Buffer(); } } // namespace clouds
using System; // ReSharper disable UnusedAutoPropertyAccessor.Global // ReSharper disable InconsistentNaming namespace Portable.Drawing.Toolkit.Fonts { public class TtfTableOS2 { public UInt16 Version { get; set; } public TtfTableOS2(BinaryReader file, OffsetEntry table) { if (file == null) throw new ArgumentNullException(nameof(file)); file.Seek(table.Offset); // See https://docs.microsoft.com/en-gb/typography/opentype/spec/os2 // See https://github.com/fontforge/fontforge/blob/master/fontforge/ttf.h#L467 Version = file.GetUint16(); switch (Version) { case 5: ReadVersion5(file); break; case 2: case 3: case 4: ReadVersion4(file); break; default: throw new Exception("OS/2 version not supported: " + Version); } } private void ReadVersion5(BinaryReader file) { ReadVersion4(file); usLowerOpticalPointSize = file.GetUint16(); usUpperOpticalPointSize = file.GetUint16(); } private void ReadVersion4(BinaryReader file) { xAvgCharWidth = file.GetInt16(); usWeightClass = file.GetUint16(); usWidthClass = file.GetUint16(); fsType = file.GetUint16(); ySubscriptXSize = file.GetInt16(); ySubscriptYSize = file.GetInt16(); ySubscriptXOffset = file.GetInt16(); ySubscriptYOffset = file.GetInt16(); ySuperscriptXSize = file.GetInt16(); ySuperscriptYSize = file.GetInt16(); ySuperscriptXOffset = file.GetInt16(); ySuperscriptYOffset = file.GetInt16(); yStrikeoutSize = file.GetInt16(); yStrikeoutPosition = file.GetInt16(); sFamilyClass = file.GetInt16(); panose = file.GetPanose(); ulUnicodeRange1 = file.GetUint32(); ulUnicodeRange2 = file.GetUint32(); ulUnicodeRange3 = file.GetUint32(); ulUnicodeRange4 = file.GetUint32(); achVendID = file.GetTag(); fsSelection = file.GetUint16(); usFirstCharIndex = file.GetUint16(); usLastCharIndex = file.GetUint16(); sTypoAscender = file.GetInt16(); sTypoDescender = file.GetInt16(); sTypoLineGap = file.GetInt16(); usWinAscent = file.GetUint16(); usWinDescent = file.GetUint16(); ulCodePageRange1 = file.GetUint32(); ulCodePageRange2 = file.GetUint32(); sxHeight = file.GetInt16(); sCapHeight = file.GetInt16(); usDefaultChar = file.GetUint16(); usBreakChar = file.GetUint16(); usMaxContext = file.GetUint16(); } public ushort usUpperOpticalPointSize { get; set; } public ushort usLowerOpticalPointSize { get; set; } public ushort usMaxContext { get; set; } public ushort usBreakChar { get; set; } public ushort usDefaultChar { get; set; } public short sCapHeight { get; set; } public short sxHeight { get; set; } public uint ulCodePageRange2 { get; set; } public uint ulCodePageRange1 { get; set; } public ushort usWinDescent { get; set; } public ushort usWinAscent { get; set; } public short sTypoLineGap { get; set; } public short sTypoDescender { get; set; } public short sTypoAscender { get; set; } public ushort usLastCharIndex { get; set; } public ushort usFirstCharIndex { get; set; } public ushort fsSelection { get; set; } public Tag achVendID { get; set; } public uint ulUnicodeRange1 { get; set; } public uint ulUnicodeRange2 { get; set; } public uint ulUnicodeRange3 { get; set; } public uint ulUnicodeRange4 { get; set; } public PanoseClassification panose { get; set; } public short sFamilyClass { get; set; } public short yStrikeoutPosition { get; set; } public short yStrikeoutSize { get; set; } public short ySuperscriptYOffset { get; set; } public short ySuperscriptXOffset { get; set; } public short ySuperscriptYSize { get; set; } public short ySuperscriptXSize { get; set; } public short ySubscriptYOffset { get; set; } public short ySubscriptXOffset { get; set; } public short ySubscriptYSize { get; set; } public short ySubscriptXSize { get; set; } public ushort fsType { get; set; } public ushort usWidthClass { get; set; } public ushort usWeightClass { get; set; } public short xAvgCharWidth { get; set; } } }
# B02 Database Reader ## (english) **B02_db_reader_en.xml** Source Connector reads from MySQL **mirth_tests** database, **patients** table, populated by the activity 0002. Each record has a **processed** column that is false by default and is set to true when read by this channel. Each unprocessed record is read with this format: ``` <result> <lastname>MASSIE</lastname> <name>JAMES</name> <id>4</id> <dob>1956-01-29</dob> <processed>0</processed> <sex>masculino</sex> </result> ``` The JS destination just shows the record processed by the channel. ## (español) **B02_db_reader.xml** Lee de la tabla pacientes donde guarda el canal 0002, los registros no procesados, los marca como procesados, y los muestra en el destino. Es la base para crear bandejas de entrada y salida por medio de una base de datos y utilizar los transformadores del canal para enviar el mensaje a otro sistema. El resultado que se muestra en el destino JavaScript Writer es el registro leido codificado como XML: ``` <result> <apellido>MASSIE</apellido> <nombre>JAMES</nombre> <id>4</id> <nacimiento>1956-01-29</nacimiento> <procesado>0</procesado> <sexo>masculino</sexo> </result> ```
<?php if ( ! defined('BASEPATH')) exit('No direct script access allowed'); /** * Name: Travel Ink * * * Author: Bruno Palma * * Description: Configuration file of Travel Ink search system website * * Requirements: PHP5 or above * */ /* | ------------------------------------------------------------------------- | Tables. | ------------------------------------------------------------------------- | Database table names. */ $config['tables']['artist_profile'] = 'artist_profile'; $config['tables']['tattoo_style'] = 'tattoo_style'; $config['tables']['country'] = 'country'; $config['tables']['city'] = 'city'; $config['tables']['artist_profile_tattoo_style'] = 'artist_profile_tattoo_style'; $config['tables']['users_artist_profile'] = 'users_artist_profile'; /* End of file travelink.php */ /* Location: ./application/config/travelink.php */
package su.ias.utils.navigationutils; import android.content.Context; import android.content.Intent; import android.content.pm.ApplicationInfo; import android.os.Bundle; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.annotation.StyleRes; import android.support.design.widget.BottomSheetDialogFragment; import android.support.design.widget.TabLayout; import android.text.TextUtils; import android.util.TypedValue; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.CheckBox; import android.widget.GridView; import android.widget.TextView; import java.io.Serializable; /** * Created on 6/1/17. * Bottom Sheet dialog with choose naviagtor program */ public class ChooseNavigatorBottomDialog extends BottomSheetDialogFragment { //is empty string; private static final String BUILDER = ""; private TabLayout tb_routeType; private CheckBox ch_save; private Builder builder; private static ChooseNavigatorBottomDialog getChooser(Builder builder) { ChooseNavigatorBottomDialog navigatorDialog = new ChooseNavigatorBottomDialog(); final Bundle bundle = new Bundle(); bundle.putSerializable(BUILDER, builder); navigatorDialog.setArguments(bundle); return navigatorDialog; } @Nullable @Override public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { return inflater.inflate(R.layout.dialog_choose_navigation, container, false); } @Override public void onViewCreated(View view, @Nullable Bundle savedInstanceState) { super.onViewCreated(view, savedInstanceState); builder = (Builder) getArguments().getSerializable(BUILDER); TextView tv_title = (TextView) view.findViewById(R.id.tv_title); ch_save = (CheckBox) view.findViewById(R.id.ch_save); tb_routeType = (TabLayout) view.findViewById(R.id.tab_roteType); GridView gv_program = (GridView) view.findViewById(R.id.gv_navigator); gv_program.setAdapter(new NavProgramAdapter()); gv_program.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { ApplicationInfo info = (ApplicationInfo) parent.getAdapter().getItem(position); NavigatorHelper.RouteType routeType = NavigatorHelper.RouteType.valueOf(builder.defaultRoadType); if (builder.useRoadType) { routeType = NavigatorHelper.RouteType.values()[tb_routeType.getSelectedTabPosition()]; } if (ch_save.isChecked()) { NavigatorHelper.saveCommand(getContext(), info, routeType); } Intent navIntent = NavigatorHelper.getNavigationIntent(info, builder.getToLatitude(), builder.getToLongitude(), routeType); if (NavigatorHelper.checkAndStartIntent(navIntent, getContext())) { dismiss(); } } }); if (builder.useRoadType) { for (NavigatorHelper.RouteType type : NavigatorHelper.RouteType.values()) { tb_routeType.addTab(tb_routeType.newTab().setIcon(type.getIcon())); } } else { tb_routeType.setVisibility(View.GONE); } if (!TextUtils.isEmpty(builder.getTitle())) { tv_title.setText(builder.getTitle()); } if (builder.isUseSave()) { if (!TextUtils.isEmpty(builder.getSaveTitle())) { ch_save.setText(builder.getSaveTitle()); } } else { ch_save.setChecked(false); ch_save.setVisibility(View.GONE); } } @SuppressWarnings({"unused", "WeakerAccess"}) public static class Builder extends AbstractBuilder<Builder> implements Serializable { private boolean useRoadType = true; private String defaultRoadType = NavigatorHelper.RouteType.AUTO.name(); public Builder(double toLatitude, double toLongitude) { super(toLatitude, toLongitude); } public Builder setUseRoadType(boolean useRoadType) { this.useRoadType = useRoadType; return this; } public Builder setDefaultRoadType(String roadType){ defaultRoadType = roadType; return this; } public ChooseNavigatorBottomDialog build() { return ChooseNavigatorBottomDialog.getChooser(this); } } }
package com.wms.android.ui.outstock import androidx.appcompat.app.AppCompatActivity import android.os.Bundle import android.view.KeyEvent import android.widget.SearchView import androidx.recyclerview.widget.LinearLayoutManager import com.wms.android.R import com.wms.android.adapter.InstockAdapter import com.wms.android.adapter.OutListAdapter import com.wms.android.base.BaseActivity import com.wms.android.logic.model.* import com.wms.android.logic.network.InstockService import com.wms.android.logic.network.OutstockService import com.wms.android.logic.network.ServiceCreator import com.wms.android.util.focus import com.wms.android.util.hideKeyBoard import com.wms.android.util.showToast import kotlinx.android.synthetic.main.activity_check_list.* import kotlinx.android.synthetic.main.activity_instock.* import kotlinx.android.synthetic.main.activity_instock.rv import kotlinx.android.synthetic.main.activity_out_list.* import kotlinx.android.synthetic.main.activity_out_list.txt_search class OutListActivity : BaseActivity() { //定义服务 private val service = ServiceCreator.create(OutstockService::class.java) //定义list private var list: ArrayList<Task> = ArrayList<Task>() private var adapter: OutListAdapter? = null override fun onCreate(savedInstanceState: Bundle?) { super.onCreate(savedInstanceState) setContentView(R.layout.activity_out_list) txt_search.isIconifiedByDefault = false txt_search.setOnQueryTextListener(object : SearchView.OnQueryTextListener { override fun onQueryTextSubmit(p0: String?): Boolean { if (p0 != null) { service.getTasks(p0) .enqueue( ServiceCreator.go( fun(res: ResultOne<Task>) { list = res.list createView() }) ) } return false } override fun onQueryTextChange(p0: String?): Boolean { return false } }) } override fun onStart() { super.onStart() service.getTasks("") .enqueue( ServiceCreator.go( fun(res: ResultOne<Task>) { list = res.list createView() }) ) } private fun createView() { val layoutManager = LinearLayoutManager(this) rv.layoutManager = layoutManager adapter = OutListAdapter(list) rv.adapter = adapter } }
import * as React from "react"; import * as Sentry from "@sentry/react"; import { PageFallback } from "../PageFallback"; const PageErrorBoundry = ({ children }) => { function handleBeforeCapture(scope) { scope.setTag("appLevel", "global"); } return ( <Sentry.ErrorBoundary fallback={<PageFallback />} beforeCapture={handleBeforeCapture} > {children} </Sentry.ErrorBoundary> ); }; export { PageErrorBoundry };
CREATE TABLE `moonshot_targeted_ids` ( `gss_acceptor` varchar(254) NOT NULL default '', `namespace` varchar(36) NOT NULL default '', `username` varchar(64) NOT NULL default '', `targeted_id` varchar(128) NOT NULL default '', `creationdate` timestamp NOT NULL default CURRENT_TIMESTAMP, PRIMARY KEY (`username`,`gss_acceptor`,`namespace`) );
require 'rails_helper' describe 'meta/formats/index' do let(:formats) { build_stubbed_list(:format, 5) } it 'shows all formats' do assign(:formats, formats) render formats.each do |format| expect(rendered).to include(format.name) end end end
package com.decathlon.ara.service.dto.execution; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; @Data @NoArgsConstructor @AllArgsConstructor public class ExecutionCriteriaDTO { String country; String exception; String feature; String handling; Long problem; String scenario; boolean scenarioDetails; String severity; String step; Long team; String type; boolean withSucceed; }
<?php namespace DJEM\Crosslinks; class Tokenizer { private $state = ['Text']; private $current = 0; private $length = 0; private $lastIndex = 0; private $text = []; private $tokens = []; private function __construct() { } private function setState($state) { $this->state[] = $state; } private function popState() { return array_pop($this->state); } private function getState() { return end($this->state); } private function isSpace() { $char = $this->text[$this->current]; return preg_match('/[[:space:]]/', $char) || $char == ''; } private function isAmp() { return $this->text[$this->current] == '&'; } private function isPunct() { return preg_match('/[[:punct:]]/', $this->text[$this->current]); } private function current($char) { return $this->text[$this->current] == $char; } private function isAhead($string) { $length = strlen($string); $text = implode('', array_slice($this->text, $this->current, $length)); return strcasecmp($string, $text) == 0; } private function getTokenText($offset = 0) { return implode('', array_slice($this->text, $this->lastIndex, $this->current - $this->lastIndex + $offset)); } private function addToken($offset = 0) { if (is_string($offset)) { $offset = strlen($offset); } if ($this->lastIndex >= $this->current + $offset || $this->current + $offset > $this->length) { return; } $token = (object) [ 'type' => $this->getState(), 'text' => $this->getTokenText($offset), ]; $this->lastIndex = $this->current = $this->current + $offset; $this->tokens[] = $token; } private function addText($text) { $this->text = str_split($text); } private function tokenize() { $this->current = 0; $this->length = count($this->text); $this->tokens = []; while ($this->current < $this->length) { call_user_func([$this, 'parse'.$this->getState()]); } $this->addToken(); return $this->tokens; } private function openTag() { if ($this->isAhead('<!--')) { $this->setState('Comment'); } elseif ($this->isAhead('<script')) { $this->setState('Script'); } elseif ($this->isAhead('<style')) { $this->setState('Style'); } else { $this->setState('Html'); } } private function parseText() { if ($this->current('<')) { $this->addToken(); $this->openTag(); } elseif ($this->isSpace()) { $this->addToken(); $this->setState('TextSpace'); } elseif ($this->isAmp()) { $this->addToken(); $this->setState('TextAmp'); } elseif ($this->isPunct()) { $this->addToken(); $this->setState('TextPunct'); } ++$this->current; } private function parseTextAmp() { if ($this->current(';')) { $this->addToken(';'); $this->popState(); return; } elseif ($this->current('<')) { $this->addToken(); $this->popState(); $this->openTag(); } ++$this->current; } private function parseTextPunct() { if ($this->current('<')) { $this->addToken(); $this->popState(); $this->openTag(); } elseif ($this->isSpace()) { $this->addToken(); $this->popState(); $this->setState('TextSpace'); } elseif ($this->isAmp()) { $this->addToken(); $this->popState(); $this->setState('TextAmp'); } elseif (! $this->isPunct()) { $this->addToken(); $this->popState(); } ++$this->current; } private function parseTextSpace() { if ($this->current('<')) { $this->addToken(); $this->popState(); $this->openTag(); } elseif ($this->isAmp()) { $this->addToken(); $this->popState(); $this->setState('TextAmp'); } elseif ($this->isPunct()) { $this->addToken(); $this->popState(); $this->setState('TextPunct'); } elseif (! $this->isSpace()) { $this->addToken(); $this->popState(); } ++$this->current; } private function parseHtml() { if ($this->current('"')) { $this->setState('HtmlQuote'); } elseif ($this->current('\'')) { $this->setState('HtmlSingleQuote'); } elseif ($this->current('>')) { $this->addToken('>'); $this->popState(); return; } ++$this->current; } private function parseHtmlQuote() { if ($this->current('"')) { $this->popState(); } ++$this->current; } private function parseHtmlSingleQuote() { if ($this->current('\'')) { $this->popState(); } ++$this->current; } private function parseComment() { if ($this->current('-') && $this->isAhead('-->')) { $this->addToken('-->'); $this->popState(); return; } ++$this->current; } private function parseScript() { if ($this->current('<') && $this->isAhead('</script>')) { $this->addToken('</script>'); $this->popState(); return; } ++$this->current; } private function parseStyle() { if ($this->current('<') && $this->isAhead('</style>')) { $this->addToken('</style>'); $this->popState(); return; } ++$this->current; } public static function parse($text) { $tokenizer = new self(); $tokenizer->addText($text); return $tokenizer->tokenize(); } public static function toString($tokens) { $text = ''; foreach ($tokens as $token) { $text .= $token->text; } return $text; } }
<?php namespace App\Models\Legacy; /** * Class LeadershipRole * * @property int $RoleID * @property string $Role * @property string $Description * @property string|null $GroupName * @property string $LeadershipLevel * @method static \Illuminate\Database\Eloquent\Builder|LegacyLeadershipRole newModelQuery() * @method static \Illuminate\Database\Eloquent\Builder|LegacyLeadershipRole newQuery() * @method static \Illuminate\Database\Eloquent\Builder|LegacyLeadershipRole query() * @method static \Illuminate\Database\Eloquent\Builder|LegacyLeadershipRole whereDescription($value) * @method static \Illuminate\Database\Eloquent\Builder|LegacyLeadershipRole whereGroupName($value) * @method static \Illuminate\Database\Eloquent\Builder|LegacyLeadershipRole whereLeadershipLevel($value) * @method static \Illuminate\Database\Eloquent\Builder|LegacyLeadershipRole whereRole($value) * @method static \Illuminate\Database\Eloquent\Builder|LegacyLeadershipRole whereRoleID($value) * @mixin \Eloquent */ class LegacyLeadershipRole extends LegacyModel { protected $table = 'tblLeadershipRoles'; protected $primaryKey = 'RoleID'; public $timestamps = false; protected $fillable = [ 'Role', 'Description', 'GroupName', 'LeadershipLevel' ]; protected $guarded = []; }
# Changelog for cardano-config ## Unreleased changes ## cardano-config 1.0.0 - First version.
[TOC] # Overview The LLVM compiler infrastructure project (formerly Low Level Virtual Machine) is a collection of modular and reusable compiler and toolchain technologies used to develop compiler front ends and back ends. LLVM is written in C++ and is designed for compile-time, link-time, run-time, and idle-time optimization of programs written in arbitrary programming languages. The LLVM project started in 2000 at the University of Illinois at Urbaba-Champaign, under the direction of Vikram Adve and Chris Lattner. # Features - LLVM can provide the middle layers of a complete compiler system, taking intermediate representation (IR) code from a compiler and emitting an optimized IR. - This new optimized IR can then be converted and linked into machine- dependent assembly language code for a target platform. - C code -> Clang/GCC's C frontend -> IR -> LLVM's IR -> optimized IR -> LLVM's machine code (backend) -> machine code # Components ## Front ends - Clang - Utrecht Haskell - Glasgow Haskell Compiler ## Intermediate representation IR is a strongly typed reduced instruction set computing (RISC) instruction set which abstracts away details of the target. ## Back ends The LLVM machine code (MC) sub-project is LLVM's framework for translating machine instructions between textual forms and machine code. ## Linker - The lld sub-project is an attempt to develop a built-in, platform-independent linker for LLVM. - Another linker: GNU ld ## C++ Standard Library An implementation of the C++ Standard Library. ## Debugger LLDB # References [wiki]: https://en.wikipedia.org/wiki/LLVM
# operate-vsts NodeJsを利用してVSTSを操作したSampleです。 # Requirement [vsts-node-api](https://github.com/Microsoft/vsts-node-api) を利用しています。 # Usage 環境変数の設定が必要です。 ``` URL=https://{your-account}.visualstudio.com/defaultcollection PERSONAL_TOKEN=1234567890abcdefghijklmn ```
function Get-MappedDrive { <# .SYNOPSIS Collected mapped drives for logged on users .PARAMETER Computername The computer you wish to collect data from .PARAMETER CimSession A CimSession, or array of Cim sessions, created using New-CimSession. The CimSession parameter allows customisation of the connection method, including the protocol and any credentials. .PARAMETER DriveName By default, all mapped drives are returned. The list may be filtered using this parameter. Names should not include ":". .EXAMPLE Get-MappedDrive Get the drives mapped by all users on the current machine. .EXAMPLE Get-MappedDrive -Computername RECEPTION-PC Get the drives mapped by all users on RECEPTION-PC .EXAMPLE Get-ADComputer -Filter * -Searchbase "OU=Sales,DC=contoso,DC=com" -Properties dnsHostName | Get-MappedDrive Get drives mapped by all users for all computers in the Sales OU in Active Directory. #> [CmdletBinding(DefaultParameterSetName = 'ComputerName')] [Alias('Get-MappedDrives')] param( [Parameter(Position = 0, ValueFromPipeline, ValueFromPipelineByPropertyName, ParameterSetName = 'ComputerName')] [Alias('PSComputerName', 'DnsHostName')] [ValidateNotNullOrEmpty()] [string[]] $ComputerName = $env:COMPUTERNAME, [Parameter(Mandatory, ParameterSetName = 'CimSession')] [CimSession[]]$CimSession, [Parameter(Position = 1, ValueFromPipelineByPropertyName)] [string[]] $DriveName ) begin { $ErrorActionPreference = 'Stop' [UInt32]$HKEY_USERS = 2147483651 } process { if ($PSCmdlet.ParameterSetName -eq 'ComputerName') { $computers = $ComputerName } else { $computers = $CimSession } $getParams = @{ ClassName = 'Win32_Process' Filter = 'Name="explorer.exe"' Property = 'Name' } foreach ($computer in $computers) { $connectionParams = @{ $PSCmdlet.ParameterSetName = $computer } try { $explorer = Get-CimInstance @getParams @connectionParams if ($explorer) { $sid = ($explorer | Invoke-CimMethod -MethodName GetOwnerSid).Sid $owner = $explorer | Invoke-CimMethod -MethodName GetOwner $invokeParams = @{ ClassName = 'StdRegProv' Namespace = 'root/default' } $driveList = Invoke-CimMethod @invokeParams @connectionParams -MethodName EnumKey -Arguments @{ hDefKey = $HKEY_USERS sSubKeyName = Join-Path $SID 'Network' } if ($PSBoundParameters.ContainsKey('DriveName')) { $driveList.sNames = $driveList.sNames | Where-Object { $_ -in $DriveName } } foreach ($drive in $driveList.sNames) { $remotePath = Invoke-CimMethod @invokeParams @connectionParams -MethodName GetStringValue -Arguments @{ hDefKey = $HKEY_USERS sSubKeyName = [System.IO.Path]::Combine($SID, 'Network', $drive) sValueName = 'RemotePath' } [PSCustomObject]@{ DriveOwner = '{0}\{1}' -f $Owner.Domain, $Owner.User DriveLetter = '{0}:\' -f $drive.ToUpper() RootPath = $remotePath.sValue } } } else { $errorRecord = [System.Management.Automation.ErrorRecord]::new( [System.UnauthorizedAccessException]::new('Unable to find a logged on user on target machine'), 'NoUserFound', [System.Management.Automation.ErrorCategory]::ObjectNotFound, $computer ) $PSCmdlet.WriteError($errorRecord) } } catch { $ErrorRecord = [System.Management.Automation.ErrorRecord]::new( [System.TimeoutException]::new('Unable to query WMI on the target machine', $_.Exception), 'CimQueryFailed', [System.Management.Automation.ErrorCategory]::ConnectionError, $computer ) $PSCmdlet.WriteError($ErrorRecord) } } } }
import { Component, ComponentMap } from '../component'; import { ExecutionContext } from '../environments'; import { AbstractVinyl } from '../../consumer/component/sources'; export interface PreviewDefinition { /** * extension preview prefix */ prefix: string; /** * path of the default template to be executed. */ renderTemplatePath?: (context: ExecutionContext) => Promise<string>; /** * get all files to require in the preview runtime. */ getModuleMap(components: Component[]): Promise<ComponentMap<AbstractVinyl[]>>; }
# tensor A fast, zero-dependency tensor library in C++. ## API ```cpp #include "tensor.hpp" using namespace type; int main() { tensor2<float> t{{0, 1}, {2, 3}, {4, 5}}; t.square().print(); } ``` ## Testing ```console $ mkdir build $ cd build $ cmake -DCOMPILER=clang -DCMAKE_BUILD_TYPE=Release -DCMAKE_TOOLCHAIN_FILE=VCPKG_TOOLCHAIN_FILE .. $ cmake --build . ``` ## References - [Tensor][tensor] ## License [MIT License][license] [tensor]: https://en.wikipedia.org/wiki/Tensor [license]: LICENSE
// // ZumoTestRunSetup.h // ZumoE2ETestApp // // Created by Carlos Figueira on 2/6/14. // Copyright (c) 2014 Microsoft. All rights reserved. // #import <Foundation/Foundation.h> @interface ZumoTestRunSetup : NSObject + (NSArray *)createTests; + (NSString *)groupDescription; @end
object Versions { val kotlinVersion = "1.3.41" val gradleVersion = "3.4.2" val appcompatVersion = "1.1.0-rc01" val junitVersion = "4.13-beta-3" val androidxTestRunnerVersion = "1.3.0-alpha01" val espressoCoreVersion = "3.3.0-alpha01" val daggerVersion = "2.23.2" val lifecycleVersion = "2.2.0-alpha02" val retrofitVersion = "2.6.0" val okhttpVersion = "4.0.1" val recyclerViewVersion = "1.1.0-beta01" val rxAndroidVersion = "2.1.1" val rxJavaVersion = "2.2.3" val glideVersion = "4.9.0" val truthVersion = "1.0" val truthExtVersion = "1.3.0-alpha01" val mockKVersion = "1.9.3" val coreTestingVersion = "2.0.0" val materialVersion = "1.2.0-alpha01" val gradleVersionPluginVersion = "0.27.0" }
#include <stdio.h> #define INIT_VALUE -1 /* recursive function returns either 1 or 0, via the pointer parameter result, according to whether or not all the digits of the positive integer argument number num are even. */ void rAllEvenDigits2(int num, int *result); int main() { int number, result=INIT_VALUE; printf("Enter a number: \n"); scanf("%d", &number); rAllEvenDigits2(number, &result); if (result == 1) printf("rAllEvenDigits2(): yes\n"); else if (result == 0) printf("rAllEvenDigits2(): no\n"); else printf("rAllevenDigits2(): error\n"); return 0; } void rAllEvenDigits2(int num, int *result) { /* Write your code here */ if (num == 0){ *result = 1; } else if ((num % 10) % 2 == 0){ rAllEvenDigits2(num/10, result); } else { *result = 0; } }
import rofi_menu import asyncio from rofify.src.SpotifyAPI import spotify from rofify.src.Hotkeys import hotkeys from rofify.src.config import config class PlayPauseItem(rofi_menu.Item): def __init__(self, text=None): super().__init__(text=text) async def load(self, meta): """ Item text should be play or pause depending on if the current track is either paused or playing respectively """ self.text = "<b><u>Playing</u></b> Paused" if \ spotify.playback.meta.session['is_playing'] \ else "Playing <b><u>Paused</u></b>" await super().load(meta) async def on_select(self, meta): """ This should pause/play the current item on the active device """ await spotify.playback.play_pause() return await super().on_select(meta) class NextItem(rofi_menu.Item): def __init__(self): super().__init__(text="Next") async def on_select(self, meta): """ This should play the next track """ await spotify.playback.next() return await super().on_select(meta) class PreviousItem(rofi_menu.Item): def __init__(self): super().__init__(text="Previous") async def on_select(self, meta): """ This should play the next track """ await spotify.playback.previous() return await super().on_select(meta) class ShuffleItem(rofi_menu.Item): text_on = "Shuffle: <b><u>on</u></b> off" text_off = "Shuffle: on <b><u>off</u></b>" async def on_select(self, meta): """ Toggle the shuffle setting """ await spotify.playback.toggle_shuffle() return await super().on_select(meta) async def load(self, meta): self.text = self.text_on if \ spotify.playback.meta.session['shuffle_state'] else self.text_off await super().load(meta) class RepeatItem(rofi_menu.Item): repeat_text = { 'off':"Repeat: <b><u>off</u></b> context track", 'context':"Repeat: off <b><u>context</u></b> track", 'track':"Repeat: off context <b><u>track</u></b>", } async def on_select(self, meta): """ Cycle between the different types of repeat """ await spotify.playback.cycle_repeat() return await super().on_select(meta) async def load(self, meta): self.text = self.repeat_text[spotify.playback.meta.session['repeat_state']] await super().load(meta) class PlaybackMenu(rofi_menu.Menu): icon = None allow_user_input = True async def pre_render(self, meta): """ The playback label contains info about the current playback. """ self.prompt = await config.header_playback_label(spotify.playback) await super().pre_render(meta) async def on_user_input(self, meta): """ Check ROFI_RETV to see if one of the mapped hotkeys has been pressed """ await hotkeys.handle_user_input() return rofi_menu.Operation(rofi_menu.OP_REFRESH_MENU) async def generate_menu_items(self, meta): if not spotify.playback._playback: await spotify.playback.update_playback() items = [ rofi_menu.BackItem(), PlayPauseItem(), NextItem(), PreviousItem(), ShuffleItem(), RepeatItem(), ] return items
from __future__ import print_function, absolute_import, division from ..libs import * from .base import Widget from toga.constants import * class ProgressBar(Widget): def __init__(self, max=None, value=None): super(ProgressBar, self).__init__() self.max = max self.value = value @property def value(self): return self._value @value.setter def value(self, value): self._value = value self._running = self._value is not None if self._impl: self._impl.setDoubleValue_(value) def _startup(self): self._impl = NSProgressIndicator.new() self._impl.setStyle_(NSProgressIndicatorBarStyle) self._impl.setDisplayedWhenStopped_(False) if self.max: self._impl.setIndeterminate_(False) self._impl.setMaxValue_(self.max) else: self._impl.setIndeterminate_(True) if self._running: self._impl.startAnimation() self._impl.setTranslatesAutoresizingMaskIntoConstraints_(False) def start(self): if self._impl and not self._running: self._impl.startAnimation_(self._impl) self._running = True def stop(self): if self._impl and self._running: self._impl.stopAnimation_(self._impl) self._running = False
require 'test_helper' class PostEditTest < ActionDispatch::IntegrationTest def setup # Admins can edit all posts. Moderators can edit all posts excluded # admin's posts. Users can edit only their own posts. @admin = users(:admin) @moderator = users(:moderator) @user = users(:user) @accepted_users = [@admin, @moderator, @user] # A villain(regular user) who will try to perform action forbidden to him. @villain = users(:user_4) # Topic in which @post is created. @topic = topics(:first) # Post created by @user. @post = posts(:third) # Post created by admin. @admins_post = posts(:first) # New valid value for post content. @new_content = 'New valid post content' end test 'should NOT allow moderator enter admin\'s post edit page' do log_in_as(@moderator) get edit_post_path(@admins_post) assert_access_denied_notice end test 'should allow admin, mod. and post owner enter post edit page' do @accepted_users.each do |user| log_in_as(user) get edit_post_path(@post) assert_template 'posts/edit' assert_flash_notices end end test 'should NOT allow user enter foreign post edit page' do log_in_as(@villain) get edit_post_path(@post) assert_access_denied_notice end test 'should NOT allow not logged in user enter post edit page' do get edit_post_path(@post) assert_friendly_forwarding_notice end test 'should allow admin, moderator and post owner update post' do @accepted_users.each do |user| log_in_as(user) patch post_path(@post), params: { post: { content: @new_content } } # Assert post has been updated. @post.reload assert_equal @new_content, @post.content assert_redirected_to @post.full_path follow_redirect! assert_flash_notices success: { count: 1 } end end test 'should NOT allow to update post with invalid data' do # Invalid data for post. @new_content = '' @accepted_users.each do |user| log_in_as(user) patch post_path(@post), params: { post: { content: @new_content } } # Assert post has NOT been updated. @post.reload assert_not_equal @new_content, @post.content assert_template 'posts/edit' # Check if there are form fileds with errors. assert_select 'div.field_with_errors' assert_flash_notices danger: { count: 1 } end end test 'should NOT allow moderator edit admin\'s post' do log_in_as(@moderator) patch post_path(@admins_post), params: { post: { content: @new_content } } @admins_post.reload assert_not_equal @new_content, @admins_post.content assert_access_denied_notice end test 'should NOT allow user edit foreign post' do log_in_as(@villain) # @post dosen't belong to @villain patch post_path(@post), params: { post: { content: @new_content } } # Assert post has NOT been updated. @post.reload assert_not_equal @new_content, @post.content assert_access_denied_notice end test 'should NOT allow not logged in user update topic' do patch post_path(@post), params: { post: { content: @new_content } } # Assert post has NOT been updated. @post.reload assert_not_equal @new_content, @post.content assert_access_denied_notice end end
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package net.gcolin.rest.util; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import java.util.function.Supplier; import javax.ws.rs.core.MultivaluedMap; /** * A MultivaluedMap loaded on the first real use. * * @author Gaël COLIN * @since 1.0 */ public class LazyMultivaluedMap<K, V> implements MultivaluedMap<K, V> { private Supplier<MultivaluedMap<K, V>> delegateBuidler; private MultivaluedMap<K, V> delegate; public LazyMultivaluedMap(Supplier<MultivaluedMap<K, V>> delegateBuidler) { super(); this.delegateBuidler = delegateBuidler; } private MultivaluedMap<K, V> get() { if (delegate == null) { delegate = delegateBuidler.get(); } return delegate; } @Override public List<V> get(Object key) { return get().get(key); } @Override public int size() { return get().size(); } @Override public boolean isEmpty() { return get().isEmpty(); } @Override public boolean containsKey(Object key) { return get().containsKey(key); } @Override public boolean containsValue(Object value) { return get().containsValue(value); } @Override public List<V> put(K key, List<V> value) { return get().put(key, value); } @Override public List<V> remove(Object key) { return get().remove(key); } @Override public void putAll(Map<? extends K, ? extends List<V>> map) { get().putAll(map); } @Override public void clear() { get().clear(); } @Override public Set<K> keySet() { return get().keySet(); } @Override public Collection<List<V>> values() { return get().values(); } @Override public Set<Entry<K, List<V>>> entrySet() { return get().entrySet(); } @Override public void add(K arg0, V arg1) { get().add(arg0, arg1); } @Override public void addAll(K arg0, @SuppressWarnings("unchecked") V... arg1) { get().addAll(arg0, arg1); } @Override public void addAll(K arg0, List<V> arg1) { get().addAll(arg0, arg1); } @Override public void addFirst(K arg0, V arg1) { get().addFirst(arg0, arg1); } @Override public boolean equalsIgnoreValueOrder(MultivaluedMap<K, V> arg0) { return get().equalsIgnoreValueOrder(arg0); } @Override public V getFirst(K arg0) { return get().getFirst(arg0); } @Override public void putSingle(K arg0, V arg1) { get().putSingle(arg0, arg1); } }
export enum DB_KEY { menuItems = 'menuItems', partners = 'partners', businessCard = 'businessCard', businessCardTitle = 'businessCard/title', businessCardSubtitle = 'businessCard/subtitle', businessCardSections = 'businessCard/sections', talents = 'talents', graduates = 'graduates', }
<?php class User extends CI_Controller { public function index() { $data['main_content'] = "add_user_view"; $this->load->model('skill_model'); $this->load->model('role_model'); $this->load->view("layouts/main", $data); } public function login() { $data['main_content'] = "welcome_view"; $this->load->view("layouts/main", $data); } public function logout() { $this->session->unset_userdata('user_id'); $this->session->unset_userdata('username'); $this->session->unset_userdata('display_name'); $this->session->unset_userdata('usertype'); $this->session->unset_userdata('logged_in'); $this->session->unset_userdata('permission'); $this->session->sess_destroy(); redirect(base_url()); } public function authenticate() { $this->load->model('user_model'); $this->load->model('role_model'); $currentUser = $this->user_model->authenticate(); if ($currentUser) { $role = $this->role_model->get_role_by_id($currentUser->role_id); if ($role) { $data['main_content'] = "home_view"; $this->set_session_user($currentUser, $role); $this->load->view("layouts/main", $data); } else { $data['login_errors'] = 'Invalid user. Please contact administrator'; $data['main_content'] = "welcome_view"; $this->load->view("layouts/main", $data); } } else { $data['login_errors'] = 'Login failed. Please enter valid username and password'; $data['main_content'] = "welcome_view"; $this->load->view("layouts/main", $data); } } public function view_profile($userId) { $this->load->model('user_model'); $this->load->model('task_model'); $this->load->model('skill_model'); $uId = base64_decode(urldecode($userId)); $result = $this->task_model->getAverageUserRating($uId); foreach($result as &$value) { $data['rating'] = ceil($value['AvgRating']); } $data['userId'] = $userId; $data['main_content'] = "user_profile"; $this->load->view("layouts/main", $data); } public function update_profile() { $this->load->model('user_model'); $result = $this->user_model->authenticate(); if ($result == FALSE) { $data['login_errors'] = 'Old password is incorrect'; } $this->user_model->update_user_profile(); $data['main_content'] = "user_profile"; $this->load->view("layouts/main", $data); } // Private functions private function set_session_user($user, $role) { $userData = array( 'user_id' => $user->id, 'display_name' => $user->name, 'usertype' => $role->name, 'permission' => $role->permission, 'logged_in' => TRUE ); $this->session->set_userdata($userData); $user_page = str_replace(' ', '', strtolower($role->name)); // This will load the usertype controller //redirect(base_url() . $user_page); //redirect(base_url()); } public function addUser(){ $this->load->model('user_model'); $this->user_model->insert_user(); $data['main_content'] = "home_view"; $this->load->view("layouts/main", $data); } }
function Toolbar() {} module.exports = Toolbar; Toolbar.prototype.view = __dirname; Toolbar.prototype.init = function(){ this.model.setNull('mode', ''); };
module Pello class Config CONFIG_FILE_PATH = "#{ENV['HOME']}/.config/pello/pello.yaml".freeze attr_accessor :board_url, :developer_public_key, :list_name, :member_token, :username, :error def initialize if File.exist?(CONFIG_FILE_PATH) config = YAML.safe_load File.open(CONFIG_FILE_PATH).read auth_config = config['auth'] @developer_public_key = auth_config['developer_public_key'] @member_token = auth_config['member_token'] pello_config = config['config'] @username = pello_config['username'] @board_url = pello_config['board_url'] @list_name = pello_config['list_name'] @error = false else @error = true end rescue => e @error = true puts "Error loading config: #{e.message}" end def valid? !error end def self.write_empty_config system "mkdir -p #{File.dirname(CONFIG_FILE_PATH)}" File.open(CONFIG_FILE_PATH, 'w') do |file| file.puts 'auth:' file.puts ' developer_public_key: ""' file.puts ' member_token: ""' file.puts 'config:' file.puts ' board_url: ""' file.puts ' username: ""' file.puts ' list_name: "In progress"' end end end end
package com.amazonaws.ivs.player.scrollablefeed.models data class SizeModel(val width: Int, val height: Int)
import 'package:anon/core/system/anon.dart'; import 'package:anon/view/widgets/anon_widgets.dart'; import 'package:anon/core/utils/test_helpers.dart'; import 'package:flutter_test/flutter_test.dart'; void main() { late AnonStatelessWidget testStatelessAnon; late AnonStatefulWidget testStatefullAnon; late AnonStatefulWidget testAnon; Anon? anonTest; setUpAll(() { testStatelessAnon = TestStatelessWidget(); testStatefullAnon = TestStatefulWidget(); testAnon = TestAnonState(); anonTest = Anon(); }); group("[Anon Widgets]", () { test('Check if anon exists', () async { expect(anonTest, testStatelessAnon.anon); expect(anonTest, testStatefullAnon.anon); expect(anonTest, testAnon.anon); }); }); }
/* * GameLocation.cpp * * Created on: 22 Sep 2010 * Author: Murray Christopherson */ #include "Space.h" namespace roadrunner { Space::Space() : taken(false), taker(NULL) { } Space::~Space() { // do nothing } Player * Space::getTaker(void) const { return taker; } bool Space::isTaken(void) const { return taken; } void Space::setTaker(Player * ava) { taker = ava; taken = true; } }
<?php namespace Brackets\Translatable; use Illuminate\Support\Facades\Config; class Translatable { /** * Attempt to get all locales. * * @return array */ public function getLocales() { return collect((array) Config::get('translatable.locales'))->map(function($val, $key){ return is_array($val) ? $key : $val; }); } }
#!/bin/bash DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) export PATH=/opt/bin:/opt/etc/vault/scripts:$PATH . $DIR/utils/functions . $DIR/utils/env_defaults . /etc/profile.d/vault.sh export PATH=/opt/bin:/opt/etc/vault/scripts:$PATH CLUSTER_ID=$1 if [ -z "$CLUSTER_ID" ]; then exit1 "A unique Kubernetes cluster id is required." fi write_kubelet_bootstrap_token() { BOOTSTRAP_TOKEN=$(head -c 16 /dev/urandom | od -An -t x | tr -d ' ') TOKEN_CSV="$BOOTSTRAP_TOKEN,kubelet-bootstrap,10001,\"system:kubelet-bootstrap\"" if ! vault read -field=value secret/$CLUSTER_ID/config/kubelet-bootstrap-token then vault write secret/$CLUSTER_ID/config/kubelet-bootstrap-token value=$TOKEN_CSV fi } write_service_account_key() { if ! vault read -field=key secret/$CLUSTER_ID/config/service-account-key then openssl genrsa 4096 > token-key vault write secret/$CLUSTER_ID/config/service-account-key key=@token-key rm token-key fi } write_kubelet_bootstrap_token write_service_account_key
using System.Collections; using System.Collections.Generic; using UnityEngine; public class Tools : MonoBehaviour { /* x, y: 数字坐标 number: 要绘制的数字 texmube: 图片资源 */ public static void DrawImageNumber(int x, int y, int number, object[] texmube) { //将数字转化为char数组 char[] arrays = number.ToString().ToCharArray(); Texture tex = (Texture)texmube[0]; int width = tex.width; int height = tex.height; //遍历打印 foreach (char c in arrays) { int i = int.Parse(c.ToString()); //绘制图片数字 GUI.DrawTexture(new Rect(x, y, width, height), (Texture)texmube[i]); x += width; } } }
package commandbuilder import ( "strings" "fmt" "github.com/mohae/deepcopy" ) var ( // Default SSH options ConnectionSshArguments = []string{"-oBatchMode=yes -oPasswordAuthentication=no"} // Default Docker options ConnectionDockerArguments = []string{"exec", "-i"} ) type Environment struct { Vars map[string]string } type Connection struct { // Type of command Type string Ssh Argument Docker Argument // Working directory for eg. ssh Workdir string // Environment variables Environment Environment containerCache map[string]string } // Clone connection func (connection *Connection) Clone() (conn *Connection) { conn = deepcopy.Copy(connection).(*Connection) if conn.Environment.Vars == nil { conn.Environment.Vars = map[string]string{} } if conn.containerCache == nil { conn.containerCache = map[string]string{} } if conn.Ssh.Options == nil { conn.Ssh.Options = map[string]string{} } if conn.Ssh.Environment == nil { conn.Ssh.Environment = map[string]string{} } if conn.Docker.Options == nil { conn.Docker.Options = map[string]string{} } if conn.Docker.Environment == nil { conn.Docker.Environment = map[string]string{} } return } // Check if connection has set any settings func (connection *Connection) IsEmpty() (status bool) { status = false if connection.Workdir != "" { return } if ! connection.Environment.IsEmpty() { return } if ! connection.Ssh.IsEmpty() { return } if ! connection.Docker.IsEmpty() { return } return true } // Set SSH configuration using string (query, dsn, user@host..) func (connection *Connection) SetSsh(configuration string) error { return connection.Ssh.Set(configuration) } // Set docker configuration using string (query, dsn, user@host..) func (connection *Connection) SetDocker(configuration string) error { return connection.Docker.Set(configuration) } // Create human readable string representation of command func (connection *Connection) String() string { var parts []string connType := connection.GetType() parts = append(parts, fmt.Sprintf("Type:%s", connType)) switch connType { case "ssh": parts = append(parts, fmt.Sprintf("SSH:%s", connection.SshConnectionHostnameString())) case "docker": parts = append(parts, fmt.Sprintf("Docker:%s", connection.Docker.Hostname)) case "ssh+docker": parts = append(parts, fmt.Sprintf("SSH:%s", connection.SshConnectionHostnameString())) parts = append(parts, fmt.Sprintf("Docker:%s", connection.Docker.Hostname)) default: } return fmt.Sprintf("Connection[%s]", strings.Join(parts[:]," ")) } // Check if environment is empty func (env *Environment) IsEmpty() (bool) { return len(env.Vars) == 0 } // Get all environment vars as map func (env *Environment) GetMap() (map[string]string){ return env.Vars } // Set environment map (absolute) func (env *Environment) SetMap(vars map[string]string) { env.Vars = vars } // Set one environment variable func (env *Environment) Set(name string, value string) { env.Vars[name] = value } // Add environment map (adds/overwrites) func (env *Environment) AddMap(vars map[string]string) { for name, val := range vars { env.Vars[name] = val } } // Clears environment map func (env *Environment) Clear() { env.Vars = map[string]string{} }
package Mojolicious::Plugin::SecureCORS; use warnings; use strict; use utf8; use feature ':5.10'; use Carp; use version; our $VERSION = qv('1.0.3'); # REMINDER: update Changes # REMINDER: update dependencies in Build.PL use Mojo::Base 'Mojolicious::Plugin'; use constant DEFAULT_MAX_AGE => 1800; sub register { my ($self, $app, $conf) = @_; if (!exists $conf->{max_age}) { $conf->{max_age} = DEFAULT_MAX_AGE; } my $root = $app->routes; $root->add_shortcut(under_strict_cors => sub { my ($r, @args) = @_; return $r->bridge(@args)->to(cb => \&_strict); }); $root->add_shortcut(cors => sub { my ($r, @args) = @_; return $r->route(@args) ->via('OPTIONS') ->over( headers => { 'Origin' => qr/\S/ms, 'Access-Control-Request-Method' => qr/\S/ms, }, ) ->to(cb => sub { _preflight($conf, @_) }); }); $app->hook(after_render => \&_request); return; } sub _strict { my ($c) = @_; if (!defined $c->req->headers->origin) { return 1; # Not a CORS request, pass } my $r = $c->match->endpoint; while ($r) { if ($r->to->{'cors.origin'}) { return 1; # Endpoint configured for CORS, pass } $r = $r->parent; } # Endpoint not configured for CORS, block $c->render(status => 403, text => 'CORS Forbidden'); return; } sub _preflight { my ($conf, $c) = @_; my $method = $c->req->headers->header('Access-Control-Request-Method'); my $match; # use options defined on this route, if available if ($c->match->endpoint->to->{'cors.origin'}) { $match = $c->match; my $opt_methods = $match->endpoint->to->{'cors.methods'}; if ($opt_methods) { my %good_methods = map {lc $_ => 1} split /,\s*/ms, $opt_methods; if (!$good_methods{lc $method}) { return $c->render(status => 204, data => q{}); # Endpoint not found, ignore } } } # otherwise try to find route for actual request and use it options else { $match = Mojolicious::Routes::Match->new(root => $c->app->routes); $match->match($c, { method => $method, path => $c->req->url->path, }); if (!$match->endpoint) { return $c->render(status => 204, data => q{}); # Endpoint not found, ignore } } my %opt = _get_opt($match->endpoint); if (!$opt{origin}) { return $c->render(status => 204, data => q{}); # Endpoint not configured for CORS, ignore } my $h = $c->res->headers; $h->append(Vary => 'Origin'); my $origin = $c->req->headers->origin; if (ref $opt{origin} eq 'Regexp') { if ($origin !~ /$opt{origin}/ms) { return $c->render(status => 204, data => q{}); # Bad Origin: } } else { if (!grep {$_ eq q{*} || $_ eq $origin} split q{ }, $opt{origin}) { return $c->render(status => 204, data => q{}); # Bad Origin: } } my $headers = $c->req->headers->header('Access-Control-Request-Headers'); my @want_headers = map {lc} split /,\s*/ms, $headers // q{}; if (ref $opt{headers} eq 'Regexp') { if (grep {!/$opt{headers}/ms} @want_headers) { return $c->render(status => 204, data => q{}); # Bad Access-Control-Request-Headers: } } else { my %good_headers = map {lc $_ => 1} split /,\s*/ms, $opt{headers}; if (grep {!exists $good_headers{$_}} @want_headers) { return $c->render(status => 204, data => q{}); # Bad Access-Control-Request-Headers: } } $h->header('Access-Control-Allow-Origin' => $origin); $h->header('Access-Control-Allow-Methods' => $method); if (defined $headers) { $h->header('Access-Control-Allow-Headers' => $headers); } if ($opt{credentials}) { $h->header('Access-Control-Allow-Credentials' => 'true'); } if (defined $conf->{max_age}) { $h->header('Access-Control-Max-Age' => $conf->{max_age}); } return $c->render(status => 204, data => q{}); } sub _request { my ($c, $output, $format) = @_; my %opt = _get_opt($c->match->endpoint); if (!$opt{origin}) { return; # Endpoint not configured for CORS, ignore } my $h = $c->res->headers; $h->append(Vary => 'Origin'); my $origin = $c->req->headers->origin; if (!defined $origin) { return; # Not a CORS } if (ref $opt{origin} eq 'Regexp') { if ($origin !~ /$opt{origin}/ms) { return; # Bad Origin: } } else { if (!grep {$_ eq q{*} || $_ eq $origin} split q{ }, $opt{origin}) { return; # Bad Origin: } } $h->header('Access-Control-Allow-Origin' => $origin); if ($opt{credentials}) { $h->header('Access-Control-Allow-Credentials' => 'true'); } if ($opt{expose}) { $h->header('Access-Control-Expose-Headers' => $opt{expose}); } return; } sub _get_opt { my ($r) = @_; my %opt; while ($r) { for my $name (qw( origin credentials expose headers )) { if (!exists $opt{$name} && exists $r->to->{"cors.$name"}) { $opt{$name} = $r->to->{"cors.$name"}; } } $r = $r->parent; } return %opt; } 1; # Magic true value required at end of module __END__ =encoding utf8 =head1 NAME Mojolicious::Plugin::SecureCORS - Complete control over CORS =head1 SYNOPSIS # in Mojolicious app sub startup { my $app = shift; … # load and configure $app->plugin('SecureCORS'); $app->plugin('SecureCORS', { max_age => undef }); # set app-wide CORS defaults $app->routes->to('cors.credentials'=>1); # set default CORS options for nested routes $r = $r->under(…, {'cors.origin' => '*'}, …); # set CORS options for this route (at least "origin" option must be # defined to allow CORS, either here or in parent routes) $r->get(…, {'cors.origin' => '*'}, …); $r->route(…)->to('cors.origin' => '*'); # allow non-simple (with preflight) CORS on this route $r->cors(…); # create bridge to protect all nested routes $r = $app->routes->under_strict_cors('/resource'); =head1 DESCRIPTION L<Mojolicious::Plugin::SecureCORS> is a plugin that allow you to configure Cross Origin Resource Sharing for routes in L<Mojolicious> app. Implements this spec: L<http://www.w3.org/TR/2014/REC-cors-20140116/>. =head2 SECURITY Don't use the lazy C<< 'cors.origin'=>'*' >> for resources which should be available only for intranet or which behave differently when accessed from intranet - otherwise malicious website opened in browser running on workstation in intranet will get access to these resources. Don't use the lazy C<< 'cors.origin'=>'*' >> for resources which should be available only from some known websites - otherwise other malicious website will be able to attack your site by injecting JavaScript into the victim's browser. Consider using C<under_strict_cors()> - it won't "save" you but may helps. =head1 INTERFACE =over =item CORS options To allow CORS on some route you should define relevant CORS options for that route. These options will be processed automatically using L<Mojolicious/"after_render"> hook and result in adding corresponding HTTP headers to the response. Options should be added into default parameters for the route or it parent routes. Defining CORS options on parent route allow you to set some predefined defaults for their nested routes. =over =item C<< 'cors.origin' => '*' >> =item C<< 'cors.origin' => 'null' >> =item C<< 'cors.origin' => 'http://example.com' >> =item C<< 'cors.origin' => 'https://example.com http://example.com:8080 null' >> =item C<< 'cors.origin' => qr/\.local\z/ms >> =item C<< 'cors.origin' => undef >> (default) This option is required to enable CORS support for the route. Only matched origins will be allowed to process returned response (C<'*'> will match any origin). When set to false value no origins will match, so it effectively disable CORS support (may be useful if you've set this option value on parent route). =item C<< 'cors.credentials' => 1 >> =item C<< 'cors.credentials' => undef >> (default) While handling preflight request true/false value will tell browser to send or not send credentials (cookies, http auth, ssl certificate) with actual request. While handling simple/actual request if set to false and browser has sent credentials will disallow to process returned response. =item C<< 'cors.expose' => 'X-Some' >> =item C<< 'cors.expose' => 'X-Some, X-Other, Server' >> =item C<< 'cors.expose' => undef >> (default) Allow access to these headers while processing returned response. These headers doesn't need to be included in this option: Cache-Control Content-Language Content-Type Expires Last-Modified Pragma =item C<< 'cors.headers' => 'X-Requested-With' >> =item C<< 'cors.headers' => 'X-Requested-With, Content-Type, X-Some' >> =item C<< 'cors.headers' => qr/\AX-|\AContent-Type\z/msi >> =item C<< 'cors.headers' => undef >> (default) Define headers which browser is allowed to send. Work only for non-simple CORS because it require preflight. =item C<< 'cors.methods' => 'POST' >> =item C<< 'cors.methods' => 'GET, POST, PUT, DELETE' >> This option can be used only for C<cors()> route. It's needed in complex cases when it's impossible to automatically detect CORS option while handling preflight - see below for example. =back =item $r->cors(…) Accept same params as L<Mojolicious::Routes::Route/"route">. Add handler for preflight (OPTIONS) CORS request - it's required to allow non-simple CORS requests on given path. To be able to respond on preflight request this handler should know CORS options for requested method/path. In most cases it will be able to detect them automatically by searching for route defined for same path and HTTP method given in CORS request. Example: $r->cors('/rpc'); $r->get('/rpc', { 'cors.origin' => 'http://example.com' }); $r->put('/rpc', { 'cors.origin' => qr/\.local\z/ms }); But in some cases target route can't be detected, for example if you've defined several routes for same path using different conditions which can't be checked while processing preflight request because browser didn't sent enough information yet (like C<Content-Type:> value which will be used in actual request). In this case you should manually define all relevant CORS options on preflight route - in addition to CORS options defined on target routes. Because you can't know which one of defined routes will be used to handle actual request, in case they use different CORS options you should use combined in less restrictive way options for preflight route. Example: $r->cors('/rpc')->to( 'cors.methods' => 'GET, POST', 'cors.origin' => 'http://localhost http://example.com', 'cors.credentials' => 1, ); $r->any([qw(GET POST)] => '/rpc', headers => { 'Content-Type' => 'application/json-rpc' }, )->to('jsonrpc#handler', 'cors.origin' => 'http://localhost', ); $r->post('/rpc', headers => { 'Content-Type' => 'application/soap+xml' }, )->to('soaprpc#handler', 'cors.origin' => 'http://example.com', 'cors.credentials' => 1, ); This route use "headers" condition, so you can add your own handler for OPTIONS method on same path after this one, to handle non-CORS OPTIONS requests on same path. =item $bridge = $r->under_strict_cors(…) Accept same params as L<Mojolicious::Routes::Route/"bridge">. Under returned bridge CORS requests to any route which isn't configured for CORS (i.e. won't have C<'cors.origin'> in route's default parameters) will be rendered as "403 Forbidden". This feature should make it harder to attack your site by injecting JavaScript into the victim's browser on vulnerable website. More details: L<https://code.google.com/p/html5security/wiki/CrossOriginRequestSecurity#Processing_rogue_COR:>. =back =head1 OPTIONS L<Mojolicious::Plugin::SecureCORS> supports the following options. =head2 max_age $app->plugin('SecureCORS', { max_age => undef }); Value for C<Access-Control-Max-Age:> sent by preflight OPTIONS handler. If set to C<undef> this header will not be sent. Default is 1800 (30 minutes). =head1 METHODS L<Mojolicious::Plugin::SecureCORS> inherits all methods from L<Mojolicious::Plugin> and implements the following new ones. =head2 register $plugin->register(Mojolicious->new); $plugin->register(Mojolicious->new, { max_age => undef }); Register hooks in L<Mojolicious> application. =head1 SEE ALSO L<Mojolicious>. =head1 BUGS AND LIMITATIONS No bugs have been reported. =head1 SUPPORT Please report any bugs or feature requests through the web interface at L<http://rt.cpan.org/NoAuth/ReportBug.html?Queue=Mojolicious-Plugin-SecureCORS>. I will be notified, and then you'll automatically be notified of progress on your bug as I make changes. You can also look for information at: =over =item * RT: CPAN's request tracker L<http://rt.cpan.org/NoAuth/Bugs.html?Dist=Mojolicious-Plugin-SecureCORS> =item * AnnoCPAN: Annotated CPAN documentation L<http://annocpan.org/dist/Mojolicious-Plugin-SecureCORS> =item * CPAN Ratings L<http://cpanratings.perl.org/d/Mojolicious-Plugin-SecureCORS> =item * Search CPAN L<http://search.cpan.org/dist/Mojolicious-Plugin-SecureCORS/> =back =head1 AUTHOR Alex Efros C<< <[email protected]> >> =head1 LICENSE AND COPYRIGHT Copyright 2014 Alex Efros <[email protected]>. This program is distributed under the MIT (X11) License: L<http://www.opensource.org/licenses/mit-license.php> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.