MixCPT
Collection
Rethinking Multilingual Continual Pretraining: Data Mixing for Adapting LLMs Across Languages and Resources
•
40 items
•
Updated
•
1
text
stringlengths 27
775k
|
---|
<?php
namespace App\Model\Settings;
use Illuminate\Database\Eloquent\Model;
use App\Model\Tables\TableConsumer;
class SystemInvoice extends Model
{
// protected $connection = 'settings';
protected $table = 'system_invoice';
protected $primaryKey = 'invoice_id';
public $timestamps = false;
public function groupInfo()
{
return $this->belongsTo(SystemInvoiceGroup::class, 'group_id', 'group_id');
}
public function detailsInfo()
{
return $this->hasMany(SystemInvoiceDetails::class, 'invoice_id', 'invoice_id');
}
public function invoiceSupplierInfo()
{
return $this->hasOne(TableConsumer::class, 'consumer_id', 'invoice_supplier');
}
public function invoiceCustomerInfo()
{
return $this->hasOne(TableConsumer::class, 'consumer_id', 'invoice_customer');
}
} |
#![allow(non_camel_case_types)]
use crate::{
arch,
arch::{Architecture, NativeArch},
bindings::{kernel, kernel::sock_filter, signal},
kernel_abi::{common, Ptr},
};
use std::mem::{self, size_of};
#[repr(C)]
pub struct robust_list<Arch: Architecture> {
pub next: Ptr<Arch::unsigned_word, robust_list<Arch>>,
}
/// Had to manually derive Copy and Clone
/// Would not work otherwise
impl<Arch: Architecture> Clone for robust_list<Arch> {
fn clone(&self) -> Self {
robust_list { next: self.next }
}
}
impl<Arch: Architecture> Copy for robust_list<Arch> {}
assert_eq_size!(kernel::robust_list, robust_list<NativeArch>);
assert_eq_align!(kernel::robust_list, robust_list<NativeArch>);
#[repr(C)]
pub struct robust_list_head<Arch: Architecture> {
pub list: robust_list<Arch>,
pub futex_offset: Arch::signed_long,
pub list_op_pending: Ptr<Arch::unsigned_word, robust_list<Arch>>,
}
/// Had to manually derive Copy and Clone
/// Would not work otherwise
impl<Arch: Architecture> Clone for robust_list_head<Arch> {
fn clone(&self) -> Self {
robust_list_head {
list: self.list,
futex_offset: self.futex_offset,
list_op_pending: self.list_op_pending,
}
}
}
impl<Arch: Architecture> Copy for robust_list_head<Arch> {}
assert_eq_size!(kernel::robust_list_head, robust_list_head<NativeArch>);
assert_eq_align!(kernel::robust_list_head, robust_list_head<NativeArch>);
#[repr(C)]
#[derive(Copy, Clone, Default)]
pub struct sock_fprog<Arch: Architecture> {
pub len: u16,
pub _padding: Arch::FPROG_PAD_ARR,
pub filter: Ptr<Arch::unsigned_word, sock_filter>,
}
assert_eq_size!(kernel::sock_fprog, sock_fprog<NativeArch>);
assert_eq_align!(kernel::sock_fprog, sock_fprog<NativeArch>);
#[repr(C)]
#[derive(Copy, Clone, Default)]
pub struct kernel_sigaction<Arch: Architecture> {
pub k_sa_handler: Ptr<Arch::unsigned_word, u8>,
pub sa_flags: Arch::unsigned_long,
pub sa_restorer: Ptr<Arch::unsigned_word, u8>,
/// This is what it is for x86 and x64 to make things simple
/// Might this definition cause problems elsewhere e.g. for AArch64?
pub sa_mask: u64,
}
#[repr(C)]
#[derive(Copy, Clone, Default)]
pub struct mmap_args<Arch: Architecture> {
pub addr: Ptr<Arch::unsigned_word, u8>,
pub len: Arch::size_t,
pub prot: i32,
pub flags: i32,
pub fd: i32,
pub __pad: Arch::STD_PAD_ARR,
pub offset: Arch::off_t,
}
#[repr(C)]
pub union sigval_t<Arch: Architecture> {
pub sival_int: i32,
pub sival_ptr: Ptr<Arch::unsigned_word, u8>,
}
impl<Arch: Architecture> Clone for sigval_t<Arch> {
fn clone(&self) -> Self {
unsafe {
sigval_t {
sival_ptr: self.sival_ptr,
}
}
}
}
impl<Arch: Architecture> Copy for sigval_t<Arch> {}
#[repr(C)]
#[derive(Copy, Clone, Default)]
pub struct siginfo_kill {
pub si_pid_: common::pid_t,
pub si_uid_: common::uid_t,
}
#[repr(C)]
pub struct siginfo_timer<Arch: Architecture> {
pub si_tid_: i32,
pub si_overrun_: i32,
pub si_sigval_: sigval_t<Arch>,
}
impl<Arch: Architecture> Clone for siginfo_timer<Arch> {
fn clone(&self) -> Self {
siginfo_timer {
si_tid_: self.si_tid_,
si_overrun_: self.si_overrun_,
si_sigval_: self.si_sigval_,
}
}
}
impl<Arch: Architecture> Copy for siginfo_timer<Arch> {}
#[repr(C)]
pub struct siginfo_rt<Arch: Architecture> {
pub si_pid_: common::pid_t,
pub si_uid_: common::uid_t,
pub si_sigval_: sigval_t<Arch>,
}
impl<Arch: Architecture> Clone for siginfo_rt<Arch> {
fn clone(&self) -> Self {
siginfo_rt {
si_pid_: self.si_pid_,
si_uid_: self.si_uid_,
si_sigval_: self.si_sigval_,
}
}
}
impl<Arch: Architecture> Copy for siginfo_rt<Arch> {}
#[repr(C)]
#[derive(Default)]
pub struct siginfo_sigchld<Arch: Architecture> {
pub si_pid_: common::pid_t,
pub si_uid_: common::uid_t,
pub si_status_: i32,
pub si_utime_: Arch::sigchld_clock_t,
pub si_stime_: Arch::sigchld_clock_t,
}
impl<Arch: Architecture> Clone for siginfo_sigchld<Arch> {
fn clone(&self) -> Self {
siginfo_sigchld {
si_pid_: self.si_pid_,
si_uid_: self.si_uid_,
si_status_: self.si_status_,
si_utime_: self.si_utime_,
si_stime_: self.si_stime_,
}
}
}
impl<Arch: Architecture> Copy for siginfo_sigchld<Arch> {}
#[repr(C)]
#[derive(Default)]
pub struct siginfo_sigfault<Arch: Architecture> {
pub si_addr_: Ptr<Arch::unsigned_word, u8>,
pub si_addr_lsb_: Arch::signed_short,
}
impl<Arch: Architecture> Clone for siginfo_sigfault<Arch> {
fn clone(&self) -> Self {
siginfo_sigfault {
si_addr_: self.si_addr_,
si_addr_lsb_: self.si_addr_lsb_,
}
}
}
impl<Arch: Architecture> Copy for siginfo_sigfault<Arch> {}
#[repr(C)]
#[derive(Default)]
pub struct siginfo_sigpoll<Arch: Architecture> {
pub si_band_: Arch::signed_long,
pub si_fd_: i32,
}
impl<Arch: Architecture> Clone for siginfo_sigpoll<Arch> {
fn clone(&self) -> Self {
siginfo_sigpoll {
si_band_: self.si_band_,
si_fd_: self.si_fd_,
}
}
}
impl<Arch: Architecture> Copy for siginfo_sigpoll<Arch> {}
#[repr(C)]
#[derive(Default)]
pub struct siginfo_sigsys<Arch: Architecture> {
pub _call_addr: Ptr<Arch::unsigned_word, u8>,
pub _syscall: i32,
pub _arch: u32,
}
impl<Arch: Architecture> Clone for siginfo_sigsys<Arch> {
fn clone(&self) -> Self {
siginfo_sigsys {
_call_addr: self._call_addr,
_syscall: self._syscall,
_arch: self._arch,
}
}
}
impl<Arch: Architecture> Copy for siginfo_sigsys<Arch> {}
#[repr(C)]
pub union siginfo_sifields<Arch: Architecture> {
pub padding: Arch::SIGINFO_PADDING_ARR,
pub _kill: siginfo_kill,
pub _timer: siginfo_timer<Arch>,
pub _rt: siginfo_rt<Arch>,
pub _sigchld: siginfo_sigchld<Arch>,
pub _sigfault: siginfo_sigfault<Arch>,
pub _sigpoll: siginfo_sigpoll<Arch>,
pub _sigsys: siginfo_sigsys<Arch>,
}
impl<Arch: Architecture> Clone for siginfo_sifields<Arch> {
fn clone(&self) -> Self {
unsafe {
siginfo_sifields {
padding: self.padding,
}
}
}
}
impl<Arch: Architecture> Copy for siginfo_sifields<Arch> {}
#[repr(C)]
pub struct siginfo_t<Arch: Architecture> {
pub si_signo: i32,
pub si_errno: i32,
pub si_code: i32,
pub _sifields: siginfo_sifields<Arch>,
}
impl<Arch: Architecture> Clone for siginfo_t<Arch> {
fn clone(&self) -> Self {
siginfo_t {
si_signo: self.si_signo,
si_errno: self.si_errno,
si_code: self.si_code,
_sifields: self._sifields,
}
}
}
impl<Arch: Architecture> Copy for siginfo_t<Arch> {}
impl<Arch: Architecture> Default for siginfo_t<Arch> {
fn default() -> Self {
unsafe { mem::zeroed() }
}
}
assert_eq_size!(kernel::siginfo_t, siginfo_t<NativeArch>);
assert_eq_align!(kernel::siginfo_t, siginfo_t<NativeArch>);
// Not necessary as these are also generated by bindgen but just to be safe
assert_eq_size!(signal::siginfo_t, siginfo_t<NativeArch>);
assert_eq_align!(signal::siginfo_t, siginfo_t<NativeArch>);
#[repr(C)]
#[derive(Copy, Default)]
pub struct iovec<Arch: Architecture> {
pub iov_base: Ptr<Arch::unsigned_word, u8>,
pub iov_len: Arch::size_t,
}
impl<Arch: Architecture> Clone for iovec<Arch> {
fn clone(&self) -> Self {
Self {
iov_base: self.iov_base,
iov_len: self.iov_len,
}
}
}
assert_eq_size!(kernel::iovec, iovec<NativeArch>);
assert_eq_align!(kernel::iovec, iovec<NativeArch>);
#[repr(C)]
#[derive(Copy, Default)]
pub struct msghdr<Arch: Architecture> {
pub msg_name: Ptr<Arch::unsigned_word, u8>,
pub msg_namelen: common::socklen_t,
pub _padding: Arch::STD_PAD_ARR,
pub msg_iov: Ptr<Arch::unsigned_word, iovec<Arch>>,
pub msg_iovlen: Arch::size_t,
pub msg_control: Ptr<Arch::unsigned_word, u8>,
pub msg_controllen: Arch::size_t,
pub msg_flags: i32,
}
impl<Arch: Architecture> Clone for msghdr<Arch> {
fn clone(&self) -> Self {
Self {
msg_name: self.msg_name,
msg_namelen: self.msg_namelen,
_padding: self._padding,
msg_iov: self.msg_iov,
msg_iovlen: self.msg_iovlen,
msg_control: self.msg_control,
msg_controllen: self.msg_controllen,
msg_flags: self.msg_flags,
}
}
}
assert_eq_size!(kernel::msghdr, msghdr<NativeArch>);
assert_eq_align!(kernel::msghdr, msghdr<NativeArch>);
#[repr(C)]
#[derive(Copy, Default)]
pub struct mmsghdr<Arch: Architecture> {
pub msg_hdr: msghdr<Arch>,
pub msg_len: u32,
}
impl<Arch: Architecture> Clone for mmsghdr<Arch> {
fn clone(&self) -> Self {
Self {
msg_hdr: self.msg_hdr.clone(),
msg_len: self.msg_len,
}
}
}
assert_eq_size!(kernel::mmsghdr, mmsghdr<NativeArch>);
assert_eq_align!(kernel::mmsghdr, mmsghdr<NativeArch>);
#[repr(C)]
#[derive(Copy, Clone, Default)]
pub struct cmsghdr<Arch: Architecture> {
pub cmsg_len: Arch::size_t,
pub cmsg_level: i32,
pub cmsg_type: i32,
}
assert_eq_size!(kernel::cmsghdr, cmsghdr<NativeArch>);
assert_eq_align!(kernel::cmsghdr, cmsghdr<NativeArch>);
pub const fn cmsg_data_offset<Arch: Architecture>() -> usize {
cmsg_align::<Arch>(size_of::<cmsghdr<Arch>>())
}
pub const fn cmsg_align<Arch: Architecture>(len: usize) -> usize {
(len + size_of::<Arch::size_t>() - 1) & !(size_of::<Arch::size_t>() - 1)
}
pub const fn cmsg_space<Arch: Architecture>(len: usize) -> usize {
cmsg_align::<Arch>(size_of::<cmsghdr<Arch>>()) + cmsg_align::<Arch>(len)
}
pub const fn cmsg_len<Arch: Architecture>(len: usize) -> usize {
cmsg_align::<Arch>(size_of::<cmsghdr<Arch>>()) + len
}
#[repr(C)]
#[derive(Copy, Clone, Default)]
pub struct pselect6_arg6<Arch: Architecture> {
pub ss: Ptr<Arch::unsigned_word, Arch::kernel_sigset_t>,
pub ss_len: Arch::size_t,
}
#[repr(C)]
#[derive(Copy, Clone, Default)]
pub struct select_args<Arch: Architecture> {
pub n_fds: i32,
pub __pad: Arch::STD_PAD_ARR,
pub read_fds: Ptr<Arch::unsigned_word, Arch::fd_set>,
pub write_fds: Ptr<Arch::unsigned_word, Arch::fd_set>,
pub except_fds: Ptr<Arch::unsigned_word, Arch::fd_set>,
pub timeout: Ptr<Arch::unsigned_word, Arch::timeval>,
}
#[repr(C)]
#[derive(Copy, Clone, Default)]
pub struct __user_cap_header_struct {
pub version: u32,
pub pid: i32,
}
assert_eq_size!(kernel::__user_cap_header_struct, __user_cap_header_struct);
assert_eq_align!(kernel::__user_cap_header_struct, __user_cap_header_struct);
#[repr(C)]
#[derive(Copy, Clone, Default)]
pub struct __user_cap_data_struct {
pub effective: u32,
pub permitted: u32,
pub inheritable: u32,
}
assert_eq_size!(kernel::__user_cap_data_struct, __user_cap_data_struct);
assert_eq_align!(kernel::__user_cap_data_struct, __user_cap_data_struct);
#[repr(C)]
#[derive(Copy, Clone, Default)]
pub struct xt_counters {
pub pcnt: u64,
pub bcnt: u64,
}
assert_eq_size!(kernel::xt_counters, xt_counters);
assert_eq_align!(kernel::xt_counters, xt_counters);
#[repr(C)]
#[derive(Copy, Clone, Default)]
pub struct setsockopt_args<Arch: Architecture> {
pub sockfd: Arch::signed_long,
pub level: Arch::signed_long,
pub optname: Arch::signed_long,
pub optval: Ptr<Arch::unsigned_word, u8>,
pub optlen: Arch::signed_long,
}
#[repr(C)]
#[derive(Copy, Clone, Default)]
pub struct ipt_replace<Arch: Architecture> {
pub name: [u8; 32],
pub valid_hook: u32,
pub num_entries: u32,
pub size: u32,
pub hook_entry: [u32; 5],
pub underflow: [u32; 5],
pub num_counters: u32,
pub counters: Ptr<Arch::unsigned_word, xt_counters>,
// Plus hangoff here
}
// @TODO: "The corresponding header requires -fpermissive, which we don't pass. Skip this check"
// assert_eq_size!(kernel::ipt_replace, ipt_replace<NativeArch>);
// assert_eq_align!(kernel::ipt_replace, ipt_replace<NativeArch>);
#[repr(C)]
#[derive(Copy, Clone, Default)]
pub struct __sysctl_args<Arch: Architecture> {
pub name: Ptr<Arch::unsigned_word, i32>,
pub nlen: i32,
pub __pad: Arch::STD_PAD_ARR,
pub oldval: Ptr<Arch::unsigned_word, u8>,
pub oldlenp: Ptr<Arch::unsigned_word, Arch::size_t>,
pub newval: Ptr<Arch::unsigned_word, u8>,
pub newlen: Ptr<Arch::unsigned_word, Arch::size_t>,
pub __rd_unused: [Arch::unsigned_long; 4],
}
assert_eq_size!(kernel::__sysctl_args, __sysctl_args<NativeArch>);
assert_eq_align!(kernel::__sysctl_args, __sysctl_args<NativeArch>);
#[repr(C)]
pub struct sockaddr<Arch: Architecture> {
pub sa_family: Arch::unsigned_short,
pub sa_data: [u8; 14],
}
assert_eq_size!(kernel::sockaddr, sockaddr<NativeArch>);
assert_eq_align!(kernel::sockaddr, sockaddr<NativeArch>);
impl<Arch: Architecture> Clone for sockaddr<Arch> {
fn clone(&self) -> Self {
Self {
sa_family: self.sa_family,
sa_data: self.sa_data,
}
}
}
impl<Arch: Architecture> Copy for sockaddr<Arch> {}
#[repr(C)]
pub struct ifmap<Arch: Architecture> {
pub mem_start: Arch::unsigned_long,
pub mem_end: Arch::unsigned_long,
pub base_addr: Arch::unsigned_short,
pub irq: u8,
pub dma: u8,
pub port: u8,
}
assert_eq_size!(kernel::ifmap, ifmap<NativeArch>);
assert_eq_align!(kernel::ifmap, ifmap<NativeArch>);
impl<Arch: Architecture> Clone for ifmap<Arch> {
fn clone(&self) -> Self {
Self {
mem_start: self.mem_start,
mem_end: self.mem_end,
base_addr: self.base_addr,
irq: self.irq,
dma: self.dma,
port: self.dma,
}
}
}
impl<Arch: Architecture> Copy for ifmap<Arch> {}
#[repr(C)]
pub union ifs_ifsu<Arch: Architecture> {
pub raw_hdlc: Ptr<Arch::unsigned_word, u8>,
pub cisco: Ptr<Arch::unsigned_word, u8>,
pub fr: Ptr<Arch::unsigned_word, u8>,
pub fr_pvc: Ptr<Arch::unsigned_word, u8>,
pub fr_pvc_info: Ptr<Arch::unsigned_word, u8>,
pub sync: Ptr<Arch::unsigned_word, u8>,
pub tel: Ptr<Arch::unsigned_word, u8>,
}
impl<Arch: Architecture> Clone for ifs_ifsu<Arch> {
fn clone(&self) -> Self {
Self {
tel: unsafe { self.tel },
}
}
}
impl<Arch: Architecture> Copy for ifs_ifsu<Arch> {}
#[repr(C)]
pub struct if_settings<Arch: Architecture> {
pub type_: u32,
pub size: u32,
pub ifs_ifsu: ifs_ifsu<Arch>,
}
assert_eq_size!(kernel::if_settings, if_settings<NativeArch>);
assert_eq_align!(kernel::if_settings, if_settings<NativeArch>);
impl<Arch: Architecture> Clone for if_settings<Arch> {
fn clone(&self) -> Self {
Self {
type_: self.type_,
size: self.size,
ifs_ifsu: self.ifs_ifsu,
}
}
}
impl<Arch: Architecture> Copy for if_settings<Arch> {}
#[repr(C)]
pub union ifr_ifru<Arch: Architecture> {
pub ifru_addr: sockaddr<Arch>,
pub ifru_dstaddr: sockaddr<Arch>,
pub ifru_broadaddr: sockaddr<Arch>,
pub ifru_netmask: sockaddr<Arch>,
pub ifru_hwaddr: sockaddr<Arch>,
pub ifru_flags: Arch::signed_short,
pub ifru_ivalue: i32,
pub ifru_mtu: i32,
pub ifru_map: ifmap<Arch>,
pub ifru_slave: [u8; 16],
pub ifru_newname: [u8; 16],
pub ifru_data: Ptr<Arch::unsigned_word, u8>,
pub ifru_settings: if_settings<Arch>,
}
impl<Arch: Architecture> Clone for ifr_ifru<Arch> {
fn clone(&self) -> Self {
Self {
ifru_slave: unsafe { self.ifru_slave },
}
}
}
impl<Arch: Architecture> Copy for ifr_ifru<Arch> {}
#[repr(C)]
#[derive(Copy, Clone)]
pub union ifr_ifrn {
pub ifrn_name: [u8; 16],
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct ifreq<Arch: Architecture> {
pub ifr_ifrn: ifr_ifrn,
pub ifr_ifru: ifr_ifru<Arch>,
}
assert_eq_size!(kernel::ifreq, ifreq<NativeArch>);
assert_eq_align!(kernel::ifreq, ifreq<NativeArch>);
#[repr(C)]
pub union ifc_ifcu<Arch: Architecture> {
pub ifcu_buf: Ptr<Arch::unsigned_word, u8>,
pub ifcu_req: Ptr<Arch::unsigned_word, ifreq<Arch>>,
}
impl<Arch: Architecture> Clone for ifc_ifcu<Arch> {
fn clone(&self) -> Self {
Self {
ifcu_buf: unsafe { self.ifcu_buf },
}
}
}
impl<Arch: Architecture> Copy for ifc_ifcu<Arch> {}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct ifconf<Arch: Architecture> {
pub ifc_len: i32,
pub __pad: Arch::STD_PAD_ARR,
pub ifc_ifcu: ifc_ifcu<Arch>,
}
assert_eq_size!(kernel::ifconf, ifconf<NativeArch>);
assert_eq_align!(kernel::ifconf, ifconf<NativeArch>);
#[repr(C)]
#[derive(Copy, Clone, Default)]
pub struct sg_io_hdr<Arch: Architecture> {
pub interface_id: i32,
pub dxfer_direction: i32,
pub cmd_len: u8,
pub mx_sb_len: u8,
pub iovec_count: Arch::unsigned_short,
pub dxfer_len: u32,
pub dxferp: Ptr<Arch::unsigned_word, u8>,
pub cmdp: Ptr<Arch::unsigned_word, u8>,
pub sbp: Ptr<Arch::unsigned_word, u8>,
pub timeout: u32,
pub flags: u32,
pub pack_id: i32,
pub usr_ptr: Ptr<Arch::unsigned_word, u8>,
pub status: u8,
pub masked_status: u8,
pub msg_status: u8,
pub sb_len_wr: u8,
pub host_status: Arch::unsigned_short,
pub driver_status: Arch::unsigned_short,
pub resid: i32,
pub duration: u32,
pub info: u32,
}
assert_eq_size!(kernel::sg_io_hdr, sg_io_hdr<NativeArch>);
assert_eq_align!(kernel::sg_io_hdr, sg_io_hdr<NativeArch>);
#[repr(C)]
#[derive(Copy, Clone)]
pub struct iw_param {
pub value: i32,
pub fixed: u8,
pub disabled: u8,
pub flags: u16,
}
assert_eq_size!(kernel::iw_param, iw_param);
assert_eq_align!(kernel::iw_param, iw_param);
#[repr(C)]
pub struct iw_point<Arch: Architecture> {
pub pointer: Ptr<Arch::unsigned_word, u8>,
pub length: u16,
pub flags: u16,
}
assert_eq_size!(kernel::iw_point, iw_point<NativeArch>);
assert_eq_align!(kernel::iw_point, iw_point<NativeArch>);
impl<Arch: Architecture> Clone for iw_point<Arch> {
fn clone(&self) -> Self {
Self {
pointer: self.pointer,
length: self.length,
flags: self.flags,
}
}
}
impl<Arch: Architecture> Copy for iw_point<Arch> {}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct iw_freq {
pub m: i32,
pub e: i16,
pub i: u8,
pub flags: u8,
}
assert_eq_size!(kernel::iw_freq, iw_freq);
assert_eq_align!(kernel::iw_freq, iw_freq);
#[repr(C)]
#[derive(Copy, Clone)]
pub struct iw_quality {
pub qual: u8,
pub level: u8,
pub noise: u8,
pub updated: u8,
}
assert_eq_size!(kernel::iw_quality, iw_quality);
assert_eq_align!(kernel::iw_quality, iw_quality);
#[repr(C)]
pub union iwreq_data<Arch: Architecture> {
pub name: [u8; 16],
pub essid: iw_point<Arch>,
pub nwid: iw_param,
pub freq: iw_freq,
pub sens: iw_param,
pub bitrate: iw_param,
pub txpower: iw_param,
pub rts: iw_param,
pub frag: iw_param,
pub mode: u32,
pub retry: iw_param,
pub encoding: iw_point<Arch>,
pub power: iw_param,
pub qual: iw_quality,
pub ap_addr: sockaddr<Arch>,
pub addr: sockaddr<Arch>,
pub param: iw_param,
pub data: iw_point<Arch>,
}
assert_eq_size!(kernel::iwreq_data, iwreq_data<NativeArch>);
assert_eq_align!(kernel::iwreq_data, iwreq_data<NativeArch>);
impl<Arch: Architecture> Clone for iwreq_data<Arch> {
fn clone(&self) -> Self {
Self {
name: unsafe { self.name },
}
}
}
impl<Arch: Architecture> Copy for iwreq_data<Arch> {}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct iwreq<Arch: Architecture> {
pub ifr_ifrn: ifr_ifrn,
pub u: iwreq_data<Arch>,
}
assert_eq_size!(kernel::iwreq, iwreq<NativeArch>);
assert_eq_align!(kernel::iwreq, iwreq<NativeArch>);
#[repr(C)]
#[derive(Copy, Clone)]
pub struct linux_dirent<Arch: Architecture> {
pub d_ino: Arch::ino_t,
pub d_off: Arch::off_t,
pub d_reclen: u16,
/// Variable length
pub d_name: [u8; 1],
// Other stuff like d_type and pad
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct linux_dirent64 {
pub d_ino: arch::ino64_t,
pub d_off: arch::off64_t,
pub d_reclen: u16,
pub d_type: u8,
/// Variable length
pub d_name: [u8; 1],
}
#[repr(C)]
pub struct connect_args<Arch: Architecture> {
pub sockfd: Arch::signed_long,
pub addr: Ptr<Arch::unsigned_word, u8>,
pub addrlen: common::socklen_t,
}
#[repr(C)]
pub struct getsockopt_args<Arch: Architecture> {
pub sockfd: i32,
pub level: i32,
pub optname: i32,
pub __pad: Arch::STD_PAD_ARR,
pub optval: Ptr<Arch::unsigned_word, u8>,
pub optlen: Ptr<Arch::unsigned_word, common::socklen_t>,
}
#[repr(C)]
pub struct socketpair_args<Arch: Architecture> {
pub domain: i32,
pub type_: i32,
pub protocol: i32,
pub __pad: Arch::STD_PAD_ARR,
pub sv: Ptr<Arch::unsigned_word, i32>, // int sv[2]
}
#[repr(C)]
pub struct getsockname_args<Arch: Architecture> {
pub sockfd: i32,
pub __pad: Arch::STD_PAD_ARR,
pub addr: Ptr<Arch::unsigned_word, sockaddr<Arch>>,
pub addrlen: Ptr<Arch::unsigned_word, common::socklen_t>,
}
#[repr(C)]
pub struct recv_args<Arch: Architecture> {
pub sockfd: i32,
pub __pad: Arch::STD_PAD_ARR,
pub buf: Ptr<Arch::unsigned_word, u8>,
pub len: Arch::size_t,
pub flags: i32,
}
#[repr(C)]
pub struct recvfrom_args<Arch: Architecture> {
pub sockfd: Arch::signed_long,
pub buf: Ptr<Arch::unsigned_word, u8>,
pub len: Arch::size_t,
pub flags: Arch::signed_long,
pub src_addr: Ptr<Arch::unsigned_word, sockaddr<Arch>>,
pub addrlen: Ptr<Arch::unsigned_word, common::socklen_t>,
}
#[repr(C)]
pub struct accept_args<Arch: Architecture> {
pub sockfd: i32,
pub __pad: Arch::STD_PAD_ARR,
pub addr: Ptr<Arch::unsigned_word, sockaddr<Arch>>,
pub addrlen: Ptr<Arch::unsigned_word, common::socklen_t>,
}
#[repr(C)]
pub struct accept4_args<Arch: Architecture> {
pub sockfd: i32,
pub __pad: Arch::STD_PAD_ARR,
pub addr: Ptr<Arch::unsigned_word, sockaddr<Arch>>,
pub addrlen: Ptr<Arch::unsigned_word, common::socklen_t>,
pub flags: Arch::signed_long,
}
#[repr(C)]
pub struct sendmsg_args<Arch: Architecture> {
pub fd: i32,
pub __pad: Arch::STD_PAD_ARR,
pub msg: Ptr<Arch::unsigned_word, msghdr<Arch>>,
pub flags: i32,
}
#[repr(C)]
pub struct sendmmsg_args<Arch: Architecture> {
pub sockfd: i32,
pub __pad: Arch::STD_PAD_ARR,
pub msgvec: Ptr<Arch::unsigned_word, mmsghdr<Arch>>,
pub vlen: u32,
pub flags: u32,
}
#[repr(C)]
pub struct recvmsg_args<Arch: Architecture> {
pub fd: i32,
pub __pad: Arch::STD_PAD_ARR,
pub msg: Ptr<Arch::unsigned_word, msghdr<Arch>>,
pub flags: i32,
}
#[repr(C)]
pub struct recvmmsg_args<Arch: Architecture> {
pub sockfd: i32,
pub __pad: Arch::STD_PAD_ARR,
pub msgvec: Ptr<Arch::unsigned_word, mmsghdr<Arch>>,
pub vlen: u32,
pub flags: u32,
pub timeout: Ptr<Arch::unsigned_word, Arch::timespec>,
}
/// Some ipc calls require 7 params, so two of them are stashed into
/// one of these structs and a pointer to this is passed instead.
pub struct ipc_kludge_args<Arch: Architecture> {
pub msgbuf: Ptr<Arch::unsigned_word, u8>,
pub msgtype: Arch::signed_long,
}
#[repr(C)]
pub struct usbdevfs_ioctl<Arch: Architecture> {
pub ifno: i32,
pub ioctl_code: i32,
pub data: Ptr<Arch::unsigned_word, u8>,
}
assert_eq_size!(kernel::usbdevfs_ioctl, usbdevfs_ioctl<NativeArch>);
assert_eq_align!(kernel::usbdevfs_ioctl, usbdevfs_ioctl<NativeArch>);
#[repr(C)]
#[allow(non_snake_case)]
pub struct usbdevfs_ctrltransfer<Arch: Architecture> {
pub bRequestType: u8,
pub bRequest: u8,
pub wValue: u16,
pub wIndex: u16,
pub wLength: u16,
pub timeout: u32,
pub data: Ptr<Arch::unsigned_word, u8>,
}
assert_eq_size!(
kernel::usbdevfs_ctrltransfer,
usbdevfs_ctrltransfer<NativeArch>
);
assert_eq_align!(
kernel::usbdevfs_ctrltransfer,
usbdevfs_ctrltransfer<NativeArch>
);
#[repr(C)]
pub struct v4l2_timecode {
pub type_: u32,
pub flags: u32,
pub frames: u8,
pub seconds: u8,
pub minutes: u8,
pub hours: u8,
pub userbits: [u8; 4],
}
assert_eq_size!(kernel::v4l2_timecode, v4l2_timecode);
assert_eq_align!(kernel::v4l2_timecode, v4l2_timecode);
#[repr(C)]
pub union v4l2_m<Arch: Architecture> {
pub offset: u32,
pub userptr: Arch::unsigned_long,
pub planes: Ptr<Arch::unsigned_word, u8>,
pub fd: i32,
}
#[repr(C)]
pub struct v4l2_buffer<Arch: Architecture> {
pub index: u32,
pub type_: u32,
pub bytesused: u32,
pub flags: u32,
pub field: u32,
pub __pad: Arch::STD_PAD_ARR,
pub timestamp: Arch::timeval,
pub timecode: v4l2_timecode,
pub sequence: u32,
pub memory: u32,
pub m: v4l2_m<Arch>,
pub length: u32,
pub reserved2: u32,
pub reserved: u32,
}
assert_eq_size!(kernel::v4l2_buffer, v4l2_buffer<NativeArch>);
assert_eq_align!(kernel::v4l2_buffer, v4l2_buffer<NativeArch>);
#[repr(C)]
pub struct usbdevfs_urb<Arch: Architecture> {
pub type_: u8,
pub endpoint: u8,
pub status: i32,
pub flags: u32,
pub buffer: Ptr<Arch::unsigned_word, u8>,
pub buffer_length: i32,
pub actual_length: i32,
pub start_frame: i32,
pub usbdevfs_urb_u: usbdevfs_urb_u,
pub error_count: i32,
pub signr: u32,
pub usercontext: Ptr<Arch::unsigned_word, u8>,
pub iso_frame_desc: [usbdevfs_iso_packet_desc; 0],
}
assert_eq_size!(kernel::usbdevfs_urb, usbdevfs_urb<NativeArch>);
assert_eq_align!(kernel::usbdevfs_urb, usbdevfs_urb<NativeArch>);
#[repr(C)]
pub union usbdevfs_urb_u {
pub number_of_packets: i32,
pub stream_id: u32,
}
#[repr(C)]
#[derive(Clone)]
pub struct usbdevfs_iso_packet_desc {
pub length: u32,
pub actual_length: u32,
pub status: u32,
}
assert_eq_size!(kernel::usbdevfs_iso_packet_desc, usbdevfs_iso_packet_desc);
assert_eq_align!(kernel::usbdevfs_iso_packet_desc, usbdevfs_iso_packet_desc);
#[repr(C)]
#[derive(Copy, Clone, Default)]
pub struct bpf_attr_u1 {
pub map_type: u32,
pub key_size: u32,
pub value_size: u32,
pub max_entries: u32,
pub map_flags: u32,
pub inner_map_fd: u32,
pub numa_node: u32,
pub map_name: [u8; 16],
pub map_ifindex: u32,
pub btf_fd: u32,
pub btf_key_type_id: u32,
pub btf_value_type_id: u32,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub union bpf_attr_u2_u1 {
pub value: common::ptr64<u8>,
pub next_key: common::ptr64<u8>,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct bpf_attr_u2 {
pub map_fd: u32,
pub key: common::ptr64<u8>,
pub bpf_attr_u2_u1: bpf_attr_u2_u1,
pub flags: u64,
}
#[repr(C, align(8))]
#[derive(Copy, Clone, Default)]
pub struct aligned_u64 {
pub __val: u64,
}
#[repr(C)]
#[derive(Copy, Clone, Default)]
pub struct bpf_attr_u3 {
pub prog_type: u32,
pub insn_cnt: u32,
pub insns: common::ptr64<u8>,
pub license: common::ptr64<u8>,
pub log_level: u32,
pub log_size: u32,
pub log_buf: common::ptr64<char>,
pub kern_version: u32,
pub prog_flags: u32,
pub prog_name: [u8; 16],
pub prog_ifindex: u32,
pub expected_attach_type: u32,
pub prog_btf_fd: u32,
pub func_info_rec_size: u32,
pub func_info: aligned_u64,
pub func_info_cnt: u32,
pub line_info_rec_size: u32,
pub line_info: aligned_u64,
pub line_info_cnt: u32,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub union bpf_attr {
pub bpf_attr_u1: bpf_attr_u1,
pub bpf_attr_u2: bpf_attr_u2,
pub bpf_attr_u3: bpf_attr_u3,
}
|
{-# LANGUAGE DataKinds, DefaultSignatures, DeriveGeneric, FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses, PolyKinds, TypeFamilies, TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
module Main where
import Data.Type.Natural ((:*), (:+), Nat (..), One)
import Data.Type.Ordinal
import GHC.Generics
class (Enum a, Enum b) => Iso a b where
toIso :: a -> b
toIso = toEnum . fromEnum
fromIso :: b -> a
fromIso = toEnum . fromEnum
data Xpto = Abc | Def | Ghi
deriving (Read, Show, Eq, Ord, Enum, Generic)
type family SizeG (a :: k) :: Nat
type instance SizeG (M1 D y a) = SizeG a
type instance SizeG (M1 C y a) = SizeG a
type instance SizeG (M1 S y a) = Z
type instance SizeG V1 = Z
type instance SizeG U1 = One
type instance SizeG (a :+: b) = SizeG a :+ SizeG b
type instance SizeG (a :*: b) = SizeG a :* SizeG b
type Size a = SizeG (Rep a)
data TTTT = T | TT | TTT
deriving (Read, Show, Eq, Ord, Enum)
instance (b ~ Size Xpto) => Iso Xpto (Ordinal b)
-- instance Iso Xpto TTTT
|
package kr.feliz.tutorial_collection.lemonfox.widget.net
import kr.feliz.tutorial_collection.BuildConfig
import okhttp3.OkHttpClient
import retrofit2.Retrofit
import retrofit2.converter.gson.GsonConverterFactory
import retrofit2.converter.scalars.ScalarsConverterFactory
object RetrofitClient {
val chart: Retrofit
init {
chart = Retrofit.Builder()
.baseUrl(BuildConfig.CHART_API_SERVER_BASE_URL)
.addConverterFactory(ScalarsConverterFactory.create())
.addConverterFactory(GsonConverterFactory.create())
.client(OkHttpClient())
.build()
}
} |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Main where
import Control.Monad
import Control.Monad.Primitive
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as BSC
import Data.Maybe
import Data.Time.Clock
import Data.Time.Format
import Options.Applicative
import Pipes
import qualified Pipes.ByteString as P (toHandle)
import Pipes.Csv
import qualified Pipes.Prelude as P hiding (toHandle)
import System.IO
import System.Locale
import System.Random.MWC
import System.Random.MWC.Distributions
import Conversion
import Parsing
import Types
import Util
-- | We roughly model the balloon system stepwise
-- TimeStamps -> Some constant time step
-- Location -> Basic brownian-like motion with a tendency
-- Temperature -> Minor fluctuations each step
-- Observatory -> Each step send to observatories within a set range
-- Data Quality -> Randomly have it such that an observatory logs poorly
-- We spread the values using a normal distribution
-- All values must be non-negative bar the location drift values and time
-- steps. Negative time steps can be used to simulate out-of-order data
data GenSettings = GenSettings
{ genStartTime :: TimeStamp -- Initial TimeStamp for generated data
, genTimeStep :: Double -- Average timestep in minutes
, genLocStepX :: Double -- Average x-coord distance to travel per timestep
, genLocStepY :: Double -- Average y-coord distance to travel per timestep
, genTempStep :: Double -- Variance in temperature for each timestep
, genBalloonRange :: Double -- Range of balloon broadcast
, genFailRate :: Double -- Failure rate at which to generate invalid data
, genSystemSize :: Int -- Size of the system
, genNumLines :: Int -- Number of lines to generate
, genFile :: FilePath -- File to write output to
}
parseGenSettings :: Parser GenSettings
parseGenSettings = GenSettings
<$> option auto
( long "start-time"
<> short 's'
<> value defaultStartTime
<> metavar "START_TIME"
<> help ("Time to start generating data from in format: " <> tsFormat))
<*> option auto
( long "time-step"
<> short 't'
<> value defaultTimeStep
<> metavar "TIME_STEP"
<> help "Average time step in minutes")
<*> option auto
( long "x-drift"
<> short 'x'
<> value defaultLocXStep
<> metavar "X_DRIFT"
<> help "Average x-coord drift per time step in metres")
<*> option auto
( long "y-drift"
<> short 'y'
<> value defaultLocYStep
<> metavar "Y_DRIFT"
<> help "Average y-coord drift per time step in metres")
<*> (nonNegative "temp-variance" <$> option auto
( long "temp-variance"
<> short 'p'
<> value defaultTempStep
<> metavar "TEMP_VARIANCE"
<> help "Variance in temperature in kelvin"))
<*> pure defaultBalloonRange
<*> (nonNegative "fail-rate" <$> option auto
( long "fail-rate"
<> short 'r'
<> value defaultFailRate
<> metavar "FAIL_RATE"
<> help "Rate at which observatories produce rubbish output [0,1)"))
<*> pure defaultSystemSize
<*> option auto
( long "num-lines"
<> short 'n'
<> value defaultNumLines
<> metavar "NUM_LINES"
<> help "Number of lines to output")
<*> strOption
( long "output-file"
<> short 'f'
<> value defaultOutputFile
<> metavar "OUTPUT_FILE"
<> help "File to output generated data to")
nonNegative :: String -> Double -> Double
nonNegative name x = if x >= 0 then x else error (name ++ " must be non-negative")
defaultStartTime :: TimeStamp
defaultTimeStep, defaultLocXStep, defaultLocYStep,
defaultTempStep, defaultBalloonRange, defaultFailRate :: Double
defaultSystemSize, defaultNumLines :: Int
defaultOutputFile :: String
defaultStartTime = TimeStamp $ fromJust $ parseTime defaultTimeLocale tsFormat "2014-06-08T10:30"
defaultTimeStep = 15
defaultLocXStep = 1200
defaultLocYStep = -1800
defaultTempStep = 0.1
defaultBalloonRange = 30000
defaultFailRate = 0.08
defaultSystemSize = 100000
defaultNumLines = 100000
defaultOutputFile = "gen-weather-sample.csv"
data ObservatoryOutput = Valid LogLine
| Invalid ByteString
instance ToRecord ObservatoryOutput where
toRecord (Valid ll) = toRecord ll
toRecord (Invalid x) = toRecord [x]
-- We use Metres and Kelvin for the System internally
data System = System
{ systemTime :: TimeStamp
, balloonLoc :: Location
, balloonRange :: Double
, systemTemp :: Temperature
, systemObss :: [(Observatory, Location)]
, systemSize :: Int
}
type Mutator x = Gen (PrimState IO) -> x -> IO x
data Mutators = Mutators
{ mutTime :: Mutator TimeStamp
, mutLoc :: Mutator Location
, mutTemp :: Mutator Temperature
, mutLine :: Mutator ObservatoryOutput
}
observatoryLocs :: [(Observatory, Location)]
observatoryLocs = [ (Observatory "AU", Location 10000 10000)
, (Observatory "FR", Location 80000 40000)
, (Observatory "US", Location 30000 50000)
, (Observatory "NZ", Location 10000 30000)
]
initialise :: GenSettings -> (Mutators, System)
initialise GenSettings{..} =
let mutTime g (TimeStamp x) = do
v <- normal genTimeStep (genTimeStep / 4) g
return $ TimeStamp $ addUTCTime (fromIntegral $ floor $ v * 60) x
mutLoc g (Location x y) = do
dx <- normal genLocStepX (abs genLocStepX / 4) g
dy <- normal genLocStepY (abs genLocStepY / 4) g
let x' = genSystemSize + x + round (dx :: Double)
let y' = genSystemSize + y + round dy
return $ Location (x' `mod` genSystemSize)
(y' `mod` genSystemSize)
mutTemp g x = do
dx <- normal 0 genTempStep g
return $ x + round dx
mutLine g x = do
c <- uniform g
return $ if c < genFailRate then Invalid "th1s1s1nv4l1d" else x
systemTime = genStartTime
balloonLoc = Location (genSystemSize `div` 2) (genSystemSize `div` 2)
balloonRange = genBalloonRange
systemTemp = 300
systemObss = observatoryLocs
systemSize = genSystemSize
in (Mutators{..}, System{..})
stepSystem :: GenIO -> Mutators -> System -> IO System
stepSystem g Mutators{..} System{..} = do
newTime <- mutTime g systemTime
newLoc <- mutLoc g balloonLoc
newTemp <- mutTemp g systemTemp
return $ System newTime newLoc balloonRange newTemp systemObss systemSize
runSystem :: GenIO -> Mutators -> System -> Producer System IO ()
runSystem g m s = do
yield s
s' <- liftIO $ stepSystem g m s
runSystem g m s'
outputSystem :: GenIO -> Mutators -> Pipe System ByteString IO ()
outputSystem g Mutators{..} = forever $ do
System{..} <- await
let systemBounds = Location systemSize systemSize
let inBounds x = distanceSquared (Just systemBounds) balloonLoc x < (balloonRange * balloonRange)
let inRange = filter (inBounds . snd) systemObss
let obsLine = LogLine systemTime balloonLoc systemTemp
let rawLines = map (Valid . convertMetreKelvinToObservatory . obsLine . fst) inRange
logLines <- liftIO $ mapM (mutLine g) rawLines
-- We filter because Data.Csv is fickle when dealing with commas regardless of delimeter
-- A custom encoder might be faster, but current speed seems more than adequate
each logLines >-> encodeWith weatherEncodeOptions >-> P.map (BSC.filter (/= '"'))
main :: IO ()
main = do
settings <- execParser (info parseGenSettings fullDesc)
g <- createSystemRandom
let (m, s) = initialise settings
withFile (genFile settings) WriteMode $ \h ->
runEffect $ runSystem g m s >-> outputSystem g m >-> P.take (genNumLines settings) >-> P.toHandle h
|
use embedded_ccs811::{prelude::*, FirmwareMode as FwMode};
use embedded_hal_mock::{
i2c::Transaction as I2cTrans,
pin::{Mock as PinMock, State as PinState, Transaction as PinTrans},
};
mod common;
use crate::common::{destroy, new, BitFlags as BF, Register, DEV_ADDR};
#[test]
fn can_create_and_destroy() {
let nwake = PinMock::new(&[]);
let sensor = new(&[], nwake);
destroy(sensor);
}
macro_rules! get_test {
($name:ident, $method:ident, $reg:ident, $value:expr, $expected:expr) => {
#[test]
fn $name() {
let nwake =
PinMock::new(&[PinTrans::set(PinState::Low), PinTrans::set(PinState::High)]);
let transactions = [
I2cTrans::write_read(DEV_ADDR, vec![Register::$reg], $value),
I2cTrans::write_read(DEV_ADDR, vec![Register::STATUS], vec![0]),
];
let mut sensor = new(&transactions, nwake);
assert_eq!($expected, sensor.$method().unwrap());
destroy(sensor);
}
};
}
get_test!(can_get_hw_id, hardware_id, HW_ID, vec![0x81], 0x81);
get_test!(
can_get_hw_version,
hardware_version,
HW_VERSION,
vec![0x12],
(1, 2)
);
get_test!(
can_get_fw_boot_version,
firmware_bootloader_version,
FW_BOOT_VERSION,
vec![0x12, 0x34],
(1, 2, 0x34)
);
get_test!(
can_get_fw_app_version,
firmware_application_version,
FW_APP_VERSION,
vec![0x12, 0x34],
(1, 2, 0x34)
);
read_status_test!(can_get_invalid_app, has_valid_app, false, 0);
read_status_test!(can_get_valid_app, has_valid_app, true, BF::APP_VALID);
read_status_test!(fw_mode_boot, firmware_mode, FwMode::Boot, 0);
read_status_test!(fw_mode_app, firmware_mode, FwMode::Application, BF::FW_MODE);
|
package baishuai.github.io.smsforward.forward
import baishuai.github.io.smsforward.forward.feige.FeigeApi
import baishuai.github.io.smsforward.forward.slack.SlackApi
import dagger.Subcomponent
import javax.inject.Singleton
/**
* Created by bai on 17-5-1.
*/
@Singleton
@Subcomponent(modules = arrayOf(ForwardModule::class))
interface ForwardComponent {
fun feigeApi(): FeigeApi
fun slackApi(): SlackApi
} |
import User from '../../src/models/Users'
import UserService from '../../src/services/user'
import * as dbHelper from '../db-helper'
const nonExistingUserId = '8ef5ad63b53b57dd876d6908'
async function createUser() {
const user = new User({
username: 'TravisKudix',
firstname: 'Travis',
lastname: 'Kudix',
email: '[email protected]',
password: 'Asd1',
})
return await UserService.create(user)
}
describe('user service', () => {
beforeEach(async () => {
await dbHelper.connect()
})
afterEach(async () => {
await dbHelper.clearDatabase()
})
afterAll(async () => {
await dbHelper.closeDatabase()
})
it('should create a new user', async () => {
expect.assertions(7)
const user = await createUser()
expect(user).toHaveProperty('_id')
expect(user).toHaveProperty('username')
expect(user).toHaveProperty('firstname')
expect(user).toHaveProperty('lastname')
expect(user).toHaveProperty('password')
expect(user).toHaveProperty('email')
const wrongUser = new User({
username: 'TravisKudix',
})
return UserService.create(wrongUser).catch((e) =>
expect(e.message).toMatch(
'User validation failed: email: Path `email` is required.'
)
)
})
it('should get a user with id', async () => {
expect.assertions(3)
const user = await createUser()
const found = await UserService.findById(user._id)
expect(found.username).toEqual(user.username)
expect(found._id).toEqual(user._id)
return await UserService.findById(nonExistingUserId).catch((e) =>
expect(e.message).toMatch('ValidationError')
)
})
it('should update user credentials', async () => {
expect.assertions(6)
const user = await createUser()
const update = {
username: 'TravisWolf',
firstname: 'Wolf',
lastname: 'Hart',
email: '[email protected]',
}
const updated = await UserService.update(user._id, update)
expect(updated).toHaveProperty('_id', user._id)
expect(updated.username).toEqual('TravisWolf')
expect(updated.firstname).toEqual('Wolf')
expect(updated.lastname).toEqual('Hart')
expect(updated.email).toEqual('[email protected]')
return UserService.update(nonExistingUserId, update).catch((e) =>
expect(e.message).toMatch(`User ${nonExistingUserId} not found`)
)
})
it('should find user by email', async () => {
const user = await createUser()
const foundUser = await UserService.findByEmail(user.email)
expect(foundUser?._id).toEqual(user._id)
})
it('should find or create google user', async () => {
const userInfo = {
username: 'TravisWolf',
firstname: 'Wolf',
lastname: 'Hart',
email: '[email protected]',
googleId: nonExistingUserId
}
const user = await UserService.findOrCreateUser(userInfo)
expect(user.email).toMatch(userInfo.email)
})
})
|
package com.seanshubin.kotlin.tryme.domain.parser
interface Tree<T> {
val name: String
fun values(): List<T>
fun toLines(depth: Int = 0): List<String>
fun indent(s: String, depth: Int) = " ".repeat(depth) + s
}
|
import { Component, OnInit } from '@angular/core';
import { ActivatedRoute } from '@angular/router';
import { AnnouncementService } from '@sunbird/core';
import { ResourceService, ToasterService, RouterNavigationService, ServerResponse } from '@sunbird/shared';
import * as _ from 'lodash';
import { IAnnouncementDetails } from '@sunbird/announcement';
import { IImpressionEventInput } from '@sunbird/telemetry';
/**
* The details popup component checks for the announcement details object
* present in announcement service. If object is undefined it calls API with
* the announcement id and gets the details.
*/
@Component({
selector: 'app-details-popup',
templateUrl: './details-popup.component.html',
styleUrls: ['./details-popup.component.css']
})
export class DetailsPopupComponent implements OnInit {
/**
* telemetryImpression
*/
telemetryImpression: IImpressionEventInput;
/**
* Contains unique announcement id
*/
announcementId: string;
/**
* Contains announcement details returned from API or object called from
* announcement service
*/
announcementDetails: IAnnouncementDetails;
/**
* This variable hepls to show and hide page loader.
* It is kept true by default as at first when we comes
* to a page the loader should be displayed before showing
* any data
*/
showLoader = true;
/**
* To make get announcement by id
*/
private announcementService: AnnouncementService;
/**
* To send activatedRoute.snapshot to routerNavigationService
*/
public activatedRoute: ActivatedRoute;
/**
* To call resource service which helps to use language constant
*/
public resourceService: ResourceService;
/**
* To show toaster(error, success etc) after any API calls
*/
private toasterService: ToasterService;
/**
* To navigate back to parent component
*/
public routerNavigationService: RouterNavigationService;
/**
* Constructor to create injected service(s) object
*
* Default method of DetailsPopupComponent class
*
* @param {AnnouncementService} announcementService Reference of AnnouncementService
* @param {ActivatedRoute} activatedRoute Reference of ActivatedRoute
* @param {ResourceService} resourceService Reference of ResourceService
* @param {ToasterService} toasterService Reference of ToasterService
* @param {RouterNavigationService} routerNavigationService Reference of routerNavigationService
*/
constructor(announcementService: AnnouncementService,
activatedRoute: ActivatedRoute,
resourceService: ResourceService,
toasterService: ToasterService,
routerNavigationService: RouterNavigationService) {
this.announcementService = announcementService;
this.activatedRoute = activatedRoute;
this.resourceService = resourceService;
this.toasterService = toasterService;
this.routerNavigationService = routerNavigationService;
}
/**
* This method checks in announcement service whether announcement details exist
* for the given announcement id or not. If not then it calls the
* get announcement by id API with a particular announcement
* id and and gets the details of the announcement
*
* @param {string} announcementId announcement id
*/
getDetails(announcementId: string): void {
if (this.announcementService.announcementDetailsObject === undefined ||
this.announcementService.announcementDetailsObject.id !== announcementId) {
const option = { announcementId: this.announcementId };
this.announcementService.getAnnouncementById(option).subscribe(
(apiResponse: ServerResponse) => {
this.announcementDetails = apiResponse.result;
if (apiResponse.result.announcement) {
this.announcementDetails = apiResponse.result.announcement;
}
this.showLoader = false;
},
err => {
this.toasterService.error(this.resourceService.messages.emsg.m0005);
this.showLoader = false;
this.routerNavigationService.navigateToParentUrl(this.activatedRoute.snapshot);
}
);
} else {
this.showLoader = false;
this.announcementDetails = this.announcementService.announcementDetailsObject;
}
}
/**
* This method calls the getDetails method to show details
* of a particular announcement
*/
ngOnInit() {
this.activatedRoute.params.subscribe(params => {
this.announcementId = params.announcementId;
});
this.getDetails(this.announcementId);
this.telemetryImpression = {
context: {
env: this.activatedRoute.snapshot.data.telemetry.env
},
object: {
id: this.announcementId,
type: this.activatedRoute.snapshot.data.telemetry.object.type,
ver: this.activatedRoute.snapshot.data.telemetry.object.ver
},
edata: {
type: this.activatedRoute.snapshot.data.telemetry.type,
pageid: this.activatedRoute.snapshot.data.telemetry.pageid,
uri: '/announcement/outbox/' + this.announcementId,
}
};
}
}
|
/*
Navicat MySQL Data Transfer
Source Server : qqbaby
Source Server Version : 50553
Source Host : localhost:3306
Source Database : qqbaby_db
Target Server Type : MYSQL
Target Server Version : 50553
File Encoding : 65001
Date: 2017-09-08 20:54:42
*/
SET FOREIGN_KEY_CHECKS=0;
-- ----------------------------
-- Table structure for `tb_babay`
-- ----------------------------
DROP TABLE IF EXISTS `tb_babay`;
CREATE TABLE `tb_babay` (
`id` int(255) NOT NULL AUTO_INCREMENT COMMENT 'id',
`title` varchar(255) NOT NULL COMMENT '客片标题',
`subTitle` varchar(255) DEFAULT NULL COMMENT '副标题【简介】',
`srcImg` varchar(255) NOT NULL COMMENT '缩略图',
`status` int(11) unsigned NOT NULL DEFAULT '1' COMMENT '状态【0隐藏 1显示 2置顶 3精华 4热门】',
`sort` int(11) unsigned DEFAULT '0' COMMENT '自定义排序',
`total` bigint(20) unsigned DEFAULT '0' COMMENT '点击量统计',
`authorId` int(11) unsigned NOT NULL COMMENT '作者Id',
`uploadTime` datetime NOT NULL COMMENT '上传时间',
`parent` int(10) unsigned NOT NULL COMMENT '所属栏目【1婴儿 2宝宝 3儿童 4亲子 5活动】',
PRIMARY KEY (`id`)
) ENGINE=MyISAM AUTO_INCREMENT=4 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for `tb_babaypic`
-- ----------------------------
DROP TABLE IF EXISTS `tb_babaypic`;
CREATE TABLE `tb_babaypic` (
`pic_id` int(255) NOT NULL AUTO_INCREMENT COMMENT 'id',
`pic_URL` text NOT NULL COMMENT '详情图url数组',
`pic_status` int(11) unsigned NOT NULL DEFAULT '1' COMMENT '状态【0隐藏 1显示】',
`linkURL` varchar(255) NOT NULL DEFAULT '1' COMMENT '图片链接',
`case_id` int(11) unsigned NOT NULL COMMENT '客片对照id',
PRIMARY KEY (`pic_id`)
) ENGINE=MyISAM AUTO_INCREMENT=91 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of tb_babaypic
-- ----------------------------
-- ----------------------------
-- Table structure for `tb_banner`
-- ----------------------------
DROP TABLE IF EXISTS `tb_banner`;
CREATE TABLE `tb_banner` (
`id` int(255) NOT NULL AUTO_INCREMENT COMMENT 'id',
`sort` int(11) unsigned DEFAULT '0' COMMENT '自定义排序',
`status` int(11) unsigned NOT NULL DEFAULT '1' COMMENT '状态【0隐藏 1显示】',
`parentId` int(11) unsigned NOT NULL COMMENT '所属广告位【1顶部 2最新 3婴儿 4宝宝 5儿童 6亲子 7 团队 8场馆】',
`linkURL` varchar(255) NOT NULL DEFAULT '1' COMMENT '图片链接',
`case_id` int(11) unsigned NOT NULL COMMENT '客片对照id',
PRIMARY KEY (`id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of tb_banner
-- ----------------------------
-- ----------------------------
-- Table structure for `tb_children`
-- ----------------------------
DROP TABLE IF EXISTS `tb_children`;
CREATE TABLE `tb_children` (
`id` int(255) NOT NULL AUTO_INCREMENT COMMENT 'id',
`title` varchar(255) NOT NULL COMMENT '客片标题',
`subTitle` varchar(255) DEFAULT NULL COMMENT '副标题【简介】',
`srcImg` varchar(255) NOT NULL COMMENT '缩略图',
`status` int(11) unsigned NOT NULL DEFAULT '1' COMMENT '状态【0隐藏 1显示 2置顶 3精华 4热门】',
`sort` int(11) unsigned DEFAULT '0' COMMENT '自定义排序',
`total` bigint(20) unsigned DEFAULT '0' COMMENT '点击量统计',
`authorId` int(11) unsigned NOT NULL COMMENT '作者Id',
`uploadTime` datetime NOT NULL COMMENT '上传时间',
`parent` int(10) unsigned NOT NULL COMMENT '所属栏目【1婴儿 2宝宝 3儿童 4亲子 5活动】',
PRIMARY KEY (`id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of tb_children
-- ----------------------------
-- ----------------------------
-- Table structure for `tb_childrenpic`
-- ----------------------------
DROP TABLE IF EXISTS `tb_childrenpic`;
CREATE TABLE `tb_childrenpic` (
`pic_id` int(255) NOT NULL AUTO_INCREMENT COMMENT 'id',
`pic_url` text NOT NULL COMMENT '详情图url数组',
`pic_status` int(11) unsigned NOT NULL DEFAULT '1' COMMENT '状态【0隐藏 1显示】',
`linkURL` varchar(255) NOT NULL DEFAULT '1' COMMENT '图片链接',
`case_id` int(11) unsigned NOT NULL COMMENT '客片对照id',
PRIMARY KEY (`pic_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of tb_childrenpic
-- ----------------------------
-- ----------------------------
-- Table structure for `tb_family`
-- ----------------------------
DROP TABLE IF EXISTS `tb_family`;
CREATE TABLE `tb_family` (
`id` int(255) NOT NULL AUTO_INCREMENT COMMENT 'id',
`title` varchar(255) NOT NULL COMMENT '客片标题',
`subTitle` varchar(255) DEFAULT NULL COMMENT '副标题【简介】',
`srcImg` varchar(255) NOT NULL COMMENT '缩略图',
`status` int(11) unsigned NOT NULL DEFAULT '1' COMMENT '状态【0隐藏 1显示 2置顶 3精华 4热门】',
`sort` int(11) unsigned DEFAULT '0' COMMENT '自定义排序',
`total` bigint(20) unsigned DEFAULT '0' COMMENT '点击量统计',
`authorId` int(11) unsigned NOT NULL COMMENT '作者Id',
`uploadTime` datetime NOT NULL COMMENT '上传时间',
`parent` int(10) unsigned NOT NULL COMMENT '所属栏目【1婴儿 2宝宝 3儿童 4亲子 5活动】',
PRIMARY KEY (`id`)
) ENGINE=MyISAM AUTO_INCREMENT=30 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for `tb_familypic`
-- ----------------------------
DROP TABLE IF EXISTS `tb_familypic`;
CREATE TABLE `tb_familypic` (
`pic_id` int(255) NOT NULL AUTO_INCREMENT COMMENT 'id',
`pic_url` varchar(255) NOT NULL COMMENT '详情图url数组',
`pic_status` int(11) unsigned NOT NULL DEFAULT '1' COMMENT '状态【0隐藏 1显示】',
`linkURL` varchar(255) NOT NULL DEFAULT '1' COMMENT '图片链接',
`case_id` int(11) unsigned NOT NULL COMMENT '客片对照id',
`pic_tiem` datetime DEFAULT NULL COMMENT '上传时间',
PRIMARY KEY (`pic_id`)
) ENGINE=MyISAM AUTO_INCREMENT=671 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of tb_familypic
-- ----------------------------
-- ----------------------------
-- Table structure for `tb_infant`
-- ----------------------------
DROP TABLE IF EXISTS `tb_infant`;
CREATE TABLE `tb_infant` (
`id` int(255) NOT NULL AUTO_INCREMENT COMMENT 'id',
`title` varchar(255) NOT NULL COMMENT '客片标题',
`subTitle` varchar(255) DEFAULT NULL COMMENT '副标题【简介】',
`srcImg` varchar(255) NOT NULL COMMENT '缩略图',
`status` int(11) unsigned NOT NULL DEFAULT '1' COMMENT '状态【0隐藏 1显示 2置顶 3精华 4热门】',
`sort` int(11) unsigned DEFAULT '0' COMMENT '自定义排序',
`total` bigint(20) unsigned DEFAULT '0' COMMENT '点击量统计',
`authorId` int(11) unsigned NOT NULL COMMENT '作者Id',
`uploadTime` datetime NOT NULL COMMENT '上传时间',
`parent` int(10) unsigned NOT NULL COMMENT '所属栏目【1婴儿 2宝宝 3儿童 4亲子 5活动】',
PRIMARY KEY (`id`)
) ENGINE=MyISAM AUTO_INCREMENT=4 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for `tb_infantpic`
-- ----------------------------
DROP TABLE IF EXISTS `tb_infantpic`;
CREATE TABLE `tb_infantpic` (
`pic_id` int(255) NOT NULL AUTO_INCREMENT COMMENT 'id',
`pic_URL` text NOT NULL COMMENT '详情图url数组',
`pic_status` int(11) unsigned NOT NULL DEFAULT '1' COMMENT '状态【0隐藏 1显示】',
`linkURL` varchar(255) NOT NULL DEFAULT '1' COMMENT '图片链接',
`case_id` int(11) unsigned NOT NULL COMMENT '客片对照id',
PRIMARY KEY (`pic_id`)
) ENGINE=MyISAM AUTO_INCREMENT=34 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for `tb_new`
-- ----------------------------
DROP TABLE IF EXISTS `tb_new`;
CREATE TABLE `tb_new` (
`id` int(255) NOT NULL AUTO_INCREMENT COMMENT 'id',
`title` varchar(255) NOT NULL COMMENT '客片标题',
`subTitle` varchar(255) DEFAULT NULL COMMENT '副标题【简介】',
`srcImg` varchar(255) NOT NULL COMMENT '缩略图',
`status` int(11) unsigned NOT NULL DEFAULT '1' COMMENT '状态【0隐藏 1显示 2置顶 3精华 4热门】',
`sort` int(11) unsigned DEFAULT '0' COMMENT '自定义排序',
`total` bigint(20) unsigned DEFAULT '0' COMMENT '点击量统计',
`authorId` int(11) unsigned NOT NULL COMMENT '作者Id',
`uploadTime` datetime NOT NULL COMMENT '上传时间',
`parent` int(10) unsigned NOT NULL COMMENT '所属栏目【1婴儿 2宝宝 3儿童 4亲子 5活动】',
PRIMARY KEY (`id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of tb_new
-- ----------------------------
-- ----------------------------
-- Table structure for `tb_newpic`
-- ----------------------------
DROP TABLE IF EXISTS `tb_newpic`;
CREATE TABLE `tb_newpic` (
`pic_id` int(255) NOT NULL AUTO_INCREMENT COMMENT 'id',
`pic_URL` text NOT NULL COMMENT '详情图url数组',
`pic_status` int(11) unsigned NOT NULL DEFAULT '1' COMMENT '状态【0隐藏 1显示】',
`linkURL` varchar(255) NOT NULL DEFAULT '1' COMMENT '图片链接',
`case_id` int(11) unsigned NOT NULL COMMENT '客片对照id',
PRIMARY KEY (`pic_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of tb_newpic
-- ----------------------------
-- ----------------------------
-- Table structure for `tb_team`
-- ----------------------------
DROP TABLE IF EXISTS `tb_team`;
CREATE TABLE `tb_team` (
`id` int(255) NOT NULL AUTO_INCREMENT COMMENT 'id',
`title` varchar(255) NOT NULL COMMENT '客片标题',
`subTitle` varchar(255) DEFAULT NULL COMMENT '副标题【简介】',
`srcImg` varchar(255) NOT NULL COMMENT '缩略图',
`status` int(11) unsigned NOT NULL DEFAULT '1' COMMENT '状态【0隐藏 1显示 2置顶 3精华 4热门】',
`sort` int(11) unsigned DEFAULT '0' COMMENT '自定义排序',
`total` bigint(20) unsigned DEFAULT '0' COMMENT '点击量统计',
`authorId` int(11) unsigned NOT NULL COMMENT '作者Id',
`uploadTime` datetime NOT NULL COMMENT '上传时间',
`parent` int(10) unsigned NOT NULL COMMENT '所属栏目【1婴儿 2宝宝 3儿童 4亲子 5活动】',
PRIMARY KEY (`id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of tb_team
-- ----------------------------
-- ----------------------------
-- Table structure for `tb_teampic`
-- ----------------------------
DROP TABLE IF EXISTS `tb_teampic`;
CREATE TABLE `tb_teampic` (
`pic_id` int(255) NOT NULL AUTO_INCREMENT COMMENT 'id',
`pic_URL` text NOT NULL COMMENT '详情图url数组',
`pic_status` int(11) unsigned NOT NULL DEFAULT '1' COMMENT '状态【0隐藏 1显示】',
`linkURL` varchar(255) NOT NULL DEFAULT '1' COMMENT '图片链接',
`case_id` int(11) unsigned NOT NULL COMMENT '客片对照id',
PRIMARY KEY (`pic_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of tb_teampic
-- ----------------------------
-- ----------------------------
-- Table structure for `tb_user`
-- ----------------------------
DROP TABLE IF EXISTS `tb_user`;
CREATE TABLE `tb_user` (
`user_id` int(10) NOT NULL AUTO_INCREMENT COMMENT 'id',
`user_name` varchar(255) NOT NULL COMMENT '账户',
`user_nickname` varchar(255) NOT NULL COMMENT '昵称',
`user_passwod` varchar(255) NOT NULL COMMENT '密码',
`user_group` int(10) unsigned NOT NULL DEFAULT '1' COMMENT '1普通管理员 2超级管理员',
PRIMARY KEY (`user_id`)
) ENGINE=MyISAM AUTO_INCREMENT=2 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of tb_user
-- ----------------------------
INSERT INTO `tb_user` VALUES ('1', 'admin', 'big黑钦', 'mq5555188', '1');
-- ----------------------------
-- Table structure for `tb_venue`
-- ----------------------------
DROP TABLE IF EXISTS `tb_venue`;
CREATE TABLE `tb_venue` (
`id` int(255) NOT NULL AUTO_INCREMENT COMMENT 'id',
`title` varchar(255) NOT NULL COMMENT '客片标题',
`subTitle` varchar(255) DEFAULT NULL COMMENT '副标题【简介】',
`srcImg` varchar(255) NOT NULL COMMENT '缩略图',
`status` int(11) unsigned NOT NULL DEFAULT '1' COMMENT '状态【0隐藏 1显示 2置顶 3精华 4热门】',
`sort` int(11) unsigned DEFAULT '0' COMMENT '自定义排序',
`total` bigint(20) unsigned DEFAULT '0' COMMENT '点击量统计',
`authorId` int(11) unsigned NOT NULL COMMENT '作者Id',
`uploadTime` datetime NOT NULL COMMENT '上传时间',
`parent` int(10) unsigned NOT NULL COMMENT '所属栏目【1婴儿 2宝宝 3儿童 4亲子 5活动】',
PRIMARY KEY (`id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of tb_venue
-- ----------------------------
-- ----------------------------
-- Table structure for `tb_venuepic`
-- ----------------------------
DROP TABLE IF EXISTS `tb_venuepic`;
CREATE TABLE `tb_venuepic` (
`pic_id` int(255) NOT NULL AUTO_INCREMENT COMMENT 'id',
`pic_URL` text NOT NULL COMMENT '详情图url数组',
`pic_status` int(11) unsigned NOT NULL DEFAULT '1' COMMENT '状态【0隐藏 1显示】',
`linkURL` varchar(255) NOT NULL DEFAULT '1' COMMENT '图片链接',
`case_id` int(11) unsigned NOT NULL COMMENT '客片对照id',
PRIMARY KEY (`pic_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of tb_venuepic
-- ----------------------------
|
import * as React from 'react';
import * as renderer from 'react-test-renderer';
import HomeScreen from './HomeScreen';
it('renders without crashing', () => {
const rendered = renderer.create(<HomeScreen />).toJSON();
expect(rendered).toBeTruthy();
});
|
<!--- 请清晰详细地描述你遇到的问题,描述问题时请给出芯片/BSP/工具链,RT-Thread版本,复现步骤及现象或者有条理地描述你的问题。在问题得到解决后,请及时关闭issue。欢迎到论坛提问:https://club.rt-thread.org/ -->
<!--- Please describe your problem clearly and in detail. When describing the problem, please use numbers or bullet points to describe your problem coherently. After the problem is resolved, please close the issue in time. Welcome to the forum to ask questions: https://club.rt-thread.io/ -->
|
require "rubygems"
require "sinatra"
require "twitter"
require "hashie"
require "haml"
require "coffee-script"
require "yaml"
class ProTweets < Sinatra::Application
configure do
@config = Hashie::Mash.new(YAML.load_file(File.join(File.dirname(__FILE__), 'config', 'config.yml')))
Twitter.configure do |conf|
conf.consumer_key = @config.twitter.consumer_key
conf.consumer_secret = @config.twitter.consumer_secret
conf.oauth_token = @config.twitter.oauth_token
conf.oauth_token_secret = @config.twitter.oauth_token_secret
end
# Setup DataMapper and Require Models
DataMapper::Logger.new($stdout, :info)
DataMapper.setup(:default, {
adapter: 'sqlite',
database: @config.database
})
Dir.glob(File.join(settings.root, 'models', '*.rb')).each do |model|
require model
end
# Commit models. Migrations get run from the worker script.
DataMapper.finalize
end
helpers do
# Get bigger user icons w/o using API requests
def bigger_image(url)
url.gsub!(/normal/, 'bigger')
end
end
get "/" do
tpp = 200
@page = params[:page].to_i if params[:page]
cache_control 1800 # Cache locally for 30 minutes
# Don't hit the database as hard if nothing's changed
@most_recent = Tweet.first(:order => [:created_at.desc])
last_modified @most_recent.created_at
if @page
@tweets = Tweet.all(:order => [:created_at.desc], :limit => tpp, :offset => (tpp*params[:page].to_i))
else
@tweets = Tweet.all(:order => [:created_at.desc], :limit => tpp)
end
haml :index
end
[ "/tweets/:id.:format", "/tweets/:id" ].each do |path|
get path do
@tweet = Tweet.first(:tweet_id => params[:id])
@format = params[:format] || "html"
cache_control 86400
last_modified @tweet.created_at
if @format == "json"
content_type 'application/json'
@tweet.to_json
else
haml :tweet
end
end
end
get "/ping" do
"PONG"
end
get "/rate_limit" do
rls = Twitter.rate_limit_status
content_type 'application/json'
rls.attrs.to_json
end
get "/stylesheets/:sheet.css" do
lastmod = File.mtime(File.join(settings.views, "stylesheets", "#{params[:sheet]}.scss"))
cache_control 86400 # Cache locally 24 hours
last_modified lastmod
content_type 'text/css'
scss "stylesheets/#{params[:sheet]}".to_sym
end
get '/coffeescripts/:script.js' do
lastmod = File.mtime(File.join(settings.views, "coffeescripts", "#{params[:script]}.coffee"))
cache_control 86400
last_modified lastmod
content_type 'text/javascript'
coffee "coffeescripts/#{params[:script]}".to_sym
end
private
def cache_control(seconds)
headers 'Cache-Control' => "public,must-revalidate,max-age=#{seconds}"
end
end
# vim: set ft=ruby ts=2 sw=2 expandtab :
|
import { Mutation } from './mutation'
import { CommandClass, OptionClass } from '../interfaces'
import { metadata, handlers } from '../constants/metadata'
export interface CommandFunctionMap {
[key: string]: CommandClass
}
export interface OptionFunctionMap {
[key: string]: OptionClass
}
export type ContainerParams = Array<CommandClass | OptionClass>
export interface ContainerData {
[handlers.COMMAND]: CommandClass[]
[handlers.OPTION]: OptionClass[]
[handlers.NOT_FOUND]: CommandClass[]
[handlers.MAJOR]: CommandClass[]
}
export class Container {
mutation: Mutation
datas: ContainerData = {
[handlers.COMMAND]: [],
[handlers.OPTION]: [],
[handlers.NOT_FOUND]: [],
[handlers.MAJOR]: [],
}
constructor(
private params: ContainerParams,
) {
this.mutation = new Mutation()
this.init()
this.insert()
}
getCommands(): CommandClass[] {
return this.datas[handlers.COMMAND]
.map(fn => Reflect.getMetadata(metadata.COMMAND_IDENTIFIER, fn))
}
private init(): void {
this.params.forEach(handler => {
const type = Reflect.getMetadata(metadata.HANDLER_IDENTIFIER, handler)
const dataColumn = this.datas[type]
dataColumn && dataColumn.push(handler)
})
}
private insert(): void {
this.mutation.devour({
commands: this.datas[handlers.COMMAND],
options: this.datas[handlers.OPTION],
notFounds: this.datas[handlers.NOT_FOUND],
majors: this.datas[handlers.MAJOR],
})
}
}
|
# Copyright 2018 Battelle Energy Alliance, LLC
#===============================================================================
#
# FILE: Parse_LM_data.pm
#===============================================================================
use strict;
use warnings;
package Parse_LM_Data;
use Data::Dumper;
use Time::Local;
sub new {
my ($class, %args) = @_;
return bless \%args, $class;
}
sub print_app_data {
my ($self, $arg_href) = @_;
my $lm_href = $arg_href->{data};
my $app_name = $arg_href->{name};
my $out = "";
my $app_href = $lm_href->{app}{$app_name};
print "Could not find the app called --$app_name--" and return if not $app_href;
$out .= print_header("Totals for $app_name");
#printf("%-23s $app_href->{attr}{total_lics}\n", "Total Licenses:");
#printf("%-23s $app_href->{attr}{used_lics}\n", "Used Licenses:");
$out .= sprintf("%-23s $app_href->{attr}{total_lics}\n", "Total Licenses:");
$out .= sprintf("%-23s $app_href->{attr}{used_lics}\n", "Used Licenses:");
my $percent = sprintf("%.1f", $app_href->{attr}{used_lics} / $app_href->{attr}{total_lics} * 100 );
#printf("%-23s $percent\n", "Percent Used:");
$out .= sprintf("%-23s $percent\n", "Percent Used:");
if (keys %{$app_href->{users}}) {
$out .= print_header("User Licenses");
foreach my $user (sort keys %{$app_href->{users}} ) {
#printf("%-12s has %-3s licenses\n", $user, $app_href->{users}{$user}{attr}{count});
$out .= sprintf("%-12s has %-3s licenses\n", $user, $app_href->{users}{$user}{attr}{count});
}
}
if (keys %{$app_href->{reserved}}) {
$out .= print_header("Reserved Licenses");
foreach my $res (sort keys %{$app_href->{reserved}} ) {
#printf("%-15s has reserved %-3s licenses\n", $res, $app_href->{reserved}{$res}{attr}{count});
$out .= sprintf("%-15s has reserved %-3s licenses\n", $res, $app_href->{reserved}{$res}{attr}{count});
}
}
$out .= "\n\n";
return $out;
}
sub get_app_lic_usage {
my ($self, $arg_href) = @_;
my $lm_href = $arg_href->{data};
my $app_name = $arg_href->{name};
my $out = "";
my $app_href = $lm_href->{app}{$app_name};
print "Could not find the app called --$app_name--" and return if not $app_href;
my $percent = sprintf("%.1f", $app_href->{attr}{used_lics} / $app_href->{attr}{total_lics} * 100 );
return $percent, $app_href->{attr}{used_lics}, $app_href->{attr}{total_lics}
}
sub print_header {
my $title = shift;
my $header = "";
my $title_len = length($title);
my $max = 40;
my $offset = $max - ($title_len + 2);
#print "\n";
#print "##########################################\n";
#printf "# $title%${offset}s\n", "#";
#print "##########################################\n";
$header .= "\n";
$header .= "##########################################\n";
$header .= sprintf "# $title%${offset}s\n", "#";
$header .= "##########################################\n";
return $header;
}
sub get_days_to_expire {
my ($self, $arg_href) = @_;
my $lm_href = $arg_href->{data};
my $app_name = $arg_href->{name};
my $expire_date = $lm_href->{app}{$app_name}{attr}{expires};
my $days_left = _get_days_to_expire($expire_date);
return $days_left;
}
sub _get_days_to_expire {
my $expire_date = shift;
#print "ED $expire_date\n";
# Get year month and day of expire
my ($then_year, $then_month, $then_day) = split /\-/, $expire_date;
# set hour to midnight
my $then_seconds = 0;
my $then_minutes = 0;
my $then_hours = 0;
# get locale time in epoch seconds
my $time = time;
# convert then time to epoch seconds
my $time_then = timelocal($then_seconds, $then_minutes, $then_hours, $then_day,$then_month-1,$then_year);
# Get just the whole number of days until lics expire
my $days_difference = int(($time_then - $time) / 86400);
return $days_difference;
}
sub check_servers_status {
my ($self, $arg_href) = @_;
my $lm_href = $arg_href->{data};
my $product_name = $arg_href->{product};
my $all_up = 1;
my $found_servers = 0;
my $data;
foreach my $server (sort keys %{$lm_href->{product}{$product_name}{server}} ) {
$found_servers = 1;
if ($lm_href->{product}{$product_name}{server}{$server}{attr}{status} =~ /up/i) {
$all_up &= 1;
$data .= " $product_name on $server: licenses UP ";
}
else {
$all_up &= 0;
$data .= " $product_name on $server: licenses DOWN ";
}
}
if ($all_up and $found_servers) {
return 0, $data;
}
else {
return 1, $data;
}
}
sub check_for_servers {
my ($self, $arg_href) = @_;
my $lm_href = $arg_href->{data};
my $product_name = $arg_href->{product};
my $server_cnt = $arg_href->{server_cnt};
my @servers = sort keys %{$lm_href->{product}{$product_name}{server}};
my $found_servers = 0;
my $down_cnt = 0;
my $up_cnt = 0;
my $data;
foreach my $server ( @servers ) {
if ($server) {
$found_servers++;
my $status = $lm_href->{product}{$product_name}{server}{$server}{attr}{status};
if ($status =~ /up/i) {
$data .= " $product_name on $server: licenses UP ";
$up_cnt++;
}
else {
$data .= " $product_name on $server: licenses DOWN ";
$down_cnt++;
}
}
}
if ($server_cnt == 3) {
if ($up_cnt <= 1) {
$data .= " Servers Down ";
return 2, $data;
}
elsif ($up_cnt == 2) {
$data .= " One Server Down ";
return 1, $data;
}
else {
return 0, $data;
}
}
elsif ($server_cnt == 1) {
if ($up_cnt != 1) {
$data .= " Server Down ";
return 2, $data;
}
else {
return 0, $data;
}
}
return 3, $data;
}
1;
|
(function () {
Fight.update = function (fight) {
var time = fight.time,
ship = fight.ship,
zone = fight.zone,
bullets = fight.bullets;
var i, l;
Ship.update(ship, {
zone: zone,
time: time,
bullets: bullets,
leftJoystick: fight.inputs.leftJoystick,
rightJoystick: fight.inputs.rightJoystick
});
l = bullets.length;
for (i = 0; i < l; i++) {
Bullet.update(bullets[i], { zone: zone });
}
fight.bullets = Destroyer.filter(fight.bullets);
};
})(); |
#!/bin/sh
VERSION=0.0.1
IMAGE=ynishi/htmlserver
docker build -t ${IMAGE}:${VERSION} . --no-cache
docker tag ${IMAGE}:${VERSION} ${IMAGE}:latest
|
import torch
img3D = torch.rand(size=[1, 1, 128, 128, 128], dtype=torch.float32).cuda()
img2D = torch.rand(size=[1, 1, 128, 128], dtype=torch.float32).cuda()
############# unet #######################
from segmentation_models import Unet
model = Unet(dimension=3, channel_in=1, backbone_name='vgg19', basefilter=64, classes=2, pretrained=False).cuda()
# model = Unet(dimension=3, channel_in=1, backbone_name='resnet50', basefilter=32, classes=2, pretrained=False).cuda()
p = model(img3D)
print(p.shape)
model = Unet(dimension=2, channel_in=1, backbone_name='vgg19', basefilter=64, classes=2, pretrained=False).cuda()
p = model(img2D)
print(p.shape)
############## HighResolutionNet ###################
from segmentation_models import HighResolutionNet
model = HighResolutionNet(dimension=3, channel_in=1, classes=2, configureType='HRNET18').cuda()
p = model(img3D)
print(p.shape) |
## call()和apply()
### 介绍
这两个方法都是函数对象的方法,需要通过函数对象来调用。
当函数调用call()和apply()时,函数都会立即**执行**。
- 都可以用来改变函数的this对象的指向。
- 第一个参数都是this要指向的对象(函数执行时,this将指向这个对象),后续参数用来传实参。
### 显式绑定this
JS提供的绝大多数函数以及我们自己创建的所有函数,都可以使用call 和apply方法。
它们的第一个参数是一个对象。因为你可以直接指定 this 绑定的对象,因此我们称之为显式绑定。
例1:
```javascript
function foo() {
console.log(this.a);
}
var obj = {
a: 2
};
// 将 this 指向 obj
foo.apply(obj); //打印结果:2
```
### 第一个参数的传递
1、thisObj不传或者为null、undefined时,函数中的this会指向window对象(非严格模式)。
2、传递一个别的函数名时,函数中的this将指向这个**函数的引用**。
3、传递的值为数字、布尔值、字符串时,this会指向这些基本类型的包装对象Number、Boolean、String。
4、传递一个对象时,函数中的this则指向传递的这个对象。
### call()和apply()的区别
call()和apply()方法都可以将实参在对象之后依次传递,但是apply()方法需要将实参封装到一个**数组**中统一传递(即使只有实参只有一个,也要放到数组中)。
比如针对下面这样的代码:
```javascript
var persion1 = {
name: "小王",
gender: "男",
age: 24,
say: function (school, grade) {
alert(this.name + " , " + this.gender + " ,今年" + this.age + " ,在" + school + "上" + grade);
}
}
var person2 = {
name: "小红",
gender: "女",
age: 18
}
```
如果是通过call的参数进行传参,是这样的:
```javascript
persion1.say.call(persion2, "实验小学", "六年级");
```
如果是通过apply的参数进行传参,是这样的:
```javascript
persion1.say.apply(persion2, ["实验小学", "六年级"]);
```
看到区别了吗,call后面的实参与say方法中是一一对应的,而apply传实参时,要封装成一个数组,数组中的元素是和say方法中一一对应的,这就是两者最大的区别。
### call()和apply()的作用
- 改变this的指向
- 实现继承。Father.call(this)
## bind()
- 都能改变this的指向
- call()/apply()是**立即调用函数**
- bind()是将函数返回,因此后面还需要加`()`才能调用。
bind()传参的方式与call()相同。
参考链接:
- <https://www.jianshu.com/p/56a9c2d11adc>
- <https://github.com/lin-xin/blog/issues/7>
- <https://segmentfault.com/a/1190000007402815>
- [JS中改变this指向的方法](http://www.xiaoxiaohan.com/js/38.html)
|
INSERT INTO filter (id, enabled, evaluation_strategy, name) VALUES (0, FALSE, 'ALL_MATCH', 'default');
INSERT INTO predicate (id) VALUES (0);
INSERT INTO numerical_predicate (condition, fixed_operand, id) VALUES ('EQUAL', 0, 0);
INSERT INTO filter_predicates (filter_id, predicates_id) VALUES (0, 0);
|
@extends('admin.master.master')
@section('title')
{{ $type->pt_name }} - Admin
@endsection
@section('my-posts')
{{-- breadcrumb --}}
{{-- @include('../comps.blog_breadcrumb') --}}
<!-- Image Showcases -->
@if ($posts->count()>0)
@include('admin.comps.posts_list')
@else
<div class="alert space">There is no post yet.</div>
@endif
@endsection |
CREATE TABLE list (id VARCHAR(2) NOT NULL, value VARCHAR(64) NOT NULL, PRIMARY KEY(id));
INSERT INTO "list" ("id", "value") VALUES ('af', 'afrikaans');
INSERT INTO "list" ("id", "value") VALUES ('af_NA', 'afrikaans (Namíbia)');
INSERT INTO "list" ("id", "value") VALUES ('af_ZA', 'afrikaans (República de Sud-àfrica)');
INSERT INTO "list" ("id", "value") VALUES ('ak', 'àkan');
INSERT INTO "list" ("id", "value") VALUES ('ak_GH', 'àkan (Ghana)');
INSERT INTO "list" ("id", "value") VALUES ('sq', 'albanès');
INSERT INTO "list" ("id", "value") VALUES ('sq_AL', 'albanès (Albània)');
INSERT INTO "list" ("id", "value") VALUES ('sq_XK', 'albanès (Kosovo)');
INSERT INTO "list" ("id", "value") VALUES ('sq_MK', 'albanès (Macedònia)');
INSERT INTO "list" ("id", "value") VALUES ('de', 'alemany');
INSERT INTO "list" ("id", "value") VALUES ('de_DE', 'alemany (Alemanya)');
INSERT INTO "list" ("id", "value") VALUES ('de_AT', 'alemany (Àustria)');
INSERT INTO "list" ("id", "value") VALUES ('de_BE', 'alemany (Bèlgica)');
INSERT INTO "list" ("id", "value") VALUES ('de_LI', 'alemany (Liechtenstein)');
INSERT INTO "list" ("id", "value") VALUES ('de_LU', 'alemany (Luxemburg)');
INSERT INTO "list" ("id", "value") VALUES ('de_CH', 'alemany (Suïssa)');
INSERT INTO "list" ("id", "value") VALUES ('am', 'amhàric');
INSERT INTO "list" ("id", "value") VALUES ('am_ET', 'amhàric (Etiòpia)');
INSERT INTO "list" ("id", "value") VALUES ('en', 'anglès');
INSERT INTO "list" ("id", "value") VALUES ('en_AI', 'anglès (Anguilla)');
INSERT INTO "list" ("id", "value") VALUES ('en_AG', 'anglès (Antigua i Barbuda)');
INSERT INTO "list" ("id", "value") VALUES ('en_AU', 'anglès (Austràlia)');
INSERT INTO "list" ("id", "value") VALUES ('en_BS', 'anglès (Bahames)');
INSERT INTO "list" ("id", "value") VALUES ('en_BB', 'anglès (Barbados)');
INSERT INTO "list" ("id", "value") VALUES ('en_BE', 'anglès (Bèlgica)');
INSERT INTO "list" ("id", "value") VALUES ('en_BZ', 'anglès (Belize)');
INSERT INTO "list" ("id", "value") VALUES ('en_BM', 'anglès (Bermudes)');
INSERT INTO "list" ("id", "value") VALUES ('en_BW', 'anglès (Botswana)');
INSERT INTO "list" ("id", "value") VALUES ('en_CM', 'anglès (Camerun)');
INSERT INTO "list" ("id", "value") VALUES ('en_CA', 'anglès (Canadà)');
INSERT INTO "list" ("id", "value") VALUES ('en_DG', 'anglès (Diego Garcia)');
INSERT INTO "list" ("id", "value") VALUES ('en_DM', 'anglès (Dominica)');
INSERT INTO "list" ("id", "value") VALUES ('en_ER', 'anglès (Eritrea)');
INSERT INTO "list" ("id", "value") VALUES ('en_US', 'anglès (Estats Units)');
INSERT INTO "list" ("id", "value") VALUES ('en_FJ', 'anglès (Fiji)');
INSERT INTO "list" ("id", "value") VALUES ('en_PH', 'anglès (Filipines)');
INSERT INTO "list" ("id", "value") VALUES ('en_GM', 'anglès (Gàmbia)');
INSERT INTO "list" ("id", "value") VALUES ('en_GH', 'anglès (Ghana)');
INSERT INTO "list" ("id", "value") VALUES ('en_GI', 'anglès (Gibraltar)');
INSERT INTO "list" ("id", "value") VALUES ('en_GD', 'anglès (Grenada)');
INSERT INTO "list" ("id", "value") VALUES ('en_GU', 'anglès (Guam)');
INSERT INTO "list" ("id", "value") VALUES ('en_GG', 'anglès (Guernsey)');
INSERT INTO "list" ("id", "value") VALUES ('en_GY', 'anglès (Guyana)');
INSERT INTO "list" ("id", "value") VALUES ('en_HK', 'anglès (Hong Kong (RAE Xina))');
INSERT INTO "list" ("id", "value") VALUES ('en_CX', 'anglès (illa Christmas)');
INSERT INTO "list" ("id", "value") VALUES ('en_IM', 'anglès (illa de Man)');
INSERT INTO "list" ("id", "value") VALUES ('en_KY', 'anglès (Illes Caiman)');
INSERT INTO "list" ("id", "value") VALUES ('en_CC', 'anglès (illes Cocos)');
INSERT INTO "list" ("id", "value") VALUES ('en_CK', 'anglès (illes Cook)');
INSERT INTO "list" ("id", "value") VALUES ('en_FK', 'anglès (Illes Malvines)');
INSERT INTO "list" ("id", "value") VALUES ('en_MP', 'anglès (illes Mariannes del Nord)');
INSERT INTO "list" ("id", "value") VALUES ('en_MH', 'anglès (illes Marshall)');
INSERT INTO "list" ("id", "value") VALUES ('en_UM', 'anglès (illes Perifèriques Menors dels EUA)');
INSERT INTO "list" ("id", "value") VALUES ('en_PN', 'anglès (illes Pitcairn)');
INSERT INTO "list" ("id", "value") VALUES ('en_SB', 'anglès (illes Salomó)');
INSERT INTO "list" ("id", "value") VALUES ('en_TC', 'anglès (Illes Turks i Caicos)');
INSERT INTO "list" ("id", "value") VALUES ('en_VG', 'anglès (Illes Verges Britàniques)');
INSERT INTO "list" ("id", "value") VALUES ('en_VI', 'anglès (Illes Verges Nord-americanes)');
INSERT INTO "list" ("id", "value") VALUES ('en_IN', 'anglès (Índia)');
INSERT INTO "list" ("id", "value") VALUES ('en_IE', 'anglès (Irlanda)');
INSERT INTO "list" ("id", "value") VALUES ('en_JM', 'anglès (Jamaica)');
INSERT INTO "list" ("id", "value") VALUES ('en_JE', 'anglès (Jersey)');
INSERT INTO "list" ("id", "value") VALUES ('en_KE', 'anglès (Kenya)');
INSERT INTO "list" ("id", "value") VALUES ('en_KI', 'anglès (Kiribati)');
INSERT INTO "list" ("id", "value") VALUES ('en_LS', 'anglès (Lesotho)');
INSERT INTO "list" ("id", "value") VALUES ('en_LR', 'anglès (Libèria)');
INSERT INTO "list" ("id", "value") VALUES ('en_MO', 'anglès (Macau (RAE Xina))');
INSERT INTO "list" ("id", "value") VALUES ('en_MG', 'anglès (Madagascar)');
INSERT INTO "list" ("id", "value") VALUES ('en_MY', 'anglès (Malàisia)');
INSERT INTO "list" ("id", "value") VALUES ('en_MW', 'anglès (Malawi)');
INSERT INTO "list" ("id", "value") VALUES ('en_MT', 'anglès (Malta)');
INSERT INTO "list" ("id", "value") VALUES ('en_MU', 'anglès (Maurici)');
INSERT INTO "list" ("id", "value") VALUES ('en_FM', 'anglès (Micronèsia)');
INSERT INTO "list" ("id", "value") VALUES ('en_MS', 'anglès (Montserrat)');
INSERT INTO "list" ("id", "value") VALUES ('en_NA', 'anglès (Namíbia)');
INSERT INTO "list" ("id", "value") VALUES ('en_NR', 'anglès (Nauru)');
INSERT INTO "list" ("id", "value") VALUES ('en_NG', 'anglès (Nigèria)');
INSERT INTO "list" ("id", "value") VALUES ('en_NU', 'anglès (Niue)');
INSERT INTO "list" ("id", "value") VALUES ('en_NF', 'anglès (Norfolk)');
INSERT INTO "list" ("id", "value") VALUES ('en_NZ', 'anglès (Nova Zelanda)');
INSERT INTO "list" ("id", "value") VALUES ('en_PK', 'anglès (Pakistan)');
INSERT INTO "list" ("id", "value") VALUES ('en_PW', 'anglès (Palau)');
INSERT INTO "list" ("id", "value") VALUES ('en_PG', 'anglès (Papua Nova Guinea)');
INSERT INTO "list" ("id", "value") VALUES ('en_PR', 'anglès (Puerto Rico)');
INSERT INTO "list" ("id", "value") VALUES ('en_GB', 'anglès (Regne Unit)');
INSERT INTO "list" ("id", "value") VALUES ('en_ZA', 'anglès (República de Sud-àfrica)');
INSERT INTO "list" ("id", "value") VALUES ('en_RW', 'anglès (Ruanda)');
INSERT INTO "list" ("id", "value") VALUES ('en_KN', 'anglès (Saint Christopher i Nevis)');
INSERT INTO "list" ("id", "value") VALUES ('en_SH', 'anglès (Saint Helena)');
INSERT INTO "list" ("id", "value") VALUES ('en_LC', 'anglès (Saint Lucia)');
INSERT INTO "list" ("id", "value") VALUES ('en_VC', 'anglès (Saint Vincent i les Grenadines)');
INSERT INTO "list" ("id", "value") VALUES ('en_AS', 'anglès (Samoa Nord-americana)');
INSERT INTO "list" ("id", "value") VALUES ('en_WS', 'anglès (Samoa)');
INSERT INTO "list" ("id", "value") VALUES ('en_SC', 'anglès (Seychelles)');
INSERT INTO "list" ("id", "value") VALUES ('en_SL', 'anglès (Sierra Leone)');
INSERT INTO "list" ("id", "value") VALUES ('en_SG', 'anglès (Singapur)');
INSERT INTO "list" ("id", "value") VALUES ('en_SX', 'anglès (Sint Maarten)');
INSERT INTO "list" ("id", "value") VALUES ('en_SS', 'anglès (Sudan del Sud)');
INSERT INTO "list" ("id", "value") VALUES ('en_SD', 'anglès (Sudan)');
INSERT INTO "list" ("id", "value") VALUES ('en_SZ', 'anglès (Swazilàndia)');
INSERT INTO "list" ("id", "value") VALUES ('en_TZ', 'anglès (Tanzània)');
INSERT INTO "list" ("id", "value") VALUES ('en_IO', 'anglès (Territori Britànic de l’Oceà Índic)');
INSERT INTO "list" ("id", "value") VALUES ('en_TK', 'anglès (Tokelau)');
INSERT INTO "list" ("id", "value") VALUES ('en_TO', 'anglès (Tonga)');
INSERT INTO "list" ("id", "value") VALUES ('en_TT', 'anglès (Trinitat i Tobago)');
INSERT INTO "list" ("id", "value") VALUES ('en_TV', 'anglès (Tuvalu)');
INSERT INTO "list" ("id", "value") VALUES ('en_UG', 'anglès (Uganda)');
INSERT INTO "list" ("id", "value") VALUES ('en_VU', 'anglès (Vanuatu)');
INSERT INTO "list" ("id", "value") VALUES ('en_ZM', 'anglès (Zàmbia)');
INSERT INTO "list" ("id", "value") VALUES ('en_ZW', 'anglès (Zimbàbue)');
INSERT INTO "list" ("id", "value") VALUES ('ar', 'àrab');
INSERT INTO "list" ("id", "value") VALUES ('ar_DZ', 'àrab (Algèria)');
INSERT INTO "list" ("id", "value") VALUES ('ar_SA', 'àrab (Aràbia Saudita)');
INSERT INTO "list" ("id", "value") VALUES ('ar_BH', 'àrab (Bahrain)');
INSERT INTO "list" ("id", "value") VALUES ('ar_KM', 'àrab (Comores)');
INSERT INTO "list" ("id", "value") VALUES ('ar_DJ', 'àrab (Djibouti)');
INSERT INTO "list" ("id", "value") VALUES ('ar_EG', 'àrab (Egipte)');
INSERT INTO "list" ("id", "value") VALUES ('ar_AE', 'àrab (Emirats Àrabs Units)');
INSERT INTO "list" ("id", "value") VALUES ('ar_ER', 'àrab (Eritrea)');
INSERT INTO "list" ("id", "value") VALUES ('ar_YE', 'àrab (Iemen)');
INSERT INTO "list" ("id", "value") VALUES ('ar_IQ', 'àrab (Iraq)');
INSERT INTO "list" ("id", "value") VALUES ('ar_IL', 'àrab (Israel)');
INSERT INTO "list" ("id", "value") VALUES ('ar_JO', 'àrab (Jordània)');
INSERT INTO "list" ("id", "value") VALUES ('ar_KW', 'àrab (Kuwait)');
INSERT INTO "list" ("id", "value") VALUES ('ar_LB', 'àrab (Líban)');
INSERT INTO "list" ("id", "value") VALUES ('ar_LY', 'àrab (Líbia)');
INSERT INTO "list" ("id", "value") VALUES ('ar_MA', 'àrab (Marroc)');
INSERT INTO "list" ("id", "value") VALUES ('ar_MR', 'àrab (Mauritània)');
INSERT INTO "list" ("id", "value") VALUES ('ar_OM', 'àrab (Oman)');
INSERT INTO "list" ("id", "value") VALUES ('ar_PS', 'àrab (Palestina)');
INSERT INTO "list" ("id", "value") VALUES ('ar_QA', 'àrab (Qatar)');
INSERT INTO "list" ("id", "value") VALUES ('ar_EH', 'àrab (Sàhara Occidental)');
INSERT INTO "list" ("id", "value") VALUES ('ar_SY', 'àrab (Síria)');
INSERT INTO "list" ("id", "value") VALUES ('ar_SO', 'àrab (Somàlia)');
INSERT INTO "list" ("id", "value") VALUES ('ar_SS', 'àrab (Sudan del Sud)');
INSERT INTO "list" ("id", "value") VALUES ('ar_SD', 'àrab (Sudan)');
INSERT INTO "list" ("id", "value") VALUES ('ar_TN', 'àrab (Tunísia)');
INSERT INTO "list" ("id", "value") VALUES ('ar_TD', 'àrab (Txad)');
INSERT INTO "list" ("id", "value") VALUES ('hy', 'armeni');
INSERT INTO "list" ("id", "value") VALUES ('hy_AM', 'armeni (Armènia)');
INSERT INTO "list" ("id", "value") VALUES ('as', 'assamès');
INSERT INTO "list" ("id", "value") VALUES ('as_IN', 'assamès (Índia)');
INSERT INTO "list" ("id", "value") VALUES ('az', 'azerbaidjanès');
INSERT INTO "list" ("id", "value") VALUES ('az_AZ', 'azerbaidjanès (Azerbaidjan)');
INSERT INTO "list" ("id", "value") VALUES ('az_Cyrl_AZ', 'azerbaidjanès (ciríl·lic, Azerbaidjan)');
INSERT INTO "list" ("id", "value") VALUES ('az_Cyrl', 'azerbaidjanès (ciríl·lic)');
INSERT INTO "list" ("id", "value") VALUES ('az_Latn_AZ', 'azerbaidjanès (llatí, Azerbaidjan)');
INSERT INTO "list" ("id", "value") VALUES ('az_Latn', 'azerbaidjanès (llatí)');
INSERT INTO "list" ("id", "value") VALUES ('bm', 'bambara');
INSERT INTO "list" ("id", "value") VALUES ('bm_Latn_ML', 'bambara (llatí, Mali)');
INSERT INTO "list" ("id", "value") VALUES ('bm_Latn', 'bambara (llatí)');
INSERT INTO "list" ("id", "value") VALUES ('eu', 'basc');
INSERT INTO "list" ("id", "value") VALUES ('eu_ES', 'basc (Espanya)');
INSERT INTO "list" ("id", "value") VALUES ('bn', 'bengalí');
INSERT INTO "list" ("id", "value") VALUES ('bn_BD', 'bengalí (Bangla Desh)');
INSERT INTO "list" ("id", "value") VALUES ('bn_IN', 'bengalí (Índia)');
INSERT INTO "list" ("id", "value") VALUES ('be', 'bielorús');
INSERT INTO "list" ("id", "value") VALUES ('be_BY', 'bielorús (Bielorússia)');
INSERT INTO "list" ("id", "value") VALUES ('my', 'birmà');
INSERT INTO "list" ("id", "value") VALUES ('my_MM', 'birmà (Myanmar (Birmània))');
INSERT INTO "list" ("id", "value") VALUES ('bs', 'bosnià');
INSERT INTO "list" ("id", "value") VALUES ('bs_BA', 'bosnià (Bòsnia i Hercegovina)');
INSERT INTO "list" ("id", "value") VALUES ('bs_Cyrl_BA', 'bosnià (ciríl·lic, Bòsnia i Hercegovina)');
INSERT INTO "list" ("id", "value") VALUES ('bs_Cyrl', 'bosnià (ciríl·lic)');
INSERT INTO "list" ("id", "value") VALUES ('bs_Latn_BA', 'bosnià (llatí, Bòsnia i Hercegovina)');
INSERT INTO "list" ("id", "value") VALUES ('bs_Latn', 'bosnià (llatí)');
INSERT INTO "list" ("id", "value") VALUES ('br', 'bretó');
INSERT INTO "list" ("id", "value") VALUES ('br_FR', 'bretó (França)');
INSERT INTO "list" ("id", "value") VALUES ('bg', 'búlgar');
INSERT INTO "list" ("id", "value") VALUES ('bg_BG', 'búlgar (Bulgària)');
INSERT INTO "list" ("id", "value") VALUES ('ks', 'caixmiri');
INSERT INTO "list" ("id", "value") VALUES ('ks_Arab_IN', 'caixmiri (àrab, Índia)');
INSERT INTO "list" ("id", "value") VALUES ('ks_Arab', 'caixmiri (àrab)');
INSERT INTO "list" ("id", "value") VALUES ('ks_IN', 'caixmiri (Índia)');
INSERT INTO "list" ("id", "value") VALUES ('ca', 'català');
INSERT INTO "list" ("id", "value") VALUES ('ca_AD', 'català (Andorra)');
INSERT INTO "list" ("id", "value") VALUES ('ca_ES', 'català (Espanya)');
INSERT INTO "list" ("id", "value") VALUES ('ca_FR', 'català (França)');
INSERT INTO "list" ("id", "value") VALUES ('ca_IT', 'català (Itàlia)');
INSERT INTO "list" ("id", "value") VALUES ('ko', 'coreà');
INSERT INTO "list" ("id", "value") VALUES ('ko_KP', 'coreà (Corea del Nord)');
INSERT INTO "list" ("id", "value") VALUES ('ko_KR', 'coreà (Corea del Sud)');
INSERT INTO "list" ("id", "value") VALUES ('kw', 'còrnic');
INSERT INTO "list" ("id", "value") VALUES ('kw_GB', 'còrnic (Regne Unit)');
INSERT INTO "list" ("id", "value") VALUES ('hr', 'croat');
INSERT INTO "list" ("id", "value") VALUES ('hr_BA', 'croat (Bòsnia i Hercegovina)');
INSERT INTO "list" ("id", "value") VALUES ('hr_HR', 'croat (Croàcia)');
INSERT INTO "list" ("id", "value") VALUES ('da', 'danès');
INSERT INTO "list" ("id", "value") VALUES ('da_DK', 'danès (Dinamarca)');
INSERT INTO "list" ("id", "value") VALUES ('da_GL', 'danès (Grenlàndia)');
INSERT INTO "list" ("id", "value") VALUES ('dz', 'dzongka');
INSERT INTO "list" ("id", "value") VALUES ('dz_BT', 'dzongka (Bhutan)');
INSERT INTO "list" ("id", "value") VALUES ('sk', 'eslovac');
INSERT INTO "list" ("id", "value") VALUES ('sk_SK', 'eslovac (Eslovàquia)');
INSERT INTO "list" ("id", "value") VALUES ('sl', 'eslovè');
INSERT INTO "list" ("id", "value") VALUES ('sl_SI', 'eslovè (Eslovènia)');
INSERT INTO "list" ("id", "value") VALUES ('es', 'espanyol');
INSERT INTO "list" ("id", "value") VALUES ('es_AR', 'espanyol (Argentina)');
INSERT INTO "list" ("id", "value") VALUES ('es_BO', 'espanyol (Bolívia)');
INSERT INTO "list" ("id", "value") VALUES ('es_EA', 'espanyol (Ceuta i Melilla)');
INSERT INTO "list" ("id", "value") VALUES ('es_CO', 'espanyol (Colòmbia)');
INSERT INTO "list" ("id", "value") VALUES ('es_CR', 'espanyol (Costa Rica)');
INSERT INTO "list" ("id", "value") VALUES ('es_CU', 'espanyol (Cuba)');
INSERT INTO "list" ("id", "value") VALUES ('es_SV', 'espanyol (El Salvador)');
INSERT INTO "list" ("id", "value") VALUES ('es_EC', 'espanyol (Equador)');
INSERT INTO "list" ("id", "value") VALUES ('es_ES', 'espanyol (Espanya)');
INSERT INTO "list" ("id", "value") VALUES ('es_US', 'espanyol (Estats Units)');
INSERT INTO "list" ("id", "value") VALUES ('es_PH', 'espanyol (Filipines)');
INSERT INTO "list" ("id", "value") VALUES ('es_GT', 'espanyol (Guatemala)');
INSERT INTO "list" ("id", "value") VALUES ('es_GQ', 'espanyol (Guinea Equatorial)');
INSERT INTO "list" ("id", "value") VALUES ('es_HN', 'espanyol (Hondures)');
INSERT INTO "list" ("id", "value") VALUES ('es_IC', 'espanyol (illes Canàries)');
INSERT INTO "list" ("id", "value") VALUES ('es_MX', 'espanyol (Mèxic)');
INSERT INTO "list" ("id", "value") VALUES ('es_NI', 'espanyol (Nicaragua)');
INSERT INTO "list" ("id", "value") VALUES ('es_PA', 'espanyol (Panamà)');
INSERT INTO "list" ("id", "value") VALUES ('es_PY', 'espanyol (Paraguai)');
INSERT INTO "list" ("id", "value") VALUES ('es_PE', 'espanyol (Perú)');
INSERT INTO "list" ("id", "value") VALUES ('es_PR', 'espanyol (Puerto Rico)');
INSERT INTO "list" ("id", "value") VALUES ('es_DO', 'espanyol (República Dominicana)');
INSERT INTO "list" ("id", "value") VALUES ('es_UY', 'espanyol (Uruguai)');
INSERT INTO "list" ("id", "value") VALUES ('es_VE', 'espanyol (Veneçuela)');
INSERT INTO "list" ("id", "value") VALUES ('es_CL', 'espanyol (Xile)');
INSERT INTO "list" ("id", "value") VALUES ('eo', 'esperanto');
INSERT INTO "list" ("id", "value") VALUES ('et', 'estonià');
INSERT INTO "list" ("id", "value") VALUES ('et_EE', 'estonià (Estònia)');
INSERT INTO "list" ("id", "value") VALUES ('ee', 'ewe');
INSERT INTO "list" ("id", "value") VALUES ('ee_GH', 'ewe (Ghana)');
INSERT INTO "list" ("id", "value") VALUES ('ee_TG', 'ewe (Togo)');
INSERT INTO "list" ("id", "value") VALUES ('fo', 'feroès');
INSERT INTO "list" ("id", "value") VALUES ('fo_FO', 'feroès (illes Fèroe)');
INSERT INTO "list" ("id", "value") VALUES ('fi', 'finès');
INSERT INTO "list" ("id", "value") VALUES ('fi_FI', 'finès (Finlàndia)');
INSERT INTO "list" ("id", "value") VALUES ('fr', 'francès');
INSERT INTO "list" ("id", "value") VALUES ('fr_DZ', 'francès (Algèria)');
INSERT INTO "list" ("id", "value") VALUES ('fr_BE', 'francès (Bèlgica)');
INSERT INTO "list" ("id", "value") VALUES ('fr_BJ', 'francès (Benín)');
INSERT INTO "list" ("id", "value") VALUES ('fr_BF', 'francès (Burkina Faso)');
INSERT INTO "list" ("id", "value") VALUES ('fr_BI', 'francès (Burundi)');
INSERT INTO "list" ("id", "value") VALUES ('fr_CM', 'francès (Camerun)');
INSERT INTO "list" ("id", "value") VALUES ('fr_CA', 'francès (Canadà)');
INSERT INTO "list" ("id", "value") VALUES ('fr_KM', 'francès (Comores)');
INSERT INTO "list" ("id", "value") VALUES ('fr_CG', 'francès (Congo - Brazzaville)');
INSERT INTO "list" ("id", "value") VALUES ('fr_CD', 'francès (Congo - Kinshasa)');
INSERT INTO "list" ("id", "value") VALUES ('fr_CI', 'francès (Costa d’Ivori)');
INSERT INTO "list" ("id", "value") VALUES ('fr_DJ', 'francès (Djibouti)');
INSERT INTO "list" ("id", "value") VALUES ('fr_FR', 'francès (França)');
INSERT INTO "list" ("id", "value") VALUES ('fr_GA', 'francès (Gabon)');
INSERT INTO "list" ("id", "value") VALUES ('fr_GP', 'francès (Guadeloupe)');
INSERT INTO "list" ("id", "value") VALUES ('fr_GF', 'francès (Guaiana Francesa)');
INSERT INTO "list" ("id", "value") VALUES ('fr_GQ', 'francès (Guinea Equatorial)');
INSERT INTO "list" ("id", "value") VALUES ('fr_GN', 'francès (Guinea)');
INSERT INTO "list" ("id", "value") VALUES ('fr_HT', 'francès (Haití)');
INSERT INTO "list" ("id", "value") VALUES ('fr_RE', 'francès (Illa de la Reunió)');
INSERT INTO "list" ("id", "value") VALUES ('fr_LU', 'francès (Luxemburg)');
INSERT INTO "list" ("id", "value") VALUES ('fr_MG', 'francès (Madagascar)');
INSERT INTO "list" ("id", "value") VALUES ('fr_ML', 'francès (Mali)');
INSERT INTO "list" ("id", "value") VALUES ('fr_MA', 'francès (Marroc)');
INSERT INTO "list" ("id", "value") VALUES ('fr_MQ', 'francès (Martinica)');
INSERT INTO "list" ("id", "value") VALUES ('fr_MU', 'francès (Maurici)');
INSERT INTO "list" ("id", "value") VALUES ('fr_MR', 'francès (Mauritània)');
INSERT INTO "list" ("id", "value") VALUES ('fr_YT', 'francès (Mayotte)');
INSERT INTO "list" ("id", "value") VALUES ('fr_MC', 'francès (Mònaco)');
INSERT INTO "list" ("id", "value") VALUES ('fr_NE', 'francès (Níger)');
INSERT INTO "list" ("id", "value") VALUES ('fr_NC', 'francès (Nova Caledònia)');
INSERT INTO "list" ("id", "value") VALUES ('fr_PF', 'francès (Polinèsia Francesa)');
INSERT INTO "list" ("id", "value") VALUES ('fr_CF', 'francès (República Centreafricana)');
INSERT INTO "list" ("id", "value") VALUES ('fr_RW', 'francès (Ruanda)');
INSERT INTO "list" ("id", "value") VALUES ('fr_BL', 'francès (Saint Barthélemy)');
INSERT INTO "list" ("id", "value") VALUES ('fr_MF', 'francès (Saint Martin)');
INSERT INTO "list" ("id", "value") VALUES ('fr_PM', 'francès (Saint-Pierre-et-Miquelon)');
INSERT INTO "list" ("id", "value") VALUES ('fr_SN', 'francès (Senegal)');
INSERT INTO "list" ("id", "value") VALUES ('fr_SC', 'francès (Seychelles)');
INSERT INTO "list" ("id", "value") VALUES ('fr_SY', 'francès (Síria)');
INSERT INTO "list" ("id", "value") VALUES ('fr_CH', 'francès (Suïssa)');
INSERT INTO "list" ("id", "value") VALUES ('fr_TG', 'francès (Togo)');
INSERT INTO "list" ("id", "value") VALUES ('fr_TN', 'francès (Tunísia)');
INSERT INTO "list" ("id", "value") VALUES ('fr_TD', 'francès (Txad)');
INSERT INTO "list" ("id", "value") VALUES ('fr_VU', 'francès (Vanuatu)');
INSERT INTO "list" ("id", "value") VALUES ('fr_WF', 'francès (Wallis i Futuna)');
INSERT INTO "list" ("id", "value") VALUES ('fy', 'frisó oriental');
INSERT INTO "list" ("id", "value") VALUES ('fy_NL', 'frisó oriental (Països Baixos)');
INSERT INTO "list" ("id", "value") VALUES ('ff', 'ful');
INSERT INTO "list" ("id", "value") VALUES ('ff_CM', 'ful (Camerun)');
INSERT INTO "list" ("id", "value") VALUES ('ff_GN', 'ful (Guinea)');
INSERT INTO "list" ("id", "value") VALUES ('ff_MR', 'ful (Mauritània)');
INSERT INTO "list" ("id", "value") VALUES ('ff_SN', 'ful (Senegal)');
INSERT INTO "list" ("id", "value") VALUES ('gd', 'gaèlic escocès');
INSERT INTO "list" ("id", "value") VALUES ('gd_GB', 'gaèlic escocès (Regne Unit)');
INSERT INTO "list" ("id", "value") VALUES ('gl', 'gallec');
INSERT INTO "list" ("id", "value") VALUES ('gl_ES', 'gallec (Espanya)');
INSERT INTO "list" ("id", "value") VALUES ('cy', 'gal·lès');
INSERT INTO "list" ("id", "value") VALUES ('cy_GB', 'gal·lès (Regne Unit)');
INSERT INTO "list" ("id", "value") VALUES ('lg', 'ganda');
INSERT INTO "list" ("id", "value") VALUES ('lg_UG', 'ganda (Uganda)');
INSERT INTO "list" ("id", "value") VALUES ('ka', 'georgià');
INSERT INTO "list" ("id", "value") VALUES ('ka_GE', 'georgià (Geòrgia)');
INSERT INTO "list" ("id", "value") VALUES ('el', 'grec');
INSERT INTO "list" ("id", "value") VALUES ('el_GR', 'grec (Grècia)');
INSERT INTO "list" ("id", "value") VALUES ('el_CY', 'grec (Xipre)');
INSERT INTO "list" ("id", "value") VALUES ('kl', 'grenlandès');
INSERT INTO "list" ("id", "value") VALUES ('kl_GL', 'grenlandès (Grenlàndia)');
INSERT INTO "list" ("id", "value") VALUES ('gu', 'gujarati');
INSERT INTO "list" ("id", "value") VALUES ('gu_IN', 'gujarati (Índia)');
INSERT INTO "list" ("id", "value") VALUES ('ha', 'haussa');
INSERT INTO "list" ("id", "value") VALUES ('ha_GH', 'haussa (Ghana)');
INSERT INTO "list" ("id", "value") VALUES ('ha_Latn_GH', 'haussa (llatí, Ghana)');
INSERT INTO "list" ("id", "value") VALUES ('ha_Latn_NE', 'haussa (llatí, Níger)');
INSERT INTO "list" ("id", "value") VALUES ('ha_Latn_NG', 'haussa (llatí, Nigèria)');
INSERT INTO "list" ("id", "value") VALUES ('ha_Latn', 'haussa (llatí)');
INSERT INTO "list" ("id", "value") VALUES ('ha_NE', 'haussa (Níger)');
INSERT INTO "list" ("id", "value") VALUES ('ha_NG', 'haussa (Nigèria)');
INSERT INTO "list" ("id", "value") VALUES ('he', 'hebreu');
INSERT INTO "list" ("id", "value") VALUES ('he_IL', 'hebreu (Israel)');
INSERT INTO "list" ("id", "value") VALUES ('hi', 'hindi');
INSERT INTO "list" ("id", "value") VALUES ('hi_IN', 'hindi (Índia)');
INSERT INTO "list" ("id", "value") VALUES ('hu', 'hongarès');
INSERT INTO "list" ("id", "value") VALUES ('hu_HU', 'hongarès (Hongria)');
INSERT INTO "list" ("id", "value") VALUES ('ig', 'igbo');
INSERT INTO "list" ("id", "value") VALUES ('ig_NG', 'igbo (Nigèria)');
INSERT INTO "list" ("id", "value") VALUES ('id', 'indonesi');
INSERT INTO "list" ("id", "value") VALUES ('id_ID', 'indonesi (Indonèsia)');
INSERT INTO "list" ("id", "value") VALUES ('yo', 'ioruba');
INSERT INTO "list" ("id", "value") VALUES ('yo_BJ', 'ioruba (Benín)');
INSERT INTO "list" ("id", "value") VALUES ('yo_NG', 'ioruba (Nigèria)');
INSERT INTO "list" ("id", "value") VALUES ('ga', 'irlandès');
INSERT INTO "list" ("id", "value") VALUES ('ga_IE', 'irlandès (Irlanda)');
INSERT INTO "list" ("id", "value") VALUES ('is', 'islandès');
INSERT INTO "list" ("id", "value") VALUES ('is_IS', 'islandès (Islàndia)');
INSERT INTO "list" ("id", "value") VALUES ('it', 'italià');
INSERT INTO "list" ("id", "value") VALUES ('it_IT', 'italià (Itàlia)');
INSERT INTO "list" ("id", "value") VALUES ('it_SM', 'italià (San Marino)');
INSERT INTO "list" ("id", "value") VALUES ('it_CH', 'italià (Suïssa)');
INSERT INTO "list" ("id", "value") VALUES ('ja', 'japonès');
INSERT INTO "list" ("id", "value") VALUES ('ja_JP', 'japonès (Japó)');
INSERT INTO "list" ("id", "value") VALUES ('yi', 'jiddisch');
INSERT INTO "list" ("id", "value") VALUES ('kn', 'kannada');
INSERT INTO "list" ("id", "value") VALUES ('kn_IN', 'kannada (Índia)');
INSERT INTO "list" ("id", "value") VALUES ('kk', 'kazakh');
INSERT INTO "list" ("id", "value") VALUES ('kk_Cyrl_KZ', 'kazakh (ciríl·lic, Kazakhstan)');
INSERT INTO "list" ("id", "value") VALUES ('kk_Cyrl', 'kazakh (ciríl·lic)');
INSERT INTO "list" ("id", "value") VALUES ('kk_KZ', 'kazakh (Kazakhstan)');
INSERT INTO "list" ("id", "value") VALUES ('km', 'khmer');
INSERT INTO "list" ("id", "value") VALUES ('km_KH', 'khmer (Cambodja)');
INSERT INTO "list" ("id", "value") VALUES ('ki', 'kikuiu');
INSERT INTO "list" ("id", "value") VALUES ('ki_KE', 'kikuiu (Kenya)');
INSERT INTO "list" ("id", "value") VALUES ('ky', 'kirguís');
INSERT INTO "list" ("id", "value") VALUES ('ky_Cyrl_KG', 'kirguís (ciríl·lic, Kirguizistan)');
INSERT INTO "list" ("id", "value") VALUES ('ky_Cyrl', 'kirguís (ciríl·lic)');
INSERT INTO "list" ("id", "value") VALUES ('ky_KG', 'kirguís (Kirguizistan)');
INSERT INTO "list" ("id", "value") VALUES ('lo', 'laosià');
INSERT INTO "list" ("id", "value") VALUES ('lo_LA', 'laosià (Laos)');
INSERT INTO "list" ("id", "value") VALUES ('lv', 'letó');
INSERT INTO "list" ("id", "value") VALUES ('lv_LV', 'letó (Letònia)');
INSERT INTO "list" ("id", "value") VALUES ('ln', 'lingala');
INSERT INTO "list" ("id", "value") VALUES ('ln_AO', 'lingala (Angola)');
INSERT INTO "list" ("id", "value") VALUES ('ln_CG', 'lingala (Congo - Brazzaville)');
INSERT INTO "list" ("id", "value") VALUES ('ln_CD', 'lingala (Congo - Kinshasa)');
INSERT INTO "list" ("id", "value") VALUES ('ln_CF', 'lingala (República Centreafricana)');
INSERT INTO "list" ("id", "value") VALUES ('lt', 'lituà');
INSERT INTO "list" ("id", "value") VALUES ('lt_LT', 'lituà (Lituània)');
INSERT INTO "list" ("id", "value") VALUES ('lu', 'luba katanga');
INSERT INTO "list" ("id", "value") VALUES ('lu_CD', 'luba katanga (Congo - Kinshasa)');
INSERT INTO "list" ("id", "value") VALUES ('lb', 'luxemburguès');
INSERT INTO "list" ("id", "value") VALUES ('lb_LU', 'luxemburguès (Luxemburg)');
INSERT INTO "list" ("id", "value") VALUES ('mk', 'macedoni');
INSERT INTO "list" ("id", "value") VALUES ('mk_MK', 'macedoni (Macedònia)');
INSERT INTO "list" ("id", "value") VALUES ('ms', 'malai');
INSERT INTO "list" ("id", "value") VALUES ('ms_BN', 'malai (Brunei)');
INSERT INTO "list" ("id", "value") VALUES ('ms_Latn_BN', 'malai (llatí, Brunei)');
INSERT INTO "list" ("id", "value") VALUES ('ms_Latn_MY', 'malai (llatí, Malàisia)');
INSERT INTO "list" ("id", "value") VALUES ('ms_Latn_SG', 'malai (llatí, Singapur)');
INSERT INTO "list" ("id", "value") VALUES ('ms_Latn', 'malai (llatí)');
INSERT INTO "list" ("id", "value") VALUES ('ms_MY', 'malai (Malàisia)');
INSERT INTO "list" ("id", "value") VALUES ('ms_SG', 'malai (Singapur)');
INSERT INTO "list" ("id", "value") VALUES ('ml', 'malaiàlam');
INSERT INTO "list" ("id", "value") VALUES ('ml_IN', 'malaiàlam (Índia)');
INSERT INTO "list" ("id", "value") VALUES ('mg', 'malgaix');
INSERT INTO "list" ("id", "value") VALUES ('mg_MG', 'malgaix (Madagascar)');
INSERT INTO "list" ("id", "value") VALUES ('mt', 'maltès');
INSERT INTO "list" ("id", "value") VALUES ('mt_MT', 'maltès (Malta)');
INSERT INTO "list" ("id", "value") VALUES ('gv', 'manx');
INSERT INTO "list" ("id", "value") VALUES ('gv_IM', 'manx (illa de Man)');
INSERT INTO "list" ("id", "value") VALUES ('mr', 'marathi');
INSERT INTO "list" ("id", "value") VALUES ('mr_IN', 'marathi (Índia)');
INSERT INTO "list" ("id", "value") VALUES ('mn', 'mongol');
INSERT INTO "list" ("id", "value") VALUES ('mn_Cyrl_MN', 'mongol (ciríl·lic, Mongòlia)');
INSERT INTO "list" ("id", "value") VALUES ('mn_Cyrl', 'mongol (ciríl·lic)');
INSERT INTO "list" ("id", "value") VALUES ('mn_MN', 'mongol (Mongòlia)');
INSERT INTO "list" ("id", "value") VALUES ('nd', 'ndebele septentrional');
INSERT INTO "list" ("id", "value") VALUES ('nd_ZW', 'ndebele septentrional (Zimbàbue)');
INSERT INTO "list" ("id", "value") VALUES ('nl', 'neerlandès');
INSERT INTO "list" ("id", "value") VALUES ('nl_AW', 'neerlandès (Aruba)');
INSERT INTO "list" ("id", "value") VALUES ('nl_BE', 'neerlandès (Bèlgica)');
INSERT INTO "list" ("id", "value") VALUES ('nl_BQ', 'neerlandès (Carib Neerlandès)');
INSERT INTO "list" ("id", "value") VALUES ('nl_CW', 'neerlandès (Curaçao)');
INSERT INTO "list" ("id", "value") VALUES ('nl_NL', 'neerlandès (Països Baixos)');
INSERT INTO "list" ("id", "value") VALUES ('nl_SX', 'neerlandès (Sint Maarten)');
INSERT INTO "list" ("id", "value") VALUES ('nl_SR', 'neerlandès (Surinam)');
INSERT INTO "list" ("id", "value") VALUES ('ne', 'nepalès');
INSERT INTO "list" ("id", "value") VALUES ('ne_IN', 'nepalès (Índia)');
INSERT INTO "list" ("id", "value") VALUES ('ne_NP', 'nepalès (Nepal)');
INSERT INTO "list" ("id", "value") VALUES ('no', 'noruec');
INSERT INTO "list" ("id", "value") VALUES ('no_NO', 'noruec (Noruega)');
INSERT INTO "list" ("id", "value") VALUES ('nb', 'noruec bokmål');
INSERT INTO "list" ("id", "value") VALUES ('nb_NO', 'noruec bokmål (Noruega)');
INSERT INTO "list" ("id", "value") VALUES ('nb_SJ', 'noruec bokmål (Svalbard i Jan Mayen)');
INSERT INTO "list" ("id", "value") VALUES ('nn', 'noruec nynorsk');
INSERT INTO "list" ("id", "value") VALUES ('nn_NO', 'noruec nynorsk (Noruega)');
INSERT INTO "list" ("id", "value") VALUES ('or', 'oriya');
INSERT INTO "list" ("id", "value") VALUES ('or_IN', 'oriya (Índia)');
INSERT INTO "list" ("id", "value") VALUES ('om', 'oromo');
INSERT INTO "list" ("id", "value") VALUES ('om_ET', 'oromo (Etiòpia)');
INSERT INTO "list" ("id", "value") VALUES ('om_KE', 'oromo (Kenya)');
INSERT INTO "list" ("id", "value") VALUES ('os', 'osset');
INSERT INTO "list" ("id", "value") VALUES ('os_GE', 'osset (Geòrgia)');
INSERT INTO "list" ("id", "value") VALUES ('os_RU', 'osset (Rússia)');
INSERT INTO "list" ("id", "value") VALUES ('ps', 'paixtu');
INSERT INTO "list" ("id", "value") VALUES ('ps_AF', 'paixtu (Afganistan)');
INSERT INTO "list" ("id", "value") VALUES ('pa', 'panjabi');
INSERT INTO "list" ("id", "value") VALUES ('pa_Arab_PK', 'panjabi (àrab, Pakistan)');
INSERT INTO "list" ("id", "value") VALUES ('pa_Arab', 'panjabi (àrab)');
INSERT INTO "list" ("id", "value") VALUES ('pa_Guru_IN', 'panjabi (gurmukhi, Índia)');
INSERT INTO "list" ("id", "value") VALUES ('pa_Guru', 'panjabi (gurmukhi)');
INSERT INTO "list" ("id", "value") VALUES ('pa_IN', 'panjabi (Índia)');
INSERT INTO "list" ("id", "value") VALUES ('pa_PK', 'panjabi (Pakistan)');
INSERT INTO "list" ("id", "value") VALUES ('fa', 'persa');
INSERT INTO "list" ("id", "value") VALUES ('fa_AF', 'persa (Afganistan)');
INSERT INTO "list" ("id", "value") VALUES ('fa_IR', 'persa (Iran)');
INSERT INTO "list" ("id", "value") VALUES ('pl', 'polonès');
INSERT INTO "list" ("id", "value") VALUES ('pl_PL', 'polonès (Polònia)');
INSERT INTO "list" ("id", "value") VALUES ('pt', 'portuguès');
INSERT INTO "list" ("id", "value") VALUES ('pt_AO', 'portuguès (Angola)');
INSERT INTO "list" ("id", "value") VALUES ('pt_BR', 'portuguès (Brasil)');
INSERT INTO "list" ("id", "value") VALUES ('pt_CV', 'portuguès (Cap Verd)');
INSERT INTO "list" ("id", "value") VALUES ('pt_GW', 'portuguès (Guinea Bissau)');
INSERT INTO "list" ("id", "value") VALUES ('pt_MO', 'portuguès (Macau (RAE Xina))');
INSERT INTO "list" ("id", "value") VALUES ('pt_MZ', 'portuguès (Moçambic)');
INSERT INTO "list" ("id", "value") VALUES ('pt_PT', 'portuguès (Portugal)');
INSERT INTO "list" ("id", "value") VALUES ('pt_ST', 'portuguès (São Tomé i Príncipe)');
INSERT INTO "list" ("id", "value") VALUES ('pt_TL', 'portuguès (Timor Oriental)');
INSERT INTO "list" ("id", "value") VALUES ('qu', 'quítxua');
INSERT INTO "list" ("id", "value") VALUES ('qu_BO', 'quítxua (Bolívia)');
INSERT INTO "list" ("id", "value") VALUES ('qu_EC', 'quítxua (Equador)');
INSERT INTO "list" ("id", "value") VALUES ('qu_PE', 'quítxua (Perú)');
INSERT INTO "list" ("id", "value") VALUES ('rm', 'retoromànic');
INSERT INTO "list" ("id", "value") VALUES ('rm_CH', 'retoromànic (Suïssa)');
INSERT INTO "list" ("id", "value") VALUES ('ro', 'romanès');
INSERT INTO "list" ("id", "value") VALUES ('ro_MD', 'romanès (Moldàvia)');
INSERT INTO "list" ("id", "value") VALUES ('ro_RO', 'romanès (Romania)');
INSERT INTO "list" ("id", "value") VALUES ('rw', 'ruandès');
INSERT INTO "list" ("id", "value") VALUES ('rw_RW', 'ruandès (Ruanda)');
INSERT INTO "list" ("id", "value") VALUES ('rn', 'rundi');
INSERT INTO "list" ("id", "value") VALUES ('rn_BI', 'rundi (Burundi)');
INSERT INTO "list" ("id", "value") VALUES ('ru', 'rus');
INSERT INTO "list" ("id", "value") VALUES ('ru_BY', 'rus (Bielorússia)');
INSERT INTO "list" ("id", "value") VALUES ('ru_KZ', 'rus (Kazakhstan)');
INSERT INTO "list" ("id", "value") VALUES ('ru_KG', 'rus (Kirguizistan)');
INSERT INTO "list" ("id", "value") VALUES ('ru_MD', 'rus (Moldàvia)');
INSERT INTO "list" ("id", "value") VALUES ('ru_RU', 'rus (Rússia)');
INSERT INTO "list" ("id", "value") VALUES ('ru_UA', 'rus (Ucraïna)');
INSERT INTO "list" ("id", "value") VALUES ('se', 'sami septentrional');
INSERT INTO "list" ("id", "value") VALUES ('se_FI', 'sami septentrional (Finlàndia)');
INSERT INTO "list" ("id", "value") VALUES ('se_NO', 'sami septentrional (Noruega)');
INSERT INTO "list" ("id", "value") VALUES ('se_SE', 'sami septentrional (Suècia)');
INSERT INTO "list" ("id", "value") VALUES ('sg', 'sango');
INSERT INTO "list" ("id", "value") VALUES ('sg_CF', 'sango (República Centreafricana)');
INSERT INTO "list" ("id", "value") VALUES ('sr', 'serbi');
INSERT INTO "list" ("id", "value") VALUES ('sr_BA', 'serbi (Bòsnia i Hercegovina)');
INSERT INTO "list" ("id", "value") VALUES ('sr_Cyrl_BA', 'serbi (ciríl·lic, Bòsnia i Hercegovina)');
INSERT INTO "list" ("id", "value") VALUES ('sr_Cyrl_XK', 'serbi (ciríl·lic, Kosovo)');
INSERT INTO "list" ("id", "value") VALUES ('sr_Cyrl_ME', 'serbi (ciríl·lic, Montenegro)');
INSERT INTO "list" ("id", "value") VALUES ('sr_Cyrl_RS', 'serbi (ciríl·lic, Sèrbia)');
INSERT INTO "list" ("id", "value") VALUES ('sr_Cyrl', 'serbi (ciríl·lic)');
INSERT INTO "list" ("id", "value") VALUES ('sr_XK', 'serbi (Kosovo)');
INSERT INTO "list" ("id", "value") VALUES ('sr_Latn_BA', 'serbi (llatí, Bòsnia i Hercegovina)');
INSERT INTO "list" ("id", "value") VALUES ('sr_Latn_XK', 'serbi (llatí, Kosovo)');
INSERT INTO "list" ("id", "value") VALUES ('sr_Latn_ME', 'serbi (llatí, Montenegro)');
INSERT INTO "list" ("id", "value") VALUES ('sr_Latn_RS', 'serbi (llatí, Sèrbia)');
INSERT INTO "list" ("id", "value") VALUES ('sr_Latn', 'serbi (llatí)');
INSERT INTO "list" ("id", "value") VALUES ('sr_ME', 'serbi (Montenegro)');
INSERT INTO "list" ("id", "value") VALUES ('sr_RS', 'serbi (Sèrbia)');
INSERT INTO "list" ("id", "value") VALUES ('sh', 'serbocroat');
INSERT INTO "list" ("id", "value") VALUES ('sh_BA', 'serbocroat (Bòsnia i Hercegovina)');
INSERT INTO "list" ("id", "value") VALUES ('sn', 'shona');
INSERT INTO "list" ("id", "value") VALUES ('sn_ZW', 'shona (Zimbàbue)');
INSERT INTO "list" ("id", "value") VALUES ('si', 'singalès');
INSERT INTO "list" ("id", "value") VALUES ('si_LK', 'singalès (Sri Lanka)');
INSERT INTO "list" ("id", "value") VALUES ('so', 'somali');
INSERT INTO "list" ("id", "value") VALUES ('so_DJ', 'somali (Djibouti)');
INSERT INTO "list" ("id", "value") VALUES ('so_ET', 'somali (Etiòpia)');
INSERT INTO "list" ("id", "value") VALUES ('so_KE', 'somali (Kenya)');
INSERT INTO "list" ("id", "value") VALUES ('so_SO', 'somali (Somàlia)');
INSERT INTO "list" ("id", "value") VALUES ('sw', 'suahili');
INSERT INTO "list" ("id", "value") VALUES ('sw_KE', 'suahili (Kenya)');
INSERT INTO "list" ("id", "value") VALUES ('sw_TZ', 'suahili (Tanzània)');
INSERT INTO "list" ("id", "value") VALUES ('sw_UG', 'suahili (Uganda)');
INSERT INTO "list" ("id", "value") VALUES ('sv', 'suec');
INSERT INTO "list" ("id", "value") VALUES ('sv_FI', 'suec (Finlàndia)');
INSERT INTO "list" ("id", "value") VALUES ('sv_AX', 'suec (illes Åland)');
INSERT INTO "list" ("id", "value") VALUES ('sv_SE', 'suec (Suècia)');
INSERT INTO "list" ("id", "value") VALUES ('tl', 'tagàlog');
INSERT INTO "list" ("id", "value") VALUES ('tl_PH', 'tagàlog (Filipines)');
INSERT INTO "list" ("id", "value") VALUES ('th', 'tailandès');
INSERT INTO "list" ("id", "value") VALUES ('th_TH', 'tailandès (Tailàndia)');
INSERT INTO "list" ("id", "value") VALUES ('ta', 'tàmil');
INSERT INTO "list" ("id", "value") VALUES ('ta_IN', 'tàmil (Índia)');
INSERT INTO "list" ("id", "value") VALUES ('ta_MY', 'tàmil (Malàisia)');
INSERT INTO "list" ("id", "value") VALUES ('ta_SG', 'tàmil (Singapur)');
INSERT INTO "list" ("id", "value") VALUES ('ta_LK', 'tàmil (Sri Lanka)');
INSERT INTO "list" ("id", "value") VALUES ('te', 'telugu');
INSERT INTO "list" ("id", "value") VALUES ('te_IN', 'telugu (Índia)');
INSERT INTO "list" ("id", "value") VALUES ('bo', 'tibetà');
INSERT INTO "list" ("id", "value") VALUES ('bo_IN', 'tibetà (Índia)');
INSERT INTO "list" ("id", "value") VALUES ('bo_CN', 'tibetà (Xina)');
INSERT INTO "list" ("id", "value") VALUES ('ti', 'tigrinya');
INSERT INTO "list" ("id", "value") VALUES ('ti_ER', 'tigrinya (Eritrea)');
INSERT INTO "list" ("id", "value") VALUES ('ti_ET', 'tigrinya (Etiòpia)');
INSERT INTO "list" ("id", "value") VALUES ('to', 'tongalès');
INSERT INTO "list" ("id", "value") VALUES ('to_TO', 'tongalès (Tonga)');
INSERT INTO "list" ("id", "value") VALUES ('tr', 'turc');
INSERT INTO "list" ("id", "value") VALUES ('tr_TR', 'turc (Turquia)');
INSERT INTO "list" ("id", "value") VALUES ('tr_CY', 'turc (Xipre)');
INSERT INTO "list" ("id", "value") VALUES ('cs', 'txec');
INSERT INTO "list" ("id", "value") VALUES ('cs_CZ', 'txec (República Txeca)');
INSERT INTO "list" ("id", "value") VALUES ('uk', 'ucraïnès');
INSERT INTO "list" ("id", "value") VALUES ('uk_UA', 'ucraïnès (Ucraïna)');
INSERT INTO "list" ("id", "value") VALUES ('ug', 'uigur');
INSERT INTO "list" ("id", "value") VALUES ('ug_Arab_CN', 'uigur (àrab, Xina)');
INSERT INTO "list" ("id", "value") VALUES ('ug_Arab', 'uigur (àrab)');
INSERT INTO "list" ("id", "value") VALUES ('ug_CN', 'uigur (Xina)');
INSERT INTO "list" ("id", "value") VALUES ('ur', 'urdú');
INSERT INTO "list" ("id", "value") VALUES ('ur_IN', 'urdú (Índia)');
INSERT INTO "list" ("id", "value") VALUES ('ur_PK', 'urdú (Pakistan)');
INSERT INTO "list" ("id", "value") VALUES ('uz', 'uzbek');
INSERT INTO "list" ("id", "value") VALUES ('uz_AF', 'uzbek (Afganistan)');
INSERT INTO "list" ("id", "value") VALUES ('uz_Arab_AF', 'uzbek (àrab, Afganistan)');
INSERT INTO "list" ("id", "value") VALUES ('uz_Arab', 'uzbek (àrab)');
INSERT INTO "list" ("id", "value") VALUES ('uz_Cyrl_UZ', 'uzbek (ciríl·lic, Uzbekistan)');
INSERT INTO "list" ("id", "value") VALUES ('uz_Cyrl', 'uzbek (ciríl·lic)');
INSERT INTO "list" ("id", "value") VALUES ('uz_Latn_UZ', 'uzbek (llatí, Uzbekistan)');
INSERT INTO "list" ("id", "value") VALUES ('uz_Latn', 'uzbek (llatí)');
INSERT INTO "list" ("id", "value") VALUES ('uz_UZ', 'uzbek (Uzbekistan)');
INSERT INTO "list" ("id", "value") VALUES ('vi', 'vietnamita');
INSERT INTO "list" ("id", "value") VALUES ('vi_VN', 'vietnamita (Vietnam)');
INSERT INTO "list" ("id", "value") VALUES ('zh', 'xinès');
INSERT INTO "list" ("id", "value") VALUES ('zh_HK', 'xinès (Hong Kong (RAE Xina))');
INSERT INTO "list" ("id", "value") VALUES ('zh_MO', 'xinès (Macau (RAE Xina))');
INSERT INTO "list" ("id", "value") VALUES ('zh_Hans_HK', 'xinès (simplificat, Hong Kong (RAE Xina))');
INSERT INTO "list" ("id", "value") VALUES ('zh_Hans_MO', 'xinès (simplificat, Macau (RAE Xina))');
INSERT INTO "list" ("id", "value") VALUES ('zh_Hans_SG', 'xinès (simplificat, Singapur)');
INSERT INTO "list" ("id", "value") VALUES ('zh_Hans_CN', 'xinès (simplificat, Xina)');
INSERT INTO "list" ("id", "value") VALUES ('zh_Hans', 'xinès (simplificat)');
INSERT INTO "list" ("id", "value") VALUES ('zh_SG', 'xinès (Singapur)');
INSERT INTO "list" ("id", "value") VALUES ('zh_TW', 'xinès (Taiwan)');
INSERT INTO "list" ("id", "value") VALUES ('zh_Hant_HK', 'xinès (tradicional, Hong Kong (RAE Xina))');
INSERT INTO "list" ("id", "value") VALUES ('zh_Hant_MO', 'xinès (tradicional, Macau (RAE Xina))');
INSERT INTO "list" ("id", "value") VALUES ('zh_Hant_TW', 'xinès (tradicional, Taiwan)');
INSERT INTO "list" ("id", "value") VALUES ('zh_Hant', 'xinès (tradicional)');
INSERT INTO "list" ("id", "value") VALUES ('zh_CN', 'xinès (Xina)');
INSERT INTO "list" ("id", "value") VALUES ('ii', 'yi sichuan');
INSERT INTO "list" ("id", "value") VALUES ('ii_CN', 'yi sichuan (Xina)');
INSERT INTO "list" ("id", "value") VALUES ('zu', 'zulu');
INSERT INTO "list" ("id", "value") VALUES ('zu_ZA', 'zulu (República de Sud-àfrica)');
|
package com.jaoafa.MyMaid3.Task;
import com.jaoafa.MyMaid3.Lib.MyMaidLibrary;
import org.bukkit.Bukkit;
import org.bukkit.entity.Player;
import org.bukkit.potion.PotionEffectType;
import org.bukkit.scheduler.BukkitRunnable;
public class Task_DisableInvisible extends BukkitRunnable {
@Override
public void run() {
for (Player player : Bukkit.getOnlinePlayers()) {
if (MyMaidLibrary.isAMR(player)) {
return;
}
if (player.hasPotionEffect(PotionEffectType.INVISIBILITY)) {
player.removePotionEffect(PotionEffectType.INVISIBILITY);
}
}
}
}
|
package cn.hi321.browser.ui.activities;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.os.Handler;
import cn.hi321.browser2.R;
import com.umeng.analytics.MobclickAgent;
import com.umeng.analytics.ReportPolicy;
/**
* 启动页面
*
* @author yanggf
*
*/
public class SplashActivity extends Activity {
private Handler mHandler = new Handler();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
MobclickAgent.onError(this);// 友盟错误报告
// 使用在线配置功能
MobclickAgent.updateOnlineConfig(this);
// 每次启动发送
MobclickAgent
.setDefaultReportPolicy(this, ReportPolicy.BATCH_AT_LAUNCH);
// 友盟检查更新
setContentView(R.layout.start_activity);
mHandler.postDelayed(new Runnable() {
@Override
public void run() {
enterHome();
}
}, 1000 * 3);
}
protected void onResume() {
super.onResume();
MobclickAgent.onResume(this);
}
@Override
protected void onPause() {
super.onPause();
MobclickAgent.onPause(this);
}
private void enterHome() {
Intent intent = new Intent(this, HomeActivity.class);
startActivity(intent);
this.finish();
};
}
|
#!/usr/bin/env bash
psearch -o "$1" | gsed -r 's/^([^ ]+) .*/\1/' | fgrep -v "`~/Scripts/listInstalledPorts.sh`"
|
const {MessageEmbed} = require('discord.js');
const {Shop, Category} = require('../../misc/dbObjects');
const {Minor} = require('../../misc/tools');
const {Op} = require('sequelize');
/*
This command doesnt remove the item from the actual shop but instead hides it from visibility.
This is done so that if items are of events they can still be seen in the inventory of the people who obtained it.
*/
module.exports = {
name: 'removeshop',
aliases: ['rshop'],
category: 'ownershop',
description: 'Remove an item from the shop.\n',
usage: '[command | alias] [id]',
examples: ['h!rshop 10'],
ownerOnly: true,
run: async (bot, message, args) => {
let shop = {
embed: new MessageEmbed().setTitle('Remove shop')
.setTimestamp(),
idReg: new RegExp('^\\d+$'),
};
if (shop.idReg.test(args[0]))
shop.id = shop.idReg.exec(args[0])[0];
else {
shop.embed.setColor(bot.embedColors.embeds.error)
.setDescription('Please provide a valid id!');
return message.channel.send(shop.embed);
}
shop.categoryDb = await Category.findOne({
where: {
name: 'hidden'
}
});
shop.item = await Shop.findOne({
where: {
id: shop.id
}
});
if (shop.item === null) {
shop.embed.setColor(bot.embedColors.embeds.error)
.setDescription('Please provide a valid id!');
return message.channel.send(shop.embed);
}
shop.item.category = shop.categoryDb.id;
shop.item.save();
shop.embed.setColor(bot.embedColors.embeds.normal)
.setDescription(`Item **${shop.item.name}** has been removed from the shop display.`);
await message.channel.send(shop.embed)
}
} |
# expand-string
[![npm Version][npm-image]][npm-url]
[![npm Downloads][downloads-image]][downloads-url]
[![Test Status][travis-image]][travis-url]
[![Test Coverage][coveralls-image]][coveralls-url]
[![MIT Licensed][license-image]][license-url]
Range based string expansion.
```js
const expandString = require('expand-string');
let expanded = expandString('a-f9-0_-');
// => 'abcdef9876543210_-'
const generateRange = expandString.generateRange;
let range = generateRange('s', 'z');
// => 'stuvwxyz'
```
## Installation
```bash
npm install expand-string
```
## Features
- Expand arbitrary character ranges.
- Specify ranges using any ellipsis (default is `'-'`).
- Full Unicode support.
## API
```js
const expandString = require('expand-string');
const generateRange = expandString.generateRange;
```
### expandString(str, options)
Expands all ranges found in a string.
#### str
*string* (default = `''`)
The string to expand. If `str` is *undefined* or *null* an empty result is
returned (`''` or `[]`, depending on `options.returnArray`).
#### options
*object*, *string* (default = `{}`)
`expandString` accepts these properties in the options object:
*Note: If `options` is a string it's treated as ellipsis.*
##### ellipsis
*string* (default = `'-'`)
The ellipsis used to indicated a range.
```js
expandString('ac-f9-5_-');
// => 'acdef98765_-'
expandString('z..u', {ellipsis: '..'});
// => 'zyxwvu'
expandString('z..u', '..'); // shortcut
// => 'zyxwvu'
```
##### returnArray
*boolean* (default = `false`)
If `false` the return value is a *string*. If `true` the return value is an
*Array* with one Unicode character per element.
```js
expandString('a-f');
// => 'abcdef'
expandString('a-f', {returnArray: true});
// => ['a', 'b', 'c', 'd', 'e', 'f']
```
### generateRange(begin, end, options)
Generates a range from `begin` to `end`.
#### begin
*string* (single character)
The begin of the range (inclusive).
#### end
*string* (single character)
The end of the range (inclusive).
#### options
*object* (default = `{}`)
`generateRange` accepts these properties in the options object:
##### returnArray
*boolean* (default = `false`)
If `false` the return value is a *string*. If `true` the return value is an
*Array* with one Unicode character per element.
```js
generateRange('a', 'f');
// => 'abcdef'
generateRange('a', 'f', {returnArray: true});
// => ['a', 'b', 'c', 'd', 'e', 'f']
```
## Tests
To run the test suite, install dependencies, then run `npm test`:
```bash
npm install
npm test
```
Coverage reports are generated by running `npm run coverage`.
Linting is done with `npm run lint`.
[npm-image]: https://img.shields.io/npm/v/expand-string.svg
[npm-url]: https://npmjs.org/package/expand-string
[downloads-image]: https://img.shields.io/npm/dm/expand-string.svg
[downloads-url]: https://npmjs.org/package/expand-string
[travis-image]: https://img.shields.io/travis/maxtruxa/expand-string/master.svg
[travis-url]: https://travis-ci.org/maxtruxa/expand-string
[coveralls-image]: https://img.shields.io/coveralls/maxtruxa/expand-string/master.svg
[coveralls-url]: https://coveralls.io/r/maxtruxa/expand-string?branch=master
[license-image]: https://img.shields.io/badge/license-MIT-blue.svg
[license-url]: https://raw.githubusercontent.com/maxtruxa/expand-string/master/LICENSE
|
package api
import (
"github.com/gorilla/mux"
uuid "github.com/satori/go.uuid"
"github.com/stackpath/backend-developer-tests/rest-service/pkg/models"
"net/http"
"strings"
)
// getPersonById router handler function to get Person by ID
func (app *Application) getPersonById(w http.ResponseWriter, r *http.Request) {
params := mux.Vars(r)
uuidStr := params["id"]
id, err := uuid.FromString(uuidStr)
if err != nil {
app.logger.Print(uuidStr, err)
app.errorJSON(w, http.StatusBadRequest, err)
return
}
app.logger.Println("id is", id)
person, err := models.FindPersonByID(id)
if err != nil {
app.errorJSON(w, http.StatusNotFound, err)
return
}
err = app.writeJSON(w, http.StatusOK, person, "people")
if err != nil {
app.logger.Println(err)
}
}
func (app *Application) getAllPeople(w http.ResponseWriter, r *http.Request) {
people := models.AllPeople()
err := app.writeJSON(w, http.StatusOK, people, "people")
if err != nil {
app.logger.Println(err)
}
}
func (app *Application) getPersonByFullName(w http.ResponseWriter, r *http.Request) {
fName := r.URL.Query()["first_name"]
lName := r.URL.Query()["last_name"]
app.logger.Println(fName, lName)
person := models.FindPeopleByName(fName[0], lName[0])
err := app.writeJSON(w, http.StatusOK, person, "people")
if err != nil {
app.logger.Println(err)
}
}
func (app *Application) getPersonByPhone(w http.ResponseWriter, r *http.Request) {
phone := r.URL.Query()["phone_number"]
app.logger.Println("phone", phone)
people := models.FindPeopleByPhoneNumber(strings.Join(phone, ""))
err := app.writeJSON(w, http.StatusOK, people, "people")
if err != nil {
app.logger.Println(err)
}
}
|
module Stompede
class Session
attr_accessor :connected, :disconnected, :server_heart_beats, :client_heart_beats
def initialize(connector, options = {})
@connector = connector
@subscriptions = {}
@mutex = Mutex.new
@server_heart_beats = options[:server_heart_beats] || [0, 0]
@client_heart_beats = options[:client_heart_beats] || [0, 0]
end
def message_all(*args)
@connector.message_all(*args)
end
def subscriptions
@mutex.synchronize { @subscriptions.values }
end
def write(value)
@connector.write(self, value.to_str)
end
def wait_for_ack(message, timeout, &block)
@connector.wait_for_ack(message, timeout, &block)
rescue Celluloid::AbortError => e
raise e.cause
end
def error(exception, headers = {})
exception = exception.cause if exception.is_a?(Celluloid::AbortError)
unless exception.is_a?(Disconnected)
safe_write(ErrorFrame.new(exception, headers))
close
end
end
def safe_write(value)
write(value)
rescue Disconnected
end
def close
@connector.close(self)
end
def subscribe(frame)
subscription = Subscription.new(self, frame)
@mutex.synchronize do
if @subscriptions[subscription.id]
raise ClientError, "subscription with id #{subscription.id.inspect} already exists"
end
@subscriptions[subscription.id] = subscription
end
subscription
end
def unsubscribe(frame)
subscription = Subscription.new(self, frame)
@mutex.synchronize do
unless @subscriptions[subscription.id]
raise ClientError, "subscription with id #{subscription.id.inspect} does not exist"
end
@subscriptions.delete(subscription.id)
end
end
def inspect
"#<Stompede::Session #{object_id}>"
end
def outgoing_heart_beats
if server_heart_beats[0].zero? or client_heart_beats[1].zero?
0
else
[server_heart_beats[0], client_heart_beats[1]].max
end
end
def incoming_heart_beats
if server_heart_beats[1].zero? or client_heart_beats[0].zero?
0
else
[server_heart_beats[1], client_heart_beats[0]].max
end
end
end
end
|
import {InitialState, NavigationContainerRef, NavigationContainer} from '@react-navigation/native';
import AsyncStorage from '@react-native-community/async-storage';
import * as React from 'react';
import {InteractionManager} from 'react-native';
interface DevPersistedNavigationContainerProps extends React.ComponentProps<typeof NavigationContainer> {
persistKey: string;
}
function DevPersistedNavigationContainerImpl(
{persistKey, onStateChange, ...others}: DevPersistedNavigationContainerProps,
forwardedRef: React.Ref<NavigationContainerRef>,
) {
const [isReady, setIsReady] = React.useState(false);
const [initialState, setInitialState] = React.useState<InitialState | undefined>();
const persistInteractionRef = React.useRef<{cancel: () => void} | null>(null);
const onStateChangeInternal = React.useCallback(
state => {
const persistState = async () => {
persistInteractionRef.current = null;
try {
await AsyncStorage.setItem(persistKey, JSON.stringify(state));
} catch (ex) {
console.warn(`Failed to persist state. ${ex.message}`);
}
};
if (persistInteractionRef.current !== null) {
persistInteractionRef.current.cancel();
}
if (state != null) {
persistInteractionRef.current = InteractionManager.runAfterInteractions(persistState);
}
if (onStateChange != null) {
onStateChange(state);
}
},
[onStateChange, persistKey],
);
React.useEffect(() => {
const loadPerisitedState = async () => {
try {
const jsonString = await AsyncStorage.getItem(persistKey);
if (jsonString != null) {
setInitialState(JSON.parse(jsonString));
}
setIsReady(true);
} catch (ex) {
console.warn(`Failed to load state. ${ex.message}`);
setIsReady(true);
}
};
loadPerisitedState();
}, [persistKey]);
if (!isReady) {
return null;
}
return (
<NavigationContainer
{...others}
key={persistKey}
ref={forwardedRef}
initialState={initialState}
onStateChange={onStateChangeInternal}
/>
);
}
const DevPersistedNavigationContainer = __DEV__
? React.forwardRef(DevPersistedNavigationContainerImpl)
: NavigationContainer;
export default DevPersistedNavigationContainer;
|
require 'test_helper'
class Blog::Api::V1::CategoriesControllerTest < ActionController::TestCase
def setup
@controller = Blog::Api::V1::CategoriesController.new
@routes = Blog::Engine.routes
end
# GET #index
test 'GET #index returns all the categories' do
result = json_parsed('index', 10, 'category')
assert_equal 10, result.length
end
# GET #show
test 'GET #show returns data of an single category' do
category = create(:category)
result = json_parsed('show', nil, nil, category)
assert_not_nil result
end
test 'GET #show returns 404 if category is not found' do
result = get :show, id: 999, format: :json
assert_response :not_found
end
# POST #create
test 'POST #create returns a successful json string with the new category' do
attributes = attributes_for(:category, name: 'About', description: 'abc')
result = json_parsed('create', nil, 'category', attributes)
assert_equal 'About', result['name']
assert_equal 'abc', result['description']
assert_equal 'about', result['slug']
end
test 'POST #create returns an error if name is not submitted' do
attributes = attributes_for(:category, name: nil)
result = json_parsed('create', nil, 'category', attributes)
assert_response :unprocessable_entity
assert_includes result['name'], "can't be blank"
end
# PUT #update
test 'PUT #update returns a successful json string with the updated category' do
category = create(:category, name: 'About', description: 'abc')
attributes = attributes_for(:category, name: 'Contact', description: 'cba')
result = json_parsed('update', nil, 'category', category, attributes)
assert_equal 'Contact', result['name']
assert_equal 'cba', result['description']
end
test 'PUT #update returns an error if name is null' do
category = create(:category, name: 'About', description: 'abc')
attributes = attributes_for(:category, name: nil, description: 'cba')
result = json_parsed('update', nil, 'category', category, attributes)
assert_includes result['name'], "can't be blank"
end
# DELETE #destroy
test 'DELETE #destroy removes a category and returns nothing' do
category = create(:category)
delete :destroy, id: category, format: :json
assert_response :no_content
end
end
|
require 'spec_helper'
describe Hedwig::Api::Attractions do
let(:attractions) { described_class }
describe ".by_location", vcr: { cassette_name: 'location-attractions' } do
let(:id) { 150807 }
let(:options) { { lang: 'en_US' } }
let(:resource) { "location/#{id}/attractions" }
subject { attractions.by_location(id, options) }
it "creates a Hedwig::Request for 'location/:id/attractions'" do
expect(Hedwig::Request).to receive(:new).with(resource, options).and_call_original
subject
end
it "returns a Hedwig::Models::Collection" do
expect(subject).to be_a Hedwig::Models::Collection
end
context "when multiple ids are passed in", vcr: { cassette_name: 'multiget-attractions' } do
let(:id) { [2226812,233835,150807] }
let(:resource) { "location/#{id.join(',')}/attractions" }
it "creates a multi-get request" do
expect(Hedwig::Request).to receive(:new).with(resource, options).and_call_original
subject
end
end
end
describe ".by_coordinates", vcr: { cassette_name: 'map-attractions' } do
let(:latitude) { 42.33141 }
let(:longitude) { -71.099396 }
let(:options) { { lang: 'en_US' } }
let(:resource) { "map/#{latitude},#{longitude}/attractions" }
subject { attractions.by_coordinates(latitude, longitude, options) }
it "creates a Hedwig::Request for 'map/:latitude,:longitude/attractions'" do
expect(Hedwig::Request).to receive(:new).with(resource, options).and_call_original
subject
end
it "returns a Hedwig::Models::Collection" do
expect(subject).to be_a Hedwig::Models::Collection
end
end
end
|
%%%
%%% Copyright (c) 2015-2021 Klarna Bank AB (publ)
%%%
%%% Licensed under the Apache License, Version 2.0 (the "License");
%%% you may not use this file except in compliance with the License.
%%% You may obtain a copy of the License at
%%%
%%% http://www.apache.org/licenses/LICENSE-2.0
%%%
%%% Unless required by applicable law or agreed to in writing, software
%%% distributed under the License is distributed on an "AS IS" BASIS,
%%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%%% See the License for the specific language governing permissions and
%%% limitations under the License.
%%%
%% @private
-module(brod_producer_buffer_SUITE).
%% Test framework
-export([ init_per_suite/1
, end_per_suite/1
, init_per_testcase/2
, end_per_testcase/2
, all/0
, suite/0
]).
%% Test cases
-export([ t_no_ack/1
, t_random_latency_ack/1
, t_nack/1
, t_send_fun_error/1
]).
-include_lib("proper/include/proper.hrl").
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-include("brod_int.hrl").
%% producer state
-record(state, { buffered = []
, acked = []
, delay_ref = ?undef :: ?undef | {timer:tref() | reference()}
, buf
}).
%%%_* ct callbacks =============================================================
suite() -> [{timetrap, {seconds, 60}}].
init_per_suite(Config) -> Config.
end_per_suite(_Config) -> ok.
init_per_testcase(_Case, Config) -> Config.
end_per_testcase(_Case, Config) -> Config.
all() -> [F || {F, _A} <- module_info(exports),
case atom_to_list(F) of
"t_" ++ _ -> true;
_ -> false
end].
%%%_* Test functions ===========================================================
t_no_ack(Config) when is_list(Config) ->
Opts = [{numtests, 1000}, {to_file, user}],
?assert(proper:quickcheck(prop_no_ack_run(), Opts)).
t_random_latency_ack(Config) when is_list(Config) ->
Opts = [{numtests, 500}, {to_file, user}],
?assert(proper:quickcheck(prop_random_latency_ack_run(), Opts)).
t_nack(Config) when is_list(Config) ->
SendFun =
fun(Conn, Batch, _Vsn) ->
Ref = make_ref(),
NumList = lists:map(fun(#{key := Bin, value := Bin}) ->
list_to_integer(binary_to_list(Bin))
end, Batch),
Conn ! {produce, Ref, NumList},
{ok, Ref}
end,
Buf0 = brod_producer_buffer:new(_BufferLimit = 2,
_OnWireLimit = 2,
_MaxBatchSize = 20, %% 2 messages
_MaxRetry = 1,
_MaxLingerTime = 0,
_MaxLingerCount = 0,
SendFun),
AddFun =
fun(BufIn, Num) ->
BufCb = make_buf_cb(Num),
Bin = list_to_binary(integer_to_list(Num)),
Batch = [#{key => Bin, value => Bin}],
brod_producer_buffer:add(BufIn, BufCb, Batch)
end,
MaybeSend =
fun(BufIn) ->
{ok, Buf} = brod_producer_buffer:maybe_send(BufIn, self(), 0),
Buf
end,
AckFun =
fun(BufIn, Ref) ->
brod_producer_buffer:ack(BufIn, Ref)
end,
NackFun =
fun(BufIn, Ref) ->
brod_producer_buffer:nack(BufIn, Ref, test)
end,
ReceiveFun =
fun(Line, ExpectedNums) ->
receive
{produce, RefX, NumList} ->
case ExpectedNums =:= NumList of
true ->
ok;
false ->
ct:fail("~p\nexp=~p\ngot=~p\n", [Line, ExpectedNums, NumList])
end,
RefX
after 1000 ->
erlang:error({Line, "timed out receiving produce message"})
end
end,
Buf1 = AddFun(Buf0, 0),
Buf2 = AddFun(Buf1, 1),
Buf3 = AddFun(AddFun(Buf2, 2), 3),
Buf4 = MaybeSend(Buf3),
Ref1 = ReceiveFun(?LINE, [0, 1]), %% max batch size
_Ref = ReceiveFun(?LINE, [2, 3]), %% max onwire is 2
Buf5 = NackFun(Buf4, Ref1), %% re-queue all
Buf6 = MaybeSend(Buf5), %% as if a scheduled retry
Ref3 = ReceiveFun(?LINE, [0, 1]), %% receive a max batch
Ref4 = ReceiveFun(?LINE, [2, 3]), %% another max batch (max onwire is 2)
Buf7 = AckFun(Buf6, Ref3),
Buf8 = AckFun(Buf7, Ref4),
?assert(brod_producer_buffer:is_empty(Buf8)).
t_send_fun_error(Config) when is_list(Config) ->
SendFun =
fun(_SockPid, _Batch, _Vsn) ->
{error, "the reason"}
end,
Buf0 = brod_producer_buffer:new(_BufferLimit = 1,
_OnWireLimit = 1,
_MaxBatchSize = 10000,
_MaxRetry = 1,
_MaxLingerTime = 0,
_MaxLingerCount = 0,
SendFun),
AddFun =
fun(BufIn, Num) ->
BufCb = make_buf_cb(Num),
Bin = list_to_binary(integer_to_list(Num)),
Batch = [#{key => Bin, value => Bin}],
brod_producer_buffer:add(BufIn, BufCb, Batch)
end,
MaybeSend =
fun(BufIn) ->
{retry, BufOut} = brod_producer_buffer:maybe_send(BufIn, self(), 0),
BufOut
end,
Buf1 = AddFun(AddFun(Buf0, 0), 1),
Buf2 = MaybeSend(Buf1),
?assertException(exit, {reached_max_retries, "the reason"},
MaybeSend(Buf2)).
%%%_* Help functions ===========================================================
-define(MAX_DELAY, 4).
prop_buffer_limit() -> proper_types:pos_integer().
prop_onwire_limit() -> proper_types:pos_integer().
prop_msgset_bytes() -> proper_types:pos_integer().
prop_linger_time() -> proper_types:integer(0, 10).
prop_linger_count() -> proper_types:integer(0, 100).
prop_value_list() -> proper_types:list(proper_types:binary()).
%% latency in milliseconds for fake kafka to process a key-value pair
prop_latency_ms() -> proper_types:range(0, ?MAX_DELAY).
%% pre-generate the latency together with the binary value.
prop_value_with_processing_latency_list() ->
proper_types:list({prop_latency_ms(), proper_types:binary()}).
prop_no_ack_run() ->
SendFun = fun(_SockPid, _Batch, _Vsn) -> ok end,
?FORALL(
{BufferLimit, OnWireLimit, MsgSetBytes, ValueList},
{prop_buffer_limit(), prop_onwire_limit(),
prop_msgset_bytes(), prop_value_list()},
begin
Buf = brod_producer_buffer:new(BufferLimit, OnWireLimit,
MsgSetBytes, _MaxRetries = 0,
_MaxLingerTime = 0, _MaxLingerCount = 0,
SendFun),
KeyList = lists:seq(1, length(ValueList)),
KvList = lists:zip(KeyList, ValueList),
no_ack_produce(Buf, KvList)
end).
prop_random_latency_ack_run() ->
SendFun0 =
fun(FakeKafka, Batch, _Vsn) ->
%% use reference as correlation to simplify test
Ref = make_ref(),
%% send the message to fake kafka
%% the pre-generated latency values are in KvList
%% fake kafka should receive the KvList, sleep a while
%% and reply ack
FakeKafka ! {produce, self(), Ref, Batch},
{ok, Ref}
end,
?FORALL(
{BufferLimit, OnWireLimit, MsgSetBytes,
MaxLingerTime, MaxLingerCount, ValueList},
{prop_buffer_limit(), prop_onwire_limit(),
prop_msgset_bytes(), prop_linger_time(), prop_linger_count(),
prop_value_with_processing_latency_list()},
begin
KeyList = lists:seq(1, length(ValueList)),
KvList = lists:zip(KeyList, ValueList),
Batch = lists:map(fun({K, {Delay, V}}) ->
#{key => integer_to_binary(K),
value => V,
delay => Delay}
end, KvList),
FakeKafka = spawn_fake_kafka(),
SendFun = fun(_SockPid, BatchX, Vsn) ->
SendFun0(FakeKafka, BatchX, Vsn)
end,
Buf = brod_producer_buffer:new(BufferLimit, OnWireLimit,
MsgSetBytes, _MaxRetries = 0,
MaxLingerTime, MaxLingerCount, SendFun),
random_latency_ack_produce(FakeKafka, Buf, Batch)
end).
no_ack_produce(Buf, []) ->
brod_producer_buffer:is_empty(Buf) orelse
erlang:error({buffer_not_empty, Buf});
no_ack_produce(Buf, [{Key, Value} | Rest]) ->
BufCb = make_buf_cb(Key),
BinKey = list_to_binary(integer_to_list(Key)),
Batch = [#{key => BinKey, value => Value}],
Buf1 = brod_producer_buffer:add(Buf, BufCb, Batch),
FakeSockPid = self(),
{ok, NewBuf} = brod_producer_buffer:maybe_send(Buf1, FakeSockPid, 0),
%% in case of no ack required, expect 'buffered' immediately
receive
{?buffered, Key} -> ok
after 100 -> erlang:error({timeout, brod_produce_req_buffered, Key})
end,
%% in case of no ack required, expect 'acked' immediately
receive
{?acked, Key} -> ok
after 100 -> erlang:error({timeout, brod_produce_req_acked, Key})
end,
no_ack_produce(NewBuf, Rest).
random_latency_ack_produce(FakeKafka, Buf, Batch) ->
State0 = #state{buf = Buf, buffered = [], acked = []},
#state{buffered = Buffered,
acked = Acked} = produce_loop(FakeKafka, Batch, State0),
N = length(Batch),
ok = assert_reply_sequence(Buffered, N),
ok = assert_reply_sequence(Acked, N),
ok = stop_fake_kafka(FakeKafka),
true.
produce_loop(FakeKafka, [], #state{buf = Buf} = State) ->
case brod_producer_buffer:is_empty(Buf) of
true ->
State;
false ->
NewState = collect_replies(State, ?MAX_DELAY),
produce_loop(FakeKafka, [], NewState)
end;
produce_loop(FakeKafka, [#{key := Key} = Msg | Rest], State0) ->
#state{buf = Buf0} = State0,
BufCb = make_buf_cb(binary_to_integer(Key)),
Buf1 = brod_producer_buffer:add(Buf0, BufCb, [Msg]),
State1 = State0#state{buf = Buf1},
State2 = maybe_send(State1),
State = collect_replies(State2, _Delay = 0),
produce_loop(FakeKafka, Rest, State).
collect_replies(#state{ buffered = Buffered
, acked = Acked
, buf = Buf0
, delay_ref = DelayRef
} = State0, Timeout) ->
receive
{delayed_send, Ref} when is_tuple(DelayRef) andalso
Ref =:= element(2, DelayRef) ->
State = maybe_send(State0#state{delay_ref = ?undef}),
collect_replies(State, Timeout);
{delayed_send, _} ->
%% stale message
collect_replies(State0, Timeout);
{?buffered, Key} ->
State = State0#state{buffered = [Key | Buffered]},
collect_replies(State, Timeout);
{ack_from_kafka, Ref} ->
Buf1 = brod_producer_buffer:ack(Buf0, Ref),
State1 = State0#state{buf = Buf1},
State = maybe_send(State1),
collect_replies(State, Timeout);
{?acked, Key} ->
State = State0#state{acked = [Key | Acked]},
collect_replies(State, Timeout);
Msg ->
erlang:error({unexpected, Msg})
after Timeout ->
State0
end.
maybe_send(#state{buf = Buf0, delay_ref = DelayRef} = State) ->
SendTo = self(),
_ = cancel_delay_send_timer(DelayRef),
case brod_producer_buffer:maybe_send(Buf0, SendTo, 0) of
{ok, Buf} ->
State#state{buf = Buf};
{{delay, Timeout}, Buf} ->
NewDelayRef = start_delay_send_timer(Timeout),
State#state{buf = Buf, delay_ref = NewDelayRef}
end.
%% Start delay send timer.
start_delay_send_timer(Timeout) ->
MsgRef = make_ref(),
TRef = erlang:send_after(Timeout, self(), {delayed_send, MsgRef}),
{TRef, MsgRef}.
%% Ensure delay send timer is canceled.
%% But not flushing the possibly already sent (stale) message
%% Stale message should be discarded in handle_info
cancel_delay_send_timer(?undef) -> ok;
cancel_delay_send_timer({Tref, _Msg}) -> _ = erlang:cancel_timer(Tref).
%% reply collection was accumulated in reversed order.
assert_reply_sequence([], 0) -> ok;
assert_reply_sequence([N | Rest], N) ->
assert_reply_sequence(Rest, N-1).
spawn_fake_kafka() ->
erlang:spawn_link(fun() -> fake_kafka_loop() end).
stop_fake_kafka(FakeKafka) when is_pid(FakeKafka) ->
MRef = monitor(process, FakeKafka),
FakeKafka ! stop,
receive
{'DOWN', MRef, process, FakeKafka, _} ->
ok
after 1000 ->
exit(FakeKafka, kill),
erlang:error(timeout)
end.
fake_kafka_loop() ->
receive
{produce, FromPid, Ref, Batch} ->
ok = fake_kafka_process_msgs(Batch),
FromPid ! {ack_from_kafka, Ref},
fake_kafka_loop();
stop ->
exit(normal);
Msg ->
exit({fake_kafka, unexpected, Msg})
end.
fake_kafka_process_msgs([]) -> ok;
fake_kafka_process_msgs([#{delay := DelayMs} | Rest]) ->
timer:sleep(DelayMs),
fake_kafka_process_msgs(Rest).
make_buf_cb(Ref) ->
Pid = self(),
fun(?buffered) ->
erlang:send(Pid, {?buffered, Ref});
({?acked, _BaseOffset}) ->
erlang:send(Pid, {?acked, Ref})
end.
%%%_* Emacs ====================================================================
%%% Local Variables:
%%% allout-layout: t
%%% erlang-indent-level: 2
%%% End:
|
<?php
namespace App\Http\Controllers\admin;
use App\Http\Controllers\Controller;
use App\Models\User;
use App\Models\Wilayah;
use Illuminate\Http\Request;
use Yajra\DataTables\Facades\DataTables;
class PelangganAdminController extends Controller
{
function main()
{
$data['title'] = 'Data Pelanggan — ' . config('app.name');
return view('admin.pelanggan-main', $data);
}
function datatable(Request $request)
{
return DataTables::of(User::where('type', '!=', 'admin')->get())
->addColumn('alamat', function($user) {
$wilayah = new Wilayah();
$alamat = ($user->alamat ? $user->alamat . ', ' : '');
$kelurahan = $wilayah->getKelurahan($user->kecamatan, $user->kelurahan)['nama'];
$kecamatan = $wilayah->getKecamatan($user->kabupaten, $user->kecamatan)['nama'];
$kabupaten = $wilayah->getKabupaten($user->provinsi, $user->kabupaten)['nama'];
$provinsi = $wilayah->getProvinsi($user->provinsi)['nama'];
return $alamat . $kelurahan . ', ' . $kecamatan . ', ' . $kabupaten . ', ' . $provinsi;
})->toJSON();
}
function add()
{
$data['title'] = 'Tambah Pelanggan — ' . config('app.name');
return view('admin.pelanggan-add', $data);
}
function addProcess(Request $request)
{}
}
|
import { computed, reactive } from 'vue';
import { useQuery } from 'vue-query';
import { QueryObserverOptions } from 'react-query/core';
import useTokens from '@/composables/useTokens';
import { useStore } from 'vuex';
import { pick } from 'lodash';
import QUERY_KEYS from '@/constants/queryKeys';
import BalancerContracts from '@/services/balancer/contracts/service';
import BalancerSubgraph from '@/services/balancer/subgraph/service';
import { DecoratedPool, FullPool } from '@/services/balancer/subgraph/types';
import { POOLS } from '@/constants/pools';
export default function usePoolQuery(
id: string,
options: QueryObserverOptions<FullPool> = {}
) {
// COMPOSABLES
const store = useStore();
const { tokens: allTokens } = useTokens();
// SERVICES
const balancerSubgraph = new BalancerSubgraph();
const balancerContracts = new BalancerContracts();
// DATA
const queryKey = QUERY_KEYS.Pools.Current(id);
// COMPUTED
const appLoading = computed(() => store.state.app.loading);
const prices = computed(() => store.state.market.prices);
const isQueryEnabled = computed(() => !appLoading.value);
function tokensInjected(pool: DecoratedPool): boolean {
if (!allTokens.value) return false;
const allAddresses = Object.keys(allTokens.value);
return [...pool.tokenAddresses, pool.address].every(address =>
allAddresses.includes(address)
);
}
// METHODS
const queryFn = async () => {
const [pool] = await balancerSubgraph.pools.getDecorated(
'24h',
prices.value,
{
where: {
id: id.toLowerCase(),
totalShares_gt: -1 // Avoid the filtering for low liquidity pools
}
}
);
if (pool.poolType === 'Stable' && !POOLS.Stable.AllowList.includes(id)) {
throw new Error('Pool not allowed');
}
if (!tokensInjected(pool)) {
await store.dispatch('registry/injectTokens', [
...pool.tokenAddresses,
pool.address
]);
}
const tokens = pick(allTokens.value, pool.tokenAddresses);
const onchainData = await balancerContracts.vault.getPoolData(
id,
pool.poolType,
tokens
);
return { ...pool, onchain: onchainData };
};
const queryOptions = reactive({
enabled: isQueryEnabled,
...options
});
return useQuery<FullPool>(queryKey, queryFn, queryOptions);
}
|
use std::fs::{remove_file, File, OpenOptions};
use std::io::prelude::*;
use std::ops::Deref;
use std::os::unix::fs::OpenOptionsExt;
use std::path::{Component, Path};
use std::process::Command;
use std::sync::Arc;
use chrono::{DateTime, Local};
use chrono_tz::Tz;
use eui48::MacAddress;
use serde::{Deserialize, Serialize};
use tonic::{Request, Response, Status};
use validator::Validate;
use super::super::super::super::{
auth::services::Session,
crypto::Aes,
jwt::Jwt,
ntp::Response as NtpResponse,
orm::sqlite::{Connection as Db, Pool as DbPool},
sys::network::{
ip4 as get_ip4, is_on, mac as get_mac,
systemd::{Dhcp, Ip, Static, Wifi, Wpa},
},
GrpcResult, Result,
};
use super::super::{
models::settings::Dao as SettingDao,
v1::{
network_profile, os_server::Os, status_response, DnsRequest, LinesResponse, LogsRequest,
NetworkProfile, NtpProfile, PingRequest, RestoreRequest, StatusResponse, VpnProfile,
},
};
use super::user::CurrentUser;
pub struct Service {
pub db: DbPool,
pub jwt: Arc<Jwt>,
pub aes: Arc<Aes>,
}
#[tonic::async_trait]
impl Os for Service {
async fn logs(&self, req: Request<LogsRequest>) -> GrpcResult<LinesResponse> {
current_pi_user!(self, &req);
let req = req.into_inner();
let output = try_grpc!(
Command::new("journalctl")
.arg("-u")
.arg(&req.name)
.arg("-b")
.output(),
Status::invalid_argument
)?;
if !output.status.success() {
return Err(Status::internal(format!(
"{:#?} {}",
output.status,
try_grpc!(String::from_utf8(output.stderr))?
)));
}
let out = try_grpc!(String::from_utf8(output.stdout))?;
let lines: Vec<&str> = out.split('\n').collect();
Ok(Response::new(LinesResponse {
messages: lines.iter().map(|x| x.to_string()).collect(),
}))
}
async fn status(&self, req: Request<()>) -> GrpcResult<StatusResponse> {
current_pi_user!(self, &req);
let si = try_grpc!(nix::sys::sysinfo::sysinfo())?;
let un = nix::sys::utsname::uname();
let load = si.load_average();
Ok(Response::new(StatusResponse {
uptime: Some(si.uptime().into()),
uname: Some(status_response::Uname {
sys: un.sysname().to_string(),
node: un.nodename().to_string(),
machine: un.machine().to_string(),
release: un.release().to_string(),
version: un.version().to_string(),
}),
process: si.process_count() as u32,
load: Some(status_response::Load {
one: load.0,
five: load.1,
fifteen: load.2,
}),
swap: Some(status_response::Range {
total: si.swap_total(),
free: si.swap_free(),
}),
ram: Some(status_response::Range {
total: si.ram_total(),
free: si.ram_total(),
}),
versions: Vec::new(),
}))
}
async fn reboot(&self, req: Request<()>) -> GrpcResult<()> {
current_pi_user!(self, &req);
try_grpc!(super::super::super::super::sys::reboot())?;
Ok(Response::new(()))
}
async fn reset(&self, req: Request<()>) -> GrpcResult<()> {
current_pi_user!(self, &req);
// TODO
Ok(Response::new(()))
}
async fn dump(&self, req: Request<()>) -> GrpcResult<()> {
current_pi_user!(self, &req);
// TODO
Ok(Response::new(()))
}
async fn restore(&self, req: Request<RestoreRequest>) -> GrpcResult<()> {
current_pi_user!(self, &req);
// TODO
Ok(Response::new(()))
}
async fn ping(&self, req: Request<PingRequest>) -> GrpcResult<LinesResponse> {
current_pi_user!(self, &req);
let req = req.into_inner();
let form = Ping { host: req.host };
let out = try_grpc!(form.execute())?;
let lines: Vec<&str> = out.split('\n').collect();
Ok(Response::new(LinesResponse {
messages: lines.iter().map(|x| x.to_string()).collect(),
}))
}
async fn dns(&self, req: Request<DnsRequest>) -> GrpcResult<LinesResponse> {
current_pi_user!(self, &req);
let req = req.into_inner();
let form = Dns {
server: req.server.clone(),
host: req.host,
};
let out = try_grpc!(form.execute())?;
let lines: Vec<&str> = out.split('\n').collect();
Ok(Response::new(LinesResponse {
messages: lines.iter().map(|x| x.to_string()).collect(),
}))
}
async fn get_network(&self, req: Request<()>) -> GrpcResult<NetworkProfile> {
current_pi_user!(self, &req);
let db = try_grpc!(self.db.get())?;
let db = db.deref();
let aes = self.aes.deref();
let form: Network = SettingDao::get(db, aes, Network::KEY).unwrap_or_default();
Ok(Response::new(NetworkProfile {
eth: Some(network_profile::Eth {
name: form.eth.name.clone(),
ip: Some(match form.eth.ip {
Ip::Dhcp => network_profile::eth::Ip::Dhcp(true),
Ip::Static {
address,
netmask,
gateway,
dns1,
dns2,
} => network_profile::eth::Ip::Static(network_profile::Static {
address,
netmask,
gateway,
dns1,
dns2,
}),
}),
}),
wlan: Some(network_profile::Wlan {
name: form.wlan.name.clone(),
wifi: form.wlan.wifi.map(|wifi| match wifi {
Wifi::Open { ssid } => {
network_profile::wlan::Wifi::Open(network_profile::Open { ssid })
}
Wifi::Psk { ssid, password } => {
network_profile::wlan::Wifi::Psk(network_profile::Psk { ssid, password })
}
Wifi::Eap {
ssid,
identity,
password,
} => network_profile::wlan::Wifi::Eap(network_profile::Eap {
ssid,
identity,
password,
}),
}),
ip: Some(match form.wlan.ip {
Ip::Dhcp => network_profile::wlan::Ip::Dhcp(true),
Ip::Static {
address,
netmask,
gateway,
dns1,
dns2,
} => network_profile::wlan::Ip::Static(network_profile::Static {
address,
netmask,
gateway,
dns1,
dns2,
}),
}),
}),
}))
}
async fn set_network(&self, req: Request<NetworkProfile>) -> GrpcResult<()> {
current_pi_user!(self, &req);
// TODO
Ok(Response::new(()))
}
async fn get_ntp(&self, req: Request<()>) -> GrpcResult<NtpProfile> {
current_pi_user!(self, &req);
let db = try_grpc!(self.db.get())?;
let db = db.deref();
let aes = self.aes.deref();
let it: Ntp = SettingDao::get(db, aes, Ntp::KEY).unwrap_or_default();
Ok(Response::new(NtpProfile {
enable: it.enable,
timezone: it.timezone.name().to_string(),
servers: it.servers,
heartbeat: it.heartbeat as u64,
}))
}
async fn set_ntp(&self, req: Request<NtpProfile>) -> GrpcResult<()> {
current_pi_user!(self, &req);
let db = try_grpc!(self.db.get())?;
let db = db.deref();
let req = req.into_inner();
let form = Ntp {
enable: req.enable,
timezone: req.timezone.parse().map_err(Status::invalid_argument)?,
servers: req.servers,
heartbeat: req.heartbeat as usize,
};
try_grpc!(form.save(db, &self.aes))?;
Ok(Response::new(()))
}
async fn get_vpn(&self, req: Request<()>) -> GrpcResult<VpnProfile> {
current_pi_user!(self, &req);
let db = try_grpc!(self.db.get())?;
let db = db.deref();
let aes = self.aes.deref();
let it: Vpn = SettingDao::get(db, aes, Vpn::KEY).unwrap_or_default();
Ok(Response::new(VpnProfile {
enable: it.enable,
body: it.body,
}))
}
async fn set_vpn(&self, req: Request<VpnProfile>) -> GrpcResult<()> {
current_pi_user!(self, &req);
let db = try_grpc!(self.db.get())?;
let db = db.deref();
let req = req.into_inner();
let form = Vpn {
enable: req.enable,
body: req.body,
};
let aes = self.aes.deref();
try_grpc!(form.save(db, aes))?;
Ok(Response::new(()))
}
}
#[derive(Validate)]
pub struct Ping {
#[validate(length(min = 1))]
pub host: String,
}
impl Ping {
pub fn execute(&self) -> Result<String> {
self.validate()?;
let out = Command::new("ping")
.arg("-W")
.arg("2")
.arg("-c")
.arg("4")
.arg(&self.host)
.output()?;
debug!("{:?}", out);
Ok(String::from_utf8(out.stdout)?)
}
}
#[derive(Validate)]
pub struct Dns {
#[validate(length(min = 1))]
pub host: String,
#[validate(length(min = 1))]
pub server: Option<String>,
}
impl Dns {
pub fn execute(&self) -> Result<String> {
self.validate()?;
let out = match self.server {
Some(ref it) => Command::new("dig")
.arg(&format!("@{}", it))
.arg(&self.host)
.output(),
None => Command::new("dig").arg(&self.host).output(),
}?;
debug!("{:?}", out);
Ok(String::from_utf8(out.stdout)?)
}
}
#[derive(Serialize, Deserialize, Validate, Debug)]
#[serde(rename_all = "camelCase")]
pub struct Ntp {
pub timezone: Tz,
#[validate(length(min = 1))]
pub servers: Vec<String>,
#[validate(range(min = 5))]
pub heartbeat: usize,
pub enable: bool,
}
impl Default for Ntp {
fn default() -> Self {
Self {
timezone: Tz::UTC,
servers: vec!["0.us.pool.ntp.org".to_string()],
heartbeat: 60 * 60 * 24,
enable: false,
}
}
}
impl Ntp {
pub fn timesyncd(&self) -> String {
format!(
r#"
[Time]
NTP={servers}
FallbackNTP=0.pool.ntp.org 1.pool.ntp.org 2.pool.ntp.org 3.pool.ntp.org
"#,
servers = self.servers.join(" ")
)
}
pub fn crontab(&self) -> String {
format!(
r#"
#!/bin/bash
for i in {servers}
do
ntpdate $i && break
done
"#,
servers = self.servers.join(" ")
)
}
pub fn test(&self) -> Result<Vec<DateTime<Local>>> {
let mut items = Vec::new();
for it in self.servers.iter() {
let now: DateTime<Local> = NtpResponse::fetch(it, None)?.into();
items.push(now);
}
Ok(items)
}
pub fn ping(&self) -> Option<DateTime<Local>> {
for it in self.servers.iter() {
if let Ok(it) = NtpResponse::fetch(it, None) {
return Some(it.into());
}
}
None
}
pub const KEY: &'static str = "ntp.client";
pub fn save(&self, db: &Db, aes: &Aes) -> Result<()> {
self.validate()?;
self.test()?;
debug!("save ntp server {:?}", self);
let file = Path::new(&Component::RootDir)
.join("etc")
.join("systemd")
.join("timesyncd.conf");
if self.enable {
let mut fd = File::create(&file)?;
write!(&mut fd, "{}", self.timesyncd())?;
} else if file.exists() {
remove_file(&file)?;
}
SettingDao::set(db, aes, Self::KEY, self, true)?;
Ok(())
}
}
#[derive(Serialize, Deserialize, Validate, Default)]
#[serde(rename_all = "camelCase")]
pub struct Vpn {
pub enable: bool,
#[validate(length(min = 1))]
pub body: String,
}
impl Vpn {
pub const KEY: &'static str = "openvpn.client";
pub fn save(&self, db: &Db, aes: &Aes) -> Result<()> {
self.validate()?;
let file = Path::new(&Component::RootDir)
.join("etc")
.join("openvpn")
.join("client.conf");
if self.enable {
info!("generate file {}", file.display());
let mut fd = OpenOptions::new()
.write(true)
.create(true)
.truncate(true)
.mode(0o600)
.open(file)?;
fd.write_all(self.body.as_bytes())?;
} else if file.exists() {
info!("delete file {}", file.display());
remove_file(file)?;
}
SettingDao::set(db, aes, Self::KEY, self, true)?;
Ok(())
}
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct Wlan {
pub name: String,
pub wifi: Option<Wifi>,
pub ip: Ip,
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct Eth {
pub name: String,
pub ip: Ip,
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct Network {
pub eth: Eth,
pub wlan: Wlan,
}
impl Default for Network {
fn default() -> Self {
Self {
eth: Eth {
name: Self::ETH.to_string(),
ip: Ip::default(),
},
wlan: Wlan {
name: Self::WLAN.to_string(),
ip: Ip::default(),
wifi: None,
},
}
}
}
impl Network {
pub const KEY: &'static str = "systemd.network";
pub const ETH: &'static str = "eth0";
pub const WLAN: &'static str = "wlan0";
#[cfg(debug_assertions)]
pub fn mac(&self) -> Result<MacAddress> {
get_mac("wlp3s0")
}
#[cfg(not(debug_assertions))]
pub fn mac(&self) -> Result<MacAddress> {
get_mac(Self::ETH)
}
pub fn is_on(&self) -> bool {
(is_on(&self.eth.name) && get_ip4(&self.eth.name).is_some())
|| (is_on(&self.wlan.name) && get_ip4(&self.wlan.name).is_some())
}
pub fn save(&self, vendor: &str, db: &Db, aes: &Aes) -> Result<()> {
debug!("save network interfaces {:?}", self);
{
let metric = 50;
match self.eth.ip {
Ip::Static {
ref address,
ref netmask,
ref gateway,
ref dns1,
ref dns2,
} => Static::new(
&self.eth.name,
metric,
address,
netmask,
gateway,
dns1,
dns2.as_deref(),
)?
.save(vendor)?,
Ip::Dhcp => Dhcp {
name: self.eth.name.clone(),
metric,
options: vec![Dhcp::WWW],
}
.save(vendor)?,
};
}
{
let metric = 200;
match self.wlan.wifi {
Some(ref it) => {
it.save(&self.wlan.name)?;
Dhcp {
name: self.wlan.name.clone(),
metric,
options: vec![Dhcp::WWW],
}
.save(vendor)?;
Wpa.save(&self.wlan.name)?;
}
None => {
Wifi::remove(&self.wlan.name)?;
}
}
}
SettingDao::set(db, aes, Self::KEY, self, true)?;
Ok(())
}
}
|
require 'spec_helper'
require 'pathname'
require 'active_support/core_ext/object/blank'
describe UniqueHtmlExtractonator do
it 'has a version number' do
expect(UniqueHtmlExtractonator::VERSION).not_to be nil
end
let(:root_path) { Pathname.new File.realpath('.', File.dirname(__FILE__)) }
def fixture_read(file)
File.open(root_path.join("fixtures/#{file}")).read
end
describe 'html extraction' do
let(:reference_html) { fixture_read 'common1.html' }
let(:html) { fixture_read self.class.metadata[:description] }
let(:extractor) { UniqueHtmlExtractonator::Extractor.new(reference_html: reference_html, html: html) }
subject { extractor.extract }
context 'common1.html' do
let(:reference_html) { fixture_read 'common3.html' }
it 'should be parsed' do is_expected.to eq(fixture_read 'common1.extracted.html') end
end
context 'common2.html' do
it 'should be parsed' do is_expected.to eq(fixture_read 'common2.extracted.html') end
end
context 'common3.html' do
it 'should be parsed' do is_expected.to eq(fixture_read 'common3.extracted.html') end
end
end
end
|
import React from "react";
import { Button } from "reactstrap";
import "./post-status-filter.css";
const PostStatusFilter = () => {
return (
<div className="btn-group">
<Button color="info">Все</Button>
{/* <button type="button" className="btn btn-info">
Все
</button> */}
<button type="button" className="btn btn-outline-secondary">
Понравилось
</button>
</div>
);
};
export default PostStatusFilter;
|
/* Style Changer */
jQuery(document).ready(function(){
/* Style Changer Autohide */
jQuery('.chBut').parent().delay(1000).animate({left:'-180px'}, 500, function(){
jQuery(this).find('.chBut').next('.chBody').css({display:'none'});
jQuery(this).find('.chBut').addClass('closed');
});
/* Style Changer Toggle */
jQuery('.chBut').click(function(){
if (jQuery(this).hasClass('closed')){
jQuery(this).next('.chBody').css({display:'block'}).parent().animate({left:0}, 500, function(){
jQuery(this).find('.chBut').removeClass('closed');
});
} else {
jQuery(this).parent().animate({left:'-180px'}, 500, function(){
jQuery(this).find('.chBut').next('.chBody').css({display:'none'});
jQuery(this).find('.chBut').addClass('closed');
});
}
return false;
});
/* Window Resize Function */
jQuery(window).resize(function(){
if (jQuery(window).height() < 750){
jQuery('#stlChanger').css({position:'absolute'});
} else {
jQuery('#stlChanger').css({position:'fixed'});
}
});
});
|
package com.onegravity.bloc.posts_compose
import androidx.compose.foundation.lazy.LazyColumn
import androidx.compose.foundation.lazy.items
import androidx.compose.material.Divider
import androidx.compose.runtime.Composable
import androidx.compose.ui.Modifier
import com.onegravity.bloc.sample.posts.domain.repositories.Post
@Composable
internal fun Posts(
posts: List<Post>,
selectedPost: Int?,
modifier: Modifier = Modifier,
onClicked: (post: Post) -> Unit
) {
LazyColumn(modifier = modifier) {
items(posts) { post ->
PostItem(post, post.id == selectedPost, onClicked)
Divider()
}
}
}
|
package common
import (
"github.com/mitchellh/packer/template/interpolate"
)
type PrlctlConfig struct {
Prlctl [][]string `mapstructure:"prlctl"`
}
func (c *PrlctlConfig) Prepare(ctx *interpolate.Context) []error {
if c.Prlctl == nil {
c.Prlctl = make([][]string, 0)
}
return nil
}
|
#!/usr/bin/env bash
export ENV_STATE=test
# pytest -vv
pytest --cov --cov-fail-under=80 -vv --cov-report html
|
import React from 'react';
import {Route, Redirect} from '../lib/react-router-dom';
function Protected(props: any) {
const {component: RouteComponent, path} = props
return (
// todo:逻辑,当用户登录了,就渲染Component,如果没有登录,就不渲染这个Component
<div>
<Route path={path} render={
(routeProps: any) => {
return localStorage.getItem('login') ? <RouteComponent {...routeProps} /> :
<Redirect to={{pathname: '/login', state: {from: routeProps.location.pathname}}} />
}
} />
</div>
)
}
export default Protected |
using BanBrick.TypeScript.CodeGenerator.Convertors;
using BanBrick.TypeScript.CodeGenerator.Enums;
using BanBrick.TypeScript.CodeGenerator.Extensions;
using BanBrick.TypeScript.CodeGenerator.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace BanBrick.TypeScript.CodeGenerator.Resolvers
{
internal class ConfigResolver
{
public IEnumerable<TypeDefinition> Resolve(IEnumerable<TypeDefinition> definitions)
{
var unprocessedDefinitions = definitions.Where(x => x.IsFirstLevel)
.Select(x => { x.ProcessConfig = ConfigConvertor.GetProcessConfig(x.Type); return x; })
.OrderBy(x => x.ProcessConfig.OutputType)
.ToList();
var processedDictionary = definitions.ToDictionary(x => x.Type, x => x);
while (unprocessedDefinitions.Any())
{
var processingDefinition = unprocessedDefinitions.First();
unprocessedDefinitions.RemoveAt(0);
processingDefinition.ProcessConfig =
processingDefinition.ProcessConfig ?? ConfigConvertor.GetProcessConfig(processingDefinition.Type);
processedDictionary[processingDefinition.Type] = processingDefinition;
var innerTypes = new List<Type>();
if (processingDefinition.ProcessingCategory == ProcessingCategory.Collection)
{
innerTypes.Add(processingDefinition.Type.GetCollectionType());
}
if (processingDefinition.ProcessingCategory == ProcessingCategory.Dictionary)
{
var dicTypes = processingDefinition.Type.GetDictionaryTypes();
innerTypes.Add(dicTypes.key);
innerTypes.Add(dicTypes.value);
}
if (processingDefinition.ProcessingCategory == ProcessingCategory.Generic)
{
innerTypes.AddRange(processingDefinition.Type.GetGenericArguments());
}
if (processingDefinition.ProcessingCategory == ProcessingCategory.Object)
{
innerTypes.AddRange(processingDefinition.Properties.Select(x => x.Type));
}
foreach (var innerType in innerTypes)
{
var propertyDefinition = processedDictionary[innerType];
// ignore property definition that's the same as processing definition
if (propertyDefinition == processingDefinition)
continue;
if (propertyDefinition.ProcessConfig == null)
{
if (processingDefinition.ProcessConfig?.Inherit ?? false) {
propertyDefinition.ProcessConfig = new ProcessConfig() {
OutputType = processingDefinition.ProcessConfig.OutputType,
Inherit = true
};
}
if (processingDefinition.ProcessConfig?.OutputType == OutputType.Const) {
propertyDefinition.ProcessConfig = new ProcessConfig() {
OutputType = OutputType.None,
Inherit = true
};
}
}
unprocessedDefinitions.Add(propertyDefinition);
}
}
var processedDefinitions = processedDictionary.Select(x => x.Value);
// remove config from inherited non heritable types
processedDefinitions.Where(x => !x.IsInheritable() && (x.ProcessConfig?.Inherit ?? false)).ToList()
.ForEach(x =>
x.ProcessConfig = null
);
processedDefinitions.Where(x => x.ProcessConfig == null).ToList().ForEach(x =>
{
x.ProcessConfig = ConfigConvertor.GetProcessConfig(x.ProcessingCategory);
});
processedDefinitions.Where(x => x.ProcessConfig.OutputType == OutputType.Default).ToList().ForEach(x =>
{
x.ProcessConfig.OutputType = ConfigConvertor.Parse(x.ProcessingCategory);
});
return processedDefinitions;
}
}
}
|
import {LanguageId, NewLanguageInput, EditLanguageInput} from '../../graphql';
import FieldSet from '../field-set';
import {DescriptionMut} from '../description';
import {SearchIndexMut} from '../search-index';
import {DefinitionMut} from '../definition';
import {LemmaMut} from '../lemma';
import {PartOfSpeechMut} from '../part-of-speech';
import {TagMut} from '../tag';
import {Language} from './model';
import {LanguageRow} from './types';
import {validateName} from './validators';
import {MutContext} from '../types';
const LanguageMut = {
insert(context: MutContext, data: NewLanguageInput): Promise<LanguageRow> {
const {name, description} = data;
const validName = validateName(context.db, null, name);
return MutContext.transact(context, context => {
const {db, events, logger} = context;
const desc = DescriptionMut.insert(db, description || []);
const now = Date.now();
const {insertId: languageId} = db.exec<LanguageId>`
insert into languages (name, description_id, time_created, time_updated)
values (${validName}, ${desc.id}, ${now}, ${now})
`;
SearchIndexMut.insertLanguage(db, languageId, validName);
events.emit({type: 'language', action: 'create', id: languageId});
logger.verbose(`Created language: ${languageId}`);
return Language.byIdRequired(db, languageId);
});
},
async update(
context: MutContext,
id: LanguageId,
data: EditLanguageInput
): Promise<LanguageRow> {
const {db} = context;
const {name, description} = data;
const language = await Language.byIdRequired(db, id);
const newFields = new FieldSet<LanguageRow>();
if (name != null) {
newFields.set('name', validateName(db, language.id, name));
}
if (newFields.hasValues || description) {
await MutContext.transact(context, context => {
const {db, events, logger} = context;
newFields.set('time_updated', Date.now());
db.exec`
update languages
set ${newFields}
where id = ${language.id}
`;
const newName = newFields.get('name');
if (newName != null) {
SearchIndexMut.updateLanguage(db, language.id, newName);
}
if (description) {
DescriptionMut.update(db, language.description_id, description);
}
events.emit({type: 'language', action: 'update', id: language.id});
logger.verbose(`Updated language: ${language.id}`);
db.clearCache(Language.byIdKey, language.id);
});
}
return Language.byIdRequired(db, id);
},
async delete(context: MutContext, id: LanguageId): Promise<boolean> {
const {db} = context;
const language = await Language.byId(db, id);
if (!language) {
return false;
}
await MutContext.transact(context, context => {
const {db, events, logger} = context;
logger.debug(`Begin deletion of language: ${language.id}`);
// Definitions reference lemmas, parts of speech, inflection tables,
// inflected forms... We must delete them before anything else.
DefinitionMut.deleteAllInLanguage(db, language.id);
logger.debug('Deleted all definitions');
LemmaMut.deleteAllInLanguage(db, language.id);
logger.debug('Deleted all lemmas');
PartOfSpeechMut.deleteAllInLanguage(db, language.id);
logger.debug('Deleted all parts of speech');
logger.debug(`Deleting language row: ${language.id}`);
db.exec`
delete from languages
where id = ${language.id}
`;
DescriptionMut.delete(db, language.description_id);
logger.debug('Deleted description');
SearchIndexMut.deleteLanguage(db, language.id);
events.emit({type: 'language', action: 'delete', id: language.id});
logger.debug(`Deleting orphaned tags`);
TagMut.deleteOrphaned(context);
logger.debug(`Language deleted: ${language.id}`);
});
return true;
},
} as const;
export {LanguageMut};
|
from flask import Flask, jsonify
app = Flask(__name__)
@app.route('/', methods=['GET'])
def hello_rest():
return jsonify({
"greeting": "Hello REST World"
})
@app.route('/add/<a>/<b>', methods=['GET'])
def add(a, b):
return jsonify({
"a": a,
"b": b,
"addition": int(a) + int(b),
})
@app.route('/mul/<a>/<b>', methods=['GET'])
def prod(a, b):
return jsonify({
"a": a,
"b": b,
"product": float(a) * float(b),
})
@app.route('/pow/<a>/<b>', methods=['GET'])
def powered(a, b):
return jsonify({
"a": a,
"b": b,
"power": float(a) ** float(b)
})
@app.route('/div/<a>/<b>', methods=['GET'])
def divide(a, b):
return jsonify({
"a": a,
"b": b,
"quotient": float(a) // float(b),
"remainder": float(a) % float(b)
})
if __name__ == '__main__':
app.run()
|
No dataset card yet