text
stringlengths
27
775k
<?php namespace App\Model\Settings; use Illuminate\Database\Eloquent\Model; use App\Model\Tables\TableConsumer; class SystemInvoice extends Model { // protected $connection = 'settings'; protected $table = 'system_invoice'; protected $primaryKey = 'invoice_id'; public $timestamps = false; public function groupInfo() { return $this->belongsTo(SystemInvoiceGroup::class, 'group_id', 'group_id'); } public function detailsInfo() { return $this->hasMany(SystemInvoiceDetails::class, 'invoice_id', 'invoice_id'); } public function invoiceSupplierInfo() { return $this->hasOne(TableConsumer::class, 'consumer_id', 'invoice_supplier'); } public function invoiceCustomerInfo() { return $this->hasOne(TableConsumer::class, 'consumer_id', 'invoice_customer'); } }
#![allow(non_camel_case_types)] use crate::{ arch, arch::{Architecture, NativeArch}, bindings::{kernel, kernel::sock_filter, signal}, kernel_abi::{common, Ptr}, }; use std::mem::{self, size_of}; #[repr(C)] pub struct robust_list<Arch: Architecture> { pub next: Ptr<Arch::unsigned_word, robust_list<Arch>>, } /// Had to manually derive Copy and Clone /// Would not work otherwise impl<Arch: Architecture> Clone for robust_list<Arch> { fn clone(&self) -> Self { robust_list { next: self.next } } } impl<Arch: Architecture> Copy for robust_list<Arch> {} assert_eq_size!(kernel::robust_list, robust_list<NativeArch>); assert_eq_align!(kernel::robust_list, robust_list<NativeArch>); #[repr(C)] pub struct robust_list_head<Arch: Architecture> { pub list: robust_list<Arch>, pub futex_offset: Arch::signed_long, pub list_op_pending: Ptr<Arch::unsigned_word, robust_list<Arch>>, } /// Had to manually derive Copy and Clone /// Would not work otherwise impl<Arch: Architecture> Clone for robust_list_head<Arch> { fn clone(&self) -> Self { robust_list_head { list: self.list, futex_offset: self.futex_offset, list_op_pending: self.list_op_pending, } } } impl<Arch: Architecture> Copy for robust_list_head<Arch> {} assert_eq_size!(kernel::robust_list_head, robust_list_head<NativeArch>); assert_eq_align!(kernel::robust_list_head, robust_list_head<NativeArch>); #[repr(C)] #[derive(Copy, Clone, Default)] pub struct sock_fprog<Arch: Architecture> { pub len: u16, pub _padding: Arch::FPROG_PAD_ARR, pub filter: Ptr<Arch::unsigned_word, sock_filter>, } assert_eq_size!(kernel::sock_fprog, sock_fprog<NativeArch>); assert_eq_align!(kernel::sock_fprog, sock_fprog<NativeArch>); #[repr(C)] #[derive(Copy, Clone, Default)] pub struct kernel_sigaction<Arch: Architecture> { pub k_sa_handler: Ptr<Arch::unsigned_word, u8>, pub sa_flags: Arch::unsigned_long, pub sa_restorer: Ptr<Arch::unsigned_word, u8>, /// This is what it is for x86 and x64 to make things simple /// Might this definition cause problems elsewhere e.g. for AArch64? pub sa_mask: u64, } #[repr(C)] #[derive(Copy, Clone, Default)] pub struct mmap_args<Arch: Architecture> { pub addr: Ptr<Arch::unsigned_word, u8>, pub len: Arch::size_t, pub prot: i32, pub flags: i32, pub fd: i32, pub __pad: Arch::STD_PAD_ARR, pub offset: Arch::off_t, } #[repr(C)] pub union sigval_t<Arch: Architecture> { pub sival_int: i32, pub sival_ptr: Ptr<Arch::unsigned_word, u8>, } impl<Arch: Architecture> Clone for sigval_t<Arch> { fn clone(&self) -> Self { unsafe { sigval_t { sival_ptr: self.sival_ptr, } } } } impl<Arch: Architecture> Copy for sigval_t<Arch> {} #[repr(C)] #[derive(Copy, Clone, Default)] pub struct siginfo_kill { pub si_pid_: common::pid_t, pub si_uid_: common::uid_t, } #[repr(C)] pub struct siginfo_timer<Arch: Architecture> { pub si_tid_: i32, pub si_overrun_: i32, pub si_sigval_: sigval_t<Arch>, } impl<Arch: Architecture> Clone for siginfo_timer<Arch> { fn clone(&self) -> Self { siginfo_timer { si_tid_: self.si_tid_, si_overrun_: self.si_overrun_, si_sigval_: self.si_sigval_, } } } impl<Arch: Architecture> Copy for siginfo_timer<Arch> {} #[repr(C)] pub struct siginfo_rt<Arch: Architecture> { pub si_pid_: common::pid_t, pub si_uid_: common::uid_t, pub si_sigval_: sigval_t<Arch>, } impl<Arch: Architecture> Clone for siginfo_rt<Arch> { fn clone(&self) -> Self { siginfo_rt { si_pid_: self.si_pid_, si_uid_: self.si_uid_, si_sigval_: self.si_sigval_, } } } impl<Arch: Architecture> Copy for siginfo_rt<Arch> {} #[repr(C)] #[derive(Default)] pub struct siginfo_sigchld<Arch: Architecture> { pub si_pid_: common::pid_t, pub si_uid_: common::uid_t, pub si_status_: i32, pub si_utime_: Arch::sigchld_clock_t, pub si_stime_: Arch::sigchld_clock_t, } impl<Arch: Architecture> Clone for siginfo_sigchld<Arch> { fn clone(&self) -> Self { siginfo_sigchld { si_pid_: self.si_pid_, si_uid_: self.si_uid_, si_status_: self.si_status_, si_utime_: self.si_utime_, si_stime_: self.si_stime_, } } } impl<Arch: Architecture> Copy for siginfo_sigchld<Arch> {} #[repr(C)] #[derive(Default)] pub struct siginfo_sigfault<Arch: Architecture> { pub si_addr_: Ptr<Arch::unsigned_word, u8>, pub si_addr_lsb_: Arch::signed_short, } impl<Arch: Architecture> Clone for siginfo_sigfault<Arch> { fn clone(&self) -> Self { siginfo_sigfault { si_addr_: self.si_addr_, si_addr_lsb_: self.si_addr_lsb_, } } } impl<Arch: Architecture> Copy for siginfo_sigfault<Arch> {} #[repr(C)] #[derive(Default)] pub struct siginfo_sigpoll<Arch: Architecture> { pub si_band_: Arch::signed_long, pub si_fd_: i32, } impl<Arch: Architecture> Clone for siginfo_sigpoll<Arch> { fn clone(&self) -> Self { siginfo_sigpoll { si_band_: self.si_band_, si_fd_: self.si_fd_, } } } impl<Arch: Architecture> Copy for siginfo_sigpoll<Arch> {} #[repr(C)] #[derive(Default)] pub struct siginfo_sigsys<Arch: Architecture> { pub _call_addr: Ptr<Arch::unsigned_word, u8>, pub _syscall: i32, pub _arch: u32, } impl<Arch: Architecture> Clone for siginfo_sigsys<Arch> { fn clone(&self) -> Self { siginfo_sigsys { _call_addr: self._call_addr, _syscall: self._syscall, _arch: self._arch, } } } impl<Arch: Architecture> Copy for siginfo_sigsys<Arch> {} #[repr(C)] pub union siginfo_sifields<Arch: Architecture> { pub padding: Arch::SIGINFO_PADDING_ARR, pub _kill: siginfo_kill, pub _timer: siginfo_timer<Arch>, pub _rt: siginfo_rt<Arch>, pub _sigchld: siginfo_sigchld<Arch>, pub _sigfault: siginfo_sigfault<Arch>, pub _sigpoll: siginfo_sigpoll<Arch>, pub _sigsys: siginfo_sigsys<Arch>, } impl<Arch: Architecture> Clone for siginfo_sifields<Arch> { fn clone(&self) -> Self { unsafe { siginfo_sifields { padding: self.padding, } } } } impl<Arch: Architecture> Copy for siginfo_sifields<Arch> {} #[repr(C)] pub struct siginfo_t<Arch: Architecture> { pub si_signo: i32, pub si_errno: i32, pub si_code: i32, pub _sifields: siginfo_sifields<Arch>, } impl<Arch: Architecture> Clone for siginfo_t<Arch> { fn clone(&self) -> Self { siginfo_t { si_signo: self.si_signo, si_errno: self.si_errno, si_code: self.si_code, _sifields: self._sifields, } } } impl<Arch: Architecture> Copy for siginfo_t<Arch> {} impl<Arch: Architecture> Default for siginfo_t<Arch> { fn default() -> Self { unsafe { mem::zeroed() } } } assert_eq_size!(kernel::siginfo_t, siginfo_t<NativeArch>); assert_eq_align!(kernel::siginfo_t, siginfo_t<NativeArch>); // Not necessary as these are also generated by bindgen but just to be safe assert_eq_size!(signal::siginfo_t, siginfo_t<NativeArch>); assert_eq_align!(signal::siginfo_t, siginfo_t<NativeArch>); #[repr(C)] #[derive(Copy, Default)] pub struct iovec<Arch: Architecture> { pub iov_base: Ptr<Arch::unsigned_word, u8>, pub iov_len: Arch::size_t, } impl<Arch: Architecture> Clone for iovec<Arch> { fn clone(&self) -> Self { Self { iov_base: self.iov_base, iov_len: self.iov_len, } } } assert_eq_size!(kernel::iovec, iovec<NativeArch>); assert_eq_align!(kernel::iovec, iovec<NativeArch>); #[repr(C)] #[derive(Copy, Default)] pub struct msghdr<Arch: Architecture> { pub msg_name: Ptr<Arch::unsigned_word, u8>, pub msg_namelen: common::socklen_t, pub _padding: Arch::STD_PAD_ARR, pub msg_iov: Ptr<Arch::unsigned_word, iovec<Arch>>, pub msg_iovlen: Arch::size_t, pub msg_control: Ptr<Arch::unsigned_word, u8>, pub msg_controllen: Arch::size_t, pub msg_flags: i32, } impl<Arch: Architecture> Clone for msghdr<Arch> { fn clone(&self) -> Self { Self { msg_name: self.msg_name, msg_namelen: self.msg_namelen, _padding: self._padding, msg_iov: self.msg_iov, msg_iovlen: self.msg_iovlen, msg_control: self.msg_control, msg_controllen: self.msg_controllen, msg_flags: self.msg_flags, } } } assert_eq_size!(kernel::msghdr, msghdr<NativeArch>); assert_eq_align!(kernel::msghdr, msghdr<NativeArch>); #[repr(C)] #[derive(Copy, Default)] pub struct mmsghdr<Arch: Architecture> { pub msg_hdr: msghdr<Arch>, pub msg_len: u32, } impl<Arch: Architecture> Clone for mmsghdr<Arch> { fn clone(&self) -> Self { Self { msg_hdr: self.msg_hdr.clone(), msg_len: self.msg_len, } } } assert_eq_size!(kernel::mmsghdr, mmsghdr<NativeArch>); assert_eq_align!(kernel::mmsghdr, mmsghdr<NativeArch>); #[repr(C)] #[derive(Copy, Clone, Default)] pub struct cmsghdr<Arch: Architecture> { pub cmsg_len: Arch::size_t, pub cmsg_level: i32, pub cmsg_type: i32, } assert_eq_size!(kernel::cmsghdr, cmsghdr<NativeArch>); assert_eq_align!(kernel::cmsghdr, cmsghdr<NativeArch>); pub const fn cmsg_data_offset<Arch: Architecture>() -> usize { cmsg_align::<Arch>(size_of::<cmsghdr<Arch>>()) } pub const fn cmsg_align<Arch: Architecture>(len: usize) -> usize { (len + size_of::<Arch::size_t>() - 1) & !(size_of::<Arch::size_t>() - 1) } pub const fn cmsg_space<Arch: Architecture>(len: usize) -> usize { cmsg_align::<Arch>(size_of::<cmsghdr<Arch>>()) + cmsg_align::<Arch>(len) } pub const fn cmsg_len<Arch: Architecture>(len: usize) -> usize { cmsg_align::<Arch>(size_of::<cmsghdr<Arch>>()) + len } #[repr(C)] #[derive(Copy, Clone, Default)] pub struct pselect6_arg6<Arch: Architecture> { pub ss: Ptr<Arch::unsigned_word, Arch::kernel_sigset_t>, pub ss_len: Arch::size_t, } #[repr(C)] #[derive(Copy, Clone, Default)] pub struct select_args<Arch: Architecture> { pub n_fds: i32, pub __pad: Arch::STD_PAD_ARR, pub read_fds: Ptr<Arch::unsigned_word, Arch::fd_set>, pub write_fds: Ptr<Arch::unsigned_word, Arch::fd_set>, pub except_fds: Ptr<Arch::unsigned_word, Arch::fd_set>, pub timeout: Ptr<Arch::unsigned_word, Arch::timeval>, } #[repr(C)] #[derive(Copy, Clone, Default)] pub struct __user_cap_header_struct { pub version: u32, pub pid: i32, } assert_eq_size!(kernel::__user_cap_header_struct, __user_cap_header_struct); assert_eq_align!(kernel::__user_cap_header_struct, __user_cap_header_struct); #[repr(C)] #[derive(Copy, Clone, Default)] pub struct __user_cap_data_struct { pub effective: u32, pub permitted: u32, pub inheritable: u32, } assert_eq_size!(kernel::__user_cap_data_struct, __user_cap_data_struct); assert_eq_align!(kernel::__user_cap_data_struct, __user_cap_data_struct); #[repr(C)] #[derive(Copy, Clone, Default)] pub struct xt_counters { pub pcnt: u64, pub bcnt: u64, } assert_eq_size!(kernel::xt_counters, xt_counters); assert_eq_align!(kernel::xt_counters, xt_counters); #[repr(C)] #[derive(Copy, Clone, Default)] pub struct setsockopt_args<Arch: Architecture> { pub sockfd: Arch::signed_long, pub level: Arch::signed_long, pub optname: Arch::signed_long, pub optval: Ptr<Arch::unsigned_word, u8>, pub optlen: Arch::signed_long, } #[repr(C)] #[derive(Copy, Clone, Default)] pub struct ipt_replace<Arch: Architecture> { pub name: [u8; 32], pub valid_hook: u32, pub num_entries: u32, pub size: u32, pub hook_entry: [u32; 5], pub underflow: [u32; 5], pub num_counters: u32, pub counters: Ptr<Arch::unsigned_word, xt_counters>, // Plus hangoff here } // @TODO: "The corresponding header requires -fpermissive, which we don't pass. Skip this check" // assert_eq_size!(kernel::ipt_replace, ipt_replace<NativeArch>); // assert_eq_align!(kernel::ipt_replace, ipt_replace<NativeArch>); #[repr(C)] #[derive(Copy, Clone, Default)] pub struct __sysctl_args<Arch: Architecture> { pub name: Ptr<Arch::unsigned_word, i32>, pub nlen: i32, pub __pad: Arch::STD_PAD_ARR, pub oldval: Ptr<Arch::unsigned_word, u8>, pub oldlenp: Ptr<Arch::unsigned_word, Arch::size_t>, pub newval: Ptr<Arch::unsigned_word, u8>, pub newlen: Ptr<Arch::unsigned_word, Arch::size_t>, pub __rd_unused: [Arch::unsigned_long; 4], } assert_eq_size!(kernel::__sysctl_args, __sysctl_args<NativeArch>); assert_eq_align!(kernel::__sysctl_args, __sysctl_args<NativeArch>); #[repr(C)] pub struct sockaddr<Arch: Architecture> { pub sa_family: Arch::unsigned_short, pub sa_data: [u8; 14], } assert_eq_size!(kernel::sockaddr, sockaddr<NativeArch>); assert_eq_align!(kernel::sockaddr, sockaddr<NativeArch>); impl<Arch: Architecture> Clone for sockaddr<Arch> { fn clone(&self) -> Self { Self { sa_family: self.sa_family, sa_data: self.sa_data, } } } impl<Arch: Architecture> Copy for sockaddr<Arch> {} #[repr(C)] pub struct ifmap<Arch: Architecture> { pub mem_start: Arch::unsigned_long, pub mem_end: Arch::unsigned_long, pub base_addr: Arch::unsigned_short, pub irq: u8, pub dma: u8, pub port: u8, } assert_eq_size!(kernel::ifmap, ifmap<NativeArch>); assert_eq_align!(kernel::ifmap, ifmap<NativeArch>); impl<Arch: Architecture> Clone for ifmap<Arch> { fn clone(&self) -> Self { Self { mem_start: self.mem_start, mem_end: self.mem_end, base_addr: self.base_addr, irq: self.irq, dma: self.dma, port: self.dma, } } } impl<Arch: Architecture> Copy for ifmap<Arch> {} #[repr(C)] pub union ifs_ifsu<Arch: Architecture> { pub raw_hdlc: Ptr<Arch::unsigned_word, u8>, pub cisco: Ptr<Arch::unsigned_word, u8>, pub fr: Ptr<Arch::unsigned_word, u8>, pub fr_pvc: Ptr<Arch::unsigned_word, u8>, pub fr_pvc_info: Ptr<Arch::unsigned_word, u8>, pub sync: Ptr<Arch::unsigned_word, u8>, pub tel: Ptr<Arch::unsigned_word, u8>, } impl<Arch: Architecture> Clone for ifs_ifsu<Arch> { fn clone(&self) -> Self { Self { tel: unsafe { self.tel }, } } } impl<Arch: Architecture> Copy for ifs_ifsu<Arch> {} #[repr(C)] pub struct if_settings<Arch: Architecture> { pub type_: u32, pub size: u32, pub ifs_ifsu: ifs_ifsu<Arch>, } assert_eq_size!(kernel::if_settings, if_settings<NativeArch>); assert_eq_align!(kernel::if_settings, if_settings<NativeArch>); impl<Arch: Architecture> Clone for if_settings<Arch> { fn clone(&self) -> Self { Self { type_: self.type_, size: self.size, ifs_ifsu: self.ifs_ifsu, } } } impl<Arch: Architecture> Copy for if_settings<Arch> {} #[repr(C)] pub union ifr_ifru<Arch: Architecture> { pub ifru_addr: sockaddr<Arch>, pub ifru_dstaddr: sockaddr<Arch>, pub ifru_broadaddr: sockaddr<Arch>, pub ifru_netmask: sockaddr<Arch>, pub ifru_hwaddr: sockaddr<Arch>, pub ifru_flags: Arch::signed_short, pub ifru_ivalue: i32, pub ifru_mtu: i32, pub ifru_map: ifmap<Arch>, pub ifru_slave: [u8; 16], pub ifru_newname: [u8; 16], pub ifru_data: Ptr<Arch::unsigned_word, u8>, pub ifru_settings: if_settings<Arch>, } impl<Arch: Architecture> Clone for ifr_ifru<Arch> { fn clone(&self) -> Self { Self { ifru_slave: unsafe { self.ifru_slave }, } } } impl<Arch: Architecture> Copy for ifr_ifru<Arch> {} #[repr(C)] #[derive(Copy, Clone)] pub union ifr_ifrn { pub ifrn_name: [u8; 16], } #[repr(C)] #[derive(Copy, Clone)] pub struct ifreq<Arch: Architecture> { pub ifr_ifrn: ifr_ifrn, pub ifr_ifru: ifr_ifru<Arch>, } assert_eq_size!(kernel::ifreq, ifreq<NativeArch>); assert_eq_align!(kernel::ifreq, ifreq<NativeArch>); #[repr(C)] pub union ifc_ifcu<Arch: Architecture> { pub ifcu_buf: Ptr<Arch::unsigned_word, u8>, pub ifcu_req: Ptr<Arch::unsigned_word, ifreq<Arch>>, } impl<Arch: Architecture> Clone for ifc_ifcu<Arch> { fn clone(&self) -> Self { Self { ifcu_buf: unsafe { self.ifcu_buf }, } } } impl<Arch: Architecture> Copy for ifc_ifcu<Arch> {} #[repr(C)] #[derive(Copy, Clone)] pub struct ifconf<Arch: Architecture> { pub ifc_len: i32, pub __pad: Arch::STD_PAD_ARR, pub ifc_ifcu: ifc_ifcu<Arch>, } assert_eq_size!(kernel::ifconf, ifconf<NativeArch>); assert_eq_align!(kernel::ifconf, ifconf<NativeArch>); #[repr(C)] #[derive(Copy, Clone, Default)] pub struct sg_io_hdr<Arch: Architecture> { pub interface_id: i32, pub dxfer_direction: i32, pub cmd_len: u8, pub mx_sb_len: u8, pub iovec_count: Arch::unsigned_short, pub dxfer_len: u32, pub dxferp: Ptr<Arch::unsigned_word, u8>, pub cmdp: Ptr<Arch::unsigned_word, u8>, pub sbp: Ptr<Arch::unsigned_word, u8>, pub timeout: u32, pub flags: u32, pub pack_id: i32, pub usr_ptr: Ptr<Arch::unsigned_word, u8>, pub status: u8, pub masked_status: u8, pub msg_status: u8, pub sb_len_wr: u8, pub host_status: Arch::unsigned_short, pub driver_status: Arch::unsigned_short, pub resid: i32, pub duration: u32, pub info: u32, } assert_eq_size!(kernel::sg_io_hdr, sg_io_hdr<NativeArch>); assert_eq_align!(kernel::sg_io_hdr, sg_io_hdr<NativeArch>); #[repr(C)] #[derive(Copy, Clone)] pub struct iw_param { pub value: i32, pub fixed: u8, pub disabled: u8, pub flags: u16, } assert_eq_size!(kernel::iw_param, iw_param); assert_eq_align!(kernel::iw_param, iw_param); #[repr(C)] pub struct iw_point<Arch: Architecture> { pub pointer: Ptr<Arch::unsigned_word, u8>, pub length: u16, pub flags: u16, } assert_eq_size!(kernel::iw_point, iw_point<NativeArch>); assert_eq_align!(kernel::iw_point, iw_point<NativeArch>); impl<Arch: Architecture> Clone for iw_point<Arch> { fn clone(&self) -> Self { Self { pointer: self.pointer, length: self.length, flags: self.flags, } } } impl<Arch: Architecture> Copy for iw_point<Arch> {} #[repr(C)] #[derive(Copy, Clone)] pub struct iw_freq { pub m: i32, pub e: i16, pub i: u8, pub flags: u8, } assert_eq_size!(kernel::iw_freq, iw_freq); assert_eq_align!(kernel::iw_freq, iw_freq); #[repr(C)] #[derive(Copy, Clone)] pub struct iw_quality { pub qual: u8, pub level: u8, pub noise: u8, pub updated: u8, } assert_eq_size!(kernel::iw_quality, iw_quality); assert_eq_align!(kernel::iw_quality, iw_quality); #[repr(C)] pub union iwreq_data<Arch: Architecture> { pub name: [u8; 16], pub essid: iw_point<Arch>, pub nwid: iw_param, pub freq: iw_freq, pub sens: iw_param, pub bitrate: iw_param, pub txpower: iw_param, pub rts: iw_param, pub frag: iw_param, pub mode: u32, pub retry: iw_param, pub encoding: iw_point<Arch>, pub power: iw_param, pub qual: iw_quality, pub ap_addr: sockaddr<Arch>, pub addr: sockaddr<Arch>, pub param: iw_param, pub data: iw_point<Arch>, } assert_eq_size!(kernel::iwreq_data, iwreq_data<NativeArch>); assert_eq_align!(kernel::iwreq_data, iwreq_data<NativeArch>); impl<Arch: Architecture> Clone for iwreq_data<Arch> { fn clone(&self) -> Self { Self { name: unsafe { self.name }, } } } impl<Arch: Architecture> Copy for iwreq_data<Arch> {} #[repr(C)] #[derive(Copy, Clone)] pub struct iwreq<Arch: Architecture> { pub ifr_ifrn: ifr_ifrn, pub u: iwreq_data<Arch>, } assert_eq_size!(kernel::iwreq, iwreq<NativeArch>); assert_eq_align!(kernel::iwreq, iwreq<NativeArch>); #[repr(C)] #[derive(Copy, Clone)] pub struct linux_dirent<Arch: Architecture> { pub d_ino: Arch::ino_t, pub d_off: Arch::off_t, pub d_reclen: u16, /// Variable length pub d_name: [u8; 1], // Other stuff like d_type and pad } #[repr(C)] #[derive(Copy, Clone)] pub struct linux_dirent64 { pub d_ino: arch::ino64_t, pub d_off: arch::off64_t, pub d_reclen: u16, pub d_type: u8, /// Variable length pub d_name: [u8; 1], } #[repr(C)] pub struct connect_args<Arch: Architecture> { pub sockfd: Arch::signed_long, pub addr: Ptr<Arch::unsigned_word, u8>, pub addrlen: common::socklen_t, } #[repr(C)] pub struct getsockopt_args<Arch: Architecture> { pub sockfd: i32, pub level: i32, pub optname: i32, pub __pad: Arch::STD_PAD_ARR, pub optval: Ptr<Arch::unsigned_word, u8>, pub optlen: Ptr<Arch::unsigned_word, common::socklen_t>, } #[repr(C)] pub struct socketpair_args<Arch: Architecture> { pub domain: i32, pub type_: i32, pub protocol: i32, pub __pad: Arch::STD_PAD_ARR, pub sv: Ptr<Arch::unsigned_word, i32>, // int sv[2] } #[repr(C)] pub struct getsockname_args<Arch: Architecture> { pub sockfd: i32, pub __pad: Arch::STD_PAD_ARR, pub addr: Ptr<Arch::unsigned_word, sockaddr<Arch>>, pub addrlen: Ptr<Arch::unsigned_word, common::socklen_t>, } #[repr(C)] pub struct recv_args<Arch: Architecture> { pub sockfd: i32, pub __pad: Arch::STD_PAD_ARR, pub buf: Ptr<Arch::unsigned_word, u8>, pub len: Arch::size_t, pub flags: i32, } #[repr(C)] pub struct recvfrom_args<Arch: Architecture> { pub sockfd: Arch::signed_long, pub buf: Ptr<Arch::unsigned_word, u8>, pub len: Arch::size_t, pub flags: Arch::signed_long, pub src_addr: Ptr<Arch::unsigned_word, sockaddr<Arch>>, pub addrlen: Ptr<Arch::unsigned_word, common::socklen_t>, } #[repr(C)] pub struct accept_args<Arch: Architecture> { pub sockfd: i32, pub __pad: Arch::STD_PAD_ARR, pub addr: Ptr<Arch::unsigned_word, sockaddr<Arch>>, pub addrlen: Ptr<Arch::unsigned_word, common::socklen_t>, } #[repr(C)] pub struct accept4_args<Arch: Architecture> { pub sockfd: i32, pub __pad: Arch::STD_PAD_ARR, pub addr: Ptr<Arch::unsigned_word, sockaddr<Arch>>, pub addrlen: Ptr<Arch::unsigned_word, common::socklen_t>, pub flags: Arch::signed_long, } #[repr(C)] pub struct sendmsg_args<Arch: Architecture> { pub fd: i32, pub __pad: Arch::STD_PAD_ARR, pub msg: Ptr<Arch::unsigned_word, msghdr<Arch>>, pub flags: i32, } #[repr(C)] pub struct sendmmsg_args<Arch: Architecture> { pub sockfd: i32, pub __pad: Arch::STD_PAD_ARR, pub msgvec: Ptr<Arch::unsigned_word, mmsghdr<Arch>>, pub vlen: u32, pub flags: u32, } #[repr(C)] pub struct recvmsg_args<Arch: Architecture> { pub fd: i32, pub __pad: Arch::STD_PAD_ARR, pub msg: Ptr<Arch::unsigned_word, msghdr<Arch>>, pub flags: i32, } #[repr(C)] pub struct recvmmsg_args<Arch: Architecture> { pub sockfd: i32, pub __pad: Arch::STD_PAD_ARR, pub msgvec: Ptr<Arch::unsigned_word, mmsghdr<Arch>>, pub vlen: u32, pub flags: u32, pub timeout: Ptr<Arch::unsigned_word, Arch::timespec>, } /// Some ipc calls require 7 params, so two of them are stashed into /// one of these structs and a pointer to this is passed instead. pub struct ipc_kludge_args<Arch: Architecture> { pub msgbuf: Ptr<Arch::unsigned_word, u8>, pub msgtype: Arch::signed_long, } #[repr(C)] pub struct usbdevfs_ioctl<Arch: Architecture> { pub ifno: i32, pub ioctl_code: i32, pub data: Ptr<Arch::unsigned_word, u8>, } assert_eq_size!(kernel::usbdevfs_ioctl, usbdevfs_ioctl<NativeArch>); assert_eq_align!(kernel::usbdevfs_ioctl, usbdevfs_ioctl<NativeArch>); #[repr(C)] #[allow(non_snake_case)] pub struct usbdevfs_ctrltransfer<Arch: Architecture> { pub bRequestType: u8, pub bRequest: u8, pub wValue: u16, pub wIndex: u16, pub wLength: u16, pub timeout: u32, pub data: Ptr<Arch::unsigned_word, u8>, } assert_eq_size!( kernel::usbdevfs_ctrltransfer, usbdevfs_ctrltransfer<NativeArch> ); assert_eq_align!( kernel::usbdevfs_ctrltransfer, usbdevfs_ctrltransfer<NativeArch> ); #[repr(C)] pub struct v4l2_timecode { pub type_: u32, pub flags: u32, pub frames: u8, pub seconds: u8, pub minutes: u8, pub hours: u8, pub userbits: [u8; 4], } assert_eq_size!(kernel::v4l2_timecode, v4l2_timecode); assert_eq_align!(kernel::v4l2_timecode, v4l2_timecode); #[repr(C)] pub union v4l2_m<Arch: Architecture> { pub offset: u32, pub userptr: Arch::unsigned_long, pub planes: Ptr<Arch::unsigned_word, u8>, pub fd: i32, } #[repr(C)] pub struct v4l2_buffer<Arch: Architecture> { pub index: u32, pub type_: u32, pub bytesused: u32, pub flags: u32, pub field: u32, pub __pad: Arch::STD_PAD_ARR, pub timestamp: Arch::timeval, pub timecode: v4l2_timecode, pub sequence: u32, pub memory: u32, pub m: v4l2_m<Arch>, pub length: u32, pub reserved2: u32, pub reserved: u32, } assert_eq_size!(kernel::v4l2_buffer, v4l2_buffer<NativeArch>); assert_eq_align!(kernel::v4l2_buffer, v4l2_buffer<NativeArch>); #[repr(C)] pub struct usbdevfs_urb<Arch: Architecture> { pub type_: u8, pub endpoint: u8, pub status: i32, pub flags: u32, pub buffer: Ptr<Arch::unsigned_word, u8>, pub buffer_length: i32, pub actual_length: i32, pub start_frame: i32, pub usbdevfs_urb_u: usbdevfs_urb_u, pub error_count: i32, pub signr: u32, pub usercontext: Ptr<Arch::unsigned_word, u8>, pub iso_frame_desc: [usbdevfs_iso_packet_desc; 0], } assert_eq_size!(kernel::usbdevfs_urb, usbdevfs_urb<NativeArch>); assert_eq_align!(kernel::usbdevfs_urb, usbdevfs_urb<NativeArch>); #[repr(C)] pub union usbdevfs_urb_u { pub number_of_packets: i32, pub stream_id: u32, } #[repr(C)] #[derive(Clone)] pub struct usbdevfs_iso_packet_desc { pub length: u32, pub actual_length: u32, pub status: u32, } assert_eq_size!(kernel::usbdevfs_iso_packet_desc, usbdevfs_iso_packet_desc); assert_eq_align!(kernel::usbdevfs_iso_packet_desc, usbdevfs_iso_packet_desc); #[repr(C)] #[derive(Copy, Clone, Default)] pub struct bpf_attr_u1 { pub map_type: u32, pub key_size: u32, pub value_size: u32, pub max_entries: u32, pub map_flags: u32, pub inner_map_fd: u32, pub numa_node: u32, pub map_name: [u8; 16], pub map_ifindex: u32, pub btf_fd: u32, pub btf_key_type_id: u32, pub btf_value_type_id: u32, } #[repr(C)] #[derive(Copy, Clone)] pub union bpf_attr_u2_u1 { pub value: common::ptr64<u8>, pub next_key: common::ptr64<u8>, } #[repr(C)] #[derive(Copy, Clone)] pub struct bpf_attr_u2 { pub map_fd: u32, pub key: common::ptr64<u8>, pub bpf_attr_u2_u1: bpf_attr_u2_u1, pub flags: u64, } #[repr(C, align(8))] #[derive(Copy, Clone, Default)] pub struct aligned_u64 { pub __val: u64, } #[repr(C)] #[derive(Copy, Clone, Default)] pub struct bpf_attr_u3 { pub prog_type: u32, pub insn_cnt: u32, pub insns: common::ptr64<u8>, pub license: common::ptr64<u8>, pub log_level: u32, pub log_size: u32, pub log_buf: common::ptr64<char>, pub kern_version: u32, pub prog_flags: u32, pub prog_name: [u8; 16], pub prog_ifindex: u32, pub expected_attach_type: u32, pub prog_btf_fd: u32, pub func_info_rec_size: u32, pub func_info: aligned_u64, pub func_info_cnt: u32, pub line_info_rec_size: u32, pub line_info: aligned_u64, pub line_info_cnt: u32, } #[repr(C)] #[derive(Copy, Clone)] pub union bpf_attr { pub bpf_attr_u1: bpf_attr_u1, pub bpf_attr_u2: bpf_attr_u2, pub bpf_attr_u3: bpf_attr_u3, }
{-# LANGUAGE DataKinds, DefaultSignatures, DeriveGeneric, FlexibleInstances #-} {-# LANGUAGE MultiParamTypeClasses, PolyKinds, TypeFamilies, TypeOperators #-} {-# LANGUAGE UndecidableInstances #-} module Main where import Data.Type.Natural ((:*), (:+), Nat (..), One) import Data.Type.Ordinal import GHC.Generics class (Enum a, Enum b) => Iso a b where toIso :: a -> b toIso = toEnum . fromEnum fromIso :: b -> a fromIso = toEnum . fromEnum data Xpto = Abc | Def | Ghi deriving (Read, Show, Eq, Ord, Enum, Generic) type family SizeG (a :: k) :: Nat type instance SizeG (M1 D y a) = SizeG a type instance SizeG (M1 C y a) = SizeG a type instance SizeG (M1 S y a) = Z type instance SizeG V1 = Z type instance SizeG U1 = One type instance SizeG (a :+: b) = SizeG a :+ SizeG b type instance SizeG (a :*: b) = SizeG a :* SizeG b type Size a = SizeG (Rep a) data TTTT = T | TT | TTT deriving (Read, Show, Eq, Ord, Enum) instance (b ~ Size Xpto) => Iso Xpto (Ordinal b) -- instance Iso Xpto TTTT
package kr.feliz.tutorial_collection.lemonfox.widget.net import kr.feliz.tutorial_collection.BuildConfig import okhttp3.OkHttpClient import retrofit2.Retrofit import retrofit2.converter.gson.GsonConverterFactory import retrofit2.converter.scalars.ScalarsConverterFactory object RetrofitClient { val chart: Retrofit init { chart = Retrofit.Builder() .baseUrl(BuildConfig.CHART_API_SERVER_BASE_URL) .addConverterFactory(ScalarsConverterFactory.create()) .addConverterFactory(GsonConverterFactory.create()) .client(OkHttpClient()) .build() } }
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} module Main where import Control.Monad import Control.Monad.Primitive import Data.ByteString (ByteString) import qualified Data.ByteString.Char8 as BSC import Data.Maybe import Data.Time.Clock import Data.Time.Format import Options.Applicative import Pipes import qualified Pipes.ByteString as P (toHandle) import Pipes.Csv import qualified Pipes.Prelude as P hiding (toHandle) import System.IO import System.Locale import System.Random.MWC import System.Random.MWC.Distributions import Conversion import Parsing import Types import Util -- | We roughly model the balloon system stepwise -- TimeStamps -> Some constant time step -- Location -> Basic brownian-like motion with a tendency -- Temperature -> Minor fluctuations each step -- Observatory -> Each step send to observatories within a set range -- Data Quality -> Randomly have it such that an observatory logs poorly -- We spread the values using a normal distribution -- All values must be non-negative bar the location drift values and time -- steps. Negative time steps can be used to simulate out-of-order data data GenSettings = GenSettings { genStartTime :: TimeStamp -- Initial TimeStamp for generated data , genTimeStep :: Double -- Average timestep in minutes , genLocStepX :: Double -- Average x-coord distance to travel per timestep , genLocStepY :: Double -- Average y-coord distance to travel per timestep , genTempStep :: Double -- Variance in temperature for each timestep , genBalloonRange :: Double -- Range of balloon broadcast , genFailRate :: Double -- Failure rate at which to generate invalid data , genSystemSize :: Int -- Size of the system , genNumLines :: Int -- Number of lines to generate , genFile :: FilePath -- File to write output to } parseGenSettings :: Parser GenSettings parseGenSettings = GenSettings <$> option auto ( long "start-time" <> short 's' <> value defaultStartTime <> metavar "START_TIME" <> help ("Time to start generating data from in format: " <> tsFormat)) <*> option auto ( long "time-step" <> short 't' <> value defaultTimeStep <> metavar "TIME_STEP" <> help "Average time step in minutes") <*> option auto ( long "x-drift" <> short 'x' <> value defaultLocXStep <> metavar "X_DRIFT" <> help "Average x-coord drift per time step in metres") <*> option auto ( long "y-drift" <> short 'y' <> value defaultLocYStep <> metavar "Y_DRIFT" <> help "Average y-coord drift per time step in metres") <*> (nonNegative "temp-variance" <$> option auto ( long "temp-variance" <> short 'p' <> value defaultTempStep <> metavar "TEMP_VARIANCE" <> help "Variance in temperature in kelvin")) <*> pure defaultBalloonRange <*> (nonNegative "fail-rate" <$> option auto ( long "fail-rate" <> short 'r' <> value defaultFailRate <> metavar "FAIL_RATE" <> help "Rate at which observatories produce rubbish output [0,1)")) <*> pure defaultSystemSize <*> option auto ( long "num-lines" <> short 'n' <> value defaultNumLines <> metavar "NUM_LINES" <> help "Number of lines to output") <*> strOption ( long "output-file" <> short 'f' <> value defaultOutputFile <> metavar "OUTPUT_FILE" <> help "File to output generated data to") nonNegative :: String -> Double -> Double nonNegative name x = if x >= 0 then x else error (name ++ " must be non-negative") defaultStartTime :: TimeStamp defaultTimeStep, defaultLocXStep, defaultLocYStep, defaultTempStep, defaultBalloonRange, defaultFailRate :: Double defaultSystemSize, defaultNumLines :: Int defaultOutputFile :: String defaultStartTime = TimeStamp $ fromJust $ parseTime defaultTimeLocale tsFormat "2014-06-08T10:30" defaultTimeStep = 15 defaultLocXStep = 1200 defaultLocYStep = -1800 defaultTempStep = 0.1 defaultBalloonRange = 30000 defaultFailRate = 0.08 defaultSystemSize = 100000 defaultNumLines = 100000 defaultOutputFile = "gen-weather-sample.csv" data ObservatoryOutput = Valid LogLine | Invalid ByteString instance ToRecord ObservatoryOutput where toRecord (Valid ll) = toRecord ll toRecord (Invalid x) = toRecord [x] -- We use Metres and Kelvin for the System internally data System = System { systemTime :: TimeStamp , balloonLoc :: Location , balloonRange :: Double , systemTemp :: Temperature , systemObss :: [(Observatory, Location)] , systemSize :: Int } type Mutator x = Gen (PrimState IO) -> x -> IO x data Mutators = Mutators { mutTime :: Mutator TimeStamp , mutLoc :: Mutator Location , mutTemp :: Mutator Temperature , mutLine :: Mutator ObservatoryOutput } observatoryLocs :: [(Observatory, Location)] observatoryLocs = [ (Observatory "AU", Location 10000 10000) , (Observatory "FR", Location 80000 40000) , (Observatory "US", Location 30000 50000) , (Observatory "NZ", Location 10000 30000) ] initialise :: GenSettings -> (Mutators, System) initialise GenSettings{..} = let mutTime g (TimeStamp x) = do v <- normal genTimeStep (genTimeStep / 4) g return $ TimeStamp $ addUTCTime (fromIntegral $ floor $ v * 60) x mutLoc g (Location x y) = do dx <- normal genLocStepX (abs genLocStepX / 4) g dy <- normal genLocStepY (abs genLocStepY / 4) g let x' = genSystemSize + x + round (dx :: Double) let y' = genSystemSize + y + round dy return $ Location (x' `mod` genSystemSize) (y' `mod` genSystemSize) mutTemp g x = do dx <- normal 0 genTempStep g return $ x + round dx mutLine g x = do c <- uniform g return $ if c < genFailRate then Invalid "th1s1s1nv4l1d" else x systemTime = genStartTime balloonLoc = Location (genSystemSize `div` 2) (genSystemSize `div` 2) balloonRange = genBalloonRange systemTemp = 300 systemObss = observatoryLocs systemSize = genSystemSize in (Mutators{..}, System{..}) stepSystem :: GenIO -> Mutators -> System -> IO System stepSystem g Mutators{..} System{..} = do newTime <- mutTime g systemTime newLoc <- mutLoc g balloonLoc newTemp <- mutTemp g systemTemp return $ System newTime newLoc balloonRange newTemp systemObss systemSize runSystem :: GenIO -> Mutators -> System -> Producer System IO () runSystem g m s = do yield s s' <- liftIO $ stepSystem g m s runSystem g m s' outputSystem :: GenIO -> Mutators -> Pipe System ByteString IO () outputSystem g Mutators{..} = forever $ do System{..} <- await let systemBounds = Location systemSize systemSize let inBounds x = distanceSquared (Just systemBounds) balloonLoc x < (balloonRange * balloonRange) let inRange = filter (inBounds . snd) systemObss let obsLine = LogLine systemTime balloonLoc systemTemp let rawLines = map (Valid . convertMetreKelvinToObservatory . obsLine . fst) inRange logLines <- liftIO $ mapM (mutLine g) rawLines -- We filter because Data.Csv is fickle when dealing with commas regardless of delimeter -- A custom encoder might be faster, but current speed seems more than adequate each logLines >-> encodeWith weatherEncodeOptions >-> P.map (BSC.filter (/= '"')) main :: IO () main = do settings <- execParser (info parseGenSettings fullDesc) g <- createSystemRandom let (m, s) = initialise settings withFile (genFile settings) WriteMode $ \h -> runEffect $ runSystem g m s >-> outputSystem g m >-> P.take (genNumLines settings) >-> P.toHandle h
use embedded_ccs811::{prelude::*, FirmwareMode as FwMode}; use embedded_hal_mock::{ i2c::Transaction as I2cTrans, pin::{Mock as PinMock, State as PinState, Transaction as PinTrans}, }; mod common; use crate::common::{destroy, new, BitFlags as BF, Register, DEV_ADDR}; #[test] fn can_create_and_destroy() { let nwake = PinMock::new(&[]); let sensor = new(&[], nwake); destroy(sensor); } macro_rules! get_test { ($name:ident, $method:ident, $reg:ident, $value:expr, $expected:expr) => { #[test] fn $name() { let nwake = PinMock::new(&[PinTrans::set(PinState::Low), PinTrans::set(PinState::High)]); let transactions = [ I2cTrans::write_read(DEV_ADDR, vec![Register::$reg], $value), I2cTrans::write_read(DEV_ADDR, vec![Register::STATUS], vec![0]), ]; let mut sensor = new(&transactions, nwake); assert_eq!($expected, sensor.$method().unwrap()); destroy(sensor); } }; } get_test!(can_get_hw_id, hardware_id, HW_ID, vec![0x81], 0x81); get_test!( can_get_hw_version, hardware_version, HW_VERSION, vec![0x12], (1, 2) ); get_test!( can_get_fw_boot_version, firmware_bootloader_version, FW_BOOT_VERSION, vec![0x12, 0x34], (1, 2, 0x34) ); get_test!( can_get_fw_app_version, firmware_application_version, FW_APP_VERSION, vec![0x12, 0x34], (1, 2, 0x34) ); read_status_test!(can_get_invalid_app, has_valid_app, false, 0); read_status_test!(can_get_valid_app, has_valid_app, true, BF::APP_VALID); read_status_test!(fw_mode_boot, firmware_mode, FwMode::Boot, 0); read_status_test!(fw_mode_app, firmware_mode, FwMode::Application, BF::FW_MODE);
package baishuai.github.io.smsforward.forward import baishuai.github.io.smsforward.forward.feige.FeigeApi import baishuai.github.io.smsforward.forward.slack.SlackApi import dagger.Subcomponent import javax.inject.Singleton /** * Created by bai on 17-5-1. */ @Singleton @Subcomponent(modules = arrayOf(ForwardModule::class)) interface ForwardComponent { fun feigeApi(): FeigeApi fun slackApi(): SlackApi }
import User from '../../src/models/Users' import UserService from '../../src/services/user' import * as dbHelper from '../db-helper' const nonExistingUserId = '8ef5ad63b53b57dd876d6908' async function createUser() { const user = new User({ username: 'TravisKudix', firstname: 'Travis', lastname: 'Kudix', email: '[email protected]', password: 'Asd1', }) return await UserService.create(user) } describe('user service', () => { beforeEach(async () => { await dbHelper.connect() }) afterEach(async () => { await dbHelper.clearDatabase() }) afterAll(async () => { await dbHelper.closeDatabase() }) it('should create a new user', async () => { expect.assertions(7) const user = await createUser() expect(user).toHaveProperty('_id') expect(user).toHaveProperty('username') expect(user).toHaveProperty('firstname') expect(user).toHaveProperty('lastname') expect(user).toHaveProperty('password') expect(user).toHaveProperty('email') const wrongUser = new User({ username: 'TravisKudix', }) return UserService.create(wrongUser).catch((e) => expect(e.message).toMatch( 'User validation failed: email: Path `email` is required.' ) ) }) it('should get a user with id', async () => { expect.assertions(3) const user = await createUser() const found = await UserService.findById(user._id) expect(found.username).toEqual(user.username) expect(found._id).toEqual(user._id) return await UserService.findById(nonExistingUserId).catch((e) => expect(e.message).toMatch('ValidationError') ) }) it('should update user credentials', async () => { expect.assertions(6) const user = await createUser() const update = { username: 'TravisWolf', firstname: 'Wolf', lastname: 'Hart', email: '[email protected]', } const updated = await UserService.update(user._id, update) expect(updated).toHaveProperty('_id', user._id) expect(updated.username).toEqual('TravisWolf') expect(updated.firstname).toEqual('Wolf') expect(updated.lastname).toEqual('Hart') expect(updated.email).toEqual('[email protected]') return UserService.update(nonExistingUserId, update).catch((e) => expect(e.message).toMatch(`User ${nonExistingUserId} not found`) ) }) it('should find user by email', async () => { const user = await createUser() const foundUser = await UserService.findByEmail(user.email) expect(foundUser?._id).toEqual(user._id) }) it('should find or create google user', async () => { const userInfo = { username: 'TravisWolf', firstname: 'Wolf', lastname: 'Hart', email: '[email protected]', googleId: nonExistingUserId } const user = await UserService.findOrCreateUser(userInfo) expect(user.email).toMatch(userInfo.email) }) })
package com.seanshubin.kotlin.tryme.domain.parser interface Tree<T> { val name: String fun values(): List<T> fun toLines(depth: Int = 0): List<String> fun indent(s: String, depth: Int) = " ".repeat(depth) + s }
import { Component, OnInit } from '@angular/core'; import { ActivatedRoute } from '@angular/router'; import { AnnouncementService } from '@sunbird/core'; import { ResourceService, ToasterService, RouterNavigationService, ServerResponse } from '@sunbird/shared'; import * as _ from 'lodash'; import { IAnnouncementDetails } from '@sunbird/announcement'; import { IImpressionEventInput } from '@sunbird/telemetry'; /** * The details popup component checks for the announcement details object * present in announcement service. If object is undefined it calls API with * the announcement id and gets the details. */ @Component({ selector: 'app-details-popup', templateUrl: './details-popup.component.html', styleUrls: ['./details-popup.component.css'] }) export class DetailsPopupComponent implements OnInit { /** * telemetryImpression */ telemetryImpression: IImpressionEventInput; /** * Contains unique announcement id */ announcementId: string; /** * Contains announcement details returned from API or object called from * announcement service */ announcementDetails: IAnnouncementDetails; /** * This variable hepls to show and hide page loader. * It is kept true by default as at first when we comes * to a page the loader should be displayed before showing * any data */ showLoader = true; /** * To make get announcement by id */ private announcementService: AnnouncementService; /** * To send activatedRoute.snapshot to routerNavigationService */ public activatedRoute: ActivatedRoute; /** * To call resource service which helps to use language constant */ public resourceService: ResourceService; /** * To show toaster(error, success etc) after any API calls */ private toasterService: ToasterService; /** * To navigate back to parent component */ public routerNavigationService: RouterNavigationService; /** * Constructor to create injected service(s) object * * Default method of DetailsPopupComponent class * * @param {AnnouncementService} announcementService Reference of AnnouncementService * @param {ActivatedRoute} activatedRoute Reference of ActivatedRoute * @param {ResourceService} resourceService Reference of ResourceService * @param {ToasterService} toasterService Reference of ToasterService * @param {RouterNavigationService} routerNavigationService Reference of routerNavigationService */ constructor(announcementService: AnnouncementService, activatedRoute: ActivatedRoute, resourceService: ResourceService, toasterService: ToasterService, routerNavigationService: RouterNavigationService) { this.announcementService = announcementService; this.activatedRoute = activatedRoute; this.resourceService = resourceService; this.toasterService = toasterService; this.routerNavigationService = routerNavigationService; } /** * This method checks in announcement service whether announcement details exist * for the given announcement id or not. If not then it calls the * get announcement by id API with a particular announcement * id and and gets the details of the announcement * * @param {string} announcementId announcement id */ getDetails(announcementId: string): void { if (this.announcementService.announcementDetailsObject === undefined || this.announcementService.announcementDetailsObject.id !== announcementId) { const option = { announcementId: this.announcementId }; this.announcementService.getAnnouncementById(option).subscribe( (apiResponse: ServerResponse) => { this.announcementDetails = apiResponse.result; if (apiResponse.result.announcement) { this.announcementDetails = apiResponse.result.announcement; } this.showLoader = false; }, err => { this.toasterService.error(this.resourceService.messages.emsg.m0005); this.showLoader = false; this.routerNavigationService.navigateToParentUrl(this.activatedRoute.snapshot); } ); } else { this.showLoader = false; this.announcementDetails = this.announcementService.announcementDetailsObject; } } /** * This method calls the getDetails method to show details * of a particular announcement */ ngOnInit() { this.activatedRoute.params.subscribe(params => { this.announcementId = params.announcementId; }); this.getDetails(this.announcementId); this.telemetryImpression = { context: { env: this.activatedRoute.snapshot.data.telemetry.env }, object: { id: this.announcementId, type: this.activatedRoute.snapshot.data.telemetry.object.type, ver: this.activatedRoute.snapshot.data.telemetry.object.ver }, edata: { type: this.activatedRoute.snapshot.data.telemetry.type, pageid: this.activatedRoute.snapshot.data.telemetry.pageid, uri: '/announcement/outbox/' + this.announcementId, } }; } }
/* Navicat MySQL Data Transfer Source Server : qqbaby Source Server Version : 50553 Source Host : localhost:3306 Source Database : qqbaby_db Target Server Type : MYSQL Target Server Version : 50553 File Encoding : 65001 Date: 2017-09-08 20:54:42 */ SET FOREIGN_KEY_CHECKS=0; -- ---------------------------- -- Table structure for `tb_babay` -- ---------------------------- DROP TABLE IF EXISTS `tb_babay`; CREATE TABLE `tb_babay` ( `id` int(255) NOT NULL AUTO_INCREMENT COMMENT 'id', `title` varchar(255) NOT NULL COMMENT '客片标题', `subTitle` varchar(255) DEFAULT NULL COMMENT '副标题【简介】', `srcImg` varchar(255) NOT NULL COMMENT '缩略图', `status` int(11) unsigned NOT NULL DEFAULT '1' COMMENT '状态【0隐藏 1显示 2置顶 3精华 4热门】', `sort` int(11) unsigned DEFAULT '0' COMMENT '自定义排序', `total` bigint(20) unsigned DEFAULT '0' COMMENT '点击量统计', `authorId` int(11) unsigned NOT NULL COMMENT '作者Id', `uploadTime` datetime NOT NULL COMMENT '上传时间', `parent` int(10) unsigned NOT NULL COMMENT '所属栏目【1婴儿 2宝宝 3儿童 4亲子 5活动】', PRIMARY KEY (`id`) ) ENGINE=MyISAM AUTO_INCREMENT=4 DEFAULT CHARSET=utf8; -- ---------------------------- -- Table structure for `tb_babaypic` -- ---------------------------- DROP TABLE IF EXISTS `tb_babaypic`; CREATE TABLE `tb_babaypic` ( `pic_id` int(255) NOT NULL AUTO_INCREMENT COMMENT 'id', `pic_URL` text NOT NULL COMMENT '详情图url数组', `pic_status` int(11) unsigned NOT NULL DEFAULT '1' COMMENT '状态【0隐藏 1显示】', `linkURL` varchar(255) NOT NULL DEFAULT '1' COMMENT '图片链接', `case_id` int(11) unsigned NOT NULL COMMENT '客片对照id', PRIMARY KEY (`pic_id`) ) ENGINE=MyISAM AUTO_INCREMENT=91 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of tb_babaypic -- ---------------------------- -- ---------------------------- -- Table structure for `tb_banner` -- ---------------------------- DROP TABLE IF EXISTS `tb_banner`; CREATE TABLE `tb_banner` ( `id` int(255) NOT NULL AUTO_INCREMENT COMMENT 'id', `sort` int(11) unsigned DEFAULT '0' COMMENT '自定义排序', `status` int(11) unsigned NOT NULL DEFAULT '1' COMMENT '状态【0隐藏 1显示】', `parentId` int(11) unsigned NOT NULL COMMENT '所属广告位【1顶部 2最新 3婴儿 4宝宝 5儿童 6亲子 7 团队 8场馆】', `linkURL` varchar(255) NOT NULL DEFAULT '1' COMMENT '图片链接', `case_id` int(11) unsigned NOT NULL COMMENT '客片对照id', PRIMARY KEY (`id`) ) ENGINE=MyISAM DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of tb_banner -- ---------------------------- -- ---------------------------- -- Table structure for `tb_children` -- ---------------------------- DROP TABLE IF EXISTS `tb_children`; CREATE TABLE `tb_children` ( `id` int(255) NOT NULL AUTO_INCREMENT COMMENT 'id', `title` varchar(255) NOT NULL COMMENT '客片标题', `subTitle` varchar(255) DEFAULT NULL COMMENT '副标题【简介】', `srcImg` varchar(255) NOT NULL COMMENT '缩略图', `status` int(11) unsigned NOT NULL DEFAULT '1' COMMENT '状态【0隐藏 1显示 2置顶 3精华 4热门】', `sort` int(11) unsigned DEFAULT '0' COMMENT '自定义排序', `total` bigint(20) unsigned DEFAULT '0' COMMENT '点击量统计', `authorId` int(11) unsigned NOT NULL COMMENT '作者Id', `uploadTime` datetime NOT NULL COMMENT '上传时间', `parent` int(10) unsigned NOT NULL COMMENT '所属栏目【1婴儿 2宝宝 3儿童 4亲子 5活动】', PRIMARY KEY (`id`) ) ENGINE=MyISAM DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of tb_children -- ---------------------------- -- ---------------------------- -- Table structure for `tb_childrenpic` -- ---------------------------- DROP TABLE IF EXISTS `tb_childrenpic`; CREATE TABLE `tb_childrenpic` ( `pic_id` int(255) NOT NULL AUTO_INCREMENT COMMENT 'id', `pic_url` text NOT NULL COMMENT '详情图url数组', `pic_status` int(11) unsigned NOT NULL DEFAULT '1' COMMENT '状态【0隐藏 1显示】', `linkURL` varchar(255) NOT NULL DEFAULT '1' COMMENT '图片链接', `case_id` int(11) unsigned NOT NULL COMMENT '客片对照id', PRIMARY KEY (`pic_id`) ) ENGINE=MyISAM DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of tb_childrenpic -- ---------------------------- -- ---------------------------- -- Table structure for `tb_family` -- ---------------------------- DROP TABLE IF EXISTS `tb_family`; CREATE TABLE `tb_family` ( `id` int(255) NOT NULL AUTO_INCREMENT COMMENT 'id', `title` varchar(255) NOT NULL COMMENT '客片标题', `subTitle` varchar(255) DEFAULT NULL COMMENT '副标题【简介】', `srcImg` varchar(255) NOT NULL COMMENT '缩略图', `status` int(11) unsigned NOT NULL DEFAULT '1' COMMENT '状态【0隐藏 1显示 2置顶 3精华 4热门】', `sort` int(11) unsigned DEFAULT '0' COMMENT '自定义排序', `total` bigint(20) unsigned DEFAULT '0' COMMENT '点击量统计', `authorId` int(11) unsigned NOT NULL COMMENT '作者Id', `uploadTime` datetime NOT NULL COMMENT '上传时间', `parent` int(10) unsigned NOT NULL COMMENT '所属栏目【1婴儿 2宝宝 3儿童 4亲子 5活动】', PRIMARY KEY (`id`) ) ENGINE=MyISAM AUTO_INCREMENT=30 DEFAULT CHARSET=utf8; -- ---------------------------- -- Table structure for `tb_familypic` -- ---------------------------- DROP TABLE IF EXISTS `tb_familypic`; CREATE TABLE `tb_familypic` ( `pic_id` int(255) NOT NULL AUTO_INCREMENT COMMENT 'id', `pic_url` varchar(255) NOT NULL COMMENT '详情图url数组', `pic_status` int(11) unsigned NOT NULL DEFAULT '1' COMMENT '状态【0隐藏 1显示】', `linkURL` varchar(255) NOT NULL DEFAULT '1' COMMENT '图片链接', `case_id` int(11) unsigned NOT NULL COMMENT '客片对照id', `pic_tiem` datetime DEFAULT NULL COMMENT '上传时间', PRIMARY KEY (`pic_id`) ) ENGINE=MyISAM AUTO_INCREMENT=671 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of tb_familypic -- ---------------------------- -- ---------------------------- -- Table structure for `tb_infant` -- ---------------------------- DROP TABLE IF EXISTS `tb_infant`; CREATE TABLE `tb_infant` ( `id` int(255) NOT NULL AUTO_INCREMENT COMMENT 'id', `title` varchar(255) NOT NULL COMMENT '客片标题', `subTitle` varchar(255) DEFAULT NULL COMMENT '副标题【简介】', `srcImg` varchar(255) NOT NULL COMMENT '缩略图', `status` int(11) unsigned NOT NULL DEFAULT '1' COMMENT '状态【0隐藏 1显示 2置顶 3精华 4热门】', `sort` int(11) unsigned DEFAULT '0' COMMENT '自定义排序', `total` bigint(20) unsigned DEFAULT '0' COMMENT '点击量统计', `authorId` int(11) unsigned NOT NULL COMMENT '作者Id', `uploadTime` datetime NOT NULL COMMENT '上传时间', `parent` int(10) unsigned NOT NULL COMMENT '所属栏目【1婴儿 2宝宝 3儿童 4亲子 5活动】', PRIMARY KEY (`id`) ) ENGINE=MyISAM AUTO_INCREMENT=4 DEFAULT CHARSET=utf8; -- ---------------------------- -- Table structure for `tb_infantpic` -- ---------------------------- DROP TABLE IF EXISTS `tb_infantpic`; CREATE TABLE `tb_infantpic` ( `pic_id` int(255) NOT NULL AUTO_INCREMENT COMMENT 'id', `pic_URL` text NOT NULL COMMENT '详情图url数组', `pic_status` int(11) unsigned NOT NULL DEFAULT '1' COMMENT '状态【0隐藏 1显示】', `linkURL` varchar(255) NOT NULL DEFAULT '1' COMMENT '图片链接', `case_id` int(11) unsigned NOT NULL COMMENT '客片对照id', PRIMARY KEY (`pic_id`) ) ENGINE=MyISAM AUTO_INCREMENT=34 DEFAULT CHARSET=utf8; -- ---------------------------- -- Table structure for `tb_new` -- ---------------------------- DROP TABLE IF EXISTS `tb_new`; CREATE TABLE `tb_new` ( `id` int(255) NOT NULL AUTO_INCREMENT COMMENT 'id', `title` varchar(255) NOT NULL COMMENT '客片标题', `subTitle` varchar(255) DEFAULT NULL COMMENT '副标题【简介】', `srcImg` varchar(255) NOT NULL COMMENT '缩略图', `status` int(11) unsigned NOT NULL DEFAULT '1' COMMENT '状态【0隐藏 1显示 2置顶 3精华 4热门】', `sort` int(11) unsigned DEFAULT '0' COMMENT '自定义排序', `total` bigint(20) unsigned DEFAULT '0' COMMENT '点击量统计', `authorId` int(11) unsigned NOT NULL COMMENT '作者Id', `uploadTime` datetime NOT NULL COMMENT '上传时间', `parent` int(10) unsigned NOT NULL COMMENT '所属栏目【1婴儿 2宝宝 3儿童 4亲子 5活动】', PRIMARY KEY (`id`) ) ENGINE=MyISAM DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of tb_new -- ---------------------------- -- ---------------------------- -- Table structure for `tb_newpic` -- ---------------------------- DROP TABLE IF EXISTS `tb_newpic`; CREATE TABLE `tb_newpic` ( `pic_id` int(255) NOT NULL AUTO_INCREMENT COMMENT 'id', `pic_URL` text NOT NULL COMMENT '详情图url数组', `pic_status` int(11) unsigned NOT NULL DEFAULT '1' COMMENT '状态【0隐藏 1显示】', `linkURL` varchar(255) NOT NULL DEFAULT '1' COMMENT '图片链接', `case_id` int(11) unsigned NOT NULL COMMENT '客片对照id', PRIMARY KEY (`pic_id`) ) ENGINE=MyISAM DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of tb_newpic -- ---------------------------- -- ---------------------------- -- Table structure for `tb_team` -- ---------------------------- DROP TABLE IF EXISTS `tb_team`; CREATE TABLE `tb_team` ( `id` int(255) NOT NULL AUTO_INCREMENT COMMENT 'id', `title` varchar(255) NOT NULL COMMENT '客片标题', `subTitle` varchar(255) DEFAULT NULL COMMENT '副标题【简介】', `srcImg` varchar(255) NOT NULL COMMENT '缩略图', `status` int(11) unsigned NOT NULL DEFAULT '1' COMMENT '状态【0隐藏 1显示 2置顶 3精华 4热门】', `sort` int(11) unsigned DEFAULT '0' COMMENT '自定义排序', `total` bigint(20) unsigned DEFAULT '0' COMMENT '点击量统计', `authorId` int(11) unsigned NOT NULL COMMENT '作者Id', `uploadTime` datetime NOT NULL COMMENT '上传时间', `parent` int(10) unsigned NOT NULL COMMENT '所属栏目【1婴儿 2宝宝 3儿童 4亲子 5活动】', PRIMARY KEY (`id`) ) ENGINE=MyISAM DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of tb_team -- ---------------------------- -- ---------------------------- -- Table structure for `tb_teampic` -- ---------------------------- DROP TABLE IF EXISTS `tb_teampic`; CREATE TABLE `tb_teampic` ( `pic_id` int(255) NOT NULL AUTO_INCREMENT COMMENT 'id', `pic_URL` text NOT NULL COMMENT '详情图url数组', `pic_status` int(11) unsigned NOT NULL DEFAULT '1' COMMENT '状态【0隐藏 1显示】', `linkURL` varchar(255) NOT NULL DEFAULT '1' COMMENT '图片链接', `case_id` int(11) unsigned NOT NULL COMMENT '客片对照id', PRIMARY KEY (`pic_id`) ) ENGINE=MyISAM DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of tb_teampic -- ---------------------------- -- ---------------------------- -- Table structure for `tb_user` -- ---------------------------- DROP TABLE IF EXISTS `tb_user`; CREATE TABLE `tb_user` ( `user_id` int(10) NOT NULL AUTO_INCREMENT COMMENT 'id', `user_name` varchar(255) NOT NULL COMMENT '账户', `user_nickname` varchar(255) NOT NULL COMMENT '昵称', `user_passwod` varchar(255) NOT NULL COMMENT '密码', `user_group` int(10) unsigned NOT NULL DEFAULT '1' COMMENT '1普通管理员 2超级管理员', PRIMARY KEY (`user_id`) ) ENGINE=MyISAM AUTO_INCREMENT=2 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of tb_user -- ---------------------------- INSERT INTO `tb_user` VALUES ('1', 'admin', 'big黑钦', 'mq5555188', '1'); -- ---------------------------- -- Table structure for `tb_venue` -- ---------------------------- DROP TABLE IF EXISTS `tb_venue`; CREATE TABLE `tb_venue` ( `id` int(255) NOT NULL AUTO_INCREMENT COMMENT 'id', `title` varchar(255) NOT NULL COMMENT '客片标题', `subTitle` varchar(255) DEFAULT NULL COMMENT '副标题【简介】', `srcImg` varchar(255) NOT NULL COMMENT '缩略图', `status` int(11) unsigned NOT NULL DEFAULT '1' COMMENT '状态【0隐藏 1显示 2置顶 3精华 4热门】', `sort` int(11) unsigned DEFAULT '0' COMMENT '自定义排序', `total` bigint(20) unsigned DEFAULT '0' COMMENT '点击量统计', `authorId` int(11) unsigned NOT NULL COMMENT '作者Id', `uploadTime` datetime NOT NULL COMMENT '上传时间', `parent` int(10) unsigned NOT NULL COMMENT '所属栏目【1婴儿 2宝宝 3儿童 4亲子 5活动】', PRIMARY KEY (`id`) ) ENGINE=MyISAM DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of tb_venue -- ---------------------------- -- ---------------------------- -- Table structure for `tb_venuepic` -- ---------------------------- DROP TABLE IF EXISTS `tb_venuepic`; CREATE TABLE `tb_venuepic` ( `pic_id` int(255) NOT NULL AUTO_INCREMENT COMMENT 'id', `pic_URL` text NOT NULL COMMENT '详情图url数组', `pic_status` int(11) unsigned NOT NULL DEFAULT '1' COMMENT '状态【0隐藏 1显示】', `linkURL` varchar(255) NOT NULL DEFAULT '1' COMMENT '图片链接', `case_id` int(11) unsigned NOT NULL COMMENT '客片对照id', PRIMARY KEY (`pic_id`) ) ENGINE=MyISAM DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of tb_venuepic -- ----------------------------
import * as React from 'react'; import * as renderer from 'react-test-renderer'; import HomeScreen from './HomeScreen'; it('renders without crashing', () => { const rendered = renderer.create(<HomeScreen />).toJSON(); expect(rendered).toBeTruthy(); });
<!--- 请清晰详细地描述你遇到的问题,描述问题时请给出芯片/BSP/工具链,RT-Thread版本,复现步骤及现象或者有条理地描述你的问题。在问题得到解决后,请及时关闭issue。欢迎到论坛提问:https://club.rt-thread.org/ --> <!--- Please describe your problem clearly and in detail. When describing the problem, please use numbers or bullet points to describe your problem coherently. After the problem is resolved, please close the issue in time. Welcome to the forum to ask questions: https://club.rt-thread.io/ -->
require "rubygems" require "sinatra" require "twitter" require "hashie" require "haml" require "coffee-script" require "yaml" class ProTweets < Sinatra::Application configure do @config = Hashie::Mash.new(YAML.load_file(File.join(File.dirname(__FILE__), 'config', 'config.yml'))) Twitter.configure do |conf| conf.consumer_key = @config.twitter.consumer_key conf.consumer_secret = @config.twitter.consumer_secret conf.oauth_token = @config.twitter.oauth_token conf.oauth_token_secret = @config.twitter.oauth_token_secret end # Setup DataMapper and Require Models DataMapper::Logger.new($stdout, :info) DataMapper.setup(:default, { adapter: 'sqlite', database: @config.database }) Dir.glob(File.join(settings.root, 'models', '*.rb')).each do |model| require model end # Commit models. Migrations get run from the worker script. DataMapper.finalize end helpers do # Get bigger user icons w/o using API requests def bigger_image(url) url.gsub!(/normal/, 'bigger') end end get "/" do tpp = 200 @page = params[:page].to_i if params[:page] cache_control 1800 # Cache locally for 30 minutes # Don't hit the database as hard if nothing's changed @most_recent = Tweet.first(:order => [:created_at.desc]) last_modified @most_recent.created_at if @page @tweets = Tweet.all(:order => [:created_at.desc], :limit => tpp, :offset => (tpp*params[:page].to_i)) else @tweets = Tweet.all(:order => [:created_at.desc], :limit => tpp) end haml :index end [ "/tweets/:id.:format", "/tweets/:id" ].each do |path| get path do @tweet = Tweet.first(:tweet_id => params[:id]) @format = params[:format] || "html" cache_control 86400 last_modified @tweet.created_at if @format == "json" content_type 'application/json' @tweet.to_json else haml :tweet end end end get "/ping" do "PONG" end get "/rate_limit" do rls = Twitter.rate_limit_status content_type 'application/json' rls.attrs.to_json end get "/stylesheets/:sheet.css" do lastmod = File.mtime(File.join(settings.views, "stylesheets", "#{params[:sheet]}.scss")) cache_control 86400 # Cache locally 24 hours last_modified lastmod content_type 'text/css' scss "stylesheets/#{params[:sheet]}".to_sym end get '/coffeescripts/:script.js' do lastmod = File.mtime(File.join(settings.views, "coffeescripts", "#{params[:script]}.coffee")) cache_control 86400 last_modified lastmod content_type 'text/javascript' coffee "coffeescripts/#{params[:script]}".to_sym end private def cache_control(seconds) headers 'Cache-Control' => "public,must-revalidate,max-age=#{seconds}" end end # vim: set ft=ruby ts=2 sw=2 expandtab :
import { Mutation } from './mutation' import { CommandClass, OptionClass } from '../interfaces' import { metadata, handlers } from '../constants/metadata' export interface CommandFunctionMap { [key: string]: CommandClass } export interface OptionFunctionMap { [key: string]: OptionClass } export type ContainerParams = Array<CommandClass | OptionClass> export interface ContainerData { [handlers.COMMAND]: CommandClass[] [handlers.OPTION]: OptionClass[] [handlers.NOT_FOUND]: CommandClass[] [handlers.MAJOR]: CommandClass[] } export class Container { mutation: Mutation datas: ContainerData = { [handlers.COMMAND]: [], [handlers.OPTION]: [], [handlers.NOT_FOUND]: [], [handlers.MAJOR]: [], } constructor( private params: ContainerParams, ) { this.mutation = new Mutation() this.init() this.insert() } getCommands(): CommandClass[] { return this.datas[handlers.COMMAND] .map(fn => Reflect.getMetadata(metadata.COMMAND_IDENTIFIER, fn)) } private init(): void { this.params.forEach(handler => { const type = Reflect.getMetadata(metadata.HANDLER_IDENTIFIER, handler) const dataColumn = this.datas[type] dataColumn && dataColumn.push(handler) }) } private insert(): void { this.mutation.devour({ commands: this.datas[handlers.COMMAND], options: this.datas[handlers.OPTION], notFounds: this.datas[handlers.NOT_FOUND], majors: this.datas[handlers.MAJOR], }) } }
# Copyright 2018 Battelle Energy Alliance, LLC #=============================================================================== # # FILE: Parse_LM_data.pm #=============================================================================== use strict; use warnings; package Parse_LM_Data; use Data::Dumper; use Time::Local; sub new { my ($class, %args) = @_; return bless \%args, $class; } sub print_app_data { my ($self, $arg_href) = @_; my $lm_href = $arg_href->{data}; my $app_name = $arg_href->{name}; my $out = ""; my $app_href = $lm_href->{app}{$app_name}; print "Could not find the app called --$app_name--" and return if not $app_href; $out .= print_header("Totals for $app_name"); #printf("%-23s $app_href->{attr}{total_lics}\n", "Total Licenses:"); #printf("%-23s $app_href->{attr}{used_lics}\n", "Used Licenses:"); $out .= sprintf("%-23s $app_href->{attr}{total_lics}\n", "Total Licenses:"); $out .= sprintf("%-23s $app_href->{attr}{used_lics}\n", "Used Licenses:"); my $percent = sprintf("%.1f", $app_href->{attr}{used_lics} / $app_href->{attr}{total_lics} * 100 ); #printf("%-23s $percent\n", "Percent Used:"); $out .= sprintf("%-23s $percent\n", "Percent Used:"); if (keys %{$app_href->{users}}) { $out .= print_header("User Licenses"); foreach my $user (sort keys %{$app_href->{users}} ) { #printf("%-12s has %-3s licenses\n", $user, $app_href->{users}{$user}{attr}{count}); $out .= sprintf("%-12s has %-3s licenses\n", $user, $app_href->{users}{$user}{attr}{count}); } } if (keys %{$app_href->{reserved}}) { $out .= print_header("Reserved Licenses"); foreach my $res (sort keys %{$app_href->{reserved}} ) { #printf("%-15s has reserved %-3s licenses\n", $res, $app_href->{reserved}{$res}{attr}{count}); $out .= sprintf("%-15s has reserved %-3s licenses\n", $res, $app_href->{reserved}{$res}{attr}{count}); } } $out .= "\n\n"; return $out; } sub get_app_lic_usage { my ($self, $arg_href) = @_; my $lm_href = $arg_href->{data}; my $app_name = $arg_href->{name}; my $out = ""; my $app_href = $lm_href->{app}{$app_name}; print "Could not find the app called --$app_name--" and return if not $app_href; my $percent = sprintf("%.1f", $app_href->{attr}{used_lics} / $app_href->{attr}{total_lics} * 100 ); return $percent, $app_href->{attr}{used_lics}, $app_href->{attr}{total_lics} } sub print_header { my $title = shift; my $header = ""; my $title_len = length($title); my $max = 40; my $offset = $max - ($title_len + 2); #print "\n"; #print "##########################################\n"; #printf "# $title%${offset}s\n", "#"; #print "##########################################\n"; $header .= "\n"; $header .= "##########################################\n"; $header .= sprintf "# $title%${offset}s\n", "#"; $header .= "##########################################\n"; return $header; } sub get_days_to_expire { my ($self, $arg_href) = @_; my $lm_href = $arg_href->{data}; my $app_name = $arg_href->{name}; my $expire_date = $lm_href->{app}{$app_name}{attr}{expires}; my $days_left = _get_days_to_expire($expire_date); return $days_left; } sub _get_days_to_expire { my $expire_date = shift; #print "ED $expire_date\n"; # Get year month and day of expire my ($then_year, $then_month, $then_day) = split /\-/, $expire_date; # set hour to midnight my $then_seconds = 0; my $then_minutes = 0; my $then_hours = 0; # get locale time in epoch seconds my $time = time; # convert then time to epoch seconds my $time_then = timelocal($then_seconds, $then_minutes, $then_hours, $then_day,$then_month-1,$then_year); # Get just the whole number of days until lics expire my $days_difference = int(($time_then - $time) / 86400); return $days_difference; } sub check_servers_status { my ($self, $arg_href) = @_; my $lm_href = $arg_href->{data}; my $product_name = $arg_href->{product}; my $all_up = 1; my $found_servers = 0; my $data; foreach my $server (sort keys %{$lm_href->{product}{$product_name}{server}} ) { $found_servers = 1; if ($lm_href->{product}{$product_name}{server}{$server}{attr}{status} =~ /up/i) { $all_up &= 1; $data .= " $product_name on $server: licenses UP "; } else { $all_up &= 0; $data .= " $product_name on $server: licenses DOWN "; } } if ($all_up and $found_servers) { return 0, $data; } else { return 1, $data; } } sub check_for_servers { my ($self, $arg_href) = @_; my $lm_href = $arg_href->{data}; my $product_name = $arg_href->{product}; my $server_cnt = $arg_href->{server_cnt}; my @servers = sort keys %{$lm_href->{product}{$product_name}{server}}; my $found_servers = 0; my $down_cnt = 0; my $up_cnt = 0; my $data; foreach my $server ( @servers ) { if ($server) { $found_servers++; my $status = $lm_href->{product}{$product_name}{server}{$server}{attr}{status}; if ($status =~ /up/i) { $data .= " $product_name on $server: licenses UP "; $up_cnt++; } else { $data .= " $product_name on $server: licenses DOWN "; $down_cnt++; } } } if ($server_cnt == 3) { if ($up_cnt <= 1) { $data .= " Servers Down "; return 2, $data; } elsif ($up_cnt == 2) { $data .= " One Server Down "; return 1, $data; } else { return 0, $data; } } elsif ($server_cnt == 1) { if ($up_cnt != 1) { $data .= " Server Down "; return 2, $data; } else { return 0, $data; } } return 3, $data; } 1;
(function () { Fight.update = function (fight) { var time = fight.time, ship = fight.ship, zone = fight.zone, bullets = fight.bullets; var i, l; Ship.update(ship, { zone: zone, time: time, bullets: bullets, leftJoystick: fight.inputs.leftJoystick, rightJoystick: fight.inputs.rightJoystick }); l = bullets.length; for (i = 0; i < l; i++) { Bullet.update(bullets[i], { zone: zone }); } fight.bullets = Destroyer.filter(fight.bullets); }; })();
#!/bin/sh VERSION=0.0.1 IMAGE=ynishi/htmlserver docker build -t ${IMAGE}:${VERSION} . --no-cache docker tag ${IMAGE}:${VERSION} ${IMAGE}:latest
import torch img3D = torch.rand(size=[1, 1, 128, 128, 128], dtype=torch.float32).cuda() img2D = torch.rand(size=[1, 1, 128, 128], dtype=torch.float32).cuda() ############# unet ####################### from segmentation_models import Unet model = Unet(dimension=3, channel_in=1, backbone_name='vgg19', basefilter=64, classes=2, pretrained=False).cuda() # model = Unet(dimension=3, channel_in=1, backbone_name='resnet50', basefilter=32, classes=2, pretrained=False).cuda() p = model(img3D) print(p.shape) model = Unet(dimension=2, channel_in=1, backbone_name='vgg19', basefilter=64, classes=2, pretrained=False).cuda() p = model(img2D) print(p.shape) ############## HighResolutionNet ################### from segmentation_models import HighResolutionNet model = HighResolutionNet(dimension=3, channel_in=1, classes=2, configureType='HRNET18').cuda() p = model(img3D) print(p.shape)
## call()和apply() ### 介绍 这两个方法都是函数对象的方法,需要通过函数对象来调用。 当函数调用call()和apply()时,函数都会立即**执行**。 - 都可以用来改变函数的this对象的指向。 - 第一个参数都是this要指向的对象(函数执行时,this将指向这个对象),后续参数用来传实参。 ### 显式绑定this JS提供的绝大多数函数以及我们自己创建的所有函数,都可以使用call 和apply方法。 它们的第一个参数是一个对象。因为你可以直接指定 this 绑定的对象,因此我们称之为显式绑定。 例1: ```javascript function foo() { console.log(this.a); } var obj = { a: 2 }; // 将 this 指向 obj foo.apply(obj); //打印结果:2 ``` ### 第一个参数的传递 1、thisObj不传或者为null、undefined时,函数中的this会指向window对象(非严格模式)。 2、传递一个别的函数名时,函数中的this将指向这个**函数的引用**。 3、传递的值为数字、布尔值、字符串时,this会指向这些基本类型的包装对象Number、Boolean、String。 4、传递一个对象时,函数中的this则指向传递的这个对象。 ### call()和apply()的区别 call()和apply()方法都可以将实参在对象之后依次传递,但是apply()方法需要将实参封装到一个**数组**中统一传递(即使只有实参只有一个,也要放到数组中)。 比如针对下面这样的代码: ```javascript var persion1 = { name: "小王", gender: "男", age: 24, say: function (school, grade) { alert(this.name + " , " + this.gender + " ,今年" + this.age + " ,在" + school + "上" + grade); } } var person2 = { name: "小红", gender: "女", age: 18 } ``` 如果是通过call的参数进行传参,是这样的: ```javascript persion1.say.call(persion2, "实验小学", "六年级"); ``` 如果是通过apply的参数进行传参,是这样的: ```javascript persion1.say.apply(persion2, ["实验小学", "六年级"]); ``` 看到区别了吗,call后面的实参与say方法中是一一对应的,而apply传实参时,要封装成一个数组,数组中的元素是和say方法中一一对应的,这就是两者最大的区别。 ### call()和apply()的作用 - 改变this的指向 - 实现继承。Father.call(this) ## bind() - 都能改变this的指向 - call()/apply()是**立即调用函数** - bind()是将函数返回,因此后面还需要加`()`才能调用。 bind()传参的方式与call()相同。 参考链接: - <https://www.jianshu.com/p/56a9c2d11adc> - <https://github.com/lin-xin/blog/issues/7> - <https://segmentfault.com/a/1190000007402815> - [JS中改变this指向的方法](http://www.xiaoxiaohan.com/js/38.html)
INSERT INTO filter (id, enabled, evaluation_strategy, name) VALUES (0, FALSE, 'ALL_MATCH', 'default'); INSERT INTO predicate (id) VALUES (0); INSERT INTO numerical_predicate (condition, fixed_operand, id) VALUES ('EQUAL', 0, 0); INSERT INTO filter_predicates (filter_id, predicates_id) VALUES (0, 0);
@extends('admin.master.master') @section('title') {{ $type->pt_name }} - Admin @endsection @section('my-posts') {{-- breadcrumb --}} {{-- @include('../comps.blog_breadcrumb') --}} <!-- Image Showcases --> @if ($posts->count()>0) @include('admin.comps.posts_list') @else <div class="alert space">There is no post yet.</div> @endif @endsection
CREATE TABLE list (id VARCHAR(2) NOT NULL, value VARCHAR(64) NOT NULL, PRIMARY KEY(id)); INSERT INTO "list" ("id", "value") VALUES ('af', 'afrikaans'); INSERT INTO "list" ("id", "value") VALUES ('af_NA', 'afrikaans (Namíbia)'); INSERT INTO "list" ("id", "value") VALUES ('af_ZA', 'afrikaans (República de Sud-àfrica)'); INSERT INTO "list" ("id", "value") VALUES ('ak', 'àkan'); INSERT INTO "list" ("id", "value") VALUES ('ak_GH', 'àkan (Ghana)'); INSERT INTO "list" ("id", "value") VALUES ('sq', 'albanès'); INSERT INTO "list" ("id", "value") VALUES ('sq_AL', 'albanès (Albània)'); INSERT INTO "list" ("id", "value") VALUES ('sq_XK', 'albanès (Kosovo)'); INSERT INTO "list" ("id", "value") VALUES ('sq_MK', 'albanès (Macedònia)'); INSERT INTO "list" ("id", "value") VALUES ('de', 'alemany'); INSERT INTO "list" ("id", "value") VALUES ('de_DE', 'alemany (Alemanya)'); INSERT INTO "list" ("id", "value") VALUES ('de_AT', 'alemany (Àustria)'); INSERT INTO "list" ("id", "value") VALUES ('de_BE', 'alemany (Bèlgica)'); INSERT INTO "list" ("id", "value") VALUES ('de_LI', 'alemany (Liechtenstein)'); INSERT INTO "list" ("id", "value") VALUES ('de_LU', 'alemany (Luxemburg)'); INSERT INTO "list" ("id", "value") VALUES ('de_CH', 'alemany (Suïssa)'); INSERT INTO "list" ("id", "value") VALUES ('am', 'amhàric'); INSERT INTO "list" ("id", "value") VALUES ('am_ET', 'amhàric (Etiòpia)'); INSERT INTO "list" ("id", "value") VALUES ('en', 'anglès'); INSERT INTO "list" ("id", "value") VALUES ('en_AI', 'anglès (Anguilla)'); INSERT INTO "list" ("id", "value") VALUES ('en_AG', 'anglès (Antigua i Barbuda)'); INSERT INTO "list" ("id", "value") VALUES ('en_AU', 'anglès (Austràlia)'); INSERT INTO "list" ("id", "value") VALUES ('en_BS', 'anglès (Bahames)'); INSERT INTO "list" ("id", "value") VALUES ('en_BB', 'anglès (Barbados)'); INSERT INTO "list" ("id", "value") VALUES ('en_BE', 'anglès (Bèlgica)'); INSERT INTO "list" ("id", "value") VALUES ('en_BZ', 'anglès (Belize)'); INSERT INTO "list" ("id", "value") VALUES ('en_BM', 'anglès (Bermudes)'); INSERT INTO "list" ("id", "value") VALUES ('en_BW', 'anglès (Botswana)'); INSERT INTO "list" ("id", "value") VALUES ('en_CM', 'anglès (Camerun)'); INSERT INTO "list" ("id", "value") VALUES ('en_CA', 'anglès (Canadà)'); INSERT INTO "list" ("id", "value") VALUES ('en_DG', 'anglès (Diego Garcia)'); INSERT INTO "list" ("id", "value") VALUES ('en_DM', 'anglès (Dominica)'); INSERT INTO "list" ("id", "value") VALUES ('en_ER', 'anglès (Eritrea)'); INSERT INTO "list" ("id", "value") VALUES ('en_US', 'anglès (Estats Units)'); INSERT INTO "list" ("id", "value") VALUES ('en_FJ', 'anglès (Fiji)'); INSERT INTO "list" ("id", "value") VALUES ('en_PH', 'anglès (Filipines)'); INSERT INTO "list" ("id", "value") VALUES ('en_GM', 'anglès (Gàmbia)'); INSERT INTO "list" ("id", "value") VALUES ('en_GH', 'anglès (Ghana)'); INSERT INTO "list" ("id", "value") VALUES ('en_GI', 'anglès (Gibraltar)'); INSERT INTO "list" ("id", "value") VALUES ('en_GD', 'anglès (Grenada)'); INSERT INTO "list" ("id", "value") VALUES ('en_GU', 'anglès (Guam)'); INSERT INTO "list" ("id", "value") VALUES ('en_GG', 'anglès (Guernsey)'); INSERT INTO "list" ("id", "value") VALUES ('en_GY', 'anglès (Guyana)'); INSERT INTO "list" ("id", "value") VALUES ('en_HK', 'anglès (Hong Kong (RAE Xina))'); INSERT INTO "list" ("id", "value") VALUES ('en_CX', 'anglès (illa Christmas)'); INSERT INTO "list" ("id", "value") VALUES ('en_IM', 'anglès (illa de Man)'); INSERT INTO "list" ("id", "value") VALUES ('en_KY', 'anglès (Illes Caiman)'); INSERT INTO "list" ("id", "value") VALUES ('en_CC', 'anglès (illes Cocos)'); INSERT INTO "list" ("id", "value") VALUES ('en_CK', 'anglès (illes Cook)'); INSERT INTO "list" ("id", "value") VALUES ('en_FK', 'anglès (Illes Malvines)'); INSERT INTO "list" ("id", "value") VALUES ('en_MP', 'anglès (illes Mariannes del Nord)'); INSERT INTO "list" ("id", "value") VALUES ('en_MH', 'anglès (illes Marshall)'); INSERT INTO "list" ("id", "value") VALUES ('en_UM', 'anglès (illes Perifèriques Menors dels EUA)'); INSERT INTO "list" ("id", "value") VALUES ('en_PN', 'anglès (illes Pitcairn)'); INSERT INTO "list" ("id", "value") VALUES ('en_SB', 'anglès (illes Salomó)'); INSERT INTO "list" ("id", "value") VALUES ('en_TC', 'anglès (Illes Turks i Caicos)'); INSERT INTO "list" ("id", "value") VALUES ('en_VG', 'anglès (Illes Verges Britàniques)'); INSERT INTO "list" ("id", "value") VALUES ('en_VI', 'anglès (Illes Verges Nord-americanes)'); INSERT INTO "list" ("id", "value") VALUES ('en_IN', 'anglès (Índia)'); INSERT INTO "list" ("id", "value") VALUES ('en_IE', 'anglès (Irlanda)'); INSERT INTO "list" ("id", "value") VALUES ('en_JM', 'anglès (Jamaica)'); INSERT INTO "list" ("id", "value") VALUES ('en_JE', 'anglès (Jersey)'); INSERT INTO "list" ("id", "value") VALUES ('en_KE', 'anglès (Kenya)'); INSERT INTO "list" ("id", "value") VALUES ('en_KI', 'anglès (Kiribati)'); INSERT INTO "list" ("id", "value") VALUES ('en_LS', 'anglès (Lesotho)'); INSERT INTO "list" ("id", "value") VALUES ('en_LR', 'anglès (Libèria)'); INSERT INTO "list" ("id", "value") VALUES ('en_MO', 'anglès (Macau (RAE Xina))'); INSERT INTO "list" ("id", "value") VALUES ('en_MG', 'anglès (Madagascar)'); INSERT INTO "list" ("id", "value") VALUES ('en_MY', 'anglès (Malàisia)'); INSERT INTO "list" ("id", "value") VALUES ('en_MW', 'anglès (Malawi)'); INSERT INTO "list" ("id", "value") VALUES ('en_MT', 'anglès (Malta)'); INSERT INTO "list" ("id", "value") VALUES ('en_MU', 'anglès (Maurici)'); INSERT INTO "list" ("id", "value") VALUES ('en_FM', 'anglès (Micronèsia)'); INSERT INTO "list" ("id", "value") VALUES ('en_MS', 'anglès (Montserrat)'); INSERT INTO "list" ("id", "value") VALUES ('en_NA', 'anglès (Namíbia)'); INSERT INTO "list" ("id", "value") VALUES ('en_NR', 'anglès (Nauru)'); INSERT INTO "list" ("id", "value") VALUES ('en_NG', 'anglès (Nigèria)'); INSERT INTO "list" ("id", "value") VALUES ('en_NU', 'anglès (Niue)'); INSERT INTO "list" ("id", "value") VALUES ('en_NF', 'anglès (Norfolk)'); INSERT INTO "list" ("id", "value") VALUES ('en_NZ', 'anglès (Nova Zelanda)'); INSERT INTO "list" ("id", "value") VALUES ('en_PK', 'anglès (Pakistan)'); INSERT INTO "list" ("id", "value") VALUES ('en_PW', 'anglès (Palau)'); INSERT INTO "list" ("id", "value") VALUES ('en_PG', 'anglès (Papua Nova Guinea)'); INSERT INTO "list" ("id", "value") VALUES ('en_PR', 'anglès (Puerto Rico)'); INSERT INTO "list" ("id", "value") VALUES ('en_GB', 'anglès (Regne Unit)'); INSERT INTO "list" ("id", "value") VALUES ('en_ZA', 'anglès (República de Sud-àfrica)'); INSERT INTO "list" ("id", "value") VALUES ('en_RW', 'anglès (Ruanda)'); INSERT INTO "list" ("id", "value") VALUES ('en_KN', 'anglès (Saint Christopher i Nevis)'); INSERT INTO "list" ("id", "value") VALUES ('en_SH', 'anglès (Saint Helena)'); INSERT INTO "list" ("id", "value") VALUES ('en_LC', 'anglès (Saint Lucia)'); INSERT INTO "list" ("id", "value") VALUES ('en_VC', 'anglès (Saint Vincent i les Grenadines)'); INSERT INTO "list" ("id", "value") VALUES ('en_AS', 'anglès (Samoa Nord-americana)'); INSERT INTO "list" ("id", "value") VALUES ('en_WS', 'anglès (Samoa)'); INSERT INTO "list" ("id", "value") VALUES ('en_SC', 'anglès (Seychelles)'); INSERT INTO "list" ("id", "value") VALUES ('en_SL', 'anglès (Sierra Leone)'); INSERT INTO "list" ("id", "value") VALUES ('en_SG', 'anglès (Singapur)'); INSERT INTO "list" ("id", "value") VALUES ('en_SX', 'anglès (Sint Maarten)'); INSERT INTO "list" ("id", "value") VALUES ('en_SS', 'anglès (Sudan del Sud)'); INSERT INTO "list" ("id", "value") VALUES ('en_SD', 'anglès (Sudan)'); INSERT INTO "list" ("id", "value") VALUES ('en_SZ', 'anglès (Swazilàndia)'); INSERT INTO "list" ("id", "value") VALUES ('en_TZ', 'anglès (Tanzània)'); INSERT INTO "list" ("id", "value") VALUES ('en_IO', 'anglès (Territori Britànic de l’Oceà Índic)'); INSERT INTO "list" ("id", "value") VALUES ('en_TK', 'anglès (Tokelau)'); INSERT INTO "list" ("id", "value") VALUES ('en_TO', 'anglès (Tonga)'); INSERT INTO "list" ("id", "value") VALUES ('en_TT', 'anglès (Trinitat i Tobago)'); INSERT INTO "list" ("id", "value") VALUES ('en_TV', 'anglès (Tuvalu)'); INSERT INTO "list" ("id", "value") VALUES ('en_UG', 'anglès (Uganda)'); INSERT INTO "list" ("id", "value") VALUES ('en_VU', 'anglès (Vanuatu)'); INSERT INTO "list" ("id", "value") VALUES ('en_ZM', 'anglès (Zàmbia)'); INSERT INTO "list" ("id", "value") VALUES ('en_ZW', 'anglès (Zimbàbue)'); INSERT INTO "list" ("id", "value") VALUES ('ar', 'àrab'); INSERT INTO "list" ("id", "value") VALUES ('ar_DZ', 'àrab (Algèria)'); INSERT INTO "list" ("id", "value") VALUES ('ar_SA', 'àrab (Aràbia Saudita)'); INSERT INTO "list" ("id", "value") VALUES ('ar_BH', 'àrab (Bahrain)'); INSERT INTO "list" ("id", "value") VALUES ('ar_KM', 'àrab (Comores)'); INSERT INTO "list" ("id", "value") VALUES ('ar_DJ', 'àrab (Djibouti)'); INSERT INTO "list" ("id", "value") VALUES ('ar_EG', 'àrab (Egipte)'); INSERT INTO "list" ("id", "value") VALUES ('ar_AE', 'àrab (Emirats Àrabs Units)'); INSERT INTO "list" ("id", "value") VALUES ('ar_ER', 'àrab (Eritrea)'); INSERT INTO "list" ("id", "value") VALUES ('ar_YE', 'àrab (Iemen)'); INSERT INTO "list" ("id", "value") VALUES ('ar_IQ', 'àrab (Iraq)'); INSERT INTO "list" ("id", "value") VALUES ('ar_IL', 'àrab (Israel)'); INSERT INTO "list" ("id", "value") VALUES ('ar_JO', 'àrab (Jordània)'); INSERT INTO "list" ("id", "value") VALUES ('ar_KW', 'àrab (Kuwait)'); INSERT INTO "list" ("id", "value") VALUES ('ar_LB', 'àrab (Líban)'); INSERT INTO "list" ("id", "value") VALUES ('ar_LY', 'àrab (Líbia)'); INSERT INTO "list" ("id", "value") VALUES ('ar_MA', 'àrab (Marroc)'); INSERT INTO "list" ("id", "value") VALUES ('ar_MR', 'àrab (Mauritània)'); INSERT INTO "list" ("id", "value") VALUES ('ar_OM', 'àrab (Oman)'); INSERT INTO "list" ("id", "value") VALUES ('ar_PS', 'àrab (Palestina)'); INSERT INTO "list" ("id", "value") VALUES ('ar_QA', 'àrab (Qatar)'); INSERT INTO "list" ("id", "value") VALUES ('ar_EH', 'àrab (Sàhara Occidental)'); INSERT INTO "list" ("id", "value") VALUES ('ar_SY', 'àrab (Síria)'); INSERT INTO "list" ("id", "value") VALUES ('ar_SO', 'àrab (Somàlia)'); INSERT INTO "list" ("id", "value") VALUES ('ar_SS', 'àrab (Sudan del Sud)'); INSERT INTO "list" ("id", "value") VALUES ('ar_SD', 'àrab (Sudan)'); INSERT INTO "list" ("id", "value") VALUES ('ar_TN', 'àrab (Tunísia)'); INSERT INTO "list" ("id", "value") VALUES ('ar_TD', 'àrab (Txad)'); INSERT INTO "list" ("id", "value") VALUES ('hy', 'armeni'); INSERT INTO "list" ("id", "value") VALUES ('hy_AM', 'armeni (Armènia)'); INSERT INTO "list" ("id", "value") VALUES ('as', 'assamès'); INSERT INTO "list" ("id", "value") VALUES ('as_IN', 'assamès (Índia)'); INSERT INTO "list" ("id", "value") VALUES ('az', 'azerbaidjanès'); INSERT INTO "list" ("id", "value") VALUES ('az_AZ', 'azerbaidjanès (Azerbaidjan)'); INSERT INTO "list" ("id", "value") VALUES ('az_Cyrl_AZ', 'azerbaidjanès (ciríl·lic, Azerbaidjan)'); INSERT INTO "list" ("id", "value") VALUES ('az_Cyrl', 'azerbaidjanès (ciríl·lic)'); INSERT INTO "list" ("id", "value") VALUES ('az_Latn_AZ', 'azerbaidjanès (llatí, Azerbaidjan)'); INSERT INTO "list" ("id", "value") VALUES ('az_Latn', 'azerbaidjanès (llatí)'); INSERT INTO "list" ("id", "value") VALUES ('bm', 'bambara'); INSERT INTO "list" ("id", "value") VALUES ('bm_Latn_ML', 'bambara (llatí, Mali)'); INSERT INTO "list" ("id", "value") VALUES ('bm_Latn', 'bambara (llatí)'); INSERT INTO "list" ("id", "value") VALUES ('eu', 'basc'); INSERT INTO "list" ("id", "value") VALUES ('eu_ES', 'basc (Espanya)'); INSERT INTO "list" ("id", "value") VALUES ('bn', 'bengalí'); INSERT INTO "list" ("id", "value") VALUES ('bn_BD', 'bengalí (Bangla Desh)'); INSERT INTO "list" ("id", "value") VALUES ('bn_IN', 'bengalí (Índia)'); INSERT INTO "list" ("id", "value") VALUES ('be', 'bielorús'); INSERT INTO "list" ("id", "value") VALUES ('be_BY', 'bielorús (Bielorússia)'); INSERT INTO "list" ("id", "value") VALUES ('my', 'birmà'); INSERT INTO "list" ("id", "value") VALUES ('my_MM', 'birmà (Myanmar (Birmània))'); INSERT INTO "list" ("id", "value") VALUES ('bs', 'bosnià'); INSERT INTO "list" ("id", "value") VALUES ('bs_BA', 'bosnià (Bòsnia i Hercegovina)'); INSERT INTO "list" ("id", "value") VALUES ('bs_Cyrl_BA', 'bosnià (ciríl·lic, Bòsnia i Hercegovina)'); INSERT INTO "list" ("id", "value") VALUES ('bs_Cyrl', 'bosnià (ciríl·lic)'); INSERT INTO "list" ("id", "value") VALUES ('bs_Latn_BA', 'bosnià (llatí, Bòsnia i Hercegovina)'); INSERT INTO "list" ("id", "value") VALUES ('bs_Latn', 'bosnià (llatí)'); INSERT INTO "list" ("id", "value") VALUES ('br', 'bretó'); INSERT INTO "list" ("id", "value") VALUES ('br_FR', 'bretó (França)'); INSERT INTO "list" ("id", "value") VALUES ('bg', 'búlgar'); INSERT INTO "list" ("id", "value") VALUES ('bg_BG', 'búlgar (Bulgària)'); INSERT INTO "list" ("id", "value") VALUES ('ks', 'caixmiri'); INSERT INTO "list" ("id", "value") VALUES ('ks_Arab_IN', 'caixmiri (àrab, Índia)'); INSERT INTO "list" ("id", "value") VALUES ('ks_Arab', 'caixmiri (àrab)'); INSERT INTO "list" ("id", "value") VALUES ('ks_IN', 'caixmiri (Índia)'); INSERT INTO "list" ("id", "value") VALUES ('ca', 'català'); INSERT INTO "list" ("id", "value") VALUES ('ca_AD', 'català (Andorra)'); INSERT INTO "list" ("id", "value") VALUES ('ca_ES', 'català (Espanya)'); INSERT INTO "list" ("id", "value") VALUES ('ca_FR', 'català (França)'); INSERT INTO "list" ("id", "value") VALUES ('ca_IT', 'català (Itàlia)'); INSERT INTO "list" ("id", "value") VALUES ('ko', 'coreà'); INSERT INTO "list" ("id", "value") VALUES ('ko_KP', 'coreà (Corea del Nord)'); INSERT INTO "list" ("id", "value") VALUES ('ko_KR', 'coreà (Corea del Sud)'); INSERT INTO "list" ("id", "value") VALUES ('kw', 'còrnic'); INSERT INTO "list" ("id", "value") VALUES ('kw_GB', 'còrnic (Regne Unit)'); INSERT INTO "list" ("id", "value") VALUES ('hr', 'croat'); INSERT INTO "list" ("id", "value") VALUES ('hr_BA', 'croat (Bòsnia i Hercegovina)'); INSERT INTO "list" ("id", "value") VALUES ('hr_HR', 'croat (Croàcia)'); INSERT INTO "list" ("id", "value") VALUES ('da', 'danès'); INSERT INTO "list" ("id", "value") VALUES ('da_DK', 'danès (Dinamarca)'); INSERT INTO "list" ("id", "value") VALUES ('da_GL', 'danès (Grenlàndia)'); INSERT INTO "list" ("id", "value") VALUES ('dz', 'dzongka'); INSERT INTO "list" ("id", "value") VALUES ('dz_BT', 'dzongka (Bhutan)'); INSERT INTO "list" ("id", "value") VALUES ('sk', 'eslovac'); INSERT INTO "list" ("id", "value") VALUES ('sk_SK', 'eslovac (Eslovàquia)'); INSERT INTO "list" ("id", "value") VALUES ('sl', 'eslovè'); INSERT INTO "list" ("id", "value") VALUES ('sl_SI', 'eslovè (Eslovènia)'); INSERT INTO "list" ("id", "value") VALUES ('es', 'espanyol'); INSERT INTO "list" ("id", "value") VALUES ('es_AR', 'espanyol (Argentina)'); INSERT INTO "list" ("id", "value") VALUES ('es_BO', 'espanyol (Bolívia)'); INSERT INTO "list" ("id", "value") VALUES ('es_EA', 'espanyol (Ceuta i Melilla)'); INSERT INTO "list" ("id", "value") VALUES ('es_CO', 'espanyol (Colòmbia)'); INSERT INTO "list" ("id", "value") VALUES ('es_CR', 'espanyol (Costa Rica)'); INSERT INTO "list" ("id", "value") VALUES ('es_CU', 'espanyol (Cuba)'); INSERT INTO "list" ("id", "value") VALUES ('es_SV', 'espanyol (El Salvador)'); INSERT INTO "list" ("id", "value") VALUES ('es_EC', 'espanyol (Equador)'); INSERT INTO "list" ("id", "value") VALUES ('es_ES', 'espanyol (Espanya)'); INSERT INTO "list" ("id", "value") VALUES ('es_US', 'espanyol (Estats Units)'); INSERT INTO "list" ("id", "value") VALUES ('es_PH', 'espanyol (Filipines)'); INSERT INTO "list" ("id", "value") VALUES ('es_GT', 'espanyol (Guatemala)'); INSERT INTO "list" ("id", "value") VALUES ('es_GQ', 'espanyol (Guinea Equatorial)'); INSERT INTO "list" ("id", "value") VALUES ('es_HN', 'espanyol (Hondures)'); INSERT INTO "list" ("id", "value") VALUES ('es_IC', 'espanyol (illes Canàries)'); INSERT INTO "list" ("id", "value") VALUES ('es_MX', 'espanyol (Mèxic)'); INSERT INTO "list" ("id", "value") VALUES ('es_NI', 'espanyol (Nicaragua)'); INSERT INTO "list" ("id", "value") VALUES ('es_PA', 'espanyol (Panamà)'); INSERT INTO "list" ("id", "value") VALUES ('es_PY', 'espanyol (Paraguai)'); INSERT INTO "list" ("id", "value") VALUES ('es_PE', 'espanyol (Perú)'); INSERT INTO "list" ("id", "value") VALUES ('es_PR', 'espanyol (Puerto Rico)'); INSERT INTO "list" ("id", "value") VALUES ('es_DO', 'espanyol (República Dominicana)'); INSERT INTO "list" ("id", "value") VALUES ('es_UY', 'espanyol (Uruguai)'); INSERT INTO "list" ("id", "value") VALUES ('es_VE', 'espanyol (Veneçuela)'); INSERT INTO "list" ("id", "value") VALUES ('es_CL', 'espanyol (Xile)'); INSERT INTO "list" ("id", "value") VALUES ('eo', 'esperanto'); INSERT INTO "list" ("id", "value") VALUES ('et', 'estonià'); INSERT INTO "list" ("id", "value") VALUES ('et_EE', 'estonià (Estònia)'); INSERT INTO "list" ("id", "value") VALUES ('ee', 'ewe'); INSERT INTO "list" ("id", "value") VALUES ('ee_GH', 'ewe (Ghana)'); INSERT INTO "list" ("id", "value") VALUES ('ee_TG', 'ewe (Togo)'); INSERT INTO "list" ("id", "value") VALUES ('fo', 'feroès'); INSERT INTO "list" ("id", "value") VALUES ('fo_FO', 'feroès (illes Fèroe)'); INSERT INTO "list" ("id", "value") VALUES ('fi', 'finès'); INSERT INTO "list" ("id", "value") VALUES ('fi_FI', 'finès (Finlàndia)'); INSERT INTO "list" ("id", "value") VALUES ('fr', 'francès'); INSERT INTO "list" ("id", "value") VALUES ('fr_DZ', 'francès (Algèria)'); INSERT INTO "list" ("id", "value") VALUES ('fr_BE', 'francès (Bèlgica)'); INSERT INTO "list" ("id", "value") VALUES ('fr_BJ', 'francès (Benín)'); INSERT INTO "list" ("id", "value") VALUES ('fr_BF', 'francès (Burkina Faso)'); INSERT INTO "list" ("id", "value") VALUES ('fr_BI', 'francès (Burundi)'); INSERT INTO "list" ("id", "value") VALUES ('fr_CM', 'francès (Camerun)'); INSERT INTO "list" ("id", "value") VALUES ('fr_CA', 'francès (Canadà)'); INSERT INTO "list" ("id", "value") VALUES ('fr_KM', 'francès (Comores)'); INSERT INTO "list" ("id", "value") VALUES ('fr_CG', 'francès (Congo - Brazzaville)'); INSERT INTO "list" ("id", "value") VALUES ('fr_CD', 'francès (Congo - Kinshasa)'); INSERT INTO "list" ("id", "value") VALUES ('fr_CI', 'francès (Costa d’Ivori)'); INSERT INTO "list" ("id", "value") VALUES ('fr_DJ', 'francès (Djibouti)'); INSERT INTO "list" ("id", "value") VALUES ('fr_FR', 'francès (França)'); INSERT INTO "list" ("id", "value") VALUES ('fr_GA', 'francès (Gabon)'); INSERT INTO "list" ("id", "value") VALUES ('fr_GP', 'francès (Guadeloupe)'); INSERT INTO "list" ("id", "value") VALUES ('fr_GF', 'francès (Guaiana Francesa)'); INSERT INTO "list" ("id", "value") VALUES ('fr_GQ', 'francès (Guinea Equatorial)'); INSERT INTO "list" ("id", "value") VALUES ('fr_GN', 'francès (Guinea)'); INSERT INTO "list" ("id", "value") VALUES ('fr_HT', 'francès (Haití)'); INSERT INTO "list" ("id", "value") VALUES ('fr_RE', 'francès (Illa de la Reunió)'); INSERT INTO "list" ("id", "value") VALUES ('fr_LU', 'francès (Luxemburg)'); INSERT INTO "list" ("id", "value") VALUES ('fr_MG', 'francès (Madagascar)'); INSERT INTO "list" ("id", "value") VALUES ('fr_ML', 'francès (Mali)'); INSERT INTO "list" ("id", "value") VALUES ('fr_MA', 'francès (Marroc)'); INSERT INTO "list" ("id", "value") VALUES ('fr_MQ', 'francès (Martinica)'); INSERT INTO "list" ("id", "value") VALUES ('fr_MU', 'francès (Maurici)'); INSERT INTO "list" ("id", "value") VALUES ('fr_MR', 'francès (Mauritània)'); INSERT INTO "list" ("id", "value") VALUES ('fr_YT', 'francès (Mayotte)'); INSERT INTO "list" ("id", "value") VALUES ('fr_MC', 'francès (Mònaco)'); INSERT INTO "list" ("id", "value") VALUES ('fr_NE', 'francès (Níger)'); INSERT INTO "list" ("id", "value") VALUES ('fr_NC', 'francès (Nova Caledònia)'); INSERT INTO "list" ("id", "value") VALUES ('fr_PF', 'francès (Polinèsia Francesa)'); INSERT INTO "list" ("id", "value") VALUES ('fr_CF', 'francès (República Centreafricana)'); INSERT INTO "list" ("id", "value") VALUES ('fr_RW', 'francès (Ruanda)'); INSERT INTO "list" ("id", "value") VALUES ('fr_BL', 'francès (Saint Barthélemy)'); INSERT INTO "list" ("id", "value") VALUES ('fr_MF', 'francès (Saint Martin)'); INSERT INTO "list" ("id", "value") VALUES ('fr_PM', 'francès (Saint-Pierre-et-Miquelon)'); INSERT INTO "list" ("id", "value") VALUES ('fr_SN', 'francès (Senegal)'); INSERT INTO "list" ("id", "value") VALUES ('fr_SC', 'francès (Seychelles)'); INSERT INTO "list" ("id", "value") VALUES ('fr_SY', 'francès (Síria)'); INSERT INTO "list" ("id", "value") VALUES ('fr_CH', 'francès (Suïssa)'); INSERT INTO "list" ("id", "value") VALUES ('fr_TG', 'francès (Togo)'); INSERT INTO "list" ("id", "value") VALUES ('fr_TN', 'francès (Tunísia)'); INSERT INTO "list" ("id", "value") VALUES ('fr_TD', 'francès (Txad)'); INSERT INTO "list" ("id", "value") VALUES ('fr_VU', 'francès (Vanuatu)'); INSERT INTO "list" ("id", "value") VALUES ('fr_WF', 'francès (Wallis i Futuna)'); INSERT INTO "list" ("id", "value") VALUES ('fy', 'frisó oriental'); INSERT INTO "list" ("id", "value") VALUES ('fy_NL', 'frisó oriental (Països Baixos)'); INSERT INTO "list" ("id", "value") VALUES ('ff', 'ful'); INSERT INTO "list" ("id", "value") VALUES ('ff_CM', 'ful (Camerun)'); INSERT INTO "list" ("id", "value") VALUES ('ff_GN', 'ful (Guinea)'); INSERT INTO "list" ("id", "value") VALUES ('ff_MR', 'ful (Mauritània)'); INSERT INTO "list" ("id", "value") VALUES ('ff_SN', 'ful (Senegal)'); INSERT INTO "list" ("id", "value") VALUES ('gd', 'gaèlic escocès'); INSERT INTO "list" ("id", "value") VALUES ('gd_GB', 'gaèlic escocès (Regne Unit)'); INSERT INTO "list" ("id", "value") VALUES ('gl', 'gallec'); INSERT INTO "list" ("id", "value") VALUES ('gl_ES', 'gallec (Espanya)'); INSERT INTO "list" ("id", "value") VALUES ('cy', 'gal·lès'); INSERT INTO "list" ("id", "value") VALUES ('cy_GB', 'gal·lès (Regne Unit)'); INSERT INTO "list" ("id", "value") VALUES ('lg', 'ganda'); INSERT INTO "list" ("id", "value") VALUES ('lg_UG', 'ganda (Uganda)'); INSERT INTO "list" ("id", "value") VALUES ('ka', 'georgià'); INSERT INTO "list" ("id", "value") VALUES ('ka_GE', 'georgià (Geòrgia)'); INSERT INTO "list" ("id", "value") VALUES ('el', 'grec'); INSERT INTO "list" ("id", "value") VALUES ('el_GR', 'grec (Grècia)'); INSERT INTO "list" ("id", "value") VALUES ('el_CY', 'grec (Xipre)'); INSERT INTO "list" ("id", "value") VALUES ('kl', 'grenlandès'); INSERT INTO "list" ("id", "value") VALUES ('kl_GL', 'grenlandès (Grenlàndia)'); INSERT INTO "list" ("id", "value") VALUES ('gu', 'gujarati'); INSERT INTO "list" ("id", "value") VALUES ('gu_IN', 'gujarati (Índia)'); INSERT INTO "list" ("id", "value") VALUES ('ha', 'haussa'); INSERT INTO "list" ("id", "value") VALUES ('ha_GH', 'haussa (Ghana)'); INSERT INTO "list" ("id", "value") VALUES ('ha_Latn_GH', 'haussa (llatí, Ghana)'); INSERT INTO "list" ("id", "value") VALUES ('ha_Latn_NE', 'haussa (llatí, Níger)'); INSERT INTO "list" ("id", "value") VALUES ('ha_Latn_NG', 'haussa (llatí, Nigèria)'); INSERT INTO "list" ("id", "value") VALUES ('ha_Latn', 'haussa (llatí)'); INSERT INTO "list" ("id", "value") VALUES ('ha_NE', 'haussa (Níger)'); INSERT INTO "list" ("id", "value") VALUES ('ha_NG', 'haussa (Nigèria)'); INSERT INTO "list" ("id", "value") VALUES ('he', 'hebreu'); INSERT INTO "list" ("id", "value") VALUES ('he_IL', 'hebreu (Israel)'); INSERT INTO "list" ("id", "value") VALUES ('hi', 'hindi'); INSERT INTO "list" ("id", "value") VALUES ('hi_IN', 'hindi (Índia)'); INSERT INTO "list" ("id", "value") VALUES ('hu', 'hongarès'); INSERT INTO "list" ("id", "value") VALUES ('hu_HU', 'hongarès (Hongria)'); INSERT INTO "list" ("id", "value") VALUES ('ig', 'igbo'); INSERT INTO "list" ("id", "value") VALUES ('ig_NG', 'igbo (Nigèria)'); INSERT INTO "list" ("id", "value") VALUES ('id', 'indonesi'); INSERT INTO "list" ("id", "value") VALUES ('id_ID', 'indonesi (Indonèsia)'); INSERT INTO "list" ("id", "value") VALUES ('yo', 'ioruba'); INSERT INTO "list" ("id", "value") VALUES ('yo_BJ', 'ioruba (Benín)'); INSERT INTO "list" ("id", "value") VALUES ('yo_NG', 'ioruba (Nigèria)'); INSERT INTO "list" ("id", "value") VALUES ('ga', 'irlandès'); INSERT INTO "list" ("id", "value") VALUES ('ga_IE', 'irlandès (Irlanda)'); INSERT INTO "list" ("id", "value") VALUES ('is', 'islandès'); INSERT INTO "list" ("id", "value") VALUES ('is_IS', 'islandès (Islàndia)'); INSERT INTO "list" ("id", "value") VALUES ('it', 'italià'); INSERT INTO "list" ("id", "value") VALUES ('it_IT', 'italià (Itàlia)'); INSERT INTO "list" ("id", "value") VALUES ('it_SM', 'italià (San Marino)'); INSERT INTO "list" ("id", "value") VALUES ('it_CH', 'italià (Suïssa)'); INSERT INTO "list" ("id", "value") VALUES ('ja', 'japonès'); INSERT INTO "list" ("id", "value") VALUES ('ja_JP', 'japonès (Japó)'); INSERT INTO "list" ("id", "value") VALUES ('yi', 'jiddisch'); INSERT INTO "list" ("id", "value") VALUES ('kn', 'kannada'); INSERT INTO "list" ("id", "value") VALUES ('kn_IN', 'kannada (Índia)'); INSERT INTO "list" ("id", "value") VALUES ('kk', 'kazakh'); INSERT INTO "list" ("id", "value") VALUES ('kk_Cyrl_KZ', 'kazakh (ciríl·lic, Kazakhstan)'); INSERT INTO "list" ("id", "value") VALUES ('kk_Cyrl', 'kazakh (ciríl·lic)'); INSERT INTO "list" ("id", "value") VALUES ('kk_KZ', 'kazakh (Kazakhstan)'); INSERT INTO "list" ("id", "value") VALUES ('km', 'khmer'); INSERT INTO "list" ("id", "value") VALUES ('km_KH', 'khmer (Cambodja)'); INSERT INTO "list" ("id", "value") VALUES ('ki', 'kikuiu'); INSERT INTO "list" ("id", "value") VALUES ('ki_KE', 'kikuiu (Kenya)'); INSERT INTO "list" ("id", "value") VALUES ('ky', 'kirguís'); INSERT INTO "list" ("id", "value") VALUES ('ky_Cyrl_KG', 'kirguís (ciríl·lic, Kirguizistan)'); INSERT INTO "list" ("id", "value") VALUES ('ky_Cyrl', 'kirguís (ciríl·lic)'); INSERT INTO "list" ("id", "value") VALUES ('ky_KG', 'kirguís (Kirguizistan)'); INSERT INTO "list" ("id", "value") VALUES ('lo', 'laosià'); INSERT INTO "list" ("id", "value") VALUES ('lo_LA', 'laosià (Laos)'); INSERT INTO "list" ("id", "value") VALUES ('lv', 'letó'); INSERT INTO "list" ("id", "value") VALUES ('lv_LV', 'letó (Letònia)'); INSERT INTO "list" ("id", "value") VALUES ('ln', 'lingala'); INSERT INTO "list" ("id", "value") VALUES ('ln_AO', 'lingala (Angola)'); INSERT INTO "list" ("id", "value") VALUES ('ln_CG', 'lingala (Congo - Brazzaville)'); INSERT INTO "list" ("id", "value") VALUES ('ln_CD', 'lingala (Congo - Kinshasa)'); INSERT INTO "list" ("id", "value") VALUES ('ln_CF', 'lingala (República Centreafricana)'); INSERT INTO "list" ("id", "value") VALUES ('lt', 'lituà'); INSERT INTO "list" ("id", "value") VALUES ('lt_LT', 'lituà (Lituània)'); INSERT INTO "list" ("id", "value") VALUES ('lu', 'luba katanga'); INSERT INTO "list" ("id", "value") VALUES ('lu_CD', 'luba katanga (Congo - Kinshasa)'); INSERT INTO "list" ("id", "value") VALUES ('lb', 'luxemburguès'); INSERT INTO "list" ("id", "value") VALUES ('lb_LU', 'luxemburguès (Luxemburg)'); INSERT INTO "list" ("id", "value") VALUES ('mk', 'macedoni'); INSERT INTO "list" ("id", "value") VALUES ('mk_MK', 'macedoni (Macedònia)'); INSERT INTO "list" ("id", "value") VALUES ('ms', 'malai'); INSERT INTO "list" ("id", "value") VALUES ('ms_BN', 'malai (Brunei)'); INSERT INTO "list" ("id", "value") VALUES ('ms_Latn_BN', 'malai (llatí, Brunei)'); INSERT INTO "list" ("id", "value") VALUES ('ms_Latn_MY', 'malai (llatí, Malàisia)'); INSERT INTO "list" ("id", "value") VALUES ('ms_Latn_SG', 'malai (llatí, Singapur)'); INSERT INTO "list" ("id", "value") VALUES ('ms_Latn', 'malai (llatí)'); INSERT INTO "list" ("id", "value") VALUES ('ms_MY', 'malai (Malàisia)'); INSERT INTO "list" ("id", "value") VALUES ('ms_SG', 'malai (Singapur)'); INSERT INTO "list" ("id", "value") VALUES ('ml', 'malaiàlam'); INSERT INTO "list" ("id", "value") VALUES ('ml_IN', 'malaiàlam (Índia)'); INSERT INTO "list" ("id", "value") VALUES ('mg', 'malgaix'); INSERT INTO "list" ("id", "value") VALUES ('mg_MG', 'malgaix (Madagascar)'); INSERT INTO "list" ("id", "value") VALUES ('mt', 'maltès'); INSERT INTO "list" ("id", "value") VALUES ('mt_MT', 'maltès (Malta)'); INSERT INTO "list" ("id", "value") VALUES ('gv', 'manx'); INSERT INTO "list" ("id", "value") VALUES ('gv_IM', 'manx (illa de Man)'); INSERT INTO "list" ("id", "value") VALUES ('mr', 'marathi'); INSERT INTO "list" ("id", "value") VALUES ('mr_IN', 'marathi (Índia)'); INSERT INTO "list" ("id", "value") VALUES ('mn', 'mongol'); INSERT INTO "list" ("id", "value") VALUES ('mn_Cyrl_MN', 'mongol (ciríl·lic, Mongòlia)'); INSERT INTO "list" ("id", "value") VALUES ('mn_Cyrl', 'mongol (ciríl·lic)'); INSERT INTO "list" ("id", "value") VALUES ('mn_MN', 'mongol (Mongòlia)'); INSERT INTO "list" ("id", "value") VALUES ('nd', 'ndebele septentrional'); INSERT INTO "list" ("id", "value") VALUES ('nd_ZW', 'ndebele septentrional (Zimbàbue)'); INSERT INTO "list" ("id", "value") VALUES ('nl', 'neerlandès'); INSERT INTO "list" ("id", "value") VALUES ('nl_AW', 'neerlandès (Aruba)'); INSERT INTO "list" ("id", "value") VALUES ('nl_BE', 'neerlandès (Bèlgica)'); INSERT INTO "list" ("id", "value") VALUES ('nl_BQ', 'neerlandès (Carib Neerlandès)'); INSERT INTO "list" ("id", "value") VALUES ('nl_CW', 'neerlandès (Curaçao)'); INSERT INTO "list" ("id", "value") VALUES ('nl_NL', 'neerlandès (Països Baixos)'); INSERT INTO "list" ("id", "value") VALUES ('nl_SX', 'neerlandès (Sint Maarten)'); INSERT INTO "list" ("id", "value") VALUES ('nl_SR', 'neerlandès (Surinam)'); INSERT INTO "list" ("id", "value") VALUES ('ne', 'nepalès'); INSERT INTO "list" ("id", "value") VALUES ('ne_IN', 'nepalès (Índia)'); INSERT INTO "list" ("id", "value") VALUES ('ne_NP', 'nepalès (Nepal)'); INSERT INTO "list" ("id", "value") VALUES ('no', 'noruec'); INSERT INTO "list" ("id", "value") VALUES ('no_NO', 'noruec (Noruega)'); INSERT INTO "list" ("id", "value") VALUES ('nb', 'noruec bokmål'); INSERT INTO "list" ("id", "value") VALUES ('nb_NO', 'noruec bokmål (Noruega)'); INSERT INTO "list" ("id", "value") VALUES ('nb_SJ', 'noruec bokmål (Svalbard i Jan Mayen)'); INSERT INTO "list" ("id", "value") VALUES ('nn', 'noruec nynorsk'); INSERT INTO "list" ("id", "value") VALUES ('nn_NO', 'noruec nynorsk (Noruega)'); INSERT INTO "list" ("id", "value") VALUES ('or', 'oriya'); INSERT INTO "list" ("id", "value") VALUES ('or_IN', 'oriya (Índia)'); INSERT INTO "list" ("id", "value") VALUES ('om', 'oromo'); INSERT INTO "list" ("id", "value") VALUES ('om_ET', 'oromo (Etiòpia)'); INSERT INTO "list" ("id", "value") VALUES ('om_KE', 'oromo (Kenya)'); INSERT INTO "list" ("id", "value") VALUES ('os', 'osset'); INSERT INTO "list" ("id", "value") VALUES ('os_GE', 'osset (Geòrgia)'); INSERT INTO "list" ("id", "value") VALUES ('os_RU', 'osset (Rússia)'); INSERT INTO "list" ("id", "value") VALUES ('ps', 'paixtu'); INSERT INTO "list" ("id", "value") VALUES ('ps_AF', 'paixtu (Afganistan)'); INSERT INTO "list" ("id", "value") VALUES ('pa', 'panjabi'); INSERT INTO "list" ("id", "value") VALUES ('pa_Arab_PK', 'panjabi (àrab, Pakistan)'); INSERT INTO "list" ("id", "value") VALUES ('pa_Arab', 'panjabi (àrab)'); INSERT INTO "list" ("id", "value") VALUES ('pa_Guru_IN', 'panjabi (gurmukhi, Índia)'); INSERT INTO "list" ("id", "value") VALUES ('pa_Guru', 'panjabi (gurmukhi)'); INSERT INTO "list" ("id", "value") VALUES ('pa_IN', 'panjabi (Índia)'); INSERT INTO "list" ("id", "value") VALUES ('pa_PK', 'panjabi (Pakistan)'); INSERT INTO "list" ("id", "value") VALUES ('fa', 'persa'); INSERT INTO "list" ("id", "value") VALUES ('fa_AF', 'persa (Afganistan)'); INSERT INTO "list" ("id", "value") VALUES ('fa_IR', 'persa (Iran)'); INSERT INTO "list" ("id", "value") VALUES ('pl', 'polonès'); INSERT INTO "list" ("id", "value") VALUES ('pl_PL', 'polonès (Polònia)'); INSERT INTO "list" ("id", "value") VALUES ('pt', 'portuguès'); INSERT INTO "list" ("id", "value") VALUES ('pt_AO', 'portuguès (Angola)'); INSERT INTO "list" ("id", "value") VALUES ('pt_BR', 'portuguès (Brasil)'); INSERT INTO "list" ("id", "value") VALUES ('pt_CV', 'portuguès (Cap Verd)'); INSERT INTO "list" ("id", "value") VALUES ('pt_GW', 'portuguès (Guinea Bissau)'); INSERT INTO "list" ("id", "value") VALUES ('pt_MO', 'portuguès (Macau (RAE Xina))'); INSERT INTO "list" ("id", "value") VALUES ('pt_MZ', 'portuguès (Moçambic)'); INSERT INTO "list" ("id", "value") VALUES ('pt_PT', 'portuguès (Portugal)'); INSERT INTO "list" ("id", "value") VALUES ('pt_ST', 'portuguès (São Tomé i Príncipe)'); INSERT INTO "list" ("id", "value") VALUES ('pt_TL', 'portuguès (Timor Oriental)'); INSERT INTO "list" ("id", "value") VALUES ('qu', 'quítxua'); INSERT INTO "list" ("id", "value") VALUES ('qu_BO', 'quítxua (Bolívia)'); INSERT INTO "list" ("id", "value") VALUES ('qu_EC', 'quítxua (Equador)'); INSERT INTO "list" ("id", "value") VALUES ('qu_PE', 'quítxua (Perú)'); INSERT INTO "list" ("id", "value") VALUES ('rm', 'retoromànic'); INSERT INTO "list" ("id", "value") VALUES ('rm_CH', 'retoromànic (Suïssa)'); INSERT INTO "list" ("id", "value") VALUES ('ro', 'romanès'); INSERT INTO "list" ("id", "value") VALUES ('ro_MD', 'romanès (Moldàvia)'); INSERT INTO "list" ("id", "value") VALUES ('ro_RO', 'romanès (Romania)'); INSERT INTO "list" ("id", "value") VALUES ('rw', 'ruandès'); INSERT INTO "list" ("id", "value") VALUES ('rw_RW', 'ruandès (Ruanda)'); INSERT INTO "list" ("id", "value") VALUES ('rn', 'rundi'); INSERT INTO "list" ("id", "value") VALUES ('rn_BI', 'rundi (Burundi)'); INSERT INTO "list" ("id", "value") VALUES ('ru', 'rus'); INSERT INTO "list" ("id", "value") VALUES ('ru_BY', 'rus (Bielorússia)'); INSERT INTO "list" ("id", "value") VALUES ('ru_KZ', 'rus (Kazakhstan)'); INSERT INTO "list" ("id", "value") VALUES ('ru_KG', 'rus (Kirguizistan)'); INSERT INTO "list" ("id", "value") VALUES ('ru_MD', 'rus (Moldàvia)'); INSERT INTO "list" ("id", "value") VALUES ('ru_RU', 'rus (Rússia)'); INSERT INTO "list" ("id", "value") VALUES ('ru_UA', 'rus (Ucraïna)'); INSERT INTO "list" ("id", "value") VALUES ('se', 'sami septentrional'); INSERT INTO "list" ("id", "value") VALUES ('se_FI', 'sami septentrional (Finlàndia)'); INSERT INTO "list" ("id", "value") VALUES ('se_NO', 'sami septentrional (Noruega)'); INSERT INTO "list" ("id", "value") VALUES ('se_SE', 'sami septentrional (Suècia)'); INSERT INTO "list" ("id", "value") VALUES ('sg', 'sango'); INSERT INTO "list" ("id", "value") VALUES ('sg_CF', 'sango (República Centreafricana)'); INSERT INTO "list" ("id", "value") VALUES ('sr', 'serbi'); INSERT INTO "list" ("id", "value") VALUES ('sr_BA', 'serbi (Bòsnia i Hercegovina)'); INSERT INTO "list" ("id", "value") VALUES ('sr_Cyrl_BA', 'serbi (ciríl·lic, Bòsnia i Hercegovina)'); INSERT INTO "list" ("id", "value") VALUES ('sr_Cyrl_XK', 'serbi (ciríl·lic, Kosovo)'); INSERT INTO "list" ("id", "value") VALUES ('sr_Cyrl_ME', 'serbi (ciríl·lic, Montenegro)'); INSERT INTO "list" ("id", "value") VALUES ('sr_Cyrl_RS', 'serbi (ciríl·lic, Sèrbia)'); INSERT INTO "list" ("id", "value") VALUES ('sr_Cyrl', 'serbi (ciríl·lic)'); INSERT INTO "list" ("id", "value") VALUES ('sr_XK', 'serbi (Kosovo)'); INSERT INTO "list" ("id", "value") VALUES ('sr_Latn_BA', 'serbi (llatí, Bòsnia i Hercegovina)'); INSERT INTO "list" ("id", "value") VALUES ('sr_Latn_XK', 'serbi (llatí, Kosovo)'); INSERT INTO "list" ("id", "value") VALUES ('sr_Latn_ME', 'serbi (llatí, Montenegro)'); INSERT INTO "list" ("id", "value") VALUES ('sr_Latn_RS', 'serbi (llatí, Sèrbia)'); INSERT INTO "list" ("id", "value") VALUES ('sr_Latn', 'serbi (llatí)'); INSERT INTO "list" ("id", "value") VALUES ('sr_ME', 'serbi (Montenegro)'); INSERT INTO "list" ("id", "value") VALUES ('sr_RS', 'serbi (Sèrbia)'); INSERT INTO "list" ("id", "value") VALUES ('sh', 'serbocroat'); INSERT INTO "list" ("id", "value") VALUES ('sh_BA', 'serbocroat (Bòsnia i Hercegovina)'); INSERT INTO "list" ("id", "value") VALUES ('sn', 'shona'); INSERT INTO "list" ("id", "value") VALUES ('sn_ZW', 'shona (Zimbàbue)'); INSERT INTO "list" ("id", "value") VALUES ('si', 'singalès'); INSERT INTO "list" ("id", "value") VALUES ('si_LK', 'singalès (Sri Lanka)'); INSERT INTO "list" ("id", "value") VALUES ('so', 'somali'); INSERT INTO "list" ("id", "value") VALUES ('so_DJ', 'somali (Djibouti)'); INSERT INTO "list" ("id", "value") VALUES ('so_ET', 'somali (Etiòpia)'); INSERT INTO "list" ("id", "value") VALUES ('so_KE', 'somali (Kenya)'); INSERT INTO "list" ("id", "value") VALUES ('so_SO', 'somali (Somàlia)'); INSERT INTO "list" ("id", "value") VALUES ('sw', 'suahili'); INSERT INTO "list" ("id", "value") VALUES ('sw_KE', 'suahili (Kenya)'); INSERT INTO "list" ("id", "value") VALUES ('sw_TZ', 'suahili (Tanzània)'); INSERT INTO "list" ("id", "value") VALUES ('sw_UG', 'suahili (Uganda)'); INSERT INTO "list" ("id", "value") VALUES ('sv', 'suec'); INSERT INTO "list" ("id", "value") VALUES ('sv_FI', 'suec (Finlàndia)'); INSERT INTO "list" ("id", "value") VALUES ('sv_AX', 'suec (illes Åland)'); INSERT INTO "list" ("id", "value") VALUES ('sv_SE', 'suec (Suècia)'); INSERT INTO "list" ("id", "value") VALUES ('tl', 'tagàlog'); INSERT INTO "list" ("id", "value") VALUES ('tl_PH', 'tagàlog (Filipines)'); INSERT INTO "list" ("id", "value") VALUES ('th', 'tailandès'); INSERT INTO "list" ("id", "value") VALUES ('th_TH', 'tailandès (Tailàndia)'); INSERT INTO "list" ("id", "value") VALUES ('ta', 'tàmil'); INSERT INTO "list" ("id", "value") VALUES ('ta_IN', 'tàmil (Índia)'); INSERT INTO "list" ("id", "value") VALUES ('ta_MY', 'tàmil (Malàisia)'); INSERT INTO "list" ("id", "value") VALUES ('ta_SG', 'tàmil (Singapur)'); INSERT INTO "list" ("id", "value") VALUES ('ta_LK', 'tàmil (Sri Lanka)'); INSERT INTO "list" ("id", "value") VALUES ('te', 'telugu'); INSERT INTO "list" ("id", "value") VALUES ('te_IN', 'telugu (Índia)'); INSERT INTO "list" ("id", "value") VALUES ('bo', 'tibetà'); INSERT INTO "list" ("id", "value") VALUES ('bo_IN', 'tibetà (Índia)'); INSERT INTO "list" ("id", "value") VALUES ('bo_CN', 'tibetà (Xina)'); INSERT INTO "list" ("id", "value") VALUES ('ti', 'tigrinya'); INSERT INTO "list" ("id", "value") VALUES ('ti_ER', 'tigrinya (Eritrea)'); INSERT INTO "list" ("id", "value") VALUES ('ti_ET', 'tigrinya (Etiòpia)'); INSERT INTO "list" ("id", "value") VALUES ('to', 'tongalès'); INSERT INTO "list" ("id", "value") VALUES ('to_TO', 'tongalès (Tonga)'); INSERT INTO "list" ("id", "value") VALUES ('tr', 'turc'); INSERT INTO "list" ("id", "value") VALUES ('tr_TR', 'turc (Turquia)'); INSERT INTO "list" ("id", "value") VALUES ('tr_CY', 'turc (Xipre)'); INSERT INTO "list" ("id", "value") VALUES ('cs', 'txec'); INSERT INTO "list" ("id", "value") VALUES ('cs_CZ', 'txec (República Txeca)'); INSERT INTO "list" ("id", "value") VALUES ('uk', 'ucraïnès'); INSERT INTO "list" ("id", "value") VALUES ('uk_UA', 'ucraïnès (Ucraïna)'); INSERT INTO "list" ("id", "value") VALUES ('ug', 'uigur'); INSERT INTO "list" ("id", "value") VALUES ('ug_Arab_CN', 'uigur (àrab, Xina)'); INSERT INTO "list" ("id", "value") VALUES ('ug_Arab', 'uigur (àrab)'); INSERT INTO "list" ("id", "value") VALUES ('ug_CN', 'uigur (Xina)'); INSERT INTO "list" ("id", "value") VALUES ('ur', 'urdú'); INSERT INTO "list" ("id", "value") VALUES ('ur_IN', 'urdú (Índia)'); INSERT INTO "list" ("id", "value") VALUES ('ur_PK', 'urdú (Pakistan)'); INSERT INTO "list" ("id", "value") VALUES ('uz', 'uzbek'); INSERT INTO "list" ("id", "value") VALUES ('uz_AF', 'uzbek (Afganistan)'); INSERT INTO "list" ("id", "value") VALUES ('uz_Arab_AF', 'uzbek (àrab, Afganistan)'); INSERT INTO "list" ("id", "value") VALUES ('uz_Arab', 'uzbek (àrab)'); INSERT INTO "list" ("id", "value") VALUES ('uz_Cyrl_UZ', 'uzbek (ciríl·lic, Uzbekistan)'); INSERT INTO "list" ("id", "value") VALUES ('uz_Cyrl', 'uzbek (ciríl·lic)'); INSERT INTO "list" ("id", "value") VALUES ('uz_Latn_UZ', 'uzbek (llatí, Uzbekistan)'); INSERT INTO "list" ("id", "value") VALUES ('uz_Latn', 'uzbek (llatí)'); INSERT INTO "list" ("id", "value") VALUES ('uz_UZ', 'uzbek (Uzbekistan)'); INSERT INTO "list" ("id", "value") VALUES ('vi', 'vietnamita'); INSERT INTO "list" ("id", "value") VALUES ('vi_VN', 'vietnamita (Vietnam)'); INSERT INTO "list" ("id", "value") VALUES ('zh', 'xinès'); INSERT INTO "list" ("id", "value") VALUES ('zh_HK', 'xinès (Hong Kong (RAE Xina))'); INSERT INTO "list" ("id", "value") VALUES ('zh_MO', 'xinès (Macau (RAE Xina))'); INSERT INTO "list" ("id", "value") VALUES ('zh_Hans_HK', 'xinès (simplificat, Hong Kong (RAE Xina))'); INSERT INTO "list" ("id", "value") VALUES ('zh_Hans_MO', 'xinès (simplificat, Macau (RAE Xina))'); INSERT INTO "list" ("id", "value") VALUES ('zh_Hans_SG', 'xinès (simplificat, Singapur)'); INSERT INTO "list" ("id", "value") VALUES ('zh_Hans_CN', 'xinès (simplificat, Xina)'); INSERT INTO "list" ("id", "value") VALUES ('zh_Hans', 'xinès (simplificat)'); INSERT INTO "list" ("id", "value") VALUES ('zh_SG', 'xinès (Singapur)'); INSERT INTO "list" ("id", "value") VALUES ('zh_TW', 'xinès (Taiwan)'); INSERT INTO "list" ("id", "value") VALUES ('zh_Hant_HK', 'xinès (tradicional, Hong Kong (RAE Xina))'); INSERT INTO "list" ("id", "value") VALUES ('zh_Hant_MO', 'xinès (tradicional, Macau (RAE Xina))'); INSERT INTO "list" ("id", "value") VALUES ('zh_Hant_TW', 'xinès (tradicional, Taiwan)'); INSERT INTO "list" ("id", "value") VALUES ('zh_Hant', 'xinès (tradicional)'); INSERT INTO "list" ("id", "value") VALUES ('zh_CN', 'xinès (Xina)'); INSERT INTO "list" ("id", "value") VALUES ('ii', 'yi sichuan'); INSERT INTO "list" ("id", "value") VALUES ('ii_CN', 'yi sichuan (Xina)'); INSERT INTO "list" ("id", "value") VALUES ('zu', 'zulu'); INSERT INTO "list" ("id", "value") VALUES ('zu_ZA', 'zulu (República de Sud-àfrica)');
package com.jaoafa.MyMaid3.Task; import com.jaoafa.MyMaid3.Lib.MyMaidLibrary; import org.bukkit.Bukkit; import org.bukkit.entity.Player; import org.bukkit.potion.PotionEffectType; import org.bukkit.scheduler.BukkitRunnable; public class Task_DisableInvisible extends BukkitRunnable { @Override public void run() { for (Player player : Bukkit.getOnlinePlayers()) { if (MyMaidLibrary.isAMR(player)) { return; } if (player.hasPotionEffect(PotionEffectType.INVISIBILITY)) { player.removePotionEffect(PotionEffectType.INVISIBILITY); } } } }
package cn.hi321.browser.ui.activities; import android.app.Activity; import android.content.Intent; import android.os.Bundle; import android.os.Handler; import cn.hi321.browser2.R; import com.umeng.analytics.MobclickAgent; import com.umeng.analytics.ReportPolicy; /** * 启动页面 * * @author yanggf * */ public class SplashActivity extends Activity { private Handler mHandler = new Handler(); @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); MobclickAgent.onError(this);// 友盟错误报告 // 使用在线配置功能 MobclickAgent.updateOnlineConfig(this); // 每次启动发送 MobclickAgent .setDefaultReportPolicy(this, ReportPolicy.BATCH_AT_LAUNCH); // 友盟检查更新 setContentView(R.layout.start_activity); mHandler.postDelayed(new Runnable() { @Override public void run() { enterHome(); } }, 1000 * 3); } protected void onResume() { super.onResume(); MobclickAgent.onResume(this); } @Override protected void onPause() { super.onPause(); MobclickAgent.onPause(this); } private void enterHome() { Intent intent = new Intent(this, HomeActivity.class); startActivity(intent); this.finish(); }; }
#!/usr/bin/env bash psearch -o "$1" | gsed -r 's/^([^ ]+) .*/\1/' | fgrep -v "`~/Scripts/listInstalledPorts.sh`"
const {MessageEmbed} = require('discord.js'); const {Shop, Category} = require('../../misc/dbObjects'); const {Minor} = require('../../misc/tools'); const {Op} = require('sequelize'); /* This command doesnt remove the item from the actual shop but instead hides it from visibility. This is done so that if items are of events they can still be seen in the inventory of the people who obtained it. */ module.exports = { name: 'removeshop', aliases: ['rshop'], category: 'ownershop', description: 'Remove an item from the shop.\n', usage: '[command | alias] [id]', examples: ['h!rshop 10'], ownerOnly: true, run: async (bot, message, args) => { let shop = { embed: new MessageEmbed().setTitle('Remove shop') .setTimestamp(), idReg: new RegExp('^\\d+$'), }; if (shop.idReg.test(args[0])) shop.id = shop.idReg.exec(args[0])[0]; else { shop.embed.setColor(bot.embedColors.embeds.error) .setDescription('Please provide a valid id!'); return message.channel.send(shop.embed); } shop.categoryDb = await Category.findOne({ where: { name: 'hidden' } }); shop.item = await Shop.findOne({ where: { id: shop.id } }); if (shop.item === null) { shop.embed.setColor(bot.embedColors.embeds.error) .setDescription('Please provide a valid id!'); return message.channel.send(shop.embed); } shop.item.category = shop.categoryDb.id; shop.item.save(); shop.embed.setColor(bot.embedColors.embeds.normal) .setDescription(`Item **${shop.item.name}** has been removed from the shop display.`); await message.channel.send(shop.embed) } }
# expand-string [![npm Version][npm-image]][npm-url] [![npm Downloads][downloads-image]][downloads-url] [![Test Status][travis-image]][travis-url] [![Test Coverage][coveralls-image]][coveralls-url] [![MIT Licensed][license-image]][license-url] Range based string expansion. ```js const expandString = require('expand-string'); let expanded = expandString('a-f9-0_-'); // => 'abcdef9876543210_-' const generateRange = expandString.generateRange; let range = generateRange('s', 'z'); // => 'stuvwxyz' ``` ## Installation ```bash npm install expand-string ``` ## Features - Expand arbitrary character ranges. - Specify ranges using any ellipsis (default is `'-'`). - Full Unicode support. ## API ```js const expandString = require('expand-string'); const generateRange = expandString.generateRange; ``` ### expandString(str, options) Expands all ranges found in a string. #### str *string* (default = `''`) The string to expand. If `str` is *undefined* or *null* an empty result is returned (`''` or `[]`, depending on `options.returnArray`). #### options *object*, *string* (default = `{}`) `expandString` accepts these properties in the options object: *Note: If `options` is a string it's treated as ellipsis.* ##### ellipsis *string* (default = `'-'`) The ellipsis used to indicated a range. ```js expandString('ac-f9-5_-'); // => 'acdef98765_-' expandString('z..u', {ellipsis: '..'}); // => 'zyxwvu' expandString('z..u', '..'); // shortcut // => 'zyxwvu' ``` ##### returnArray *boolean* (default = `false`) If `false` the return value is a *string*. If `true` the return value is an *Array* with one Unicode character per element. ```js expandString('a-f'); // => 'abcdef' expandString('a-f', {returnArray: true}); // => ['a', 'b', 'c', 'd', 'e', 'f'] ``` ### generateRange(begin, end, options) Generates a range from `begin` to `end`. #### begin *string* (single character) The begin of the range (inclusive). #### end *string* (single character) The end of the range (inclusive). #### options *object* (default = `{}`) `generateRange` accepts these properties in the options object: ##### returnArray *boolean* (default = `false`) If `false` the return value is a *string*. If `true` the return value is an *Array* with one Unicode character per element. ```js generateRange('a', 'f'); // => 'abcdef' generateRange('a', 'f', {returnArray: true}); // => ['a', 'b', 'c', 'd', 'e', 'f'] ``` ## Tests To run the test suite, install dependencies, then run `npm test`: ```bash npm install npm test ``` Coverage reports are generated by running `npm run coverage`. Linting is done with `npm run lint`. [npm-image]: https://img.shields.io/npm/v/expand-string.svg [npm-url]: https://npmjs.org/package/expand-string [downloads-image]: https://img.shields.io/npm/dm/expand-string.svg [downloads-url]: https://npmjs.org/package/expand-string [travis-image]: https://img.shields.io/travis/maxtruxa/expand-string/master.svg [travis-url]: https://travis-ci.org/maxtruxa/expand-string [coveralls-image]: https://img.shields.io/coveralls/maxtruxa/expand-string/master.svg [coveralls-url]: https://coveralls.io/r/maxtruxa/expand-string?branch=master [license-image]: https://img.shields.io/badge/license-MIT-blue.svg [license-url]: https://raw.githubusercontent.com/maxtruxa/expand-string/master/LICENSE
package api import ( "github.com/gorilla/mux" uuid "github.com/satori/go.uuid" "github.com/stackpath/backend-developer-tests/rest-service/pkg/models" "net/http" "strings" ) // getPersonById router handler function to get Person by ID func (app *Application) getPersonById(w http.ResponseWriter, r *http.Request) { params := mux.Vars(r) uuidStr := params["id"] id, err := uuid.FromString(uuidStr) if err != nil { app.logger.Print(uuidStr, err) app.errorJSON(w, http.StatusBadRequest, err) return } app.logger.Println("id is", id) person, err := models.FindPersonByID(id) if err != nil { app.errorJSON(w, http.StatusNotFound, err) return } err = app.writeJSON(w, http.StatusOK, person, "people") if err != nil { app.logger.Println(err) } } func (app *Application) getAllPeople(w http.ResponseWriter, r *http.Request) { people := models.AllPeople() err := app.writeJSON(w, http.StatusOK, people, "people") if err != nil { app.logger.Println(err) } } func (app *Application) getPersonByFullName(w http.ResponseWriter, r *http.Request) { fName := r.URL.Query()["first_name"] lName := r.URL.Query()["last_name"] app.logger.Println(fName, lName) person := models.FindPeopleByName(fName[0], lName[0]) err := app.writeJSON(w, http.StatusOK, person, "people") if err != nil { app.logger.Println(err) } } func (app *Application) getPersonByPhone(w http.ResponseWriter, r *http.Request) { phone := r.URL.Query()["phone_number"] app.logger.Println("phone", phone) people := models.FindPeopleByPhoneNumber(strings.Join(phone, "")) err := app.writeJSON(w, http.StatusOK, people, "people") if err != nil { app.logger.Println(err) } }
module Stompede class Session attr_accessor :connected, :disconnected, :server_heart_beats, :client_heart_beats def initialize(connector, options = {}) @connector = connector @subscriptions = {} @mutex = Mutex.new @server_heart_beats = options[:server_heart_beats] || [0, 0] @client_heart_beats = options[:client_heart_beats] || [0, 0] end def message_all(*args) @connector.message_all(*args) end def subscriptions @mutex.synchronize { @subscriptions.values } end def write(value) @connector.write(self, value.to_str) end def wait_for_ack(message, timeout, &block) @connector.wait_for_ack(message, timeout, &block) rescue Celluloid::AbortError => e raise e.cause end def error(exception, headers = {}) exception = exception.cause if exception.is_a?(Celluloid::AbortError) unless exception.is_a?(Disconnected) safe_write(ErrorFrame.new(exception, headers)) close end end def safe_write(value) write(value) rescue Disconnected end def close @connector.close(self) end def subscribe(frame) subscription = Subscription.new(self, frame) @mutex.synchronize do if @subscriptions[subscription.id] raise ClientError, "subscription with id #{subscription.id.inspect} already exists" end @subscriptions[subscription.id] = subscription end subscription end def unsubscribe(frame) subscription = Subscription.new(self, frame) @mutex.synchronize do unless @subscriptions[subscription.id] raise ClientError, "subscription with id #{subscription.id.inspect} does not exist" end @subscriptions.delete(subscription.id) end end def inspect "#<Stompede::Session #{object_id}>" end def outgoing_heart_beats if server_heart_beats[0].zero? or client_heart_beats[1].zero? 0 else [server_heart_beats[0], client_heart_beats[1]].max end end def incoming_heart_beats if server_heart_beats[1].zero? or client_heart_beats[0].zero? 0 else [server_heart_beats[1], client_heart_beats[0]].max end end end end
import {InitialState, NavigationContainerRef, NavigationContainer} from '@react-navigation/native'; import AsyncStorage from '@react-native-community/async-storage'; import * as React from 'react'; import {InteractionManager} from 'react-native'; interface DevPersistedNavigationContainerProps extends React.ComponentProps<typeof NavigationContainer> { persistKey: string; } function DevPersistedNavigationContainerImpl( {persistKey, onStateChange, ...others}: DevPersistedNavigationContainerProps, forwardedRef: React.Ref<NavigationContainerRef>, ) { const [isReady, setIsReady] = React.useState(false); const [initialState, setInitialState] = React.useState<InitialState | undefined>(); const persistInteractionRef = React.useRef<{cancel: () => void} | null>(null); const onStateChangeInternal = React.useCallback( state => { const persistState = async () => { persistInteractionRef.current = null; try { await AsyncStorage.setItem(persistKey, JSON.stringify(state)); } catch (ex) { console.warn(`Failed to persist state. ${ex.message}`); } }; if (persistInteractionRef.current !== null) { persistInteractionRef.current.cancel(); } if (state != null) { persistInteractionRef.current = InteractionManager.runAfterInteractions(persistState); } if (onStateChange != null) { onStateChange(state); } }, [onStateChange, persistKey], ); React.useEffect(() => { const loadPerisitedState = async () => { try { const jsonString = await AsyncStorage.getItem(persistKey); if (jsonString != null) { setInitialState(JSON.parse(jsonString)); } setIsReady(true); } catch (ex) { console.warn(`Failed to load state. ${ex.message}`); setIsReady(true); } }; loadPerisitedState(); }, [persistKey]); if (!isReady) { return null; } return ( <NavigationContainer {...others} key={persistKey} ref={forwardedRef} initialState={initialState} onStateChange={onStateChangeInternal} /> ); } const DevPersistedNavigationContainer = __DEV__ ? React.forwardRef(DevPersistedNavigationContainerImpl) : NavigationContainer; export default DevPersistedNavigationContainer;
require 'test_helper' class Blog::Api::V1::CategoriesControllerTest < ActionController::TestCase def setup @controller = Blog::Api::V1::CategoriesController.new @routes = Blog::Engine.routes end # GET #index test 'GET #index returns all the categories' do result = json_parsed('index', 10, 'category') assert_equal 10, result.length end # GET #show test 'GET #show returns data of an single category' do category = create(:category) result = json_parsed('show', nil, nil, category) assert_not_nil result end test 'GET #show returns 404 if category is not found' do result = get :show, id: 999, format: :json assert_response :not_found end # POST #create test 'POST #create returns a successful json string with the new category' do attributes = attributes_for(:category, name: 'About', description: 'abc') result = json_parsed('create', nil, 'category', attributes) assert_equal 'About', result['name'] assert_equal 'abc', result['description'] assert_equal 'about', result['slug'] end test 'POST #create returns an error if name is not submitted' do attributes = attributes_for(:category, name: nil) result = json_parsed('create', nil, 'category', attributes) assert_response :unprocessable_entity assert_includes result['name'], "can't be blank" end # PUT #update test 'PUT #update returns a successful json string with the updated category' do category = create(:category, name: 'About', description: 'abc') attributes = attributes_for(:category, name: 'Contact', description: 'cba') result = json_parsed('update', nil, 'category', category, attributes) assert_equal 'Contact', result['name'] assert_equal 'cba', result['description'] end test 'PUT #update returns an error if name is null' do category = create(:category, name: 'About', description: 'abc') attributes = attributes_for(:category, name: nil, description: 'cba') result = json_parsed('update', nil, 'category', category, attributes) assert_includes result['name'], "can't be blank" end # DELETE #destroy test 'DELETE #destroy removes a category and returns nothing' do category = create(:category) delete :destroy, id: category, format: :json assert_response :no_content end end
require 'spec_helper' describe Hedwig::Api::Attractions do let(:attractions) { described_class } describe ".by_location", vcr: { cassette_name: 'location-attractions' } do let(:id) { 150807 } let(:options) { { lang: 'en_US' } } let(:resource) { "location/#{id}/attractions" } subject { attractions.by_location(id, options) } it "creates a Hedwig::Request for 'location/:id/attractions'" do expect(Hedwig::Request).to receive(:new).with(resource, options).and_call_original subject end it "returns a Hedwig::Models::Collection" do expect(subject).to be_a Hedwig::Models::Collection end context "when multiple ids are passed in", vcr: { cassette_name: 'multiget-attractions' } do let(:id) { [2226812,233835,150807] } let(:resource) { "location/#{id.join(',')}/attractions" } it "creates a multi-get request" do expect(Hedwig::Request).to receive(:new).with(resource, options).and_call_original subject end end end describe ".by_coordinates", vcr: { cassette_name: 'map-attractions' } do let(:latitude) { 42.33141 } let(:longitude) { -71.099396 } let(:options) { { lang: 'en_US' } } let(:resource) { "map/#{latitude},#{longitude}/attractions" } subject { attractions.by_coordinates(latitude, longitude, options) } it "creates a Hedwig::Request for 'map/:latitude,:longitude/attractions'" do expect(Hedwig::Request).to receive(:new).with(resource, options).and_call_original subject end it "returns a Hedwig::Models::Collection" do expect(subject).to be_a Hedwig::Models::Collection end end end
%%% %%% Copyright (c) 2015-2021 Klarna Bank AB (publ) %%% %%% Licensed under the Apache License, Version 2.0 (the "License"); %%% you may not use this file except in compliance with the License. %%% You may obtain a copy of the License at %%% %%% http://www.apache.org/licenses/LICENSE-2.0 %%% %%% Unless required by applicable law or agreed to in writing, software %%% distributed under the License is distributed on an "AS IS" BASIS, %%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %%% See the License for the specific language governing permissions and %%% limitations under the License. %%% %% @private -module(brod_producer_buffer_SUITE). %% Test framework -export([ init_per_suite/1 , end_per_suite/1 , init_per_testcase/2 , end_per_testcase/2 , all/0 , suite/0 ]). %% Test cases -export([ t_no_ack/1 , t_random_latency_ack/1 , t_nack/1 , t_send_fun_error/1 ]). -include_lib("proper/include/proper.hrl"). -include_lib("common_test/include/ct.hrl"). -include_lib("eunit/include/eunit.hrl"). -include("brod_int.hrl"). %% producer state -record(state, { buffered = [] , acked = [] , delay_ref = ?undef :: ?undef | {timer:tref() | reference()} , buf }). %%%_* ct callbacks ============================================================= suite() -> [{timetrap, {seconds, 60}}]. init_per_suite(Config) -> Config. end_per_suite(_Config) -> ok. init_per_testcase(_Case, Config) -> Config. end_per_testcase(_Case, Config) -> Config. all() -> [F || {F, _A} <- module_info(exports), case atom_to_list(F) of "t_" ++ _ -> true; _ -> false end]. %%%_* Test functions =========================================================== t_no_ack(Config) when is_list(Config) -> Opts = [{numtests, 1000}, {to_file, user}], ?assert(proper:quickcheck(prop_no_ack_run(), Opts)). t_random_latency_ack(Config) when is_list(Config) -> Opts = [{numtests, 500}, {to_file, user}], ?assert(proper:quickcheck(prop_random_latency_ack_run(), Opts)). t_nack(Config) when is_list(Config) -> SendFun = fun(Conn, Batch, _Vsn) -> Ref = make_ref(), NumList = lists:map(fun(#{key := Bin, value := Bin}) -> list_to_integer(binary_to_list(Bin)) end, Batch), Conn ! {produce, Ref, NumList}, {ok, Ref} end, Buf0 = brod_producer_buffer:new(_BufferLimit = 2, _OnWireLimit = 2, _MaxBatchSize = 20, %% 2 messages _MaxRetry = 1, _MaxLingerTime = 0, _MaxLingerCount = 0, SendFun), AddFun = fun(BufIn, Num) -> BufCb = make_buf_cb(Num), Bin = list_to_binary(integer_to_list(Num)), Batch = [#{key => Bin, value => Bin}], brod_producer_buffer:add(BufIn, BufCb, Batch) end, MaybeSend = fun(BufIn) -> {ok, Buf} = brod_producer_buffer:maybe_send(BufIn, self(), 0), Buf end, AckFun = fun(BufIn, Ref) -> brod_producer_buffer:ack(BufIn, Ref) end, NackFun = fun(BufIn, Ref) -> brod_producer_buffer:nack(BufIn, Ref, test) end, ReceiveFun = fun(Line, ExpectedNums) -> receive {produce, RefX, NumList} -> case ExpectedNums =:= NumList of true -> ok; false -> ct:fail("~p\nexp=~p\ngot=~p\n", [Line, ExpectedNums, NumList]) end, RefX after 1000 -> erlang:error({Line, "timed out receiving produce message"}) end end, Buf1 = AddFun(Buf0, 0), Buf2 = AddFun(Buf1, 1), Buf3 = AddFun(AddFun(Buf2, 2), 3), Buf4 = MaybeSend(Buf3), Ref1 = ReceiveFun(?LINE, [0, 1]), %% max batch size _Ref = ReceiveFun(?LINE, [2, 3]), %% max onwire is 2 Buf5 = NackFun(Buf4, Ref1), %% re-queue all Buf6 = MaybeSend(Buf5), %% as if a scheduled retry Ref3 = ReceiveFun(?LINE, [0, 1]), %% receive a max batch Ref4 = ReceiveFun(?LINE, [2, 3]), %% another max batch (max onwire is 2) Buf7 = AckFun(Buf6, Ref3), Buf8 = AckFun(Buf7, Ref4), ?assert(brod_producer_buffer:is_empty(Buf8)). t_send_fun_error(Config) when is_list(Config) -> SendFun = fun(_SockPid, _Batch, _Vsn) -> {error, "the reason"} end, Buf0 = brod_producer_buffer:new(_BufferLimit = 1, _OnWireLimit = 1, _MaxBatchSize = 10000, _MaxRetry = 1, _MaxLingerTime = 0, _MaxLingerCount = 0, SendFun), AddFun = fun(BufIn, Num) -> BufCb = make_buf_cb(Num), Bin = list_to_binary(integer_to_list(Num)), Batch = [#{key => Bin, value => Bin}], brod_producer_buffer:add(BufIn, BufCb, Batch) end, MaybeSend = fun(BufIn) -> {retry, BufOut} = brod_producer_buffer:maybe_send(BufIn, self(), 0), BufOut end, Buf1 = AddFun(AddFun(Buf0, 0), 1), Buf2 = MaybeSend(Buf1), ?assertException(exit, {reached_max_retries, "the reason"}, MaybeSend(Buf2)). %%%_* Help functions =========================================================== -define(MAX_DELAY, 4). prop_buffer_limit() -> proper_types:pos_integer(). prop_onwire_limit() -> proper_types:pos_integer(). prop_msgset_bytes() -> proper_types:pos_integer(). prop_linger_time() -> proper_types:integer(0, 10). prop_linger_count() -> proper_types:integer(0, 100). prop_value_list() -> proper_types:list(proper_types:binary()). %% latency in milliseconds for fake kafka to process a key-value pair prop_latency_ms() -> proper_types:range(0, ?MAX_DELAY). %% pre-generate the latency together with the binary value. prop_value_with_processing_latency_list() -> proper_types:list({prop_latency_ms(), proper_types:binary()}). prop_no_ack_run() -> SendFun = fun(_SockPid, _Batch, _Vsn) -> ok end, ?FORALL( {BufferLimit, OnWireLimit, MsgSetBytes, ValueList}, {prop_buffer_limit(), prop_onwire_limit(), prop_msgset_bytes(), prop_value_list()}, begin Buf = brod_producer_buffer:new(BufferLimit, OnWireLimit, MsgSetBytes, _MaxRetries = 0, _MaxLingerTime = 0, _MaxLingerCount = 0, SendFun), KeyList = lists:seq(1, length(ValueList)), KvList = lists:zip(KeyList, ValueList), no_ack_produce(Buf, KvList) end). prop_random_latency_ack_run() -> SendFun0 = fun(FakeKafka, Batch, _Vsn) -> %% use reference as correlation to simplify test Ref = make_ref(), %% send the message to fake kafka %% the pre-generated latency values are in KvList %% fake kafka should receive the KvList, sleep a while %% and reply ack FakeKafka ! {produce, self(), Ref, Batch}, {ok, Ref} end, ?FORALL( {BufferLimit, OnWireLimit, MsgSetBytes, MaxLingerTime, MaxLingerCount, ValueList}, {prop_buffer_limit(), prop_onwire_limit(), prop_msgset_bytes(), prop_linger_time(), prop_linger_count(), prop_value_with_processing_latency_list()}, begin KeyList = lists:seq(1, length(ValueList)), KvList = lists:zip(KeyList, ValueList), Batch = lists:map(fun({K, {Delay, V}}) -> #{key => integer_to_binary(K), value => V, delay => Delay} end, KvList), FakeKafka = spawn_fake_kafka(), SendFun = fun(_SockPid, BatchX, Vsn) -> SendFun0(FakeKafka, BatchX, Vsn) end, Buf = brod_producer_buffer:new(BufferLimit, OnWireLimit, MsgSetBytes, _MaxRetries = 0, MaxLingerTime, MaxLingerCount, SendFun), random_latency_ack_produce(FakeKafka, Buf, Batch) end). no_ack_produce(Buf, []) -> brod_producer_buffer:is_empty(Buf) orelse erlang:error({buffer_not_empty, Buf}); no_ack_produce(Buf, [{Key, Value} | Rest]) -> BufCb = make_buf_cb(Key), BinKey = list_to_binary(integer_to_list(Key)), Batch = [#{key => BinKey, value => Value}], Buf1 = brod_producer_buffer:add(Buf, BufCb, Batch), FakeSockPid = self(), {ok, NewBuf} = brod_producer_buffer:maybe_send(Buf1, FakeSockPid, 0), %% in case of no ack required, expect 'buffered' immediately receive {?buffered, Key} -> ok after 100 -> erlang:error({timeout, brod_produce_req_buffered, Key}) end, %% in case of no ack required, expect 'acked' immediately receive {?acked, Key} -> ok after 100 -> erlang:error({timeout, brod_produce_req_acked, Key}) end, no_ack_produce(NewBuf, Rest). random_latency_ack_produce(FakeKafka, Buf, Batch) -> State0 = #state{buf = Buf, buffered = [], acked = []}, #state{buffered = Buffered, acked = Acked} = produce_loop(FakeKafka, Batch, State0), N = length(Batch), ok = assert_reply_sequence(Buffered, N), ok = assert_reply_sequence(Acked, N), ok = stop_fake_kafka(FakeKafka), true. produce_loop(FakeKafka, [], #state{buf = Buf} = State) -> case brod_producer_buffer:is_empty(Buf) of true -> State; false -> NewState = collect_replies(State, ?MAX_DELAY), produce_loop(FakeKafka, [], NewState) end; produce_loop(FakeKafka, [#{key := Key} = Msg | Rest], State0) -> #state{buf = Buf0} = State0, BufCb = make_buf_cb(binary_to_integer(Key)), Buf1 = brod_producer_buffer:add(Buf0, BufCb, [Msg]), State1 = State0#state{buf = Buf1}, State2 = maybe_send(State1), State = collect_replies(State2, _Delay = 0), produce_loop(FakeKafka, Rest, State). collect_replies(#state{ buffered = Buffered , acked = Acked , buf = Buf0 , delay_ref = DelayRef } = State0, Timeout) -> receive {delayed_send, Ref} when is_tuple(DelayRef) andalso Ref =:= element(2, DelayRef) -> State = maybe_send(State0#state{delay_ref = ?undef}), collect_replies(State, Timeout); {delayed_send, _} -> %% stale message collect_replies(State0, Timeout); {?buffered, Key} -> State = State0#state{buffered = [Key | Buffered]}, collect_replies(State, Timeout); {ack_from_kafka, Ref} -> Buf1 = brod_producer_buffer:ack(Buf0, Ref), State1 = State0#state{buf = Buf1}, State = maybe_send(State1), collect_replies(State, Timeout); {?acked, Key} -> State = State0#state{acked = [Key | Acked]}, collect_replies(State, Timeout); Msg -> erlang:error({unexpected, Msg}) after Timeout -> State0 end. maybe_send(#state{buf = Buf0, delay_ref = DelayRef} = State) -> SendTo = self(), _ = cancel_delay_send_timer(DelayRef), case brod_producer_buffer:maybe_send(Buf0, SendTo, 0) of {ok, Buf} -> State#state{buf = Buf}; {{delay, Timeout}, Buf} -> NewDelayRef = start_delay_send_timer(Timeout), State#state{buf = Buf, delay_ref = NewDelayRef} end. %% Start delay send timer. start_delay_send_timer(Timeout) -> MsgRef = make_ref(), TRef = erlang:send_after(Timeout, self(), {delayed_send, MsgRef}), {TRef, MsgRef}. %% Ensure delay send timer is canceled. %% But not flushing the possibly already sent (stale) message %% Stale message should be discarded in handle_info cancel_delay_send_timer(?undef) -> ok; cancel_delay_send_timer({Tref, _Msg}) -> _ = erlang:cancel_timer(Tref). %% reply collection was accumulated in reversed order. assert_reply_sequence([], 0) -> ok; assert_reply_sequence([N | Rest], N) -> assert_reply_sequence(Rest, N-1). spawn_fake_kafka() -> erlang:spawn_link(fun() -> fake_kafka_loop() end). stop_fake_kafka(FakeKafka) when is_pid(FakeKafka) -> MRef = monitor(process, FakeKafka), FakeKafka ! stop, receive {'DOWN', MRef, process, FakeKafka, _} -> ok after 1000 -> exit(FakeKafka, kill), erlang:error(timeout) end. fake_kafka_loop() -> receive {produce, FromPid, Ref, Batch} -> ok = fake_kafka_process_msgs(Batch), FromPid ! {ack_from_kafka, Ref}, fake_kafka_loop(); stop -> exit(normal); Msg -> exit({fake_kafka, unexpected, Msg}) end. fake_kafka_process_msgs([]) -> ok; fake_kafka_process_msgs([#{delay := DelayMs} | Rest]) -> timer:sleep(DelayMs), fake_kafka_process_msgs(Rest). make_buf_cb(Ref) -> Pid = self(), fun(?buffered) -> erlang:send(Pid, {?buffered, Ref}); ({?acked, _BaseOffset}) -> erlang:send(Pid, {?acked, Ref}) end. %%%_* Emacs ==================================================================== %%% Local Variables: %%% allout-layout: t %%% erlang-indent-level: 2 %%% End:
<?php namespace App\Http\Controllers\admin; use App\Http\Controllers\Controller; use App\Models\User; use App\Models\Wilayah; use Illuminate\Http\Request; use Yajra\DataTables\Facades\DataTables; class PelangganAdminController extends Controller { function main() { $data['title'] = 'Data Pelanggan &mdash; ' . config('app.name'); return view('admin.pelanggan-main', $data); } function datatable(Request $request) { return DataTables::of(User::where('type', '!=', 'admin')->get()) ->addColumn('alamat', function($user) { $wilayah = new Wilayah(); $alamat = ($user->alamat ? $user->alamat . ', ' : ''); $kelurahan = $wilayah->getKelurahan($user->kecamatan, $user->kelurahan)['nama']; $kecamatan = $wilayah->getKecamatan($user->kabupaten, $user->kecamatan)['nama']; $kabupaten = $wilayah->getKabupaten($user->provinsi, $user->kabupaten)['nama']; $provinsi = $wilayah->getProvinsi($user->provinsi)['nama']; return $alamat . $kelurahan . ', ' . $kecamatan . ', ' . $kabupaten . ', ' . $provinsi; })->toJSON(); } function add() { $data['title'] = 'Tambah Pelanggan &mdash; ' . config('app.name'); return view('admin.pelanggan-add', $data); } function addProcess(Request $request) {} }
import { computed, reactive } from 'vue'; import { useQuery } from 'vue-query'; import { QueryObserverOptions } from 'react-query/core'; import useTokens from '@/composables/useTokens'; import { useStore } from 'vuex'; import { pick } from 'lodash'; import QUERY_KEYS from '@/constants/queryKeys'; import BalancerContracts from '@/services/balancer/contracts/service'; import BalancerSubgraph from '@/services/balancer/subgraph/service'; import { DecoratedPool, FullPool } from '@/services/balancer/subgraph/types'; import { POOLS } from '@/constants/pools'; export default function usePoolQuery( id: string, options: QueryObserverOptions<FullPool> = {} ) { // COMPOSABLES const store = useStore(); const { tokens: allTokens } = useTokens(); // SERVICES const balancerSubgraph = new BalancerSubgraph(); const balancerContracts = new BalancerContracts(); // DATA const queryKey = QUERY_KEYS.Pools.Current(id); // COMPUTED const appLoading = computed(() => store.state.app.loading); const prices = computed(() => store.state.market.prices); const isQueryEnabled = computed(() => !appLoading.value); function tokensInjected(pool: DecoratedPool): boolean { if (!allTokens.value) return false; const allAddresses = Object.keys(allTokens.value); return [...pool.tokenAddresses, pool.address].every(address => allAddresses.includes(address) ); } // METHODS const queryFn = async () => { const [pool] = await balancerSubgraph.pools.getDecorated( '24h', prices.value, { where: { id: id.toLowerCase(), totalShares_gt: -1 // Avoid the filtering for low liquidity pools } } ); if (pool.poolType === 'Stable' && !POOLS.Stable.AllowList.includes(id)) { throw new Error('Pool not allowed'); } if (!tokensInjected(pool)) { await store.dispatch('registry/injectTokens', [ ...pool.tokenAddresses, pool.address ]); } const tokens = pick(allTokens.value, pool.tokenAddresses); const onchainData = await balancerContracts.vault.getPoolData( id, pool.poolType, tokens ); return { ...pool, onchain: onchainData }; }; const queryOptions = reactive({ enabled: isQueryEnabled, ...options }); return useQuery<FullPool>(queryKey, queryFn, queryOptions); }
use std::fs::{remove_file, File, OpenOptions}; use std::io::prelude::*; use std::ops::Deref; use std::os::unix::fs::OpenOptionsExt; use std::path::{Component, Path}; use std::process::Command; use std::sync::Arc; use chrono::{DateTime, Local}; use chrono_tz::Tz; use eui48::MacAddress; use serde::{Deserialize, Serialize}; use tonic::{Request, Response, Status}; use validator::Validate; use super::super::super::super::{ auth::services::Session, crypto::Aes, jwt::Jwt, ntp::Response as NtpResponse, orm::sqlite::{Connection as Db, Pool as DbPool}, sys::network::{ ip4 as get_ip4, is_on, mac as get_mac, systemd::{Dhcp, Ip, Static, Wifi, Wpa}, }, GrpcResult, Result, }; use super::super::{ models::settings::Dao as SettingDao, v1::{ network_profile, os_server::Os, status_response, DnsRequest, LinesResponse, LogsRequest, NetworkProfile, NtpProfile, PingRequest, RestoreRequest, StatusResponse, VpnProfile, }, }; use super::user::CurrentUser; pub struct Service { pub db: DbPool, pub jwt: Arc<Jwt>, pub aes: Arc<Aes>, } #[tonic::async_trait] impl Os for Service { async fn logs(&self, req: Request<LogsRequest>) -> GrpcResult<LinesResponse> { current_pi_user!(self, &req); let req = req.into_inner(); let output = try_grpc!( Command::new("journalctl") .arg("-u") .arg(&req.name) .arg("-b") .output(), Status::invalid_argument )?; if !output.status.success() { return Err(Status::internal(format!( "{:#?} {}", output.status, try_grpc!(String::from_utf8(output.stderr))? ))); } let out = try_grpc!(String::from_utf8(output.stdout))?; let lines: Vec<&str> = out.split('\n').collect(); Ok(Response::new(LinesResponse { messages: lines.iter().map(|x| x.to_string()).collect(), })) } async fn status(&self, req: Request<()>) -> GrpcResult<StatusResponse> { current_pi_user!(self, &req); let si = try_grpc!(nix::sys::sysinfo::sysinfo())?; let un = nix::sys::utsname::uname(); let load = si.load_average(); Ok(Response::new(StatusResponse { uptime: Some(si.uptime().into()), uname: Some(status_response::Uname { sys: un.sysname().to_string(), node: un.nodename().to_string(), machine: un.machine().to_string(), release: un.release().to_string(), version: un.version().to_string(), }), process: si.process_count() as u32, load: Some(status_response::Load { one: load.0, five: load.1, fifteen: load.2, }), swap: Some(status_response::Range { total: si.swap_total(), free: si.swap_free(), }), ram: Some(status_response::Range { total: si.ram_total(), free: si.ram_total(), }), versions: Vec::new(), })) } async fn reboot(&self, req: Request<()>) -> GrpcResult<()> { current_pi_user!(self, &req); try_grpc!(super::super::super::super::sys::reboot())?; Ok(Response::new(())) } async fn reset(&self, req: Request<()>) -> GrpcResult<()> { current_pi_user!(self, &req); // TODO Ok(Response::new(())) } async fn dump(&self, req: Request<()>) -> GrpcResult<()> { current_pi_user!(self, &req); // TODO Ok(Response::new(())) } async fn restore(&self, req: Request<RestoreRequest>) -> GrpcResult<()> { current_pi_user!(self, &req); // TODO Ok(Response::new(())) } async fn ping(&self, req: Request<PingRequest>) -> GrpcResult<LinesResponse> { current_pi_user!(self, &req); let req = req.into_inner(); let form = Ping { host: req.host }; let out = try_grpc!(form.execute())?; let lines: Vec<&str> = out.split('\n').collect(); Ok(Response::new(LinesResponse { messages: lines.iter().map(|x| x.to_string()).collect(), })) } async fn dns(&self, req: Request<DnsRequest>) -> GrpcResult<LinesResponse> { current_pi_user!(self, &req); let req = req.into_inner(); let form = Dns { server: req.server.clone(), host: req.host, }; let out = try_grpc!(form.execute())?; let lines: Vec<&str> = out.split('\n').collect(); Ok(Response::new(LinesResponse { messages: lines.iter().map(|x| x.to_string()).collect(), })) } async fn get_network(&self, req: Request<()>) -> GrpcResult<NetworkProfile> { current_pi_user!(self, &req); let db = try_grpc!(self.db.get())?; let db = db.deref(); let aes = self.aes.deref(); let form: Network = SettingDao::get(db, aes, Network::KEY).unwrap_or_default(); Ok(Response::new(NetworkProfile { eth: Some(network_profile::Eth { name: form.eth.name.clone(), ip: Some(match form.eth.ip { Ip::Dhcp => network_profile::eth::Ip::Dhcp(true), Ip::Static { address, netmask, gateway, dns1, dns2, } => network_profile::eth::Ip::Static(network_profile::Static { address, netmask, gateway, dns1, dns2, }), }), }), wlan: Some(network_profile::Wlan { name: form.wlan.name.clone(), wifi: form.wlan.wifi.map(|wifi| match wifi { Wifi::Open { ssid } => { network_profile::wlan::Wifi::Open(network_profile::Open { ssid }) } Wifi::Psk { ssid, password } => { network_profile::wlan::Wifi::Psk(network_profile::Psk { ssid, password }) } Wifi::Eap { ssid, identity, password, } => network_profile::wlan::Wifi::Eap(network_profile::Eap { ssid, identity, password, }), }), ip: Some(match form.wlan.ip { Ip::Dhcp => network_profile::wlan::Ip::Dhcp(true), Ip::Static { address, netmask, gateway, dns1, dns2, } => network_profile::wlan::Ip::Static(network_profile::Static { address, netmask, gateway, dns1, dns2, }), }), }), })) } async fn set_network(&self, req: Request<NetworkProfile>) -> GrpcResult<()> { current_pi_user!(self, &req); // TODO Ok(Response::new(())) } async fn get_ntp(&self, req: Request<()>) -> GrpcResult<NtpProfile> { current_pi_user!(self, &req); let db = try_grpc!(self.db.get())?; let db = db.deref(); let aes = self.aes.deref(); let it: Ntp = SettingDao::get(db, aes, Ntp::KEY).unwrap_or_default(); Ok(Response::new(NtpProfile { enable: it.enable, timezone: it.timezone.name().to_string(), servers: it.servers, heartbeat: it.heartbeat as u64, })) } async fn set_ntp(&self, req: Request<NtpProfile>) -> GrpcResult<()> { current_pi_user!(self, &req); let db = try_grpc!(self.db.get())?; let db = db.deref(); let req = req.into_inner(); let form = Ntp { enable: req.enable, timezone: req.timezone.parse().map_err(Status::invalid_argument)?, servers: req.servers, heartbeat: req.heartbeat as usize, }; try_grpc!(form.save(db, &self.aes))?; Ok(Response::new(())) } async fn get_vpn(&self, req: Request<()>) -> GrpcResult<VpnProfile> { current_pi_user!(self, &req); let db = try_grpc!(self.db.get())?; let db = db.deref(); let aes = self.aes.deref(); let it: Vpn = SettingDao::get(db, aes, Vpn::KEY).unwrap_or_default(); Ok(Response::new(VpnProfile { enable: it.enable, body: it.body, })) } async fn set_vpn(&self, req: Request<VpnProfile>) -> GrpcResult<()> { current_pi_user!(self, &req); let db = try_grpc!(self.db.get())?; let db = db.deref(); let req = req.into_inner(); let form = Vpn { enable: req.enable, body: req.body, }; let aes = self.aes.deref(); try_grpc!(form.save(db, aes))?; Ok(Response::new(())) } } #[derive(Validate)] pub struct Ping { #[validate(length(min = 1))] pub host: String, } impl Ping { pub fn execute(&self) -> Result<String> { self.validate()?; let out = Command::new("ping") .arg("-W") .arg("2") .arg("-c") .arg("4") .arg(&self.host) .output()?; debug!("{:?}", out); Ok(String::from_utf8(out.stdout)?) } } #[derive(Validate)] pub struct Dns { #[validate(length(min = 1))] pub host: String, #[validate(length(min = 1))] pub server: Option<String>, } impl Dns { pub fn execute(&self) -> Result<String> { self.validate()?; let out = match self.server { Some(ref it) => Command::new("dig") .arg(&format!("@{}", it)) .arg(&self.host) .output(), None => Command::new("dig").arg(&self.host).output(), }?; debug!("{:?}", out); Ok(String::from_utf8(out.stdout)?) } } #[derive(Serialize, Deserialize, Validate, Debug)] #[serde(rename_all = "camelCase")] pub struct Ntp { pub timezone: Tz, #[validate(length(min = 1))] pub servers: Vec<String>, #[validate(range(min = 5))] pub heartbeat: usize, pub enable: bool, } impl Default for Ntp { fn default() -> Self { Self { timezone: Tz::UTC, servers: vec!["0.us.pool.ntp.org".to_string()], heartbeat: 60 * 60 * 24, enable: false, } } } impl Ntp { pub fn timesyncd(&self) -> String { format!( r#" [Time] NTP={servers} FallbackNTP=0.pool.ntp.org 1.pool.ntp.org 2.pool.ntp.org 3.pool.ntp.org "#, servers = self.servers.join(" ") ) } pub fn crontab(&self) -> String { format!( r#" #!/bin/bash for i in {servers} do ntpdate $i && break done "#, servers = self.servers.join(" ") ) } pub fn test(&self) -> Result<Vec<DateTime<Local>>> { let mut items = Vec::new(); for it in self.servers.iter() { let now: DateTime<Local> = NtpResponse::fetch(it, None)?.into(); items.push(now); } Ok(items) } pub fn ping(&self) -> Option<DateTime<Local>> { for it in self.servers.iter() { if let Ok(it) = NtpResponse::fetch(it, None) { return Some(it.into()); } } None } pub const KEY: &'static str = "ntp.client"; pub fn save(&self, db: &Db, aes: &Aes) -> Result<()> { self.validate()?; self.test()?; debug!("save ntp server {:?}", self); let file = Path::new(&Component::RootDir) .join("etc") .join("systemd") .join("timesyncd.conf"); if self.enable { let mut fd = File::create(&file)?; write!(&mut fd, "{}", self.timesyncd())?; } else if file.exists() { remove_file(&file)?; } SettingDao::set(db, aes, Self::KEY, self, true)?; Ok(()) } } #[derive(Serialize, Deserialize, Validate, Default)] #[serde(rename_all = "camelCase")] pub struct Vpn { pub enable: bool, #[validate(length(min = 1))] pub body: String, } impl Vpn { pub const KEY: &'static str = "openvpn.client"; pub fn save(&self, db: &Db, aes: &Aes) -> Result<()> { self.validate()?; let file = Path::new(&Component::RootDir) .join("etc") .join("openvpn") .join("client.conf"); if self.enable { info!("generate file {}", file.display()); let mut fd = OpenOptions::new() .write(true) .create(true) .truncate(true) .mode(0o600) .open(file)?; fd.write_all(self.body.as_bytes())?; } else if file.exists() { info!("delete file {}", file.display()); remove_file(file)?; } SettingDao::set(db, aes, Self::KEY, self, true)?; Ok(()) } } #[derive(Serialize, Deserialize, Debug)] #[serde(rename_all = "camelCase")] pub struct Wlan { pub name: String, pub wifi: Option<Wifi>, pub ip: Ip, } #[derive(Serialize, Deserialize, Debug)] #[serde(rename_all = "camelCase")] pub struct Eth { pub name: String, pub ip: Ip, } #[derive(Serialize, Deserialize, Debug)] #[serde(rename_all = "camelCase")] pub struct Network { pub eth: Eth, pub wlan: Wlan, } impl Default for Network { fn default() -> Self { Self { eth: Eth { name: Self::ETH.to_string(), ip: Ip::default(), }, wlan: Wlan { name: Self::WLAN.to_string(), ip: Ip::default(), wifi: None, }, } } } impl Network { pub const KEY: &'static str = "systemd.network"; pub const ETH: &'static str = "eth0"; pub const WLAN: &'static str = "wlan0"; #[cfg(debug_assertions)] pub fn mac(&self) -> Result<MacAddress> { get_mac("wlp3s0") } #[cfg(not(debug_assertions))] pub fn mac(&self) -> Result<MacAddress> { get_mac(Self::ETH) } pub fn is_on(&self) -> bool { (is_on(&self.eth.name) && get_ip4(&self.eth.name).is_some()) || (is_on(&self.wlan.name) && get_ip4(&self.wlan.name).is_some()) } pub fn save(&self, vendor: &str, db: &Db, aes: &Aes) -> Result<()> { debug!("save network interfaces {:?}", self); { let metric = 50; match self.eth.ip { Ip::Static { ref address, ref netmask, ref gateway, ref dns1, ref dns2, } => Static::new( &self.eth.name, metric, address, netmask, gateway, dns1, dns2.as_deref(), )? .save(vendor)?, Ip::Dhcp => Dhcp { name: self.eth.name.clone(), metric, options: vec![Dhcp::WWW], } .save(vendor)?, }; } { let metric = 200; match self.wlan.wifi { Some(ref it) => { it.save(&self.wlan.name)?; Dhcp { name: self.wlan.name.clone(), metric, options: vec![Dhcp::WWW], } .save(vendor)?; Wpa.save(&self.wlan.name)?; } None => { Wifi::remove(&self.wlan.name)?; } } } SettingDao::set(db, aes, Self::KEY, self, true)?; Ok(()) } }
require 'spec_helper' require 'pathname' require 'active_support/core_ext/object/blank' describe UniqueHtmlExtractonator do it 'has a version number' do expect(UniqueHtmlExtractonator::VERSION).not_to be nil end let(:root_path) { Pathname.new File.realpath('.', File.dirname(__FILE__)) } def fixture_read(file) File.open(root_path.join("fixtures/#{file}")).read end describe 'html extraction' do let(:reference_html) { fixture_read 'common1.html' } let(:html) { fixture_read self.class.metadata[:description] } let(:extractor) { UniqueHtmlExtractonator::Extractor.new(reference_html: reference_html, html: html) } subject { extractor.extract } context 'common1.html' do let(:reference_html) { fixture_read 'common3.html' } it 'should be parsed' do is_expected.to eq(fixture_read 'common1.extracted.html') end end context 'common2.html' do it 'should be parsed' do is_expected.to eq(fixture_read 'common2.extracted.html') end end context 'common3.html' do it 'should be parsed' do is_expected.to eq(fixture_read 'common3.extracted.html') end end end end
import React from "react"; import { Button } from "reactstrap"; import "./post-status-filter.css"; const PostStatusFilter = () => { return ( <div className="btn-group"> <Button color="info">Все</Button> {/* <button type="button" className="btn btn-info"> Все </button> */} <button type="button" className="btn btn-outline-secondary"> Понравилось </button> </div> ); }; export default PostStatusFilter;
/* Style Changer */ jQuery(document).ready(function(){ /* Style Changer Autohide */ jQuery('.chBut').parent().delay(1000).animate({left:'-180px'}, 500, function(){ jQuery(this).find('.chBut').next('.chBody').css({display:'none'}); jQuery(this).find('.chBut').addClass('closed'); }); /* Style Changer Toggle */ jQuery('.chBut').click(function(){ if (jQuery(this).hasClass('closed')){ jQuery(this).next('.chBody').css({display:'block'}).parent().animate({left:0}, 500, function(){ jQuery(this).find('.chBut').removeClass('closed'); }); } else { jQuery(this).parent().animate({left:'-180px'}, 500, function(){ jQuery(this).find('.chBut').next('.chBody').css({display:'none'}); jQuery(this).find('.chBut').addClass('closed'); }); } return false; }); /* Window Resize Function */ jQuery(window).resize(function(){ if (jQuery(window).height() < 750){ jQuery('#stlChanger').css({position:'absolute'}); } else { jQuery('#stlChanger').css({position:'fixed'}); } }); });
package com.onegravity.bloc.posts_compose import androidx.compose.foundation.lazy.LazyColumn import androidx.compose.foundation.lazy.items import androidx.compose.material.Divider import androidx.compose.runtime.Composable import androidx.compose.ui.Modifier import com.onegravity.bloc.sample.posts.domain.repositories.Post @Composable internal fun Posts( posts: List<Post>, selectedPost: Int?, modifier: Modifier = Modifier, onClicked: (post: Post) -> Unit ) { LazyColumn(modifier = modifier) { items(posts) { post -> PostItem(post, post.id == selectedPost, onClicked) Divider() } } }
package common import ( "github.com/mitchellh/packer/template/interpolate" ) type PrlctlConfig struct { Prlctl [][]string `mapstructure:"prlctl"` } func (c *PrlctlConfig) Prepare(ctx *interpolate.Context) []error { if c.Prlctl == nil { c.Prlctl = make([][]string, 0) } return nil }
#!/usr/bin/env bash export ENV_STATE=test # pytest -vv pytest --cov --cov-fail-under=80 -vv --cov-report html
import React from 'react'; import {Route, Redirect} from '../lib/react-router-dom'; function Protected(props: any) { const {component: RouteComponent, path} = props return ( // todo:逻辑,当用户登录了,就渲染Component,如果没有登录,就不渲染这个Component <div> <Route path={path} render={ (routeProps: any) => { return localStorage.getItem('login') ? <RouteComponent {...routeProps} /> : <Redirect to={{pathname: '/login', state: {from: routeProps.location.pathname}}} /> } } /> </div> ) } export default Protected
using BanBrick.TypeScript.CodeGenerator.Convertors; using BanBrick.TypeScript.CodeGenerator.Enums; using BanBrick.TypeScript.CodeGenerator.Extensions; using BanBrick.TypeScript.CodeGenerator.Models; using System; using System.Collections.Generic; using System.Linq; using System.Text; namespace BanBrick.TypeScript.CodeGenerator.Resolvers { internal class ConfigResolver { public IEnumerable<TypeDefinition> Resolve(IEnumerable<TypeDefinition> definitions) { var unprocessedDefinitions = definitions.Where(x => x.IsFirstLevel) .Select(x => { x.ProcessConfig = ConfigConvertor.GetProcessConfig(x.Type); return x; }) .OrderBy(x => x.ProcessConfig.OutputType) .ToList(); var processedDictionary = definitions.ToDictionary(x => x.Type, x => x); while (unprocessedDefinitions.Any()) { var processingDefinition = unprocessedDefinitions.First(); unprocessedDefinitions.RemoveAt(0); processingDefinition.ProcessConfig = processingDefinition.ProcessConfig ?? ConfigConvertor.GetProcessConfig(processingDefinition.Type); processedDictionary[processingDefinition.Type] = processingDefinition; var innerTypes = new List<Type>(); if (processingDefinition.ProcessingCategory == ProcessingCategory.Collection) { innerTypes.Add(processingDefinition.Type.GetCollectionType()); } if (processingDefinition.ProcessingCategory == ProcessingCategory.Dictionary) { var dicTypes = processingDefinition.Type.GetDictionaryTypes(); innerTypes.Add(dicTypes.key); innerTypes.Add(dicTypes.value); } if (processingDefinition.ProcessingCategory == ProcessingCategory.Generic) { innerTypes.AddRange(processingDefinition.Type.GetGenericArguments()); } if (processingDefinition.ProcessingCategory == ProcessingCategory.Object) { innerTypes.AddRange(processingDefinition.Properties.Select(x => x.Type)); } foreach (var innerType in innerTypes) { var propertyDefinition = processedDictionary[innerType]; // ignore property definition that's the same as processing definition if (propertyDefinition == processingDefinition) continue; if (propertyDefinition.ProcessConfig == null) { if (processingDefinition.ProcessConfig?.Inherit ?? false) { propertyDefinition.ProcessConfig = new ProcessConfig() { OutputType = processingDefinition.ProcessConfig.OutputType, Inherit = true }; } if (processingDefinition.ProcessConfig?.OutputType == OutputType.Const) { propertyDefinition.ProcessConfig = new ProcessConfig() { OutputType = OutputType.None, Inherit = true }; } } unprocessedDefinitions.Add(propertyDefinition); } } var processedDefinitions = processedDictionary.Select(x => x.Value); // remove config from inherited non heritable types processedDefinitions.Where(x => !x.IsInheritable() && (x.ProcessConfig?.Inherit ?? false)).ToList() .ForEach(x => x.ProcessConfig = null ); processedDefinitions.Where(x => x.ProcessConfig == null).ToList().ForEach(x => { x.ProcessConfig = ConfigConvertor.GetProcessConfig(x.ProcessingCategory); }); processedDefinitions.Where(x => x.ProcessConfig.OutputType == OutputType.Default).ToList().ForEach(x => { x.ProcessConfig.OutputType = ConfigConvertor.Parse(x.ProcessingCategory); }); return processedDefinitions; } } }
import {LanguageId, NewLanguageInput, EditLanguageInput} from '../../graphql'; import FieldSet from '../field-set'; import {DescriptionMut} from '../description'; import {SearchIndexMut} from '../search-index'; import {DefinitionMut} from '../definition'; import {LemmaMut} from '../lemma'; import {PartOfSpeechMut} from '../part-of-speech'; import {TagMut} from '../tag'; import {Language} from './model'; import {LanguageRow} from './types'; import {validateName} from './validators'; import {MutContext} from '../types'; const LanguageMut = { insert(context: MutContext, data: NewLanguageInput): Promise<LanguageRow> { const {name, description} = data; const validName = validateName(context.db, null, name); return MutContext.transact(context, context => { const {db, events, logger} = context; const desc = DescriptionMut.insert(db, description || []); const now = Date.now(); const {insertId: languageId} = db.exec<LanguageId>` insert into languages (name, description_id, time_created, time_updated) values (${validName}, ${desc.id}, ${now}, ${now}) `; SearchIndexMut.insertLanguage(db, languageId, validName); events.emit({type: 'language', action: 'create', id: languageId}); logger.verbose(`Created language: ${languageId}`); return Language.byIdRequired(db, languageId); }); }, async update( context: MutContext, id: LanguageId, data: EditLanguageInput ): Promise<LanguageRow> { const {db} = context; const {name, description} = data; const language = await Language.byIdRequired(db, id); const newFields = new FieldSet<LanguageRow>(); if (name != null) { newFields.set('name', validateName(db, language.id, name)); } if (newFields.hasValues || description) { await MutContext.transact(context, context => { const {db, events, logger} = context; newFields.set('time_updated', Date.now()); db.exec` update languages set ${newFields} where id = ${language.id} `; const newName = newFields.get('name'); if (newName != null) { SearchIndexMut.updateLanguage(db, language.id, newName); } if (description) { DescriptionMut.update(db, language.description_id, description); } events.emit({type: 'language', action: 'update', id: language.id}); logger.verbose(`Updated language: ${language.id}`); db.clearCache(Language.byIdKey, language.id); }); } return Language.byIdRequired(db, id); }, async delete(context: MutContext, id: LanguageId): Promise<boolean> { const {db} = context; const language = await Language.byId(db, id); if (!language) { return false; } await MutContext.transact(context, context => { const {db, events, logger} = context; logger.debug(`Begin deletion of language: ${language.id}`); // Definitions reference lemmas, parts of speech, inflection tables, // inflected forms... We must delete them before anything else. DefinitionMut.deleteAllInLanguage(db, language.id); logger.debug('Deleted all definitions'); LemmaMut.deleteAllInLanguage(db, language.id); logger.debug('Deleted all lemmas'); PartOfSpeechMut.deleteAllInLanguage(db, language.id); logger.debug('Deleted all parts of speech'); logger.debug(`Deleting language row: ${language.id}`); db.exec` delete from languages where id = ${language.id} `; DescriptionMut.delete(db, language.description_id); logger.debug('Deleted description'); SearchIndexMut.deleteLanguage(db, language.id); events.emit({type: 'language', action: 'delete', id: language.id}); logger.debug(`Deleting orphaned tags`); TagMut.deleteOrphaned(context); logger.debug(`Language deleted: ${language.id}`); }); return true; }, } as const; export {LanguageMut};
from flask import Flask, jsonify app = Flask(__name__) @app.route('/', methods=['GET']) def hello_rest(): return jsonify({ "greeting": "Hello REST World" }) @app.route('/add/<a>/<b>', methods=['GET']) def add(a, b): return jsonify({ "a": a, "b": b, "addition": int(a) + int(b), }) @app.route('/mul/<a>/<b>', methods=['GET']) def prod(a, b): return jsonify({ "a": a, "b": b, "product": float(a) * float(b), }) @app.route('/pow/<a>/<b>', methods=['GET']) def powered(a, b): return jsonify({ "a": a, "b": b, "power": float(a) ** float(b) }) @app.route('/div/<a>/<b>', methods=['GET']) def divide(a, b): return jsonify({ "a": a, "b": b, "quotient": float(a) // float(b), "remainder": float(a) % float(b) }) if __name__ == '__main__': app.run()
use glib::clone; use gtk::glib; use gtk::prelude::*; use gtk::subclass::prelude::*; pub mod imp { use super::*; use glib::subclass::Signal; use glib::ParamSpec; use once_cell::sync::Lazy; use std::cell::{Cell, RefCell}; #[derive(Debug)] pub struct CustomTag { pub container: gtk::Box, pub button: RefCell<Option<gtk::Button>>, label: gtk::Label, pub has_close_button: Cell<bool>, } impl Default for CustomTag { fn default() -> Self { Self { container: gtk::Box::new(gtk::Orientation::Horizontal, 0), button: RefCell::default(), label: gtk::Label::new(None), has_close_button: Cell::new(false), } } } #[glib::object_subclass] impl ObjectSubclass for CustomTag { const NAME: &'static str = "CustomTag"; type Type = super::CustomTag; type ParentType = gtk::Widget; fn class_init(klass: &mut Self::Class) { klass.set_css_name("tag"); } } impl ObjectImpl for CustomTag { fn properties() -> &'static [ParamSpec] { static PROPERTIES: Lazy<Vec<ParamSpec>> = Lazy::new(|| { vec![ ParamSpec::string( "label", "Label", "Label", Some(""), glib::ParamFlags::READWRITE, ), ParamSpec::boolean( "has-close-button", "Has close button", "Whether this tag has a close button", false, glib::ParamFlags::READWRITE, ), ] }); PROPERTIES.as_ref() } fn get_property(&self, _obj: &Self::Type, _id: usize, pspec: &ParamSpec) -> glib::Value { match pspec.get_name() { "label" => self.label.get_text().to_value(), "has-close-button" => self.has_close_button.get().to_value(), _ => unimplemented!(), } } fn set_property( &self, tag: &Self::Type, _id: usize, value: &glib::Value, pspec: &ParamSpec, ) { match pspec.get_name() { "label" => self.label.set_text(value.get().unwrap().unwrap()), "has-close-button" => { tag.set_has_close_button(value.get_some().unwrap()); } _ => unimplemented!(), } } fn signals() -> &'static [Signal] { static SIGNALS: Lazy<Vec<Signal>> = Lazy::new(|| { vec![ Signal::builder("closed", &[], <()>::static_type().into()).build(), Signal::builder("clicked", &[], <()>::static_type().into()).build(), ] }); SIGNALS.as_ref() } fn constructed(&self, tag: &Self::Type) { self.container.set_parent(tag); self.container.append(&self.label); let gesture = gtk::GestureClick::new(); gesture.connect_released(clone!(@weak tag => move |_gesture, _n_press, _x, _y| { tag.emit_by_name("clicked", &[]).unwrap(); })); tag.add_controller(&gesture); } fn dispose(&self, _tag: &Self::Type) { self.container.unparent(); } } impl WidgetImpl for CustomTag { fn measure( &self, _widget: &Self::Type, orientation: gtk::Orientation, for_size: i32, min: &mut i32, nat: &mut i32, min_base: &mut i32, nat_base: &mut i32, ) { let (c_min, c_nat, c_min_base, c_nat_base) = self.container.measure(orientation, for_size); *min = c_min; *nat = c_nat; *min_base = c_min_base; *nat_base = c_nat_base; } fn size_allocate(&self, _widget: &Self::Type, width: i32, height: i32, baseline: i32) { self.container.size_allocate( &gtk::Allocation { width, height, x: 0, y: 0, }, baseline, ) } } } glib::wrapper! { pub struct CustomTag(ObjectSubclass<imp::CustomTag>) @extends gtk::Widget; } impl CustomTag { pub fn new(label: &str) -> Self { glib::Object::new(&[("label", &label), ("has-close-button", &true)]) .expect("Failed to create a CustomTag") } pub fn set_has_close_button(&self, has_close_button: bool) { let self_ = imp::CustomTag::from_instance(self); if self_.has_close_button.get() == has_close_button { return; } if has_close_button { let button = gtk::ButtonBuilder::new() .halign(gtk::Align::Center) .valign(gtk::Align::Center) .has_frame(false) .build(); button.connect_clicked(clone!(@weak self as tag => move |_btn| { tag.emit_by_name("closed", &[]).unwrap(); })); let icon = gtk::Image::from_icon_name(Some("window-close-symbolic")); button.set_child(Some(&icon)); self_.container.append(&button); self_.button.replace(Some(button)); } else if let Some(button) = self_.button.borrow_mut().take() { self_.container.remove(&button); } self_.has_close_button.set(has_close_button); } }
package com.example.mygangedrecyclerview; public interface CheckListener { void check(int position, boolean isScroll); }
package com.jdroid.java.http.parser.plain import com.jdroid.java.utils.LoggerUtils import com.jdroid.java.utils.StreamUtils import java.io.InputStream abstract class PlainTextParser : com.jdroid.java.http.parser.Parser { override fun parse(inputStream: InputStream): Any { LOGGER.debug("Parsing started.") try { // Read the plain text response val result = StreamUtils.toString(inputStream) LOGGER.debug(result) // Parse the plain text return parse(result) } finally { LOGGER.debug("Parsing finished.") } } abstract override fun parse(input: String): Any companion object { private val LOGGER = LoggerUtils.getLogger(PlainTextParser::class.java) } }
import { WiredBase, TemplateResult, CSSResult, PropertyValues } from './wired-base'; export declare class WiredToggle extends WiredBase { checked: boolean; disabled: boolean; private knob?; static readonly styles: CSSResult; render(): TemplateResult; private refreshDisabledState; private toggleCheck; firstUpdated(): void; updated(changed: PropertyValues): void; refreshElement(): void; }
import re from datetime import date, datetime from src.bcs_oi_api import BCSOIAPIBaseModel from src.bcs_oi_api.models import SecurityAdvisoryOutcome def check_model_creation(input_dict: dict, model_instance: BCSOIAPIBaseModel): for k, v in input_dict.items(): attribute_name = (re.sub(r"(?<!^)(?=[A-Z0-9])", "_", k).lower()).replace("p_i_e", "PIE") if isinstance(model_instance.__getattribute__(attribute_name), list): for i, j in enumerate(model_instance.__getattribute__(attribute_name)): check_model_creation(input_dict=v[i], model_instance=j) elif isinstance(model_instance.__getattribute__(attribute_name), BCSOIAPIBaseModel): check_model_creation(input_dict=v, model_instance=model_instance.__getattribute__(attribute_name)) elif isinstance(model_instance.__getattribute__(attribute_name), SecurityAdvisoryOutcome): assert model_instance.__getattribute__(attribute_name).value == v elif isinstance(model_instance.__getattribute__(attribute_name), datetime): assert model_instance.__getattribute__(attribute_name) == datetime.strptime(v, "%Y-%m-%dT%H:%M:%S") elif isinstance(model_instance.__getattribute__(attribute_name), date): assert model_instance.__getattribute__(attribute_name) == datetime.strptime(v, "%Y-%m-%d").date() else: assert model_instance.__getattribute__(attribute_name) == v
DELETE FROM `spell_proc_event` WHERE `entry` IN (24949,34082); INSERT INTO `spell_proc_event` VALUES (24949,0,0,0,0,0x0000000000000000,0x00000000,0), (34082,0,0,0,0,0x0000000000000000,0x00000000,0);
# frozen_string_literal: true # Authorization policy for WorkVersion objects class WorkVersionPolicy < ApplicationPolicy alias_rule :edit?, to: :update? alias_rule :delete?, to: :destroy? relation_scope :edits do |scope| if administrator? scope else scope.where(depositor: user) .or(scope.where(collection_id: [user.manages_collection_ids + user.reviews_collection_ids])) end end # Can deposit a work iff: # 1. Collection is accessioned # 2. The user is an administrator, or a depositor or a manager of this collection def create? return false unless collection.head.accessioned? return true if administrator? (collection.depositor_ids.include?(user.id) || manages_collection?(collection)) end def update_type? record.purl_reservation? && update? end # Can edit a work iff: # The work is in a state where it can be updated (e.g. not depositing, not an in-progress purl reservation) # AND if any one of the following is true: # 1. The user is an administrator # 2. The user is the depositor of the work and it is not currently pending approval (review workflow) # 3. The user is a manager of the collection the work is in # 4. The user is a reviewer of the collection the work is in def update? return false unless record.updatable? return true if allowed_to?(:review?, collection) depositor_of_the_work? && !record.pending_approval? end # Can show a work iff any one of the following is true: # 1. The user is an administrator # 2. The user is the depositor of the work # 3. The user is a manager of the collection the work is in # 4. The user is a reviewer of the collection the work is in def show? depositor_of_the_work? || allowed_to?(:review?, collection) end # The collection reviewers can review a work def review? record.pending_approval? && allowed_to?(:review?, collection) end def destroy? (allowed_to?(:review?, collection) || depositor_of_the_work?) && record.persisted? && record.draft? end private delegate :administrator?, to: :user_with_groups def collection record.work.collection end def depositor_of_the_work? record.work.depositor == user end end
#!/bin/bash cv=`jorc current_version` lv=`jorc local_version` v=`jorc pending_update` rv=$? if [ "$cv" == "$lv" ] || [ $rv -ne 0 ] then stat=0 statustxt="OK, No updates Pending" else stat=1 statustxt="WARNING, An Update is in progress from version $lv to $cv" fi echo "$stat Update_Status - $statustxt"
package geoset import ( "math" ) type entry struct { latitude float64 longitude float64 value interface{} } // GeoSet is a collection that allows for values to be stored by their latitude and longitude; // and allows for lookups to find the entry closest to the supplied latitude and longitude. type GeoSet struct { entries []entry } // NewGeoSet returns a new GeoSet func NewGeoSet() *GeoSet { return &GeoSet{} } // Add the supplied value to the location specified with the latitude and longitude (in degrees) func (gs *GeoSet) Add(lat float64, lon float64, value interface{}) { gs.entries = append(gs.entries, entry{lat, lon, value}) } // Closest returns the entry in the set that is nearest to the supplied latitude and longitude (in degrees) func (gs *GeoSet) Closest(lat float64, lon float64) interface{} { shortestDistance := math.MaxFloat64 var value interface{} for _, entry := range gs.entries { distance := distance(lat, lon, entry.latitude, entry.longitude) if distance < shortestDistance { shortestDistance = distance value = entry.value } } return value } // haversine function func hsin(theta float64) float64 { return math.Pow(math.Sin(theta/2), 2) } // See http://en.wikipedia.org/wiki/Haversine_formula func distance(lat1 float64, lon1 float64, lat2 float64, lon2 float64) float64 { // Radius of Earth in metres (mean earth radius, via https://en.wikipedia.org/wiki/Great-circle_distance) r := float64(6371000) // Convert degrees to radians la1 := lat1 * math.Pi / 180 lo1 := lon1 * math.Pi / 180 la2 := lat2 * math.Pi / 180 lo2 := lon2 * math.Pi / 180 h := hsin(la2-la1) + math.Cos(la1)*math.Cos(la2)*hsin(lo2-lo1) return 2 * r * math.Asin(math.Sqrt(h)) }
package model // Name. Name of the server for which to display parameters.<br>Minimum length = 1. // Server_Gslbservice_Binding. gslbservice that can be bound to server. // Server_Service_Binding. service that can be bound to server. // Server_Servicegroup_Binding. servicegroup that can be bound to server. // ServerBindingWrapper wraps the object and serves as default response. type ServerBindingWrapper struct { Errorcode int `json:"errorcode"` Message string `json:"message"` Severity string `json:"severity"` ServerBinding []ServerBinding `json:"server_binding"` } // ServerBinding describes the resource. type ServerBinding struct { Name string `json:"name"` ServerServicegroupBinding []ServerServicegroupBinding `json:"server_servicegroup_binding"` ServerServiceBinding []ServerServiceBinding `json:"server_service_binding"` ServerGslbserviceBinding []interface{} `json:"server_gslbservice_binding"` }
package uk.gov.justice.digital.hmpps.hmppsinterventionsservice.dto import java.time.OffsetDateTime data class EventDTO( val eventType: String, val description: String, val detailUrl: String, val occurredAt: OffsetDateTime, val additionalInformation: Map<String, Any> ) { val version: Int = 1 }
package surferua import "strings" var browserDBSize = 0 var browserDB []BrowserInfo // Browser is br type Browser struct { // The name of the browser. Name string // The name of the browser's engine. Engine Engine // The version of the browser. Semver Semver } func (b *Browser) String() string { // chrome has different format tpl if b == nil { return "" } if strings.Contains(b.Name, "hrome") { // make safari random return b.Engine.String() + " " + b.Name + "/" + b.Semver.String() + " Safari/537.36" } return b.Engine.String() + " " + b.Name + "/" + b.Semver.String() } type BrowserInfo struct { Name string EngineInfo EngineInfo VersionInfo VersionInfo } func (bi *BrowserInfo) Random() *Browser { return &Browser{Name: bi.Name, Engine: *bi.EngineInfo.Random(), Semver: *bi.VersionInfo.Random()} } func NewBrowserInfo(name string, m interface{}) (bi *BrowserInfo) { if mMap, ok := m.(map[interface{}]interface{}); ok { bi = &BrowserInfo{ Name: name, EngineInfo: NewEngineInfo(mMap["engine"]), VersionInfo: NewVersionInfo(mMap["version"]), } } return } func NewBrowserInfoList(m map[interface{}]interface{}) (l []*BrowserInfo) { for name, mMpa := range m { bi := NewBrowserInfo(name.(string), mMpa) if bi != nil { l = append(l, bi) } } return }
class CleanUpReviews < ActiveRecord::Migration class Vote < ActiveRecord::Base; end class Review < ActiveRecord::Base has_many :votes, as: 'target', dependent: :delete_all end def change #### Media Association # anime_id -> media_id rename_column :reviews, :anime_id, :media_id # Add media_type column add_column :reviews, :media_type, :string Review.update_all(media_type: 'Anime') change_column_null :reviews, :media_type, null: false # Remove nullability of media_id Review.where(media_id: nil).destroy_all change_column_null :reviews, :media_id, false #### User Association # Remove nullability of user Review.where(user_id: nil).destroy_all change_column_null :reviews, :user_id, false #### Ratings # Remove detailed ratings remove_column :reviews, :rating_story remove_column :reviews, :rating_animation remove_column :reviews, :rating_sound remove_column :reviews, :rating_character remove_column :reviews, :rating_enjoyment # Make rating non-nullable Review.where(rating: nil).destroy_all change_column_null :reviews, :rating, false #### Content Stuff # Add formatted content add_column :reviews, :content_formatted, :text # Copy formatted content execute 'UPDATE reviews SET content_formatted = content' # Remove nullability of content Review.where(content: nil).destroy_all change_column_null :reviews, :content, false change_column_null :reviews, :content_formatted, false # Add column to mark HTML-backed content add_column :reviews, :legacy, :boolean, default: false, null: false # Mark all reviews as legacy for now Review.update_all(legacy: true) #### Library Entry linkup # Add column add_column :reviews, :library_entry_id, :integer add_foreign_key :reviews, :library_entries # Fill column with existing data execute <<-SQL.squish UPDATE reviews r SET library_entry_id = ( SELECT le.id FROM library_entries le WHERE le.media_type = r.media_type AND le.media_id = r.media_id AND le.user_id = r.user_id ) SQL #### Review Likes # Create table create_table :review_likes do |t| t.timestamps null: false t.references :review, null: false t.references :user, null: false, foreign_key: true end # Move existing data execute <<-SQL.squish INSERT INTO review_likes (review_id, user_id, created_at, updated_at) SELECT target_id, user_id, created_at, updated_at FROM votes WHERE target_type = 'Review' AND positive = 't' SQL # Remove downvote stuff remove_column :reviews, :wilson_score remove_column :reviews, :total_votes # Rename our positive_votes to likes_count rename_column :reviews, :positive_votes, :likes_count end end
package world.phantasmal.psolib.fileFormats.ninja import world.phantasmal.core.Success import world.phantasmal.psolib.test.LibTestSuite import world.phantasmal.psolib.test.readFile import kotlin.test.Test import kotlin.test.assertEquals import kotlin.test.assertTrue class NinjaTests : LibTestSuite { @Test fun can_parse_rag_rappy_model() = testAsync { val result = parseNj(readFile("/RagRappy.nj")) assertTrue(result is Success) assertEquals(1, result.value.size) } }
package im.actor.server import scala.util.{ Failure, Success } import com.typesafe.config._ import slick.driver.PostgresDriver.api.Database import slick.jdbc.JdbcDataSource import im.actor.server.db.{ DbInit, FlywayInit } trait SqlSpecHelpers extends FlywayInit with DbInit { final val sqlConfig = ConfigFactory.load().getConfig("services.postgresql") def migrateAndInitDb(): (JdbcDataSource, Database) = { initDs(sqlConfig) match { case Success(ds) ⇒ val flyway = initFlyway(ds.ds) flyway.clean() flyway.migrate() (ds, initDb(ds)) case Failure(e) ⇒ throw e } } }
// Copyright 2019 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "ui/ozone/platform/headless/headless_screen.h" namespace ui { HeadlessScreen::HeadlessScreen() { static constexpr int64_t kHeadlessDisplayId = 1; static constexpr float kHeadlessDisplayScale = 1.0f; static constexpr gfx::Rect kHeadlessDisplayBounds(gfx::Size(1, 1)); display::Display display(kHeadlessDisplayId); display.SetScaleAndBounds(kHeadlessDisplayScale, kHeadlessDisplayBounds); display_list_.AddDisplay(display, display::DisplayList::Type::PRIMARY); } HeadlessScreen::~HeadlessScreen() = default; const std::vector<display::Display>& HeadlessScreen::GetAllDisplays() const { return display_list_.displays(); } display::Display HeadlessScreen::GetPrimaryDisplay() const { auto iter = display_list_.GetPrimaryDisplayIterator(); DCHECK(iter != display_list_.displays().end()); return *iter; } display::Display HeadlessScreen::GetDisplayForAcceleratedWidget( gfx::AcceleratedWidget widget) const { return GetPrimaryDisplay(); } gfx::Point HeadlessScreen::GetCursorScreenPoint() const { return gfx::Point(); } gfx::AcceleratedWidget HeadlessScreen::GetAcceleratedWidgetAtScreenPoint( const gfx::Point& point) const { return gfx::kNullAcceleratedWidget; } display::Display HeadlessScreen::GetDisplayNearestPoint( const gfx::Point& point) const { return GetPrimaryDisplay(); } display::Display HeadlessScreen::GetDisplayMatching( const gfx::Rect& match_rect) const { return GetPrimaryDisplay(); } void HeadlessScreen::AddObserver(display::DisplayObserver* observer) { display_list_.AddObserver(observer); } void HeadlessScreen::RemoveObserver(display::DisplayObserver* observer) { display_list_.RemoveObserver(observer); } } // namespace ui
package shark import java.io.{FileOutputStream, PrintWriter, BufferedOutputStream, File} import org.scalatest.{FunSuite, BeforeAndAfterAll} import org.junit.Assert._ /** * Test various queries related to timestamps. Specifically, test for a bug that is occurs when * counting timestamp values, ordering by the same timestamp column. */ class TimestampSuite extends FunSuite with SharkHiveTestUtil with BeforeAndAfterAll { override def beforeAll() { setHiveTestDir() } test("CountGroupByOrderByTimestamp") { SharkEnv.initWithSharkContext("CountGroupByOrderByTimestamp") val tableName = "ts_test" new File(getTestDir).mkdirs() val testDataFile = new File(getTestDir, tableName + ".tsv") val out = new PrintWriter(new BufferedOutputStream(new FileOutputStream(testDataFile))) val tsStr = "2013-03-18 00:41:15" out.println(tsStr) out.close() try { val sc = SharkEnv.sc.asInstanceOf[SharkContext] assertEquals("", sc.sql("DROP TABLE IF EXISTS %s".format(tableName)).mkString("\n")) assertEquals("", sc.sql("CREATE TABLE %s (t TIMESTAMP)".format(tableName)).mkString("\n")) assertEquals("", sc.sql("LOAD DATA LOCAL INPATH '%s' OVERWRITE INTO TABLE %s".format( testDataFile.getAbsolutePath, tableName)).mkString("\n")) assertEquals(tsStr, sc.sql("SELECT * FROM %s".format(tableName)).mkString("\n")) assertEquals(tsStr + "\t1", sc.sql("SELECT t, COUNT(1) FROM %s GROUP BY t ORDER BY t".format(tableName)).mkString("\n")) } finally { SharkEnv.stop() } } }
#ifdef KAI_HAVE_PRAGMA_ONCE # pragma once #endif #ifndef KAI_TESTS_BINARY_STREAM_H # define KAI_TESTS_BINARY_STREAM_H KAI_TESTS_BEGIN struct TestBinaryStream : Test::Suite<TestBinaryStream> { TestBinaryStream() { Builder(this, "TestBinaryStream") ("TestBuiltins", &TestBinaryStream::TestBuiltins) ("TestObject", &TestBinaryStream::TestObject) ("TestProperties", &TestBinaryStream::TestProperties) ("TestList", &TestBinaryStream::TestList) ("TestArray", &TestBinaryStream::TestArray) ("TestMap", &TestBinaryStream::TestMap) ("TestSet", &TestBinaryStream::TestSet) ("TestStreams", &TestBinaryStream::TestStreams) ; } void TestBuiltins(); void TestObject(); void TestProperties(); void TestList(); void TestArray(); void TestMap(); void TestSet(); void TestStreams(); }; KAI_TESTS_END #endif // KAI_TESTS_BINARY_STREAM_H //EOF
# frozen_string_literal: true # This class is not meant to be used to edit or set # any of the config values. It is for display only. # See the School class for more info. module Schools class Subfield # @param school [Schools::School]: The school this subfield belongs to. # @param department [Schools::Department]: The department this subfield belongs to. # @param id [String]: The ID for the subfield (as defined in config/authorities/<department>_programs.yml) def initialize(school, department, id) @school = school @department = department @id = id end attr_reader :school, :department, :id def department_service department.service end def label return @label if @label qa_terms = department_service.active_elements.find { |subfield| subfield['id'] == id } || {} @label = qa_terms[:label] end end end
import {Util} from './util.js' class LoadingPage { constructor() { let assets = document.querySelector('vartiste-assets') let loadingPage = document.querySelector('#loading-page') let progressBar = loadingPage.querySelector('.loading-progress') let assetEntries = Object.entries(assets.waitingFor) let totalCount = assetEntries.length let currentCount = 0 for (let [name, promise] of assetEntries) { promise.then(() => { currentCount++ progressBar.style.width = `${currentCount / totalCount * 100}%` }) } Util.whenLoaded(document.querySelector('#assets'), () => { loadingPage.classList.add('hidden') }) } } document.body.addEventListener('vartisteassetsadded', () => { new LoadingPage() }, true)
using System.Linq; using NUnit.Framework; using Shouldly; using StructureMap.Configuration.DSL; using StructureMap.Graph; namespace StructureMap.Testing.Bugs { [TestFixture] public class Bug_247_ConnectOpenTypesToImplementations_doubling_up_registrations { [Test] public void Scanner_apply_should_only_register_two_instances() { var scanner = new GenericConnectionScanner(typeof (ISomeServiceOf<>)); var registry = new Registry(); var graph = new PluginGraph(); scanner.Process(typeof (SomeService1), registry); scanner.Process(typeof (SomeService2), registry); scanner.Apply(graph); graph .AllInstances(typeof (ISomeServiceOf<string>)) .Count() .ShouldBe(2); } public interface ISomeServiceOf<T> { } public class SomeService1 : ISomeServiceOf<string> { } public class SomeService2 : ISomeServiceOf<string> { } } }
# # Cookbook:: end_to_end # Recipe:: _zypper # # Copyright:: Copyright (c) Chef Software Inc. # zypper_repository "nginx repo" do baseurl "https://nginx.org/packages/sles/15" gpgkey "https://nginx.org/keys/nginx_signing.key" end zypper_package "nginx"
#pragma once #include <cstdint> #include <cstdlib> #include <cstring> namespace SDK { namespace ue { class C_String { private: uint32_t refCount = 0; int32_t stringLength = 0; int32_t wideStringLength = 0; char *stringPtr = nullptr; public: const char *c_str() { if (!this) return nullptr; return *reinterpret_cast<const char **>(this); } void SetString(const char *str); C_String(const char *str) { SetString(str); } }; }; // namespace ue } // namespace SDK
#include "SandBoxApplication.h" #include <iostream> #include "GameEngine/EntryPoint.h" void SandBoxApplication::Run() { std::cout << "Sandbox application is running "<<std::endl; } Applicaiton* const GameEngine::CreateApplication() { return new SandBoxApplication(); }
<?php /** * @file panels-pane.tpl.php * Main panel pane template * * Variables available: * - $pane->type: the content type inside this pane * - $pane->subtype: The subtype, if applicable. If a view it will be the * view name; if a node it will be the nid, etc. * - $title: The title of the content * - $content: The actual content * - $links: Any links associated with the content * - $more: An optional 'more' link (destination only) * - $admin_links: Administrative links associated with the content * - $feeds: Any feed icons or associated with the content * - $display: The complete panels display object containing all kinds of * data including the contexts and all of the other panes being displayed. */ ?> <?php if ($pane_prefix): ?> <?php print $pane_prefix; ?> <?php endif; ?> <div class="media m-y-4"> <?php if ($admin_links): ?> <?php print $admin_links; ?> <?php endif; ?> <div class="media-left hidden-xs"> <i class="text-secondary fa fa-list-ul fa-fw fa-2x" aria-hidden="true"></i> </div> <div class="media-body"> <i class="text-secondary fa fa-list-ul fa-fw fa-2x visible-xs-inline pull-left" aria-hidden="true"></i> <h4 class="m-t-0 m-b-2"><?php print $title; ?></h4> <?php if ($feeds): ?> <div class="feed"> <?php print $feeds; ?> </div> <?php endif; ?> <div class="pane-content"> <?php print render($content); ?> </div> <?php if ($links): ?> <div class="links"> <?php print $links; ?> </div> <?php endif; ?> <?php if ($more): ?> <div class="more-link"> <?php print $more; ?> </div> <?php endif; ?> </div> <?php if ($pane_suffix): ?> <?php print $pane_suffix; ?> <?php endif; ?> </div>
Given /^there is already an organization with the name '(.*?)'$/ do |org_name| FactoryGirl.create :member, :organization_name => org_name end Given(/^a sponsor account already exists$/) do @password = 'password' @email = Faker::Internet.email @member = FactoryGirl.build(:member, :product_name => "supporter", :organization_name => Faker::Company.name, :password => @password, :password_confirmation => @password, :email => @email ) @member.save! @member.current! end Given(/^I have a (sponsor|partner) account$/) do |level| @password = 'password' @email = Faker::Internet.email @member = FactoryGirl.build :member, :product_name => level, :organization_name => Faker::Company.name, :password => @password, :password_confirmation => @password, :email => @email @member.remote! @member.save! @member.current! @membership_number = @member.membership_number steps %{ When I visit the sign in page And I enter my membership number and password And the password is correct And I click sign in } end Given(/^I visit my account page$/) do visit member_path(@member) end Given(/^I visit their account page$/) do visit member_path(@member) end When /^I enter my name and contact details$/ do @contact_name = 'Ian McIain' @email ||= '[email protected]' @telephone = '0121 123 446' @share_with_third_parties = false @twitter = nil fill_in('member_contact_name', :with => @contact_name) fill_in('member_email', :with => @email) fill_in('member_telephone', :with => @telephone) fill_in('member_password', :with => @password || 'p4ssw0rd') fill_in('member_password_confirmation', :with => @password || 'p4ssw0rd') end When /^I enter my address details$/ do @street_address = '123 Fake Street' @address_region = 'Faketown' @address_country = 'United Kingdom' @postal_code = 'FAKE 123' fill_in('member_street_address', :with => @street_address) fill_in('member_address_region', :with => @address_region) select(@address_country, from: :member_address_country, match: :first) fill_in('member_postal_code', :with => @postal_code) end Then /^I should see an error relating to (.*)$/ do |text| expect(page.find(:css, "div.alert-error")).to have_content(text) end Then /^I should not see an error$/ do expect(page).to_not have_css("div.alert-error") end Then /^a welcome email should be sent to me$/ do steps %Q{ Then a welcome email should be sent to "#{@email}" } end Then(/^a welcome email should be sent to "(.*?)"$/) do |email| steps %Q{ Then "#{email}" should receive an email When they open the email And they should see the email delivered from "[email protected]" And they should see "mailto:[email protected]" in the email body } expect(current_email).to bcc_to(%w([email protected])) end Given(/^I have signed up, but haven't paid$/) do steps %{ When I enter my name and contact details And I enter my company details And I enter my address details And I agree to the terms And I click sign up } end Given(/^I try to sign up again$/) do steps %{ Given that I want to sign up as a supporter And product information has been setup for "corporate-supporter_annual" When I visit the signup page When I enter my name and contact details And I enter my company details And I enter my address details And I agree to the terms And I click sign up } end Then(/^I should be redirected to the login page$/) do expect(current_path).to eq(new_member_session_path) end Then(/^I should see an error telling me I need to login$/) do expect(page.body).to include("You have already started the signup process, to continue to payment, please login.") end Then(/^I log in$/) do fill_in('member_password', :with => @original_password) click_button('submit') end Then(/^an? (.*?) membership should be created for "(.*?)"$/) do |product_name, email| @member = Member.where(email: email).first expect(@member).to be_present expect(@member.product_name).to eql product_name @email = email steps %Q{ Then they should have a membership number generated Then they should be marked as active } end
-- WSO2 BPS Oracle Clean up Script V3 -- -- Important : Before you run this script, configure instance states and Data retention time period in STMT2 (line 44). -- SET AUTOCOMMIT OFF; SET SERVEROUTPUT ON CREATE OR REPLACE PROCEDURE CLEANINSTANCE AUTHID CURRENT_USER IS STMT1 VARCHAR2(2048); STMT2 VARCHAR2(2048); STMT3 VARCHAR2(2048); STMT4 VARCHAR2(2048); STMT5 VARCHAR2(2048); STMT6 VARCHAR2(2048); STMT7 VARCHAR2(2048); STMT8 VARCHAR2(2048); STMT9 VARCHAR2(2048); STMT10 VARCHAR2(2048); STMT11 VARCHAR2(2048); STMT12 VARCHAR2(2048); STMT13 VARCHAR2(2048); STMT14 VARCHAR2(2048); STMT15 VARCHAR2(2048); STMT16 VARCHAR2(2048); STMT17 VARCHAR2(2048); BEGIN DBMS_OUTPUT.PUT_LINE (' Start deleting instance data '); STMT1 := 'CREATE TABLE TEMP_CLEANUP(ID NUMBER)'; EXECUTE IMMEDIATE STMT1; -- -- stmt2 : Configure Followings before executing this script. -- -- * Instance states : List of instance states, which need to be cleaned from the database. Followings are the instance states in BPS engine. -- 20 - Active. -- 30 - Completed. -- 40 - Completed with Fault. -- 50 - Suspended. -- 60 - Terminated. -- * Last Active Time : Last active time of the instances, which need to be cleaned from the database. . -- Eg: (SYSTIMESTAMP - 1) will filter instances which are older than 1 day. -- Eg: (SYSTIMESTAMP - 7) will filter instances which are older than 7 days. -- STMT2 := 'INSERT INTO TEMP_CLEANUP(ID) SELECT ID FROM ODE_PROCESS_INSTANCE WHERE INSTANCE_STATE IN (30 , 40 , 60) AND LAST_ACTIVE_TIME < (SYSTIMESTAMP - 1)'; EXECUTE IMMEDIATE STMT2; --- ----------------------------ODE_XML_DATA_LOBS--------------------------------------------------- DBMS_OUTPUT.PUT_LINE (' Start cleanup of ODE_XML_DATA BLOBS '); EXECUTE IMMEDIATE 'ALTER TABLE "ODE_XML_DATA" ENABLE ROW MOVEMENT'; DBMS_OUTPUT.PUT_LINE (' Set ODE_XML_DATA LOBs to null '); STMT3 := 'UPDATE ODE_XML_DATA SET ODE_XML_DATA.DATA = NULL WHERE SCOPE_ID IN (SELECT os.SCOPE_ID FROM ODE_SCOPE os WHERE os.PROCESS_INSTANCE_ID IN (SELECT ID FROM TEMP_CLEANUP))'; EXECUTE IMMEDIATE STMT3; DBMS_OUTPUT.PUT_LINE (' Deleting from ODE_XML_DATA '); STMT4 := 'DELETE FROM ODE_XML_DATA WHERE SCOPE_ID IN (SELECT os.SCOPE_ID FROM ODE_SCOPE os WHERE os.PROCESS_INSTANCE_ID IN (SELECT ID FROM TEMP_CLEANUP))'; EXECUTE IMMEDIATE STMT4; DBMS_OUTPUT.PUT_LINE (' Shrinking ODE_XML_DATA table'); EXECUTE IMMEDIATE 'ALTER TABLE "ODE_XML_DATA" SHRINK SPACE CASCADE'; --------------------------------------ODE_MESSAGE_LOBS--------------------------------------------------- DBMS_OUTPUT.PUT_LINE (' Start cleanup of ODE_MESSAGE BLOBS '); EXECUTE IMMEDIATE 'ALTER TABLE "ODE_MESSAGE" ENABLE ROW MOVEMENT'; DBMS_OUTPUT.PUT_LINE (' Set ODE_MESSAGE LOBs to null '); STMT5 := 'UPDATE ODE_MESSAGE SET ODE_MESSAGE.DATA = NULL, ODE_MESSAGE.HEADER = NULL WHERE MESSAGE_EXCHANGE_ID IN (SELECT mex.MESSAGE_EXCHANGE_ID FROM ODE_MESSAGE_EXCHANGE mex WHERE mex.PROCESS_INSTANCE_ID IN (SELECT ID FROM TEMP_CLEANUP))'; EXECUTE IMMEDIATE STMT5; DBMS_OUTPUT.PUT_LINE (' Deleting from ODE_MESSAGE '); STMT6 := 'DELETE FROM ODE_MESSAGE WHERE MESSAGE_EXCHANGE_ID IN (SELECT mex.MESSAGE_EXCHANGE_ID FROM ODE_MESSAGE_EXCHANGE mex WHERE mex.PROCESS_INSTANCE_ID IN (SELECT ID FROM TEMP_CLEANUP))'; EXECUTE IMMEDIATE STMT6; DBMS_OUTPUT.PUT_LINE (' Shrinking ODE_MESSAGE table'); EXECUTE IMMEDIATE 'ALTER TABLE "ODE_MESSAGE" SHRINK SPACE CASCADE'; ----------------------------------------------------ODE Events--------------------------------- EXECUTE IMMEDIATE 'ALTER TABLE "ODE_EVENT" ENABLE ROW MOVEMENT'; STMT7 :='DELETE FROM ODE_EVENT WHERE INSTANCE_ID IN (SELECT ID FROM TEMP_CLEANUP)'; EXECUTE IMMEDIATE 'ALTER TABLE "ODE_EVENT" SHRINK SPACE CASCADE'; ----------------------------------------------------OTHER--------------------------------- EXECUTE IMMEDIATE STMT7; STMT8 :='DELETE FROM ODE_CORSET_PROP WHERE CORRSET_ID IN (SELECT cs.CORRELATION_SET_ID FROM ODE_CORRELATION_SET cs WHERE cs.SCOPE_ID IN (SELECT os.SCOPE_ID FROM ODE_SCOPE os WHERE os.PROCESS_INSTANCE_ID IN (SELECT ID FROM TEMP_CLEANUP)))'; EXECUTE IMMEDIATE STMT8; STMT9 :='DELETE FROM ODE_CORRELATION_SET WHERE SCOPE_ID IN (SELECT os.SCOPE_ID FROM ODE_SCOPE os WHERE os.PROCESS_INSTANCE_ID IN (SELECT ID FROM TEMP_CLEANUP))'; EXECUTE IMMEDIATE STMT9; STMT10 := 'DELETE FROM ODE_PARTNER_LINK WHERE SCOPE_ID IN (SELECT os.SCOPE_ID FROM ODE_SCOPE os WHERE os.PROCESS_INSTANCE_ID IN (SELECT ID FROM TEMP_CLEANUP))'; EXECUTE IMMEDIATE STMT10; STMT11 :='DELETE FROM ODE_XML_DATA_PROP WHERE XML_DATA_ID IN (SELECT xd.XML_DATA_ID FROM ODE_XML_DATA xd WHERE xd.SCOPE_ID IN (SELECT os.SCOPE_ID FROM ODE_SCOPE os WHERE os.PROCESS_INSTANCE_ID IN (SELECT ID FROM TEMP_CLEANUP)))'; EXECUTE IMMEDIATE STMT11; STMT12 := 'DELETE FROM ODE_SCOPE WHERE PROCESS_INSTANCE_ID IN (SELECT ID FROM TEMP_CLEANUP)'; EXECUTE IMMEDIATE STMT12; STMT13 := 'DELETE FROM ODE_MEX_PROP WHERE MEX_ID IN (SELECT mex.MESSAGE_EXCHANGE_ID FROM ODE_MESSAGE_EXCHANGE mex WHERE mex.PROCESS_INSTANCE_ID IN (SELECT ID FROM TEMP_CLEANUP))'; EXECUTE IMMEDIATE STMT13; STMT14 := 'DELETE FROM ODE_MESSAGE_EXCHANGE WHERE PROCESS_INSTANCE_ID IN (SELECT ID FROM TEMP_CLEANUP)'; EXECUTE IMMEDIATE STMT14; STMT15 := 'DELETE FROM ODE_MESSAGE_ROUTE WHERE PROCESS_INSTANCE_ID IN (SELECT ID FROM TEMP_CLEANUP)'; EXECUTE IMMEDIATE STMT15; STMT16 := 'DELETE FROM ODE_PROCESS_INSTANCE WHERE ID IN (SELECT ID FROM TEMP_CLEANUP)'; EXECUTE IMMEDIATE STMT16; DBMS_OUTPUT.PUT_LINE (' End deleting instance data '); STMT17 := 'DROP TABLE TEMP_CLEANUP'; EXECUTE IMMEDIATE STMT17; COMMIT; EXCEPTION WHEN OTHERS THEN DBMS_OUTPUT.PUT_LINE (' Triggered Exception sequence. '); STMT17 := 'DROP TABLE TEMP_CLEANUP'; EXECUTE IMMEDIATE STMT17; COMMIT; END; / SET AUTOCOMMIT OFF; BEGIN DBMS_OUTPUT.PUT_LINE (' Starting cleanInstance procedure'); CLEANINSTANCE(); DBMS_OUTPUT.PUT_LINE (' Ending cleanInstance procedure'); END; / SET AUTOCOMMIT ON; SET AUTOCOMMIT OFF; CREATE TABLE TEMP_CLEANUP AS SELECT ID FROM HT_TASK WHERE STATUS ='COMPLETED'; set serveroutput on CREATE OR REPLACE PROCEDURE cleanTaskInstance IS BEGIN dbms_output.put_line (' Start deleting task instance data with instance ids '); DELETE FROM ODE_EVENT WHERE INSTANCE_ID IN (SELECT ID FROM TEMP_CLEANUP); DELETE FROM HT_DEADLINE WHERE TASK_ID IN (SELECT ID FROM TEMP_CLEANUP); DELETE FROM HT_EVENT WHERE HT_EVENT.TASK_ID IN (SELECT ID FROM TEMP_CLEANUP); DELETE FROM HT_ORG_ENTITY WHERE ORG_ENTITY_ID IN ( SELECT ORGENTITY_ID FROM HT_HUMANROLE_ORGENTITY WHERE HUMANROLE_ID IN ( SELECT GHR_ID FROM HT_GENERIC_HUMAN_ROLE WHERE TASK_ID IN ( SELECT ID FROM TEMP_CLEANUP))); DELETE FROM HT_HUMANROLE_ORGENTITY WHERE HUMANROLE_ID IN ( SELECT GHR_ID FROM HT_GENERIC_HUMAN_ROLE WHERE TASK_ID IN ( SELECT ID FROM TEMP_CLEANUP)); DELETE FROM HT_GENERIC_HUMAN_ROLE WHERE TASK_ID IN(SELECT ID FROM TEMP_CLEANUP); DELETE FROM HT_PRESENTATION_ELEMENT WHERE TASK_ID IN (SELECT ID FROM TEMP_CLEANUP); DELETE FROM HT_PRESENTATION_PARAM WHERE TASK_ID IN (SELECT ID FROM TEMP_CLEANUP); DELETE FROM HT_MESSAGE WHERE TASK_ID IN (SELECT ID FROM TEMP_CLEANUP); DELETE FROM HT_TASK_COMMENT WHERE TASK_ID IN(SELECT ID FROM TEMP_CLEANUP); DELETE FROM HT_TASK WHERE ID IN(SELECT ID FROM TEMP_CLEANUP); DELETE FROM ATTACHMENT WHERE ATTACHMENT_URL IN (SELECT ATTACHMENT_VALUE FROM HT_TASK_ATTACHMENT WHERE TASK_ID IN (SELECT ID FROM TEMP_CLEANUP)); DELETE FROM HT_TASK_ATTACHMENT WHERE TASK_ID IN (SELECT ID FROM TEMP_CLEANUP); DELETE FROM TEMP_CLEANUP; COMMIT; dbms_output.put_line (' End deleting task instance data with instance ids '); END; / SET AUTOCOMMIT OFF; BEGIN dbms_output.put_line (' Starting cleanTaskInstance procedure '); cleanTaskInstance(); dbms_output.put_line (' Ending cleanTaskInstance procedure '); END; / SET AUTOCOMMIT ON; / DROP TABLE TEMP_CLEANUP;
SUBROUTINE DBMMGR ( OPCODE ) C********************************************************************* C / FCB / C FCB(1,I) - OPEN FLAG C FCB(2,I) - BUFFER ADDRESS C FCB(3,I) - CURRENT CLR C FCB(4,I) - CURRENT BLOCK NUMBER C FCB(5,I) - FIRST BLOCK NUMBER WRITTEN TO THIS FILE C FCB(6,I) - LAST BLOCK NUMBER WRITTEN TO THIS FILE C FCB(7,I) - MAXIMUM NUMBER OF BLOCKS TO BE ALLOCATED C TO THIS FILE C FCB(8,I) - =0, IF NO MATRIX STRINGS WRITTEN TO FILE C =1, OTHERWISE, USED TO INITIALIZE COLUMN C NUMBER TO 1. C FCB(9,I) - INDEX TO FIRST IN-MEMORY BLOCK C FCB(10,I)- INDEX TO LAST IN-MEMORY BLOCK C FCB(11,I)- INDEX TO CURRENT IN-MEMORY BLOCK C FCB(12,I)- ORIGINAL BUFFER ADDRESS C FCB(13-14,I) - DMAP FILE NAME (2A4) C FCB(15,I)- OPEN FLAG FOR EXTERNAL FILE C / DBM/ C IDBBAS - (INPUT)-INDEX TO IN-MEMORY DATA BASE RELATIVE C TO /DBM/ C IDBFRE - (INPUT)-INDEX TO FREE CHAIN OF IN-MEMORY DATA C BASE RELATIVE TO /DBM/ C IDBDIR - (INPUT)-INDEX TO FIRST DIRECTORY BLOCK C MAXALC - (OUTPUT)-MAXIMUM NUMBER OF BLOCKS AVAILABLE FOR C JOB C MAXBLK - (OUTPUT)-MAXIMUM NUMBER OF BLOCKS ALLOCATED(JOB) C MAXDSK - (OUTPUT)-MAXIMUM NUMBER OF BLOCKS WRITTEN TO C TO DISK C LENALC - (OUTPUT)-LENGTH OF EACH ALLOCATED BLOCK C IOCODE - (INPUT) -IO-CODE FOR OPEN/CLOSE CALL C IFILEX - (INPUT) -FILE NUMBER FOR GINO FILE IN /XFIAT/ C NBLOCK - (INPUT/OUTPUT) -BLOCK NUMBER BEING REFERENCED C NAME - (INPUT) -GINO FILE NAME (E.G., 101,201,303,...) C INDBAS - INDEX TO START OF BUFFER RELATIVE TO /ZZZZZZ/ C INDCLR - INDEX TO CLR WITHIN BUFFER RELATIVE TO /ZZZZZZ/ C INDCBP - INDEX TO CBP WITHIN BUFFER RELATIVE TO /ZZZZZZ/ C FREE CHAIN FORMAT (ALSO, ALL BLOCKS ALLOCATED) C IDBFRE==> WORD 0 POINTER TO PREVIOUS FREE BLOCK C IN CHAIN, ALWAYS 0 FOR 1ST BLK) C WORD 1 POINTER TO NEXT BLOCK IN CHAIN C -INITIALLY SET TO ZERO) C WORD 2 NUMBER OF FREE WORDS IN BLOCK C WORD 3 RELATIVE BLOCK NUMBER C C OPCODE C 1 OPEN C /GINOX/ IOCODE = 0 ; READ WITH REWIND C = 1 ; WRITE WITH REWIND C = 2 ; READ WITHOUT REWIND C = 3 ; WRITE WITHOUT REWIND C 2 CLOSE C /GINOX/ IOCODE = 1 ; CLOSE WITH REWIND C (OTHERWISE NO REWIND) C 3 REWIND C 4 WRITE C 5 READ C 6 POSITION FILE C NBLOCK = BLOCK NUMBER TO POSITION TO C 7 DELETE FILE C 8 PROCESS WRTBLK REQUEST (SUBSTRUCTURING) C 9 PROCESS RDBLK REQUEST (SUBSTRUCTURING) C******************************************************************** INTEGER OPCODE INTEGER CASE / 4HCASE / INTEGER XYCD / 4HXYCD / INTEGER PCDB / 4HPCDB / INTEGER POOL / 4HPOOL / INTEGER XPDT / 4HXPDT / INCLUDE 'DSIOF.COM' COMMON / XFIST / FIST(10) COMMON / XFIAT / FIAT(10) COMMON / ZZZZZZ / MEM(4) COMMON / SYSTEM / ISYSBF, IWR DATA LENBUF / 0 / IF ( LENBUF .NE. 0 ) GO TO 10 C SET UP BLOCK ALLOCATIONS FOR DOUBLE WORD BOUNDARIES IBASBF = LOCFX( MEM ) LENBUF = ISYSBF - 3 + 8 LENALC = LENBUF NBUFF3 = ISYSBF - 4 ITEST = MOD( LENBUF,2) IF ( ITEST .NE. 0 ) LENBUF = LENBUF + 1 10 IF ( IDBDIR .NE. 0 ) GO TO 30 C OPCODES OF 8 AND 9 HAVE NO PURPOSE WHEN THERE IS NO USE OF THE C IN-MEMORY DATA BASE IF ( OPCODE .EQ. 8 .OR. OPCODE .EQ. 9 ) GO TO 7777 C CALL DBMIO DIRECTLY, NO IN-MEMORY DATA BASE 20 CALL DBMIO ( OPCODE ) GO TO 7777 30 IF ( NAME .GT. 100 .AND. NAME .LT. 400 ) GO TO 50 C30 IF ( NAME .GT. 300 .AND. NAME .LT. 400 ) GO TO 50 C CHECK FOR CASECC, XYCD, AND PCDB (SETUP IN FIAT FOR PREFACE) IF ( NAME .EQ. CASE ) GO TO 50 IF ( NAME .EQ. XYCD ) GO TO 50 IF ( NAME .EQ. PCDB ) GO TO 50 IF ( NAME .EQ. XPDT ) GO TO 50 IF ( NAME .EQ. POOL ) GO TO 50 C OPCODES OF 8 AND 9 HAVE NO PURPOSE WHEN THERE IS NO USE OF THE C IN-MEMORY DATA BASE IF ( OPCODE .EQ. 8 .OR. OPCODE .EQ. 9 ) GO TO 7777 C CALL DBMIO DIRECTLY BECAUSE THIS IS AN EXECUTIVE FILE IF ( FCB( 9, IFILEX ) .NE. 0 ) CALL DBMREL GO TO 20 50 CONTINUE C IF ( IFILEX .NE. 48 ) GO TO 55 C IF ( NAME .NE. 307 ) GO TO 55 C WRITE(IWR,40646)OPCODE,IOCODE,NBLOCK,IFILEX,NAME,INDBAS 40646 FORMAT(/,' OPCODE,IOCODE,NBLOCK,IFILEX,NAME,INDBAS=',6I6) C WRITE(IWR,40647)(MEM(INDBAS+KB),KB=-4,20) 40647 FORMAT(' INPUT BUFFER HAS=',/,10(4(1X,Z8),/)) C WRITE(6,44770)(FCB(K,IFILEX),K=1,15) 44770 FORMAT(' ENTERRED FCB=',/,2(5I8,/),2I8,4X,2A4,4X,I8) C CALL DBMFDP 55 CONTINUE GO TO ( 100,200,300,400,500,600,700,800,900),OPCODE C**************** C OPEN CODE ********************************************************* C**************** 100 CONTINUE FCB( 1, IFILEX ) = IOCODE FCB( 12, IFILEX ) = FCB( 2, IFILEX ) IF ( FCB( 9, IFILEX ) .NE. 0 ) GO TO 130 C CHECK TO SEE IF FILE IS SELF CONTAINED ON DISK IF ( FCB( 5, IFILEX ) .NE. 0 ) GO TO 120 105 CONTINUE IF ( IOCODE .NE. 0 .AND. IOCODE .NE. 2 ) GO TO 108 WRITE ( IWR, 9900 ) IFILEX, FCB( 13, IFILEX), FCB( 14, IFILEX ) 9900 FORMAT(///,' DBMMGR ERROR, ATTEMPT TO OPEN FOR READ OR WRITE APP' &,'END:' &,/,' UNIT-',I4,' NAME=',2A4,' WHICH DOES NOT EXIST.') C CALL DBMDMP CALL DSMSG ( 777 ) CALL MESAGE ( -61, 0, 0 ) 108 CONTINUE C NEW FILE NAME FOR IFILEX, RELEASE ANY PREVIOUSLY ALLOCATED BLOCKS IF ( FCB( 9, IFILEX ) .NE. 0 ) CALL DBMREL C CREATE FILE ENTRY IN FCB DO 110 I = 3,11 IF ( I .EQ. 7 ) GO TO 110 FCB( I, IFILEX ) = 0 110 CONTINUE FCB( 4, IFILEX ) = 1 NBLOCK = 1 115 CONTINUE C ALLOCATE FIRST BLOCK CALL DBMALB ( LENBUF, NEXBLK ) IF ( NEXBLK .LE. 0 ) GO TO 120 FCB( 9, IFILEX ) = NEXBLK FCB( 10, IFILEX ) = NEXBLK FCB( 11, IFILEX ) = NEXBLK C INITIALIZE PREVIOUS, NEXT, LENGTH AND BLOCK NUMBER FOR ALLOCATED BLK MEM( NEXBLK ) = 0 MEM( NEXBLK+1 ) = 0 MEM( NEXBLK+2 ) = LENBUF MEM( NEXBLK+3 ) = 1 FCB( 2, IFILEX ) = LOCFX( MEM( NEXBLK+4 ) ) - IBASBF + 1 CALL DBMMOV ( INDBAS, NEXBLK+4, 4) GO TO 7000 C NO MORE SPACE WITHIN IN-MEMORY DATA BASE, USE I/O 120 CALL DBMIO ( OPCODE ) GO TO 7777 C FILE EXISTS IN IN-MEMORY DATA BASE 130 CONTINUE IF ( IOCODE .EQ. 0 ) GO TO 150 IF ( IOCODE .EQ. 1 ) GO TO 160 IF ( IOCODE .EQ. 2 ) GO TO 170 IF ( IOCODE .EQ. 3 ) GO TO 180 C FILE IS OPENED FOR READ WITH REWIND 150 CONTINUE NEXBLK = FCB( 9, IFILEX ) IF ( NEXBLK .GT. 0 ) GO TO 155 WRITE ( IWR, 9910 ) IFILEX 9910 FORMAT(///,' DBMMGR ERROR, ATTEMPT TO READ FILE WITH NO BLOCKS' & /,' UNIT=',I4) C CALL DBMDMP CALL DSMSG ( 777 ) CALL MESAGE( -61, 0, 0 ) 155 CONTINUE FCB( 11, IFILEX ) = NEXBLK FCB( 4, IFILEX ) = 1 NBLOCK = 1 FCB( 2, IFILEX ) = LOCFX( MEM( NEXBLK+4 ) ) - IBASBF + 1 CALL DBMMOV ( INDBAS, NEXBLK+4, 3 ) GO TO 7000 C FILE IS OPENED FOR WRITE WITH REWIND 160 CONTINUE GO TO 105 C FILE IS OPENED FOR READ WITHOUT REWIND 170 CONTINUE NEXBLK = FCB( 10, IFILEX ) LASTIB = MEM( NEXBLK+3 ) NBLOCK = FCB( 4, IFILEX ) IF ( FCB( 4, IFILEX ) .GT. LASTIB ) GO TO 120 IF ( FCB( 4, IFILEX ) .EQ. 1 ) GO TO 150 NEXBLK = FCB( 11, IFILEX ) IBLK1 = FCB( 4, IFILEX ) IBLK2 = MEM( NEXBLK+3 ) IBLK3 = MEM( NEXBLK+7 ) FCB( 2, IFILEX ) = LOCFX( MEM( NEXBLK+4 ) ) - IBASBF + 1 C CHECK THAT CURRENT BLOCK NUMBER MATCHES BLOCK NO. IN IN-MEM BLK IF ( IBLK1 .EQ. IBLK2 .AND. IBLK1 .EQ. IBLK3 ) GO TO 7000 GO TO 190 C FILE IS OPENED FOR WRITE WITHOUT REWIND 180 CONTINUE NEXBLK = FCB( 10, IFILEX ) LASTIB = MEM( NEXBLK+3 ) IF ( FCB( 4, IFILEX ) .GT. LASTIB ) GO TO 120 C====== IF ( FCB( 4, IFILEX ) .EQ. 1 ) GO TO 160 NEXBLK = FCB( 11, IFILEX ) C IGNORE ANY PREVIOUSLY WRITTEN BLOCKS FOR THIS FILE FCB( 5, IFILEX ) = 0 FCB( 6, IFILEX ) = 0 IBLK1 = FCB( 4, IFILEX ) IBLK2 = MEM( NEXBLK+3 ) IBLK3 = MEM( NEXBLK+7 ) FCB( 2, IFILEX ) = LOCFX( MEM( NEXBLK+4 ) ) - IBASBF + 1 C CHECK THAT CURRENT BLOCK NUMBER MATCHES BLOCK NO. IN IN-MEM BLK IF ( IBLK1 .EQ. IBLK2 .AND. IBLK1 .EQ. IBLK3 ) GO TO 7000 190 CONTINUE WRITE ( IWR, 9911 ) IFILEX, IBLK1, IBLK2, IBLK3 9911 FORMAT(///' BLOCK NUMBERS INCONSISTANT ON OPEN IN DBMMGR' &,/,' UNIT =',I4 &,/,' BLOCK NUMBER EXPECTED (IN FCB) =',I8 &,/,' BLOCK NUMBER IN IN-MEMORY BLOCK =',I8 &,/,' BLOCK NUMBER IN BUFFER =',I8 ) C CALL DBMDMP CALL DBMFDP CALL DSMSG ( 777 ) CALL MESAGE ( -61, 0, 0 ) C**************** C CLOSE CODE ******************************************************** C**************** 200 CONTINUE C CHECK TO SEE IF FILE HAS IN-MEMORY BLOCKS IF ( FCB( 9, IFILEX ) .NE. 0 ) GO TO 220 210 CALL DBMIO ( OPCODE ) GO TO 7000 220 CONTINUE CWKBDB SPR94012 10/94 C IF ( IOCODE .NE. 1 ) GO TO 225 CC CLOSE FILE WITH REWIND C FCB( 11, IFILEX ) = FCB( 9, IFILEX ) C FCB( 4, IFILEX ) = 1 C IF ( FCB( 5, IFILEX ) .NE. 0 ) GO TO 210 CWKBDE SPR94012 10/94 C IF FILE IS OPENED FOR READ THAN GO COMPUTE STATISTICS 225 IF ( FCB( 1, IFILEX ) .EQ. 0.OR. & FCB( 1, IFILEX ) .EQ. 2 ) GO TO 240 IF ( FCB( 15, IFILEX ) .NE. 0 ) GO TO 240 C FILE OPENED FOR WRITE AND FILE NOT SPILLED TO DISK, THEN C RELEASE LAST ALLOCATED BLOCK, BECAUSE IT WAS NOT USED NEXBLK = FCB( 11, IFILEX ) C RESET LAST BLOCK POINTER, GET PREVIOUS BLOCK ALLOCATED CWKBNB SPR94012 10/94 228 IBLOCK = MEM( NEXBLK+3 ) C CHECK IF LAST BLOCK NOT USED, THERE COULD HAVE BEEN A BACKPSPACE BACK C TO A PREVIOUS USED BLOCK (CAUSED BY CLOSE CALLING DSBRC1 TO BACKSPACE C OVER AN EOF THAT WAS AT THE END OF A PREVIOUS BLOCK). IF ( IBLOCK .GT. NBLOCK ) GO TO 230 NEXBLK = MEM( NEXBLK+1 ) IF ( NEXBLK .EQ. 0 ) GO TO 240 GO TO 228 230 CONTINUE CWKBNE SPR94012 10/94 INDBLK = MEM( NEXBLK ) FCB( 10, IFILEX ) = INDBLK FCB( 11, IFILEX ) = INDBLK FCB( 4, IFILEX ) = MEM( INDBLK+3 ) FCB( 2, IFILEX ) = LOCFX( MEM( INDBLK+4 ) ) - IBASBF + 1 CALL DBMRLB( NEXBLK ) CWKBNB SPR94012 10/94 240 IF ( IOCODE .NE. 1 ) GO TO 245 C CLOSE FILE WITH REWIND FCB( 11, IFILEX ) = FCB( 9, IFILEX ) FCB( 4, IFILEX ) = 1 CWKBNE SPR94012 10/94 CWKBR SPR94012 10/94 C240 IF ( FCB( 5, IFILEX ) .NE. 0 ) CALL DBMIO ( OPCODE ) 245 IF ( FCB( 5, IFILEX ) .NE. 0 ) CALL DBMIO ( OPCODE ) IF ( FCB( 5, IFILEX ) .LE. FCB( 6, IFILEX ) ) GO TO 7000 C SPECIAL CASE, LAST BLOCK ALLOCATED WAS FOR DISK BUT NEVER USED, RESET C INDBAS BACK TO LAST IN-MEMORY BLOCK NEXBLK = FCB( 10, IFILEX ) FCB( 2, IFILEX ) = LOCFX( MEM( NEXBLK+4 ) ) - IBASBF + 1 FCB( 5, IFILEX ) = 0 FCB( 6, IFILEX ) = 0 FCB(11, IFILEX ) = FCB( 10, IFILEX ) GO TO 7000 C**************** C REWIND OPCODE ***************************************************** C**************** 300 CONTINUE C IF FILE IS ON EXTERNAL FILE CALL DBMIO DIRECTLY IF ( FCB( 9, IFILEX ) .NE. 0 ) GO TO 320 CALL DBMIO ( OPCODE ) GO TO 7777 320 CONTINUE NEXBLK = FCB( 9, IFILEX ) FCB( 11, IFILEX ) = NEXBLK FCB( 4, IFILEX ) = 1 C REPLACE BUFFER ADDRESS IN FCB FCB( 2,IFILEX ) = LOCFX( MEM( NEXBLK+4 ) ) - IBASBF + 1 CALL DBMMOV ( INDBAS, NEXBLK+4, 3 ) IOCODE = 0 IF ( FCB( 5, IFILEX ) .NE. 0 ) CALL DBMIO ( 2 ) GO TO 7000 C**************** C WRITE CODE ******************************************************** C**************** 400 CONTINUE C CHECK TO SEE IF THIS BLOCK IS ON EXTERNAL FILE IF ( FCB( 15, IFILEX ) .NE. 0 ) GO TO 450 C CHECK THAT BLOCK NUMBER MATCHES NEXBLK = FCB( 11, IFILEX ) IBLK1 = FCB( 4, IFILEX ) IBLK2 = MEM( NEXBLK+3 ) IBLK3 = MEM( NEXBLK+7 ) IF ( IBLK1 .EQ. IBLK2 .AND. IBLK1 .EQ. IBLK3 ) GO TO 410 WRITE ( IWR, 9940 ) IFILEX, IBLK1, IBLK2, IBLK3 9940 FORMAT(///' BLOCK NUMBERS INCONSISTANT ON WRITE IN DBMMGR' &,/,' UNIT = ',I4 &,/,' BLOCK NUMBER EXPECTED (IN FCB) =',I8 &,/,' BLOCK NUMBER IN IN-MEMORY BLOCK =',I8 &,/,' BLOCK NUMBER IN BUFFER =',I8 ) C CALL DBMDMP CALL DBMFDP CALL DSMSG ( 777 ) CALL MESAGE ( -61, 0, 0 ) 410 CONTINUE FCB( 4, IFILEX ) = FCB( 4, IFILEX ) + 1 NEXBLK = MEM( INDBAS-3 ) IF ( NEXBLK .EQ. 0 ) GO TO 420 C USE EXISTING BLOCK ALREADY ALLOCATED FROM PREVIOUS OPEN FOR WRITE FCB( 11, IFILEX) = NEXBLK FCB( 2,IFILEX ) = LOCFX( MEM( NEXBLK+4 ) ) - IBASBF + 1 CALL DBMMOV ( INDBAS, NEXBLK+4, 4 ) GO TO 7000 420 CONTINUE CALL DBMALB ( LENBUF, NEXBLK ) IF ( NEXBLK .LE. 0 ) GO TO 440 C ANOTHER BLOCK SUCCESSFULLY ALLOCATED, CONNECT TO CHAIN INDBLK = FCB( 11, IFILEX ) MEM( INDBLK+1 ) = NEXBLK MEM( NEXBLK ) = INDBLK MEM( NEXBLK+1 ) = 0 MEM( NEXBLK+2 ) = LENBUF MEM( NEXBLK+3 ) = FCB( 4, IFILEX ) FCB( 10, IFILEX) = NEXBLK FCB( 11, IFILEX) = NEXBLK FCB( 2,IFILEX ) = LOCFX( MEM( NEXBLK+4 ) ) - IBASBF + 1 CALL DBMMOV ( INDBAS, NEXBLK+4, 4 ) GO TO 7000 C NO MORE SPACE IN IN-MEMORY DATA BASE, WRITE DATA TO FILE 440 CONTINUE C CALL DBMIO TO OPEN EXTERNAL FILE WITH REWIND ISAVE = IOCODE ISAVEB = NBLOCK IOCODE = 1 NBLOCK = FCB( 4, IFILEX ) IPRBLK = INDBAS C RESET BUFFER ADDRESS TO BUFFER IN USER'S OPEN CORE FCB( 2,IFILEX ) = FCB( 12, IFILEX ) INDBAS = FCB( 2, IFILEX ) CALL DBMIO ( 1 ) IOCODE = ISAVE NBLOCK = ISAVEB C WRITE(6,88771)(MEM(IPRBLK+K),K=-4,4) 88771 FORMAT(' MEMPRBLK=',9(1X,Z8)) C WRITE(6,88772)(MEM(INDBAS+K),K=-4,4) 88772 FORMAT(' MEMINDBAS=',9(1X,Z8)) C PRINT *,' IFILEX,NBLOCK,IPRBLK,INDBAS=',IFILEX,NBLOCK, C & IPRBLK,INDBAS CALL DBMMOV ( IPRBLK, INDBAS, 4 ) C PRINT *,' MEM(IPRBLK=',MEM(IPRBLK) C WRITE(6,88771)(MEM(IPRBLK+K),K=-4,4) C WRITE(6,88772)(MEM(INDBAS+K),K=-4,4) GO TO 7000 450 CONTINUE CALL DBMIO ( OPCODE ) GO TO 7777 C**************** C READ CODE ********************************************************* C**************** 500 CONTINUE IF ( FCB( 5, IFILEX ) .EQ. 0 ) GO TO 505 IF ( FCB( 4, IFILEX ) .GE. ( FCB( 5, IFILEX ) - 1 ) ) GO TO 540 505 FCB( 4, IFILEX ) = FCB( 4, IFILEX ) + 1 NEXBLK = MEM( INDBAS-3 ) IF ( NEXBLK .GT. 0 ) GO TO 510 WRITE ( IWR, 9950 ) FCB( 4, IFILEX ), IFILEX 9950 FORMAT(///,' ERROR IN DBMMGR DURING READ',/,' EXPECTED ANOTHER ' &,' IN-MEMORY BLOCK FOR BLOCK=',I8,' UNIT=',I3) C CALL DBMDMP CALL DBMFDP CALL DSMSG ( 777 ) CALL MESAGE ( -61, 0, 0 ) 510 FCB( 2, IFILEX ) = LOCFX( MEM( NEXBLK+4 ) ) - IBASBF + 1 FCB( 11, IFILEX ) = NEXBLK CALL DBMMOV ( INDBAS, NEXBLK+4, 3 ) IBLK1 = FCB( 4, IFILEX) IBLK2 = MEM( NEXBLK+3 ) IBLK3 = MEM( NEXBLK+7 ) IF ( IBLK1 .EQ. IBLK2 .AND. IBLK1 .EQ. IBLK3 ) GO TO 7000 WRITE ( IWR, 9951 ) IFILEX, IBLK1, IBLK2, IBLK3 9951 FORMAT(///' BLOCK NUMBERS INCONSISTANT ON READ IN DBMMGR' &,/,' UNIT =',I4 &,/,' BLOCK NUMBER (IN FCB) =',I8 &,/,' BLOCK NUMBER IN IN-MEMORY BLOCK =',I8 &,/,' BLOCK NUMBER IN BUFFER =',I8 ) C CALL DBMDMP CALL DBMFDP CALL DSMSG ( 777 ) CALL MESAGE ( -61, 0, 0 ) C BLOCK IS NOT IN MEMORY, CALL DBMIO 540 CONTINUE IF ( FCB( 15, IFILEX ) .NE. 0 ) GO TO 550 ISAVE = IOCODE ISAVEB = NBLOCK IOCODE = 0 NBLOCK = FCB( 4, IFILEX ) + 1 IPRBLK = INDBAS INDBAS = FCB( 12, IFILEX ) FCB( 2, IFILEX ) = INDBAS CALL DBMIO ( 1 ) IOCODE = ISAVE NBLOCK = ISAVEB CALL DBMMOV ( IPRBLK, INDBAS, 3 ) GO TO 7777 550 CONTINUE IF ( FCB( 4, IFILEX ) .GT. FCB( 6, IFILEX ) ) GO TO 570 INDBAS = FCB( 12, IFILEX ) FCB( 2, IFILEX ) = INDBAS CALL DBMIO ( OPCODE ) GO TO 7777 570 CONTINUE WRITE ( IWR, 9052 ) IFILEX 9052 FORMAT(///,' DBMMGR ERROR, ATTEMPT TO READ BEYOND EOF' &,/' UNIT=',I5) C CALL DBMDMP CALL DBMFDP CALL DSMSG ( 777 ) CALL MESAGE ( -61, 0, 0 ) C**************** C POSITION CODE ***************************************************** C**************** 600 CONTINUE IF ( FCB( 5, IFILEX ) .EQ. 0 ) GO TO 605 IF ( NBLOCK .GE. FCB( 5, IFILEX ) ) GO TO 690 605 CONTINUE C BLOCK IS IN THE IN-MEMORY DATA BASE, WALK CHAIN TO CORRECT BLOCK IOFF = 1 NBLK = NBLOCK - 1 NEXBLK = FCB( 9, IFILEX ) IF ( NBLOCK .EQ. 1 ) GO TO 670 ICNDEX = FCB( 11, IFILEX ) IF ( ICNDEX .EQ. 0 ) GO TO 610 NEXBLK = ICNDEX ICBLK = MEM( ICNDEX+3 ) IF ( ICBLK .EQ. NBLOCK ) GO TO 670 IDIFF = NBLOCK - ICBLK NBLK = IABS( IDIFF ) IF ( IDIFF .LT. 0 ) IOFF = 0 610 CONTINUE DO 620 I = 1, NBLK NEXBLK = MEM( NEXBLK+IOFF ) 620 CONTINUE C SET DIRECTORY ENTRIES FOR THE POSITIONED BLOCK 670 FCB( 11, IFILEX ) = NEXBLK FCB( 4, IFILEX ) = NBLOCK FCB( 2, IFILEX ) = LOCFX( MEM(NEXBLK+4) ) - IBASBF + 1 CALL DBMMOV ( INDBAS, NEXBLK+4, 3 ) GO TO 7000 690 CONTINUE IF ( FCB( 15, IFILEX ) .NE. 0 ) GO TO 695 ISAVE = IOCODE IOCODE = 0 IPRBLK = INDBAS INDBAS = FCB( 12, IFILEX ) FCB( 2, IFILEX ) = INDBAS FCB( 4, IFILEX ) = NBLOCK CALL DBMIO( 1 ) IOCODE = ISAVE CALL DBMMOV ( IPRBLK, INDBAS, 3 ) GO TO 7777 695 CONTINUE FCB( 4, IFILEX ) = NBLOCK INDBAS = FCB( 12, IFILEX ) FCB( 2, IFILEX ) = INDBAS CALL DBMIO ( OPCODE ) GO TO 7777 C**************** C DELETE CODE ******************************************************* C**************** 700 CONTINUE IF ( FCB( 9, IFILEX ) .EQ. 0 ) GO TO 710 CALL DBMREL 710 CONTINUE CALL DBMIO ( 7 ) DO 720 K = 1,15 IF ( K .EQ. 7 ) GO TO 720 FCB( K, IFILEX ) = 0 720 CONTINUE GO TO 7777 C**************** C WRTBLK CODE ******************************************************* C**************** C SPECIAL ENTRY FOR SUBSTRUCTURING, MOVE DATA FROM OPENCORE BUFFER C CALLED BY WRTBLK OF GINO 800 CONTINUE IF ( FCB( 15, IFILEX ) .EQ. 0 ) GO TO 810 C ORIGINAL BUFFER IS BEING USED BY GINO, JUST RETURN GO TO 7777 810 IND1 = FCB( 2, IFILEX ) IND2 = FCB( 12, IFILEX ) IND1 = IND1 + 2 IND2 = IND2 + 2 C PRINT *,' DBMMGR,WRTBLK,IND1,IND2,NBUFF3=',IND1,IND2,NBUFF3 C PRINT *,' DBMMGR,WRTBLK,INDBAS=',INDBAS C WRITE(6,44771)(FCB(K,IFILEX),K=1,15) C WRITE(6,44772)(MEM(IND2+K),K=1,8) 44772 FORMAT(' DBMMGR,BUFFER,IND2=',8(1X,Z8)) DO 820 I = 1, NBUFF3 MEM( IND1+I ) = MEM( IND2+I ) 820 CONTINUE GO TO 7000 C**************** C RDBLK CODE ******************************************************* C**************** C SPECIAL ENTRY FOR SUBSTRUCTURING, MOVE DATA TO ORIGINAL BUFFER IF C THE IN-MEMORY DATA BASE IS BEING USED C CALLED BY RDBLK 900 CONTINUE IF ( FCB( 15, IFILEX ) .EQ. 0 ) GO TO 910 C ORIGINAL BUFFER IS BEING USED, JUST RETURN GO TO 7777 910 IND1 = FCB( 2, IFILEX ) IND2 = FCB( 12, IFILEX ) IND1 = IND1 + 2 IND2 = IND2 + 2 C PRINT *,' DBMMGR,RDBLK,IND1,IND2,NBUFF3=',IND1,IND2,NBUFF3 C PRINT *,' DBMMGR,RDBLK,INDBAS=',INDBAS C WRITE(6,44771)(FCB(K,IFILEX),K=1,15) C WRITE(6,44773)(MEM(IND1+K),K=1,8) 44773 FORMAT(' DBMMGR,BUFFER,IND1=',8(1X,Z8)) DO 920 I = 1, NBUFF3 MEM( IND2+I ) = MEM( IND1+I ) 920 CONTINUE GO TO 7000 7000 CONTINUE C SET INDBAS TO POINT TO CURRENT BUFFER INDBAS = FCB( 2, IFILEX ) C IF ( NAME .NE. 307 ) GO TO 7777 C IF ( IFILEX .NE. 48 ) GO TO 7777 C PRINT *,' DBMMGR RETURNING,IFILEX,INDBAS=',IFILEX,INDBAS C PRINT *,' DBMMGR RETURNING,INDCLR,INDCBP=',INDCLR,INDCBP C write(6,40648)(mem(kb),kb=indbas-4,indbas+8) 40648 format(' returned buffer=',/,10(4(1x,z8),/)) C WRITE(6,44771)(FCB(K,IFILEX),K=1,15) C CALL DBMFDP 44771 FORMAT(' returned FCB=',/,2(5I8,/),2I8,4X,2A4,4X,I8) 7777 CONTINUE RETURN END
from typing import Dict, List from tracardi.domain.value_object.storage_info import StorageInfo from tracardi.process_engine.debugger import Debugger from tracardi.service.wf.domain.debug_info import DebugInfo from tracardi.domain.entity import Entity from tracardi.service.secrets import b64_encoder, b64_decoder class EventDebugRecord(Entity): content: str = None @staticmethod def encode(stat: Debugger) -> List['EventDebugRecord']: for event_type, debugging in stat.items(): for debug_infos in debugging: for rule_id, debug_info in debug_infos.items(): # type: DebugInfo # todo - to pole jest za małe (wyskakuje błąd gdy debug infor ma powyżej 32000 znaków) b64 = b64_encoder(debug_info.dict()) yield EventDebugRecord(id=debug_info.event.id, content=b64) def decode(self, from_dict=False) -> DebugInfo: # todo - to pole jest za małe (wyskakuje błąd gdy debug infor ma powyżej 32000 znaków) if from_dict is True: debug_info = b64_decoder(self['content']) else: debug_info = b64_decoder(self.content) return DebugInfo( **debug_info ) # Persistence @staticmethod def storage_info() -> StorageInfo: return StorageInfo( 'debug-info', EventDebugRecord )
import 'package:easy_localization/easy_localization.dart'; import 'package:flutter/material.dart'; import 'package:shake/shake.dart'; import '../../../../../constants.dart' as c; /// class ShakeActionWidget extends StatefulWidget { /// Function to call after action completed. final Function(ShakeActionWidget action) onComplete; /// ShakeActionWidget({ Key key, @required this.onComplete, }) : super(key: key); @override _ShakeActionWidgetState createState() => _ShakeActionWidgetState(); } class _ShakeActionWidgetState extends State<ShakeActionWidget> { ShakeDetector detector; @override void initState() { detector = ShakeDetector.autoStart( onPhoneShake: () { widget.onComplete(widget); }, ); super.initState(); } @override Widget build(BuildContext context) { return Container( color: c.background, child: Center( child: Text( tr('action_shake'), style: TextStyle(fontSize: 26), ), ), ); } @override void dispose() { detector?.stopListening(); super.dispose(); } }
0.0.5 2013-06-09 - cleanup 0.0.4 2013-06-09 - simplification: only annotations and explicit mappings, no interface-based mapping - enum support to define atoms and constants 0.0.3 2013-06-02 - separated binding and filter - explicit bindings between objects and language elements (atom, term, constant) - auto-binding of input objects and filter classes 0.0.2 2013-05-30 - solver clingo supported - terms and constants 0.0.1 2013-05-26 - first running version - solver dlv supported - mapping via interface implementations - mapping via annotations
package oop import "fmt" /* TODO: 声明结构体 实现封装 */ type People struct { name string } func (people *People) walk() { fmt.Println(people.name + "在走路") }
package com.linkedin.lift.lib.testing import com.linkedin.lift.types.ScoreWithLabelAndPosition import org.apache.spark.mllib.random.RandomRDDs.normalRDD import org.apache.spark.rdd.RDD import org.apache.spark.sql.{DataFrame, Dataset, SparkSession} /** * Common values for testing purposes */ object TestValues { val spark: SparkSession = TestUtils.createSparkSession(numThreads = "*") import spark.implicits._ case class JoinedData(memberId: Int, label: String, predicted: String, gender: String, qid: String = "") val testData: Seq[JoinedData] = Seq( JoinedData(12340, "0", "0", "MALE"), JoinedData(12341, "1", "0", "MALE"), JoinedData(12342, "0", "1", "MALE"), JoinedData(12343, "0", "0", "MALE"), JoinedData(12344, "1", "1", "MALE"), JoinedData(12345, "0", "1", "UNKNOWN"), JoinedData(12346, "1", "1", "FEMALE"), JoinedData(12347, "1", "0", "FEMALE"), JoinedData(12348, "0", "0", "FEMALE"), JoinedData(12349, "0", "1", "FEMALE")) val df: DataFrame = TestUtils.createDFFromProduct(TestValues.spark, testData) val testData2: Seq[JoinedData] = Seq( JoinedData(12340, "0.0", "0.3", "MALE", "1"), JoinedData(12341, "1.0", "0.4", "MALE", "2"), JoinedData(12342, "0.0", "0.8", "MALE", "3"), JoinedData(12343, "0.0", "0.1", "MALE", "3"), JoinedData(12344, "1.0", "0.7", "MALE", "1"), JoinedData(12345, "0.0", "0.6", "UNKNOWN", "2"), JoinedData(12346, "1.0", "0.9", "FEMALE", "2"), JoinedData(12347, "1.0", "0.3", "FEMALE", "3"), JoinedData(12348, "0.0", "0.2", "FEMALE", "2"), JoinedData(12349, "0.0", "0.8", "FEMALE", "1")) val df2: DataFrame = TestUtils.createDFFromProduct(TestValues.spark, testData2) // test data for PositionBiasUtils val score00: RDD[ScoreWithLabelAndPosition] = normalRDD(spark.sparkContext, 1000L, 1, 12) .map(x => ScoreWithLabelAndPosition(x, 0, 1)) val score10: RDD[ScoreWithLabelAndPosition] = normalRDD(spark.sparkContext, 1000L, 1, 123) .map(x => ScoreWithLabelAndPosition(x, 1, 1)) val score01: RDD[ScoreWithLabelAndPosition] = normalRDD(spark.sparkContext, 200L, 1, 1234) .map(x => ScoreWithLabelAndPosition(x, 0, 2)) val score11: RDD[ScoreWithLabelAndPosition] = normalRDD(spark.sparkContext, 800L, 1, 12345) .map(x => ScoreWithLabelAndPosition(x, 1, 2)) val score02: RDD[ScoreWithLabelAndPosition] = normalRDD(spark.sparkContext, 100L, 1, 23) .map(x => ScoreWithLabelAndPosition(x - 0.5, 0, 3)) val score12: RDD[ScoreWithLabelAndPosition] = normalRDD(spark.sparkContext, 600L, 1, 234) .map(x => ScoreWithLabelAndPosition(x - 0.5, 1, 3)) val positionBiasData: Dataset[ScoreWithLabelAndPosition] = (score00 ++ score01 ++ score10 ++ score11 ++ score02 ++ score12).toDS }
/* * Copyright (C) 2009 - 2020 Broadleaf Commerce * * Licensed under the Broadleaf End User License Agreement (EULA), Version 1.1 (the * "Commercial License" located at http://license.broadleafcommerce.org/commercial_license-1.1.txt). * * Alternatively, the Commercial License may be replaced with a mutually agreed upon license (the * "Custom License") between you and Broadleaf Commerce. You may not use this file except in * compliance with the applicable license. * * NOTICE: All information contained herein is, and remains the property of Broadleaf Commerce, LLC * The intellectual and technical concepts contained herein are proprietary to Broadleaf Commerce, * LLC and may be covered by U.S. and Foreign Patents, patents in process, and are protected by * trade secret or copyright law. Dissemination of this information or reproduction of this material * is strictly forbidden unless prior written permission is obtained from Broadleaf Commerce, LLC. */ package org.broadleafcommerce.vendor.paypal.domain; import com.paypal.base.rest.PayPalModel; public class PayerInfo extends PayPalModel { private String account_id; private String email_address; private Name payer_name; public String getAccount_id() { return this.account_id; } public PayerInfo setAccount_id(String account_id) { this.account_id = account_id; return this; } public String getEmail_address() { return this.email_address; } public PayerInfo setEmail_address(String email_address) { this.email_address = email_address; return this; } public Name getPayer_name() { return this.payer_name; } public PayerInfo setPayer_name(Name payer_name) { this.payer_name = payer_name; return this; } @java.lang.Override @java.lang.SuppressWarnings("all") public boolean equals(final java.lang.Object o) { if (o == this) return true; if (!(o instanceof PayerInfo)) return false; final PayerInfo other = (PayerInfo) o; if (!other.canEqual((java.lang.Object) this)) return false; if (!super.equals(o)) return false; final java.lang.Object this$account_id = this.getAccount_id(); final java.lang.Object other$account_id = other.getAccount_id(); if (this$account_id == null ? other$account_id != null : !this$account_id.equals(other$account_id)) return false; final java.lang.Object this$email_address = this.getEmail_address(); final java.lang.Object other$email_address = other.getEmail_address(); if (this$email_address == null ? other$email_address != null : !this$email_address.equals(other$email_address)) return false; final java.lang.Object this$payer_name = this.getPayer_name(); final java.lang.Object other$payer_name = other.getPayer_name(); if (this$payer_name == null ? other$payer_name != null : !this$payer_name.equals(other$payer_name)) return false; return true; } @java.lang.SuppressWarnings("all") protected boolean canEqual(final java.lang.Object other) { return other instanceof PayerInfo; } @java.lang.Override @java.lang.SuppressWarnings("all") public int hashCode() { final int PRIME = 59; int result = 1; result = result * PRIME + super.hashCode(); final java.lang.Object $account_id = this.getAccount_id(); result = result * PRIME + ($account_id == null ? 43 : $account_id.hashCode()); final java.lang.Object $email_address = this.getEmail_address(); result = result * PRIME + ($email_address == null ? 43 : $email_address.hashCode()); final java.lang.Object $payer_name = this.getPayer_name(); result = result * PRIME + ($payer_name == null ? 43 : $payer_name.hashCode()); return result; } }
class NeuralNet attr_reader :shape, :outputs attr_accessor :weights, :weight_update_values DEFAULT_TRAINING_OPTIONS = { max_iterations: 1_000, error_threshold: 0.01 } def initialize shape @shape = shape end def run input # Input to this method represents the output of the first layer (i.e., the input layer) @outputs = [input] set_initial_weight_values if @weights.nil? # Now calculate output of neurons in subsequent layers: 1.upto(output_layer).each do |layer| source_layer = layer - 1 # i.e, the layer that is feeding into this one source_outputs = @outputs[source_layer] @outputs[layer] = @weights[layer].map do |neuron_weights| # inputs to this neuron are the neuron outputs from the source layer times weights inputs = neuron_weights.map.with_index do |weight, i| source_output = source_outputs[i] || 1 # if no output, this is the bias neuron weight * source_output end sum_of_inputs = inputs.reduce(:+) # the activated output of this neuron (using sigmoid activation function) sigmoid sum_of_inputs end end # Outputs of neurons in the last layer is the final result @outputs[output_layer] end def train inputs, expected_outputs, opts = {} opts = DEFAULT_TRAINING_OPTIONS.merge(opts) error_threshold, log_every = opts[:error_threshold], opts[:log_every] iteration, error = 0, 0 set_initial_weight_update_values if @weight_update_values.nil? set_weight_changes_to_zeros set_previous_gradients_to_zeroes while iteration < opts[:max_iterations] iteration += 1 error = train_on_batch(inputs, expected_outputs) if log_every && (iteration % log_every == 0) puts "[#{iteration}] #{(error * 100).round(2)}% mse" end break if error_threshold && (error < error_threshold) end {error: error.round(5), iterations: iteration, below_error_threshold: (error < error_threshold)} end private def train_on_batch inputs, expected_outputs total_mse = 0 set_gradients_to_zeroes inputs.each.with_index do |input, i| run input training_error = calculate_training_error expected_outputs[i] update_gradients training_error total_mse += mean_squared_error training_error end update_weights total_mse / inputs.length.to_f # average mean squared error for batch end def calculate_training_error ideal_output @outputs[output_layer].map.with_index do |output, i| output - ideal_output[i] end end def update_gradients training_error deltas = {} # Starting from output layer and working backwards, backpropagating the training error output_layer.downto(1).each do |layer| deltas[layer] = [] @shape[layer].times do |neuron| neuron_error = if layer == output_layer -training_error[neuron] else target_layer = layer + 1 weighted_target_deltas = deltas[target_layer].map.with_index do |target_delta, target_neuron| target_weight = @weights[target_layer][target_neuron][neuron] target_delta * target_weight end weighted_target_deltas.reduce(:+) end output = @outputs[layer][neuron] activation_derivative = output * (1.0 - output) delta = deltas[layer][neuron] = neuron_error * activation_derivative # gradient for each of this neuron's incoming weights is calculated: # the last output from incoming source neuron (from -1 layer) # times this neuron's delta (calculated from error coming back from +1 layer) source_neurons = @shape[layer - 1] + 1 # account for bias neuron source_outputs = @outputs[layer - 1] gradients = @gradients[layer][neuron] source_neurons.times do |source_neuron| source_output = source_outputs[source_neuron] || 1 # if no output, this is the bias neuron gradient = source_output * delta gradients[source_neuron] += gradient # accumulate gradients from batch end end end end MIN_STEP, MAX_STEP = Math.exp(-6), 50 # Now that we've calculated gradients for the batch, we can use these to update the weights # Using the RPROP algorithm - somewhat more complicated than classic backpropagation algorithm, but much faster def update_weights 1.upto(output_layer) do |layer| source_layer = layer - 1 source_neurons = @shape[source_layer] + 1 # account for bias neuron @shape[layer].times do |neuron| source_neurons.times do |source_neuron| weight_change = @weight_changes[layer][neuron][source_neuron] weight_update_value = @weight_update_values[layer][neuron][source_neuron] # for RPROP, we use the negative of the calculated gradient gradient = -@gradients[layer][neuron][source_neuron] previous_gradient = @previous_gradients[layer][neuron][source_neuron] c = sign(gradient * previous_gradient) case c when 1 then # no sign change; accelerate gradient descent weight_update_value = [weight_update_value * 1.2, MAX_STEP].min weight_change = -sign(gradient) * weight_update_value when -1 then # sign change; we've jumped over a local minimum weight_update_value = [weight_update_value * 0.5, MIN_STEP].max weight_change = -weight_change # roll back previous weight change gradient = 0 # so won't trigger sign change on next update when 0 then weight_change = -sign(gradient) * weight_update_value end @weights[layer][neuron][source_neuron] += weight_change @weight_changes[layer][neuron][source_neuron] = weight_change @weight_update_values[layer][neuron][source_neuron] = weight_update_value @previous_gradients[layer][neuron][source_neuron] = gradient end end end end def set_weight_changes_to_zeros @weight_changes = build_connection_matrixes { 0.0 } end def set_gradients_to_zeroes @gradients = build_connection_matrixes { 0.0 } end def set_previous_gradients_to_zeroes @previous_gradients = build_connection_matrixes { 0.0 } end def set_initial_weight_update_values @weight_update_values = build_connection_matrixes { 0.1 } end def set_initial_weight_values # Initialize all weights to random float value @weights = build_connection_matrixes { rand(-0.5..0.5) } # Update weights for first hidden layer (Nguyen-Widrow method) # This is a bit obscure, and not entirely necessary, but it should help the network train faster beta = 0.7 * @shape[1]**(1.0 / @shape[0]) @shape[1].times do |neuron| weights = @weights[1][neuron] norm = Math.sqrt weights.map {|w| w**2}.reduce(:+) updated_weights = weights.map {|weight| (beta * weight) / norm } @weights[1][neuron] = updated_weights end end def build_connection_matrixes 1.upto(output_layer).inject({}) do |hsh, layer| # Number of incoming connections to each neuron in this layer: source_neurons = @shape[layer - 1] + 1 # == number of neurons in prev layer + a bias neuron # matrix[neuron] == Array of values for each incoming connection to neuron matrix = Array.new(@shape[layer]) do |neuron| Array.new(source_neurons) { yield } end hsh[layer] = matrix hsh end end def output_layer @shape.length - 1 end def sigmoid x 1 / (1 + Math.exp(-x)) end def mean_squared_error errors errors.map {|e| e**2}.reduce(:+) / errors.length.to_f end ZERO_TOLERANCE = Math.exp(-16) def sign x if x > ZERO_TOLERANCE 1 elsif x < -ZERO_TOLERANCE -1 else 0 # x is zero, or a float very close to zero end end def marshal_dump [@shape, @weights, @weight_update_values] end def marshal_load array @shape, @weights, @weight_update_values = array end end
#!/bin/bash RNDSTRING=$( tr -dc a-z < /dev/urandom | head -c 6 || true) TENANT_RND_NAME_FOR_TESTING_ADD_TENANT="testtenant${RNDSTRING}" echo ${TENANT_RND_NAME_FOR_TESTING_ADD_TENANT} > /build/tenant-rnd-name echo "--- running ta-create-keypair" /build/to/opstrace ta-create-keypair /build/ta-custom-keypair.pem echo "--- running ta-create-token" /build/to/opstrace ta-create-token "${OPSTRACE_CLUSTER_NAME}" \ "${TENANT_RND_NAME_FOR_TESTING_ADD_TENANT}" ta-custom-keypair.pem > /build/tenant-rnd-auth-token-from-custom-keypair echo "--- running ta-create-token" /build/to/opstrace ta-pubkeys-add "${OPSTRACE_CLOUD_PROVIDER}" "${OPSTRACE_CLUSTER_NAME}" /build/ta-custom-keypair.pem
import React, { CSSProperties } from "react"; import "./Col.scss"; import { combineClasses, scopedClass, classesObj } from "@utils/index"; import RowContext from "@components/Row/context"; type StringOrNumber = string | number; type ResponsiveAttributeType = StringOrNumber | Object; interface ColSize extends Object { offset?: StringOrNumber; order?: StringOrNumber; push?: StringOrNumber; pull?: StringOrNumber; span?: StringOrNumber; } export interface Props { children?: React.ReactNode; /** flex 布局属性 */ flex?: StringOrNumber; /** 栅格左侧的间隔格数 */ offset?: StringOrNumber; /** 栅格顺序 */ order?: StringOrNumber; /** 栅格向右移动格数 */ push?: StringOrNumber; /** 栅格向左移动格数 */ pull?: StringOrNumber; /** 栅格占位格数,为 0 时等于 display: none */ span?: StringOrNumber; /** <576px 响应式栅格,可为栅格数或一个包含其他属性的对象 */ xs?: ResponsiveAttributeType; /** ≥576px 响应式栅格,可为栅格数或一个包含其他属性的对象 */ sm?: ResponsiveAttributeType; /** ≥768px 响应式栅格,可为栅格数或一个包含其他属性的对象 */ md?: ResponsiveAttributeType; /** ≥992px 响应式栅格,可为栅格数或一个包含其他属性的对象 */ lg?: ResponsiveAttributeType; /** ≥1200px 响应式栅格,可为栅格数或一个包含其他属性的对象 */ xl?: ResponsiveAttributeType; /** ≥1600px 响应式栅格,可为栅格数或一个包含其他属性的对象 */ xxl?: ResponsiveAttributeType; } const Col: React.FunctionComponent<Props> = ({ children, flex, offset = 0, order = 0, push = 0, pull = 0, span, xs, sm, md, lg, xl, xxl, }: Props) => { const flexStyle: CSSProperties = { flex, }; const colClass = (...classes: (string | Array<string> | classesObj)[]) => scopedClass("col", ...classes); const generateReponsiveClassNames = ( attributeName: string, attributeValue: ResponsiveAttributeType ) => { if ( typeof attributeValue === "string" || typeof attributeValue === "number" ) { return { [`${colClass(attributeName, `${attributeValue}`)}`]: Boolean( attributeValue ), }; } else { // typeof attributeValue => "object" const { offset, order, push, pull, span } = attributeValue as ColSize; return { [`${colClass(`${attributeName}-offset`, `${offset}`)}`]: true, [`${colClass(`${attributeName}-order`, `${order}`)}`]: true, [`${colClass(`${attributeName}-push`, `${push}`)}`]: true, [`${colClass(`${attributeName}-pull`, `${pull}`)}`]: true, [`${colClass(`${attributeName}`, `${span}`)}`]: true, }; } }; const generateReponsiveClassNames2 = (obj: { [k: string]: ResponsiveAttributeType | undefined; }) => { return Object.entries(obj).map(([key, value]) => { if (value === undefined) { return ""; } return generateReponsiveClassNames(key, value); }); }; const reponsiveClassNames = generateReponsiveClassNames2({ xs, sm, md, lg, xl, xxl, }); const styleClassNames = (offset: StringOrNumber, order: StringOrNumber) =>{ return combineClasses( { [`${colClass("offset", `${offset}`)}`]: Boolean(offset), [`${colClass("order", `${order}`)}`]: Boolean(order), [`${colClass("push", `${push}`)}`]: Boolean(push), [`${colClass("pull", `${pull}`)}`]: Boolean(pull), }, (typeof span === "undefined") ? {} : { [`${colClass(`${span}`)}`]: true, }, ...reponsiveClassNames ); } const className = combineClasses(colClass(), styleClassNames(offset, order)); return ( <RowContext.Consumer> {({ horizontal, vertical }) => { const gutterStyle: CSSProperties = { ...(horizontal && horizontal > 0 ? { paddingLeft: horizontal / 2, paddingRight: horizontal / 2, } : {}), ...(vertical && vertical! > 0 ? { paddingTop: vertical / 2, paddingBottom: vertical / 2, } : {}), }; return ( <div className={className} style={{ ...flexStyle, ...gutterStyle }}> {children} </div> ); }} </RowContext.Consumer> ); }; export default Col;
import pandas as pd import numpy as np import random import matplotlib.pyplot as plt from sklearn.datasets import make_blobs from sklearn.decomposition import PCA from sklearn import preprocessing from sklearn import metrics class KMeans: def __init__(self): self.c_clusters = 0 self.centroids = list self.data = list self.clustering_results = list def get_init_centroids(self): if self.c_clusters < 1: print('please set the number of clusters bigger than 0') exit() elif self.c_clusters > self.data.__len__(): print('the number of clusters is not allowed to be set bigger than number of data') else: self.centroids = np.array(random.sample(self.data.tolist(), self.c_clusters)) def fit(self, data, c_clusters): self.data = data self.c_clusters = c_clusters self.get_init_centroids() convergence = 0 while convergence != 1: # calculate each point's new cluster dist_matrix = np.sqrt( -2 * np.dot(data, self.centroids.T) + np.sum(np.square(self.centroids), axis=1) + np.transpose( [np.sum(np.square(data), axis=1)])) self.clustering_results = np.array([dist_matrix.argmin(axis=1)]) # calculate each cluster's new centroid index_df = np.concatenate((self.clustering_results.T, self.data), axis=1) index_df = pd.DataFrame(index_df) index_df.rename(columns={index_df.columns[0]: "index"}, inplace=True) index_df.sort_values('index', inplace=True) new_centroids = index_df.groupby('index').mean().values convergence = 1 if (new_centroids == self.centroids).all() else 0 self.centroids = new_centroids def draw_result(self): pca = PCA(n_components=2) data = pca.fit_transform(self.data) plt.scatter(data[:, 0], data[:, 1], c=self.clustering_results[0]) plt.title('clustering result') plt.savefig('clustering result') if __name__ == '__main__': k_means = KMeans() # centers = [[2, 2], [1.3, 1.1], [0, 0], [-1.1, 0.9], [0.9, -1.1]] # data, labels_true = make_blobs(n_samples=10000, centers=centers, cluster_std=1.2, random_state=0) raw_data = pd.read_csv('../data/mobile_train.csv') data = raw_data.iloc[:, 1:].values labels_true = raw_data.iloc[:, -1:].values[:, 0] k_means.fit(data, 4) pca = PCA(n_components=2) data = pca.fit_transform(k_means.data) plt.scatter(data[:, 0], data[:, 1], c=k_means.clustering_results[0]) plt.title('Clustering result: mobile prices') plt.savefig('mobile prices') plt.clf() plt.scatter(data[:, 0], data[:, 1], c=labels_true) plt.title('True classifications: mobile prices') plt.savefig('true result mobile prices') score = metrics.adjusted_rand_score(labels_true, k_means.clustering_results[0]) print(score)
package mage.cards.d; import mage.abilities.Ability; import mage.abilities.common.DiesCreatureTriggeredAbility; import mage.abilities.effects.Effect; import mage.abilities.effects.common.DrawCardSourceControllerEffect; import mage.abilities.effects.common.LoseLifeSourceControllerEffect; import mage.cards.CardImpl; import mage.cards.CardSetInfo; import mage.constants.CardType; import mage.constants.TargetController; import mage.filter.StaticFilters; import mage.filter.common.FilterCreaturePermanent; import java.util.UUID; /** * @author LevelX2 */ public final class DarkProphecy extends CardImpl { public DarkProphecy(UUID ownerId, CardSetInfo setInfo) { super(ownerId, setInfo, new CardType[]{CardType.ENCHANTMENT}, "{B}{B}{B}"); // Whenever a creature you control dies, you draw a card and you lose 1 life. Effect effect = new DrawCardSourceControllerEffect(1, "you"); Ability ability = new DiesCreatureTriggeredAbility(effect, false, StaticFilters.FILTER_CONTROLLED_A_CREATURE); effect = new LoseLifeSourceControllerEffect(1); ability.addEffect(effect.concatBy("and")); this.addAbility(ability); } private DarkProphecy(final DarkProphecy card) { super(card); } @Override public DarkProphecy copy() { return new DarkProphecy(this); } }
const DB_NAME = 'oneShotIDB' const STORE_NAME = 'defaultStore' const TARGET_KEY = 0 interface KVSRecord { key: number data: unknown } // NOTE: Unfortunately, the current typeScript compiler doesn't support inference of IDBOpenRequest and its result. const getResultFromEvent = (event: Event): unknown => (event.target as IDBOpenDBRequest).result const idbCreate = () => new Promise<IDBDatabase>((resolve, reject) => { const openReq = indexedDB.open(DB_NAME, 1) openReq.onupgradeneeded = (event: IDBVersionChangeEvent) => { const db = getResultFromEvent(event) as IDBDatabase db.createObjectStore(STORE_NAME, { keyPath: 'key', }) } openReq.onsuccess = (event: Event) => resolve(getResultFromEvent(event) as IDBDatabase) openReq.onerror = () => reject(new Error('IndexedDB open failed.')) }) const idbGet = (db: IDBDatabase, key: number) => new Promise<KVSRecord>((resolve, reject) => { const trans = db.transaction(STORE_NAME, 'readonly') const store = trans.objectStore(STORE_NAME) const getReq = store.get(key) getReq.onsuccess = (event: Event) => resolve(getResultFromEvent(event) as KVSRecord) getReq.onerror = () => reject(new Error('IndexedDB get failed.')) }) const idbPut = (db: IDBDatabase, record: KVSRecord) => new Promise((resolve, reject) => { const trans = db.transaction(STORE_NAME, 'readwrite') const store = trans.objectStore(STORE_NAME) const putReq = store.put(record) putReq.onsuccess = () => resolve() putReq.onerror = () => reject(new Error('IndexedDB put failed.')) }) const dbDelete = () => new Promise((resolve, reject) => { const deleteReq = indexedDB.deleteDatabase(DB_NAME) deleteReq.onsuccess = () => resolve() deleteReq.onerror = () => reject(new Error('IndexedDB delete failed.')) }) export const writeData = async (data: unknown) => { let db: IDBDatabase | null = null try { await dbDelete() db = await idbCreate() await idbPut(db, { key: TARGET_KEY, data }) db.close() return true } catch (e) { console.error(e) if (db) { db.close() } return false } } export const readData = async () => { let db: IDBDatabase | null = null try { db = await idbCreate() const record = await idbGet(db, TARGET_KEY) db.close() await dbDelete() return record.data } catch (e) { console.error(e) if (db) { db.close() } return null } }
# frozen_string_literal: true require "helper" require "rbconfig" require "json" module TestVersionInfoTests VERSION_MATCH = /\d+\.\d+\.\d+/ # # This module is mixed into test classes below so that the tests # are validated when `nokogiri.rb` is required and when # `nokogiri/version.rb` is required. See #1896 for background. # def test_version_info_basics assert_match(VERSION_MATCH, Nokogiri::VERSION) assert_equal(Nokogiri::VERSION, Nokogiri::VERSION_INFO["nokogiri"]["version"]) if jruby? refute(Nokogiri::VERSION_INFO["nokogiri"].has_key?("cppflags"), "did not expect cppflags") else # cppflags/ldflags are more fully tested in scripts/test-gem-installation assert_kind_of(Array, Nokogiri::VERSION_INFO["nokogiri"]["cppflags"], "cppflags should be an array") assert_kind_of(Array, Nokogiri::VERSION_INFO["nokogiri"]["ldflags"], "ldflags should be an array") end assert_equal(::RUBY_VERSION, Nokogiri::VERSION_INFO["ruby"]["version"]) assert_equal(::RUBY_PLATFORM, Nokogiri::VERSION_INFO["ruby"]["platform"]) assert_equal(::Gem::Platform.local.to_s, Nokogiri::VERSION_INFO["ruby"]["gem_platform"]) end def test_version_info_for_xerces_and_nekohtml skip_unless_jruby("xerces/nekohtml is only used for JRuby") assert_equal(Nokogiri::XERCES_VERSION, version_info["other_libraries"]["xerces"]) assert_equal(Nokogiri::NEKO_VERSION, version_info["other_libraries"]["nekohtml"]) end def test_version_info_for_libxml skip_unless_libxml2("libxml2 is only used for CRuby") if Nokogiri::VersionInfo.instance.libxml2_using_packaged? assert_equal("packaged", version_info["libxml"]["source"]) assert(version_info["libxml"]["patches"]) assert_equal(Nokogiri::VersionInfo.instance.libxml2_precompiled?, version_info["libxml"]["precompiled"]) end if Nokogiri::VersionInfo.instance.libxml2_using_system? assert_equal("system", version_info["libxml"]["source"]) refute(version_info["libxml"].key?("precompiled")) refute(version_info["libxml"].key?("patches")) end assert_equal(Nokogiri::LIBXML_COMPILED_VERSION, version_info["libxml"]["compiled"]) assert_match(VERSION_MATCH, version_info["libxml"]["compiled"]) assert_match VERSION_MATCH, version_info["libxml"]["loaded"] Nokogiri::LIBXML_LOADED_VERSION =~ /(\d)(\d{2})(\d{2})/ major = Regexp.last_match(1).to_i minor = Regexp.last_match(2).to_i bug = Regexp.last_match(3).to_i assert_equal("#{major}.#{minor}.#{bug}", Nokogiri::VERSION_INFO["libxml"]["loaded"]) assert(version_info["libxml"].key?("iconv_enabled")) end def test_version_info_for_libxslt skip_unless_libxml2("libxslt is only used for CRuby") if Nokogiri::VersionInfo.instance.libxml2_using_packaged? assert_equal("packaged", version_info["libxslt"]["source"]) assert(version_info["libxslt"]["patches"]) assert_equal(Nokogiri::VersionInfo.instance.libxml2_precompiled?, version_info["libxslt"]["precompiled"]) end if Nokogiri::VersionInfo.instance.libxml2_using_system? assert_equal("system", version_info["libxslt"]["source"]) refute(version_info["libxslt"].key?("precompiled")) refute(version_info["libxslt"].key?("patches")) end assert_equal(Nokogiri::LIBXSLT_COMPILED_VERSION, version_info["libxslt"]["compiled"]) assert_match(VERSION_MATCH, version_info["libxslt"]["compiled"]) assert_match(VERSION_MATCH, version_info["libxslt"]["loaded"]) Nokogiri::LIBXSLT_LOADED_VERSION =~ /(\d)(\d{2})(\d{2})/ major = Regexp.last_match(1).to_i minor = Regexp.last_match(2).to_i bug = Regexp.last_match(3).to_i assert_equal("#{major}.#{minor}.#{bug}", Nokogiri::VERSION_INFO["libxslt"]["loaded"]) end end class TestVersionInfo RUBYEXEC = File.join(RbConfig::CONFIG["bindir"], RbConfig::CONFIG["RUBY_INSTALL_NAME"]) ROOTDIR = File.expand_path(File.join(File.dirname(__FILE__), "..")) class Base < Nokogiri::TestCase let(:version_info) do version_info = Dir.chdir(ROOTDIR) do %x(#{RUBYEXEC} -Ilib -rjson -e 'require "#{require_name}"; puts Nokogiri::VERSION_INFO.to_json') end JSON.parse(version_info) end end class RequireNokogiri < TestVersionInfo::Base include TestVersionInfoTests let(:require_name) { "nokogiri" } end class RequireVersionFileOnly < TestVersionInfo::Base include TestVersionInfoTests let(:require_name) { "nokogiri/version" } end end
export const TrackOpTypes = { 'GET': 'get' } export const TriggerOpTypes = { 'SET': 'set', 'ADD': 'add' }
bin=$(pwd)/node_modules/.bin/babel-node out="bin/cli.js" echo "#!$bin" > $out echo "require(\"./ixirc.js\");" >> $out chmod +x $out
package typingsSlinky.winrt.Windows.ApplicationModel.Activation import org.scalablytyped.runtime.StObject import scala.scalajs.js import scala.scalajs.js.`|` import scala.scalajs.js.annotation.{JSGlobalScope, JSGlobal, JSImport, JSName, JSBracketAccess} @js.native sealed trait ApplicationExecutionState extends StObject @JSGlobal("Windows.ApplicationModel.Activation.ApplicationExecutionState") @js.native object ApplicationExecutionState extends StObject { @js.native sealed trait closedByUser extends ApplicationExecutionState @js.native sealed trait notRunning extends ApplicationExecutionState @js.native sealed trait running extends ApplicationExecutionState @js.native sealed trait suspended extends ApplicationExecutionState @js.native sealed trait terminated extends ApplicationExecutionState }
<?php namespace RTippin\Messenger\Events; use Illuminate\Queue\SerializesModels; use RTippin\Messenger\Models\SentFriend; class FriendCancelledEvent { use SerializesModels; /** * @var SentFriend */ public SentFriend $friend; /** * Create a new event instance. * * @param SentFriend $friend */ public function __construct(SentFriend $friend) { $this->friend = $friend; } }
namespace ExperimentalTools { public static class FeatureIdentifiers { public const string AddConstructorParameterRefactoring = "AddConstructorParameterRefactoring"; public const string AddInitializedFieldRefactoring = "AddInitializedFieldRefactoring"; public const string AddNewConstructorWithParameterRefactoring = "AddNewConstructorWithParameterRefactoring"; public const string ChangeAccessModifierRefactoring = "ChangeAccessModifierRefactoring"; public const string TypeAndDocumentNameAnalyzer = "TypeAndDocumentNameAnalyzer"; public const string RenameTypeToMatchFileNameCodeFix = "RenameTypeToMatchFileNameCodeFix"; public const string RenameFileToMatchTypeNameCodeFix = "RenameFileToMatchTypeNameCodeFix"; public const string NamespaceNormalizationAnalyzer = "NamespaceNormalizationAnalyzer"; public const string NamespaceNormalizationCodeFix = "NamespaceNormalizationCodeFix"; public const string FixConstructorNameCodeFix = "FixConstructorNameCodeFix"; public const string LocateInSolutionExplorerCommand = "LocateInSolutionExplorerCommand"; public const string ScaffoldXunitTheoryMemberData = "ScaffoldXunitTheoryMemberData"; public const string ScaffoldXunitTheoryInlineData = "ScaffoldXunitTheoryInlineData"; public const string GenerateGuid = "GenerateGuid"; public const string AddBraces = "AddBraces"; public const string RemoveBraces = "RemoveBraces"; public const string FieldCanBeMadeReadOnly = "FieldCanBeMadeReadOnly"; } }
<?php namespace Noardcode\LaravelUptimeMonitor\Models; use Carbon\Carbon; use GuzzleHttp\Exception\ConnectException; use GuzzleHttp\Exception\RequestException; use GuzzleHttp\Psr7\Response; use GuzzleHttp\TransferStats; use Illuminate\Database\Eloquent\Model; use Illuminate\Database\Eloquent\Relations\HasMany; use Noardcode\LaravelUptimeMonitor\Events\MonitorAvailable; use Noardcode\LaravelUptimeMonitor\Events\MonitorRestored; use Noardcode\LaravelUptimeMonitor\Events\MonitorUnavailable; use Noardcode\LaravelUptimeMonitor\Collections\MonitorsCollection; use Noardcode\LaravelUptimeMonitor\ValueObjects\SslCertificate; /** * Class Monitor * @package Noardcode\LaravelUptimeMonitor\Models */ class Monitor extends Model { /** * @var string[] */ protected $guarded = ['created_at', 'updated_at', 'id']; /** * @var string[] */ protected $dates = [ 'checked_at', 'ssl_checked_at', ]; /** * @var string[] */ protected $casts = [ 'enabled' => 'bool' ]; /** * @param array $models * * @return MonitorsCollection */ public function newCollection(array $models = []): MonitorsCollection { return new MonitorsCollection($models); } /** * @return HasMany */ public function statistics() { return $this->hasMany(MonitorStatistic::class); } /** * @param Response $response */ public function requestSucceeded(Response $response) { if (!($response->getStatusCode() >= 200 && $response->getStatusCode() < 400)) { $this->monitorUnavailable('Status code ' . $response->getStatusCode()); } $this->monitorAvailable($response); } /** * @param RequestException $connectException */ public function requestFailed(RequestException $connectException) { $this->monitorUnavailable($connectException->getMessage()); } /** * @param SslCertificate $certificate */ public function certificateReceived(SslCertificate $certificate) { $this->ssl_status = 'up'; $this->ssl_issuer = $certificate->getIssuerCommonName(); $this->ssl_valid_from = $certificate->getValidFrom(); $this->ssl_valid_to = $certificate->getValidTo(); $this->ssl_checked_at = Carbon::now(); $this->save(); } /** * @param SslCertificate $certificate */ public function certificateFailed(SslCertificate $certificate) { $this->ssl_status = 'down'; $this->ssl_issuer = null; $this->ssl_valid_from = null; $this->ssl_valid_to = null; $this->ssl_checked_at = Carbon::now(); $this->save(); } /** * @param TransferStats $stats */ public function receivedStats(TransferStats $stats) { MonitorStatistic::create([ 'monitor_id' => $this->id, 'total_time' => $stats->getHandlerStat('total_time'), 'namelookup_time' => $stats->getHandlerStat('namelookup_time'), 'connect_time' => $stats->getHandlerStat('connect_time'), 'pretransfer_time' => $stats->getHandlerStat('pretransfer_time'), 'starttransfer_time' => $stats->getHandlerStat('starttransfer_time'), 'redirect_time' => $stats->getHandlerStat('redirect_time'), ]); } /** * @param Response $response */ private function monitorAvailable(Response $response) { $restored = false; if ($this->status == 'down') { $restored = true; } $this->status = 'up'; $this->down_reason = null; $this->checked_at = Carbon::now(); $this->save(); if ($restored === true) { MonitorRestored::dispatch($this, $response); } MonitorAvailable::dispatch($this, $response); } /** * @param string $reason */ private function monitorUnavailable(string $reason) { $this->status = 'down'; $this->down_reason = $reason; $this->checked_at = Carbon::now(); $this->save(); MonitorUnavailable::dispatch($this); } }
function ConvertFrom-BytesToHumanReadable { [cmdletbinding()] Param ( [Parameter(Mandatory,ValueFromPipeline)] [double]$Bytes ) if ($Bytes -gt ([math]::pow(10, 9))) { $GB = [math]::Round($Bytes/1gb, 2) return "$GB GB" } elseif ($Bytes -gt ([math]::pow(10, 6))) { $MB = [math]::Round($Bytes/1mb, 2) return "$MB MB" } elseif ($Bytes -gt ([math]::pow(10, 3))) { $KB = [math]::Round($Bytes/1kb, 2) return "$KB KB" } else { return "$Bytes Bytes" } }
--- layout: post title: Training Caffe to break CAPTCHA --- Caffe trained on CAPTCHA: https://github.com/LouieYang/CAPTCHA-caffe/blob/master/ Training caffe of cat photos: http://adilmoujahid.com/posts/2016/06/introduction-deep-learning-python-caffe/ Using a trained network (deployed.prototxt): https://github.com/BVLC/caffe/wiki/Using-a-Trained-Network:-Deploy
<?php namespace Emonkak\Di\Cache; /** * @implements \ArrayAccess<string,?mixed> */ class ApcuCache implements \ArrayAccess { private string $prefix; private int $lifetime; public function __construct(string $prefix = '', int $lifetime = 0) { $this->prefix = $prefix; $this->lifetime = $lifetime; } /** * {@inheritdoc} */ public function offsetGet($offset) { $value = apcu_fetch($this->prefix . $offset, $success); return $success ? $value : null; } /** * {@inheritdoc} */ public function offsetExists($offset) { return apcu_exists($this->prefix . $offset); } /** * {@inheritdoc} */ public function offsetSet($offset, $value) { apcu_store($this->prefix . $offset, $value, $this->lifetime); } /** * {@inheritdoc} */ public function offsetUnset($offset) { apcu_delete($this->prefix . $offset); } }
package my.itgungnir.rxmvvm.core.redux interface Middleware<T> { fun apply(state: T, action: Action, dispatch: (Action) -> Unit): Action }
using System.Collections.Generic; /// <summary> /// 0 необходимо в скором времени /// 3 это нужно срочно /// 2 надо сходить в магазин за /// 4 пизда рулю, нужно сейчас /// 5 (вопрос жизни и смерти) /// </summary> namespace Bitard_BlockChain_Bot_Unit_Test { class priorityItem { private string itemName; private int priority; public priorityItem(string item, int _priority) { itemName = item; priority = _priority; } public string getItem => itemName; public int } class staff { private List<string> listOfItems; public staff() => listOfItems = new List<string>(); /// <summary> /// Adding new Item /// </summary> /// <param name="newItem"></param> public void addItem(string newItem) => listOfItems.Add(newItem); public void deleteItem(string item) => listOfItems.Remove(item); public string getListOfItems() { string temple = ""; foreach(string item in listOfItems) { temple = temple + item + "\n"; } return temple; } //GetSize of list public int getSize => listOfItems.Count; } }
package den.device import platform.UIKit.UIImpactFeedbackGenerator class ImpactGenerator { private val generator: UIImpactFeedbackGenerator = UIImpactFeedbackGenerator() fun impact() { generator.prepare() generator.impactOccurred() } }