hexsha
stringlengths 40
40
| size
int64 5
129k
| content
stringlengths 5
129k
| avg_line_length
float64 4
98.3
| max_line_length
int64 5
660
| alphanum_fraction
float64 0.25
0.98
|
---|---|---|---|---|---|
703d51054bde8a2caf434860ddad29694bdf287f | 171 | export modinfo = {
type: "command"
desc: "UnDeek"
alias: {"undeek"}
func: getDoPlayersFunction (v) ->
pcall ->
tr = v.Character["Nice thing"]
tr.Parent = nil
} | 19 | 34 | 0.625731 |
8f1160e962acc744d6e319a09219d5de21dc2dbf | 164 | Dorothy!
VisualSettingView = require "View.Control.Unit.VisualSetting"
--MessageBox = require "Control.Basic.MessageBox"
Class VisualSettingView,
__init:=>
| 23.428571 | 62 | 0.768293 |
950fcda6435b984842b808ce32635a03dc22bcd9 | 3,237 | --- Handle outgoing responses
-- @author RyanSquared <[email protected]>
-- @classmod data.Response
http =
cookies: require "http.cookies"
headers: require "http.headers"
class Response
--- Return a Response handler
-- @tparam http.stream stream lua-http server -> client stream
-- @tparam App tbsp_app Turbospoon app
-- @tparam Request request Associated request object
new: (stream, tbsp_app, request)=>
assert stream
assert tbsp_app
@stream = stream
@app = tbsp_app
@cookies = {}
@session = request.session
--- Add a cookie to be sent back to the client
-- @tparam table cookie
-- **Must have the following fields:**
--
-- - `max_age` - Amount of seconds for the cookie to live (`0` == session)
-- - `key` - Name to store the cookie by
-- - `value` - Value for the cookie, can be any RFC-safe string
--
-- **Can also have the following fields:**
--
-- - `domain` - PSL-compatible domain name where the cookie is valid
-- - `path` - URI path where the cookie is valid
-- - `secure` - Whether a cookie can be sent over unencrypted connections
-- - `http_only` - Whether browsers, etc. should be able to read the cookie
-- - `same_site` (`"strict"` or `"lax"`) - Same Site cookie policy
add_cookie: (cookie)=>
assert cookie.max_age, "Missing cookie.max_age"
assert cookie.key, "Missing cookie.key"
assert cookie.value, "Missing cookie.value"
@cookies[cookie.key] = cookie.key
--- Remove a cookie based on key
-- @tparam string key
remove_cookie: (key)=>
@cookies[key] = nil
--- Create session cookies and set it as a header
-- @tparam number age Age for cookie (default 28 days)
_make_session_cookie: (age = 60 * 60 * 24 * 28)=>
-- no need to worry about optimizing ^ because most Lua interpreters
-- implement folding of static values automatically
return if not @session
@cookies.session =
key: "session"
value: @app.jwt\encode @session
max_age: age
http_only: true
-- ::TODO:: redirect()
--- Send HTTP headers to the client
-- @tparam number status HTTP status code
-- @tparam http.headers headers Optional headers to send to client
write_headers: (status, headers = http.headers.new!)=>
assert status
-- if headers are *not* of http.headers, they should be instead a
-- key-value mapping of tables
headers_mt = getmetatable headers
if headers_mt != http.headers.mt
new_headers = http.headers.new!
for k, v in pairs headers
new_headers\append k, v
headers = new_headers
if not headers\has "content-type" then
headers\append "content-type", "text/plain"
@_make_session_cookie!
for cookie in *@cookies
headers\append "set-cookie", http.cookies.bake_cookie cookie
headers\upsert ":status", tostring status
@stream\write_headers headers, @method == "HEAD"
-- ::TODO:: write_chunk()
--- Send text as the body of the message
-- @tparam string text
write_body: (text)=>
return if @method == "HEAD"
@stream\write_chunk tostring(text), true
--- Send HTTP headers and body text at the same time
-- @tparam string text
-- @tparam number status HTTP status
-- @tparam http.headers headers HTTP headers
write_response: (text, status = 200, headers)=>
@write_headers status, headers
@write_body text
| 32.049505 | 76 | 0.699722 |
0308929850a91a6e4c354021749ce95735826f30 | 129 | export modinfo = {
type: "command"
desc: "Tests Output2"
alias: {"T2"}
func: (Msg,Speaker) ->
Output2(Msg,{Colors.Green})
} | 18.428571 | 29 | 0.635659 |
b6b55ef8ec532122ba00a22b01362e29086a57ba | 5,747 |
import insert, concat from table
import get_fields from require "lapis.util"
unpack = unpack or table.unpack
query_parts = {"where", "group", "having", "order", "limit", "offset"}
rebuild_query_clause = (parsed) ->
buffer = {}
if joins = parsed.join
for {join_type, join_clause} in *joins
insert buffer, join_type
insert buffer, join_clause
for p in *query_parts
clause = parsed[p]
continue unless clause and clause != ""
p = "order by" if p == "order"
p = "group by" if p == "group"
insert buffer, p
insert buffer, clause
concat buffer, " "
flatten_iter = (iter) ->
current_page = iter!
idx = 1
->
if current_page
with current_page[idx]
idx += 1
unless current_page[idx]
current_page = iter!
idx = 1
class Paginator
new: (@model, clause="", ...) =>
@db = @model.__class.db
param_count = select "#", ...
opts = if param_count > 0
last = select param_count, ...
if type(last) == "table" and not @db.is_encodable last
param_count -= 1
last
elseif type(clause) == "table"
opts = clause
clause = ""
opts
@per_page = @model.per_page
@per_page = opts.per_page if opts
@_clause = if param_count > 0
@db.interpolate_query clause, ...
else
clause
@opts = opts
select: (...) =>
@model\select ...
prepare_results: (items) =>
if pr = @opts and @opts.prepare_results
pr items
else
items
each_item: =>
flatten_iter @each_page!
class OffsetPaginator extends Paginator
per_page: 10
each_page: (page=1) =>
->
results = @get_page page
if next results
page += 1
results
get_all: =>
@prepare_results @select @_clause, @opts
-- 1 indexed page
get_page: (page) =>
page = (math.max 1, tonumber(page) or 0) - 1
limit = @db.interpolate_query " LIMIT ? OFFSET ?",
@per_page, @per_page * page, @opts
@prepare_results @select @_clause .. limit, @opts
num_pages: =>
math.ceil @total_items! / @per_page
has_items: =>
parsed = @db.parse_clause(@_clause)
parsed.limit = "1"
parsed.offset = nil
parsed.order = nil
tbl_name = @db.escape_identifier @model\table_name!
res = @db.query "SELECT 1 FROM #{tbl_name} #{rebuild_query_clause parsed}"
not not unpack res
total_items: =>
unless @_count
parsed = @db.parse_clause(@_clause)
parsed.limit = nil
parsed.offset = nil
parsed.order = nil
if parsed.group
error "OffsetPaginator: can't calculate total items in a query with group by"
tbl_name = @db.escape_identifier @model\table_name!
query = "COUNT(*) AS c FROM #{tbl_name} #{rebuild_query_clause parsed}"
@_count = unpack(@db.select query).c
@_count
class OrderedPaginator extends Paginator
order: "ASC" -- default sort order
per_page: 10
valid_orders = {
asc: true
desc: true
}
new: (model, @field, ...) =>
super model, ...
if @opts and @opts.order
@order = @opts.order
@opts.order = nil
each_page: =>
tuple = {}
->
tuple = { @get_page unpack tuple, 2 }
if next tuple[1]
tuple[1]
get_page: (...) =>
@get_ordered @order, ...
after: (...) =>
@get_ordered "ASC", ...
before: (...) =>
@get_ordered "DESC", ...
get_ordered: (order, ...) =>
parsed = assert @db.parse_clause @_clause
has_multi_fields = type(@field) == "table" and not @db.is_raw @field
order_lower = order\lower!
unless valid_orders[order_lower]
error "OrderedPaginator: invalid query order: #{order}"
table_name = @model\table_name!
prefix = @db.escape_identifier(table_name) .. "."
escaped_fields = if has_multi_fields
[prefix .. @db.escape_identifier f for f in *@field]
else
{ prefix .. @db.escape_identifier @field }
if parsed.order
error "OrderedPaginator: order should not be provided for #{@@__name}"
if parsed.offset or parsed.limit
error "OrderedPaginator: offset and limit should not be provided for #{@@__name}"
parsed.order = table.concat ["#{f} #{order}" for f in *escaped_fields], ", "
if ...
op = switch order\lower!
when "asc"
">"
when "desc"
"<"
pos_count = select "#", ...
if pos_count > #escaped_fields
error "OrderedPaginator: passed in too many values for paginated query (expected #{#escaped_fields}, got #{pos_count})"
order_clause = if 1 == pos_count
order_clause = "#{escaped_fields[1]} #{op} #{@db.escape_literal (...)}"
else
positions = {...}
buffer = {"("}
for i in ipairs positions
unless escaped_fields[i]
error "passed in too many values for paginated query (expected #{#escaped_fields}, got #{pos_count})"
insert buffer, escaped_fields[i]
insert buffer, ", "
buffer[#buffer] = nil
insert buffer, ") "
insert buffer, op
insert buffer, " ("
for pos in *positions
insert buffer, @db.escape_literal pos
insert buffer, ", "
buffer[#buffer] = nil
insert buffer, ")"
concat buffer
if parsed.where
parsed.where = "#{order_clause} and (#{parsed.where})"
else
parsed.where = order_clause
parsed.limit = tostring @per_page
query = rebuild_query_clause parsed
res = @select query, @opts
final = res[#res]
res = @prepare_results res
if has_multi_fields
res, get_fields final, unpack @field
else
res, get_fields final, @field
{ :OffsetPaginator, :OrderedPaginator, :Paginator}
| 23.650206 | 127 | 0.598747 |
169be27c068e99f7ff83b3932c5a6950003d80ab | 125 | parent = ...
members = {
"split",
}
M = {}
for name in *members
M[name] = require(parent.."._"..name)[name]
return M | 13.888889 | 47 | 0.552 |
7171faf696995a56d2600482871077e358595946 | 164 | export modinfo = {
type: "command"
desc: "Name"
alias: {"SN"}
func: (Msg,Speaker) ->
Output(string.format("This script's name is: %s",Name),{Colors.Orange})
} | 23.428571 | 73 | 0.640244 |
a7bab34cafa39f9f1aaa7277de7f411f70ff718d | 387 |
db = require "lapis.db"
import assert_env from require "lapis.environment"
truncate_tables = (...) ->
assert_env "test", for: "truncate_tables"
tables = for t in *{...}
if type(t) == "table"
t\table_name!
else
t
-- truncate is slow, so delete is used instead
-- db.truncate unpack tables
for table in *tables
db.delete table
{
:truncate_tables
}
| 17.590909 | 50 | 0.638243 |
f5e50b6f81e44fbae85dbe63851abb997543af85 | 129 | export modinfo = {
type: "command"
desc: "Tests Output4"
alias: {"T4"}
func: (Msg,Speaker) ->
Output4(Msg,{Colors.Green})
} | 18.428571 | 29 | 0.635659 |
dd5ea4e7edbf9d79d2eea01192856afb770284da | 18,058 |
-- Copyright (C) 2018-2020 DBotThePony
-- Permission is hereby granted, free of charge, to any person obtaining a copy
-- of this software and associated documentation files (the "Software"), to deal
-- in the Software without restriction, including without limitation the rights
-- to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
-- of the Software, and to permit persons to whom the Software is furnished to do so,
-- subject to the following conditions:
-- The above copyright notice and this permission notice shall be included in all copies
-- or substantial portions of the Software.
-- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
-- INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
-- PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
-- FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-- OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-- DEALINGS IN THE SOFTWARE.
fixme_init = {
'/advdupe2/'
'duplicator.lua'
}
DPP2.FIXME_HookSpawns = DPP2.FIXME_HookSpawns or fixme_init
for init in *fixme_init
if not table.qhasValue(DPP2.FIXME_HookSpawns, init)
table.insert(DPP2.FIXME_HookSpawns, init)
log_blacklist = {
'logic_collision_pair'
'phys_constraint'
'phys_hinge'
'phys_constraintsystem'
'phys_lengthconstraint'
}
DPP2.PlayerSpawnedSomething = (ply, ent, advancedCheck = false) ->
return if ent.__dpp2_hit
ent.__dpp2_hit = true
ent.__dpp2_dupe_fix = nil
return false if ent\GetEFlags()\band(EFL_KILLME) ~= 0
if ply\DPP2IsBanned()
SafeRemoveEntity(ent)
return false
classname = ent\GetClass()
if not DPP2.SpawnRestrictions\Ask(classname, ply)
hook.Run('DPP_SpawnRestrictionHit', ply, ent)
DPP2.NotifyError(ply, 5, 'message.dpp2.restriction.spawn', classname)
DPP2.LogSpawn('message.dpp2.log.spawn.tried_generic', ply, color_red, DPP2.textcolor, ent)
SafeRemoveEntity(ent)
return false
if DPP2.PerEntityLimits.IS_INCLUSIVE\GetBool()
check = false
if entry = DPP2.PerEntityLimits\Get(classname, ply\GetUserGroup())
check = not entry.limit or entry.limit >= #ply\DPP2GetAllEntsByClass(classname)
if not check
hook.Run('DPP_SpawnLimitHit', ply, ent)
DPP2.NotifyError(ply, 5, 'message.dpp2.limit.spawn', classname)
DPP2.LogSpawn('message.dpp2.log.spawn.tried_generic', ply, color_red, DPP2.textcolor, ent)
SafeRemoveEntity(ent)
return false
else
if entry = DPP2.PerEntityLimits\Get(classname, ply\GetUserGroup())
if entry.limit and entry.limit <= #ply\DPP2GetAllEntsByClass(classname)
hook.Run('DPP_SpawnLimitHit', ply, ent)
DPP2.NotifyError(ply, 5, 'message.dpp2.limit.spawn', classname)
DPP2.LogSpawn('message.dpp2.log.spawn.tried_generic', ply, color_red, DPP2.textcolor, ent)
SafeRemoveEntity(ent)
return false
fixme = false
i = 1
info = debug.getinfo(i)
while info
for fix in *DPP2.FIXME_HookSpawns
if string.find(info.src or info.short_src, fix)
fixme = true
break
i += 1
info = debug.getinfo(i)
if not fixme
return if not advancedCheck and not DPP2.QueueAntispam(ply, ent)
if not advancedCheck and DPP2.ENABLE_ANTISPAM\GetBool() and DPP2.ANTISPAM_COLLISIONS\GetBool() and ent\GetSolid() ~= SOLID_NONE
-- TODO: Point position calculation near plane, for accurate results
-- using OBBMins and OBBMaxs
timer.Simple 0, ->
return if not IsValid(ply) or not IsValid(ent)
mins, maxs = ent\WorldSpaceAABB()
if mins and maxs and mins ~= vector_origin and maxs ~= vector_origin
for ent2 in *ents.FindInBox(mins, maxs)
if ent2 ~= ent and not ent2\IsPlayer() and not ent2\IsNPC() and (not ent2\IsWeapon() or not ent2\GetOwner()\IsValid()) and ent2\GetSolid() ~= SOLID_NONE
ent\DPP2Ghost()
DPP2.NotifyHint(ply, 5, 'message.dpp2.warn.collisions')
break
else
return if not DPP2.AntispamCheck(ply, true, ent, nil, true)
ent.__dpp2_dupe_fix = engine.TickCount() + 5 if DPP2.ENABLE_ANTISPAM_ALTERNATIVE_DUPE\GetBool()
if DPP2.ENABLE_ANTIPROPKILL\GetBool() and DPP2.ANTIPROPKILL_TRAP\GetBool() and ent\GetSolid() ~= SOLID_NONE
timer.Simple 0, -> DPP2.APKTriggerPhysgunDrop(ply, ent) if IsValid(ply) and IsValid(ent)
ent\DPP2SetOwner(ply)
eclass = ent\GetClass()
if DPP2.NO_ROPE_WORLD\GetBool() and eclass == 'keyframe_rope'
start, endpoint = ent\GetInternalVariable('m_hStartPoint'), ent\GetInternalVariable('m_hEndPoint')
if start == endpoint and not IsValid(start)
ent\Remove()
DPP2.LogSpawn('message.dpp2.log.spawn.tried_plain', ply, color_red, DPP2.textcolor, 'keyframe_rope')
return false
if not eclass or not table.qhasValue(log_blacklist, eclass)
if not eclass or not eclass\startsWith('prop_')
DPP2.LogSpawn('message.dpp2.log.spawn.generic', ply, ent)
else
DPP2.LogSpawn('message.dpp2.log.spawn.prop', ply, ent, ent\GetModel() or '<unknown>')
hook.Run('DPP_PlayerSpawn', ply, ent)
return true
PreventModelSpawn = (ply, model = ent and ent\GetModel() or 'wtf', ent = NULL, nonotify = false) ->
if ply\DPP2IsBanned()
value = ply\DPP2BanTimeLeft()
if value == math.huge
DPP2.NotifyError(ply, nil, 'message.dpp2.spawn.banned')
else
DPP2.NotifyError(ply, nil, 'message.dpp2.spawn.banned_for', DLib.I18n.FormatTimeForPlayer(ply, value\ceil()))
SafeRemoveEntity(ent)
return false
model = model\lower()
if DPP2.IsModelBlacklisted(IsValid(ent) and ent or model, ply)
DPP2.NotifyError(ply, nil, 'message.dpp2.blacklist.model_blocked', model) if not nonotify
SafeRemoveEntity(ent)
return false
if DPP2.IsModelRestricted(IsValid(ent) and ent or model, ply)
DPP2.NotifyError(ply, nil, 'message.dpp2.blacklist.model_restricted', model) if not nonotify
SafeRemoveEntity(ent)
return false
if DPP2.PerModelLimits.IS_INCLUSIVE\GetBool()
check = false
if entry = DPP2.PerModelLimits\Get(model, ply\GetUserGroup())
if entry.limit
count = 0
for ent2 in *ply\DPP2GetAllEnts()
if ent2\GetModel() == model
count += 1
break if entry.limit < count
check = entry.limit >= count
if not check
hook.Run('DPP_ModelLimitHit', ply, model, ent)
DPP2.NotifyError(ply, 5, 'message.dpp2.limit.spawn', model) if not nonotify
DPP2.LogSpawn('message.dpp2.log.spawn.tried_generic', ply, color_red, DPP2.textcolor, ent) if IsValid(ent)
SafeRemoveEntity(ent)
return false
else
if entry = DPP2.PerModelLimits\Get(model, ply\GetUserGroup())
if entry.limit
count = 0
for ent2 in *ply\DPP2GetAllEnts()
if ent2\GetModel() == model
count += 1
break if entry.limit < count
if entry.limit < count
hook.Run('DPP_ModelLimitHit', ply, model, ent)
DPP2.NotifyError(ply, 5, 'message.dpp2.limit.spawn', model) if not nonotify
DPP2.LogSpawn('message.dpp2.log.spawn.tried_generic', ply, color_red, DPP2.textcolor, ent) if IsValid(ent)
SafeRemoveEntity(ent)
return false
if DPP2.ENABLE_ANTISPAM\GetBool() and IsValid(ent)
hit = false
if DPP2.AUTO_BLACKLIST_BY_SIZE\GetBool()
volume1 = DPP2.AUTO_BLACKLIST_SIZE\GetFloat()
volume2 = 0
phys = ent\GetPhysicsObject()
volume2 = phys\GetVolume() if IsValid(phys)
if volume1 <= volume2 and not DPP2.ModelBlacklist\Has(model)
DPP2.ModelBlacklist\Add(model)
DPP2.Notify(true, nil, 'message.dpp2.autoblacklist.added_volume', model)
hit = true
if not DPP2.ModelBlacklist\Ask(model, ply)
DPP2.NotifyError(ply, nil, 'message.dpp2.blacklist.model_blocked', model) if not nonotify
SafeRemoveEntity(ent)
return false
if DPP2.AUTO_BLACKLIST_BY_AABB\GetBool() and not hit
volume1 = DPP2.AUTO_BLACKLIST_AABB_SIZE\GetFloat()
volume2 = DPP2._ComputeVolume2(ent)
if volume1 <= volume2 and not DPP2.ModelBlacklist\Has(model)
DPP2.ModelBlacklist\Add(model)
DPP2.Notify(true, nil, 'message.dpp2.autoblacklist.added_aabb', model)
if not DPP2.ModelBlacklist\Ask(model, ply)
DPP2.NotifyError(ply, nil, 'message.dpp2.blacklist.model_blocked', model) if not nonotify
SafeRemoveEntity(ent)
return false
return true
PlayerSpawnedEffect = (ply = NULL, model = 'models/error.mdl', ent = NULL) ->
return unless ply\IsValid()
return unless ent\IsValid()
DPP2.PlayerSpawnedSomething(ply, ent)
return false if not PreventModelSpawn(ply, model, ent)
PlayerSpawnedProp = (ply = NULL, model = 'models/error.mdl', ent = NULL) ->
return unless ply\IsValid()
return unless ent\IsValid()
return false if not PreventModelSpawn(ply, model, ent)
DPP2.PlayerSpawnedSomething(ply, ent)
return
PlayerSpawnedRagdoll = (ply = NULL, model = 'models/error.mdl', ent = NULL) ->
return unless ply\IsValid()
return unless ent\IsValid()
return false if not PreventModelSpawn(ply, model, ent)
DPP2.PlayerSpawnedSomething(ply, ent)
return
PlayerSpawnedNPC = (ply = NULL, ent = NULL) ->
return unless ply\IsValid()
return unless ent\IsValid()
return false if not PreventModelSpawn(ply, model, ent)
DPP2.PlayerSpawnedSomething(ply, ent)
return
PlayerSpawnedSENT = (ply = NULL, ent = NULL) ->
return unless ply\IsValid()
return unless ent\IsValid()
return false if not PreventModelSpawn(ply, model, ent)
DPP2.PlayerSpawnedSomething(ply, ent)
return
PlayerSpawnedSWEP = (ply = NULL, ent = NULL) ->
return unless ply\IsValid()
return unless ent\IsValid()
return false if not PreventModelSpawn(ply, model, ent)
DPP2.PlayerSpawnedSomething(ply, ent)
return
PlayerSpawnedVehicle = (ply = NULL, ent = NULL) ->
return unless ply\IsValid()
return unless ent\IsValid()
return false if not PreventModelSpawn(ply, model, ent)
DPP2.PlayerSpawnedSomething(ply, ent)
return
PlayerSpawnEffect = (ply = NULL, model = 'models/error.mdl') ->
return unless ply\IsValid()
return false if not PreventModelSpawn(ply, model)
return false if not DPP2.AntispamCheck(ply)
if not DPP2.SpawnRestrictions\Ask('prop_effect', ply)
DPP2.LogSpawn('message.dpp2.log.spawn.tried_plain', ply, color_red, DPP2.textcolor, 'prop_effect')
return false
PlayerSpawnProp = (ply = NULL, model = 'models/error.mdl') ->
return unless ply\IsValid()
return false if not PreventModelSpawn(ply, model)
return false if not DPP2.AntispamCheck(ply)
if not DPP2.SpawnRestrictions\Ask('prop_physics', ply)
DPP2.LogSpawn('message.dpp2.log.spawn.tried_plain', ply, color_red, DPP2.textcolor, 'prop_physics')
return false
PlayerSpawnRagdoll = (ply = NULL, model = 'models/error.mdl') ->
return unless ply\IsValid()
return false if not PreventModelSpawn(ply, model)
return false if not DPP2.AntispamCheck(ply)
if not DPP2.SpawnRestrictions\Ask('prop_ragdoll', ply)
DPP2.LogSpawn('message.dpp2.log.spawn.tried_plain', ply, color_red, DPP2.textcolor, 'prop_ragdoll')
return false
PlayerSpawnObject = (ply = NULL, model = 'models/error.mdl', skin = 0) ->
return unless ply\IsValid()
return false if not PreventModelSpawn(ply, model)
return false if not DPP2.AntispamCheck(ply)
PlayerSpawnVehicle = (ply = NULL, model = 'models/error.mdl', name = 'prop_vehicle_jeep', info = {}) ->
return unless ply\IsValid()
return false if not PreventModelSpawn(ply, model)
return false if not DPP2.AntispamCheck(ply)
if not DPP2.SpawnRestrictions\Ask(name, ply)
DPP2.LogSpawn('message.dpp2.log.spawn.tried_plain', ply, color_red, DPP2.textcolor, name)
return false
PlayerSpawnNPC = (ply = NULL, npcclassname = 'base_entity', weaponclass = 'base_entity') ->
return unless ply\IsValid()
return false if not DPP2.AntispamCheck(ply)
if not DPP2.SpawnRestrictions\Ask(npcclassname, ply)
DPP2.LogSpawn('message.dpp2.log.spawn.tried_plain', ply, color_red, DPP2.textcolor, npcclassname)
return false
PlayerSpawnSENT = (ply = NULL, classname = 'base_entity') ->
return unless ply\IsValid()
return false if not DPP2.AntispamCheck(ply)
if not DPP2.SpawnRestrictions\Ask(classname, ply)
DPP2.LogSpawn('message.dpp2.log.spawn.tried_plain', ply, color_red, DPP2.textcolor, classname)
return false
PlayerGiveSWEP = (ply = NULL, classname = 'base_entity', definition = {ClassName: 'base_entity', WorldModel: 'models/error.mdl', ViewModel: 'models/error.mdl'}) ->
return unless ply\IsValid()
return false if not PreventModelSpawn(ply, definition.WorldModel)
return false if not DPP2.AntispamCheck(ply)
if not DPP2.SpawnRestrictions\Ask(classname, ply)
DPP2.LogSpawn('message.dpp2.log.spawn.tried_plain', ply, color_red, DPP2.textcolor, classname)
return false
if not IsValid(ply\GetWeapon(classname))
timer.Simple 0, ->
return if not IsValid(ply)
wep = ply\GetWeapon(classname)
if IsValid(wep)
if DPP2.PlayerSpawnedSomething(ply, wep)
DPP2.LogSpawn('message.dpp2.log.spawn.giveswep_valid', ply, wep)
else
DPP2.LogSpawn('message.dpp2.log.spawn.giveswep', ply, color_white, classname)
return
PlayerSpawnSWEP = (ply = NULL, classname = 'base_entity', definition = {ClassName: 'base_entity', WorldModel: 'models/error.mdl', ViewModel: 'models/error.mdl'}) ->
return unless ply\IsValid()
return false if not PreventModelSpawn(ply, definition.WorldModel)
return false if not DPP2.AntispamCheck(ply)
if not DPP2.SpawnRestrictions\Ask(classname, ply)
DPP2.LogSpawn('message.dpp2.log.spawn.tried_plain', ply, color_red, DPP2.textcolor, classname)
return false
PlayerCanPickupItem = (ply = NULL, ent = NULL) ->
return if not IsValid(ply) or not IsValid(ent)
return false if not DPP2.PickupProtection.Blacklist\Ask(ent\GetClass(), ply)
return false if not DPP2.PickupProtection.RestrictionList\Ask(ent\GetClass(), ply)
hooksToReg = {
:PlayerSpawnedEffect, :PlayerSpawnedProp, :PlayerSpawnedRagdoll
:PlayerSpawnedNPC, :PlayerSpawnedSENT, :PlayerSpawnedSWEP
:PlayerSpawnedVehicle, :PlayerSpawnEffect, :PlayerSpawnProp
:PlayerSpawnRagdoll, :PlayerSpawnObject, :PlayerSpawnVehicle
:PlayerSpawnNPC, :PlayerSpawnSENT, :PlayerGiveSWEP, :PlayerSpawnSWEP
:PlayerCanPickupItem, PlayerCanPickupWeapon: PlayerCanPickupItem
}
hook.Add(name, 'DPP2.SpawnHooks', func, -4) for name, func in pairs(hooksToReg)
import CurTimeL, table, type from _G
CheckEntities = {}
DPP2._Spawn_CheckFrame = 0
DPP2.HookedEntityCreation = => table.qhasValue(CheckEntities, @) or @__dpp2_spawn_frame == CurTimeL()
local DiveTableCheck
local DiveEntityCheck
DiveTableCheck = (tab, owner, checkedEnts, checkedTables, found) =>
return if checkedTables[tab]
checkedTables[tab] = true
for key, value in pairs(tab)
vtype = type(value)
if vtype == 'table' and (type(key) ~= 'string' or not key\startsWith('__dpp2'))
DiveTableCheck(@, value, owner, checkedEnts, checkedTables, found)
elseif vtype == 'Entity' or vtype == 'NPC' or vtype == 'NextBot' or vtype == 'Vehicle' or vtype == 'Weapon'
DiveEntityCheck(value, owner, checkedEnts, checkedTables, found)
DiveEntityCheck = (owner, checkedEnts, checkedTables, found) =>
return found if checkedEnts[@]
return found if @__dpp2_check_frame == CurTimeL()
return found if not @GetTable()
checkedEnts[@] = true
@__dpp2_check_frame = CurTimeL()
table.insert(found, @) if @DPP2GetOwner() ~= owner and @__dpp2_spawn_frame == CurTimeL()
DiveTableCheck(@, @GetTable(), owner, checkedEnts, checkedTables, found)
DiveTableCheck(@, @GetSaveTable(), owner, checkedEnts, checkedTables, found)
return found
hook.Add 'Think', 'DPP2.CheckEntitiesOwnage', ->
return if DPP2._Spawn_CheckFrame >= CurTimeL()
return if #CheckEntities == 0
copy = CheckEntities
checkConstraints = {}
CheckEntities = {}
ctime = CurTimeL()
for ent in *copy
if ent\IsValid()
ent.__dpp2_spawn_frame = ctime
while #copy ~= 0
ent = table.remove(copy, 1)
if ent\IsValid()
if ent\IsConstraint()
table.insert(checkConstraints, ent)
elseif ent\DPP2OwnerIsValid()
ply = ent\DPP2GetOwner()
found = DiveEntityCheck(ent, ply, {}, {}, {})
if #found ~= 0
DPP2.UnqueueAntispam(ent)
local toremove
for ent2 in *found
DPP2.UnqueueAntispam(ent2)
DPP2.PlayerSpawnedSomething(ply, ent2, true)
for i, ent3 in ipairs(copy)
if ent2 == ent3
toremove = toremove or {}
table.insert(toremove, i)
break
table.removeValues(copy, toremove) if toremove
fail = not PreventModelSpawn(ply, nil, ent, true)
if not fail
for ent2 in *found
fail = not PreventModelSpawn(ply, nil, ent2, true)
break if fail
if not fail
should_queue = not ent.__dpp2_dupe_fix or engine.TickCount() >= ent.__dpp2_dupe_fix
if should_queue
for ent2 in *found
if ent.__dpp2_dupe_fix and engine.TickCount() < ent.__dpp2_dupe_fix
should_queue = false
break
DPP2.QueueAntispam(ply, ent, found) if should_queue
else
SafeRemoveEntity(ent)
SafeRemoveEntity(ent2) for ent2 in *found
DPP2.NotifyError(ply, nil, 'message.dpp2.blacklist.models_blocked', #found + 1)
for constraint in *checkConstraints
ent1, ent2 = constraint\GetConstrainedEntities()
if IsValid(ent1) and IsValid(ent2)
if ctime == ent1.__dpp2_spawn_frame and ctime == ent2.__dpp2_spawn_frame
if ent1\DPP2IsOwned() and ent1\DPP2OwnerIsValid()
DPP2.PlayerSpawnedSomething(ent1\DPP2GetOwner(), constraint, true)
elseif ent2\DPP2IsOwned() and ent2\DPP2OwnerIsValid()
DPP2.PlayerSpawnedSomething(ent2\DPP2GetOwner(), constraint, true)
else
DPP2.LMessageError('message.dpp2.error.empty_constraint', ' ', constraint, ' ', ent1, ' ', ent2)
hook.Add 'OnEntityCreated', 'DPP2.CheckEntitiesOwnage', =>
DPP2._Spawn_CheckFrame = CurTimeL()
table.insert(CheckEntities, @)
return
hook.Add 'OnEntityCopyTableFinish', 'DPP2.ClearFields', (data) =>
data.__dpp2_check_frame = nil
data.__dpp2_hit = nil
data.__dpp2_spawn_frame = nil
data.__dpp2_contraption = nil
data._dpp2_last_nick = nil
data.__dpp2_pushing = nil
data.__dpp2_unfreeze = nil
data.__dpp2_old_collisions_group = nil
data.__dpp2_old_movetype = nil
data.__dpp2_old_color = nil
data.__dpp2_ghost_callbacks = nil
data.__dpp2_old_rendermode = nil
| 35.829365 | 164 | 0.736793 |
a954c1a9ceadfd2e452c8d2914136f00991e2a62 | 354 | local *
package.path = "?.lua;?/init.lua;#{package.path}"
sss = assert require "sss"
s = with sss.socket!
\reuseaddr!
\bind
host: "*"
port: 32000
\listen!
while true
c, addr = s\accept!
addr = sss.toladdress addr
got = c\receive!
print "got #{got} from #{addr.host}:#{addr.port}"
c\send got
c\close!
| 18.631579 | 53 | 0.564972 |
8a6faa76b155a5b2e2a5180a32a5de0fab19a9d7 | 15,320 | -- Copyright 2014-2015 The Howl Developers
-- License: MIT (see LICENSE.md at the top-level directory of the distribution)
ffi = require 'ffi'
require 'ljglibs.cdefs.glib'
require 'ljglibs.cdefs.cairo'
ffi.cdef [[
/* PangoFontDescription */
typedef struct {} PangoFontDescription;
PangoFontDescription * pango_font_description_new (void);
void pango_font_description_free (PangoFontDescription *desc);
void pango_font_description_set_family (PangoFontDescription *desc, const char *family);
const char * pango_font_description_get_family (const PangoFontDescription *desc);
void pango_font_description_set_size (PangoFontDescription *desc, gint size);
gint pango_font_description_get_size (const PangoFontDescription *desc);
void pango_font_description_set_absolute_size (PangoFontDescription *desc, double size);
gboolean pango_font_description_get_size_is_absolute (const PangoFontDescription *desc);
PangoFontDescription * pango_font_description_from_string (const char *str);
char * pango_font_description_to_string (const PangoFontDescription *desc);
/* PangoTabArray */
typedef enum {
PANGO_TAB_LEFT
} PangoTabAlign;
typedef struct {} PangoTabArray;
PangoTabArray * pango_tab_array_new (gint initial_size, gboolean positions_in_pixels);
PangoTabArray * pango_tab_array_new_with_positions (gint size,
gboolean positions_in_pixels,
PangoTabAlign first_alignment,
gint first_position,
...);
void pango_tab_array_free (PangoTabArray *tab_array);
gint pango_tab_array_get_size (PangoTabArray *tab_array);
void pango_tab_array_set_tab (PangoTabArray *tab_array,
gint tab_index,
PangoTabAlign alignment,
gint location);
void pango_tab_array_get_tab (PangoTabArray *tab_array,
gint tab_index,
PangoTabAlign *alignment,
gint *location);
gboolean pango_tab_array_get_positions_in_pixels (PangoTabArray *tab_array);
/* PangoContext */
typedef struct {} PangoContext;
PangoFontDescription * pango_context_get_font_description (PangoContext *context);
void pango_context_set_font_description (PangoContext *context, const PangoFontDescription *desc);
typedef struct {
int x;
int y;
int width;
int height;
} PangoRectangle;
typedef enum {
PANGO_ALIGN_LEFT,
PANGO_ALIGN_CENTER,
PANGO_ALIGN_RIGHT
} PangoAlignment;
typedef enum {
PANGO_ELLIPSIZE_NONE,
PANGO_ELLIPSIZE_START,
PANGO_ELLIPSIZE_MIDDLE,
PANGO_ELLIPSIZE_END
} PangoEllipsizeMode;
/* PangoColor */
typedef struct {
guint16 red;
guint16 green;
guint16 blue;
} PangoColor;
gboolean pango_color_parse (PangoColor *color, const char *spec);
gchar * pango_color_to_string (const PangoColor *color);
/* Attributes */
typedef enum {
PANGO_ATTR_INDEX_FROM_TEXT_BEGINNING = 0,
PANGO_ATTR_INDEX_TO_TEXT_END = 4294967295 // fix me
} PangoAttributeConstants;
typedef enum
{
PANGO_ATTR_INVALID, /* 0 is an invalid attribute type */
PANGO_ATTR_LANGUAGE, /* PangoAttrLanguage */
PANGO_ATTR_FAMILY, /* PangoAttrString */
PANGO_ATTR_STYLE, /* PangoAttrInt */
PANGO_ATTR_WEIGHT, /* PangoAttrInt */
PANGO_ATTR_VARIANT, /* PangoAttrInt */
PANGO_ATTR_STRETCH, /* PangoAttrInt */
PANGO_ATTR_SIZE, /* PangoAttrSize */
PANGO_ATTR_FONT_DESC, /* PangoAttrFontDesc */
PANGO_ATTR_FOREGROUND, /* PangoAttrColor */
PANGO_ATTR_BACKGROUND, /* PangoAttrColor */
PANGO_ATTR_UNDERLINE, /* PangoAttrInt */
PANGO_ATTR_STRIKETHROUGH, /* PangoAttrInt */
PANGO_ATTR_RISE, /* PangoAttrInt */
PANGO_ATTR_SHAPE, /* PangoAttrShape */
PANGO_ATTR_SCALE, /* PangoAttrFloat */
PANGO_ATTR_FALLBACK, /* PangoAttrInt */
PANGO_ATTR_LETTER_SPACING, /* PangoAttrInt */
PANGO_ATTR_UNDERLINE_COLOR, /* PangoAttrColor */
PANGO_ATTR_STRIKETHROUGH_COLOR,/* PangoAttrColor */
PANGO_ATTR_ABSOLUTE_SIZE, /* PangoAttrSize */
PANGO_ATTR_GRAVITY, /* PangoAttrInt */
PANGO_ATTR_GRAVITY_HINT /* PangoAttrInt */
} PangoAttrType;
typedef enum {
PANGO_STYLE_NORMAL,
PANGO_STYLE_OBLIQUE,
PANGO_STYLE_ITALIC
} PangoStyle;
typedef enum {
PANGO_VARIANT_NORMAL,
PANGO_VARIANT_SMALL_CAPS
} PangoVariant;
typedef enum {
PANGO_WEIGHT_THIN = 100,
PANGO_WEIGHT_ULTRALIGHT = 200,
PANGO_WEIGHT_LIGHT = 300,
PANGO_WEIGHT_BOOK = 380,
PANGO_WEIGHT_NORMAL = 400,
PANGO_WEIGHT_MEDIUM = 500,
PANGO_WEIGHT_SEMIBOLD = 600,
PANGO_WEIGHT_BOLD = 700,
PANGO_WEIGHT_ULTRABOLD = 800,
PANGO_WEIGHT_HEAVY = 900,
PANGO_WEIGHT_ULTRAHEAVY = 1000
} PangoWeight;
typedef enum {
PANGO_STRETCH_ULTRA_CONDENSED,
PANGO_STRETCH_EXTRA_CONDENSED,
PANGO_STRETCH_CONDENSED,
PANGO_STRETCH_SEMI_CONDENSED,
PANGO_STRETCH_NORMAL,
PANGO_STRETCH_SEMI_EXPANDED,
PANGO_STRETCH_EXPANDED,
PANGO_STRETCH_EXTRA_EXPANDED,
PANGO_STRETCH_ULTRA_EXPANDED
} PangoStretch;
typedef enum {
PANGO_FONT_MASK_FAMILY = 1 << 0,
PANGO_FONT_MASK_STYLE = 1 << 1,
PANGO_FONT_MASK_VARIANT = 1 << 2,
PANGO_FONT_MASK_WEIGHT = 1 << 3,
PANGO_FONT_MASK_STRETCH = 1 << 4,
PANGO_FONT_MASK_SIZE = 1 << 5,
PANGO_FONT_MASK_GRAVITY = 1 << 6
} PangoFontMask;
typedef enum {
PANGO_UNDERLINE_NONE,
PANGO_UNDERLINE_SINGLE,
PANGO_UNDERLINE_DOUBLE,
PANGO_UNDERLINE_LOW,
PANGO_UNDERLINE_ERROR
} PangoUnderline;
typedef enum {
PANGO_GRAVITY_SOUTH,
PANGO_GRAVITY_EAST,
PANGO_GRAVITY_NORTH,
PANGO_GRAVITY_WEST,
PANGO_GRAVITY_AUTO
} PangoGravity;
typedef enum {
PANGO_GRAVITY_HINT_NATURAL,
PANGO_GRAVITY_HINT_STRONG,
PANGO_GRAVITY_HINT_LINE
} PangoGravityHint;
typedef struct {} _PangoAttribute;
typedef struct {
PangoAttrType type;
_PangoAttribute * (*copy) (const _PangoAttribute *attr);
void (*destroy) (_PangoAttribute *attr);
gboolean (*equal) (const _PangoAttribute *attr1, const _PangoAttribute *attr2);
} PangoAttrClass;
typedef struct {
const PangoAttrClass *klass;
guint start_index; /* in bytes */
guint end_index; /* in bytes. The character at this index is not included */
} PangoAttribute;
typedef struct {
PangoAttribute attr;
char *value;
} PangoAttrString;
typedef struct {
PangoAttribute attr;
int value;
} PangoAttrInt;
typedef struct {
PangoAttribute attr;
double value;
} PangoAttrFloat;
typedef struct {
PangoAttribute attr;
int size;
guint absolute : 1;
} PangoAttrSize;
typedef struct {
PangoAttribute attr;
PangoColor color;
} PangoAttrColor;
typedef struct {
PangoAttrColor color;
} PangoAttributeForeground;
typedef struct {
PangoAttribute attr;
PangoFontDescription *desc;
} PangoAttrFontDesc;
void pango_attribute_destroy (PangoAttribute *attr);
PangoAttribute * pango_attribute_copy (const PangoAttribute *attr);
PangoAttrColor * pango_attr_foreground_new (guint16 red,
guint16 green,
guint16 blue);
PangoAttrColor * pango_attr_background_new (guint16 red,
guint16 green,
guint16 blue);
PangoAttrString * pango_attr_family_new (const char *family);
PangoAttrInt * pango_attr_style_new (PangoStyle style);
PangoAttrInt * pango_attr_variant_new (PangoVariant variant);
PangoAttrInt * pango_attr_stretch_new (PangoStretch stretch);
PangoAttrInt * pango_attr_weight_new (PangoWeight weight);
PangoAttrInt * pango_attr_size_new (int size);
PangoAttrInt * pango_attr_size_new_absolute (int size);
PangoAttribute * pango_attr_font_desc_new (const PangoFontDescription *desc);
PangoAttrInt * pango_attr_strikethrough_new (gboolean strikethrough);
PangoAttrColor * pango_attr_strikethrough_color_new (guint16 red,
guint16 green,
guint16 blue);
PangoAttrInt * pango_attr_underline_new (PangoUnderline underline);
PangoAttrColor *pango_attr_underline_color_new (guint16 red,
guint16 green,
guint16 blue);
PangoAttrInt * pango_attr_rise_new (int rise);
PangoAttrFloat * pango_attr_scale_new (double scale_factor);
PangoAttrInt * pango_attr_fallback_new (gboolean enable_fallback);
PangoAttrInt * pango_attr_letter_spacing_new (int letter_spacing);
PangoAttribute * pango_attr_shape_new (const PangoRectangle *ink_rect,
const PangoRectangle *logical_rect);
PangoAttrInt * pango_attr_gravity_new (PangoGravity gravity);
PangoAttrInt * pango_attr_gravity_hint_new (PangoGravityHint hint);
typedef struct {} PangoAttrList;
PangoAttrList * pango_attr_list_new (void);
void pango_attr_list_unref (PangoAttrList *list);
void pango_attr_list_insert (PangoAttrList *list, PangoAttribute *attr);
void pango_attr_list_insert_before (PangoAttrList *list, PangoAttribute *attr);
void pango_attr_list_change (PangoAttrList *list, PangoAttribute *attr);
typedef struct {} PangoAttrIterator;
PangoAttrIterator * pango_attr_list_get_iterator (PangoAttrList *list);
void pango_attr_iterator_destroy (PangoAttrIterator *iterator);
gboolean pango_attr_iterator_next (PangoAttrIterator *iterator);
void pango_attr_iterator_range (PangoAttrIterator *iterator,
gint *start,
gint *end);
PangoAttribute * pango_attr_iterator_get (PangoAttrIterator *iterator,
PangoAttrType type);
/* PangoLayout */
typedef enum {
PANGO_WRAP_WORD,
PANGO_WRAP_CHAR,
PANGO_WRAP_WORD_CHAR
} PangoWrapMode;
typedef struct {} PangoLayout;
typedef struct {
PangoLayout *layout;
gint start_index; /* start of line as byte index into layout->text */
gint length; /* length of line in bytes */
void *runs;
//GSList *runs;
guint is_paragraph_start : 1; /* TRUE if this is the first line of the paragraph */
guint resolved_dir : 3; /* Resolved PangoDirection of line */
} PangoLayoutLine;
typedef struct {} PangoLayoutIter;
PangoLayout * pango_layout_new (PangoContext *context);
void pango_layout_set_text (PangoLayout *layout, const char *text, int length);
const char *pango_layout_get_text (PangoLayout *layout);
void pango_layout_get_pixel_size (PangoLayout *layout, int *width, int *height);
void pango_layout_set_alignment (PangoLayout *layout, PangoAlignment alignment);
PangoAlignment pango_layout_get_alignment (PangoLayout *layout);
void pango_layout_set_width (PangoLayout *layout, int width);
int pango_layout_get_width (PangoLayout *layout);
void pango_layout_set_height (PangoLayout *layout, int height);
int pango_layout_get_height (PangoLayout *layout);
void pango_layout_set_spacing (PangoLayout *layout, int spacing);
int pango_layout_get_spacing (PangoLayout *layout);
void pango_layout_set_attributes (PangoLayout *layout, PangoAttrList *attrs);
PangoAttrList * pango_layout_get_attributes (PangoLayout *layout);
void pango_layout_set_font_description (PangoLayout *layout, const PangoFontDescription *desc);
const PangoFontDescription * pango_layout_get_font_description (PangoLayout *layout);
int pango_layout_get_baseline (PangoLayout *layout);
gboolean pango_layout_is_wrapped (PangoLayout *layout);
PangoWrapMode pango_layout_get_wrap (PangoLayout *layout);
void pango_layout_set_wrap (PangoLayout *layout, PangoWrapMode wrap);
void pango_layout_index_to_pos (PangoLayout *layout, int index, PangoRectangle *pos);
int pango_layout_get_line_count (PangoLayout *layout);
void pango_layout_set_indent (PangoLayout *layout, int indent);
int pango_layout_get_indent (PangoLayout *layout);
gboolean pango_layout_xy_to_index (PangoLayout *layout,
int x,
int y,
int *index_,
int *trailing);
void pango_layout_index_to_line_x (PangoLayout *layout,
int index_,
gboolean trailing,
int *line,
int *x_pos);
void pango_layout_move_cursor_visually (PangoLayout *layout,
gboolean strong,
int old_index,
int old_trailing,
int direction,
int *new_index,
int *new_trailing);
/* PangoLayoutLine */
PangoLayoutLine * pango_layout_line_ref (PangoLayoutLine *line);
void pango_layout_line_unref (PangoLayoutLine *line);
PangoLayoutLine * pango_layout_get_line (PangoLayout *layout, int line);
PangoLayoutLine * pango_layout_get_line_readonly (PangoLayout *layout, int line);
void pango_layout_line_get_pixel_extents (PangoLayoutLine *layout_line,
PangoRectangle *ink_rect,
PangoRectangle *logical_rect);
void pango_layout_line_index_to_x (PangoLayoutLine *line,
int index_,
gboolean trailing,
int *x_pos);
gboolean pango_layout_line_x_to_index (PangoLayoutLine *line,
int x_pos,
int *index_,
int *trailing);
/* PangoLayoutIter */
PangoLayoutIter * pango_layout_get_iter (PangoLayout *layout);
void pango_layout_iter_free (PangoLayoutIter *iter);
gboolean pango_layout_iter_next_line (PangoLayoutIter *iter);
gboolean pango_layout_iter_at_last_line (PangoLayoutIter *iter);
int pango_layout_iter_get_baseline (PangoLayoutIter *iter);
PangoLayoutLine * pango_layout_iter_get_line (PangoLayoutIter *iter);
PangoLayoutLine * pango_layout_iter_get_line_readonly (PangoLayoutIter *iter);
void pango_layout_iter_get_line_yrange (PangoLayoutIter *iter,
int *y0_,
int *y1_);
void pango_layout_set_tabs (PangoLayout *layout, PangoTabArray *tabs);
PangoTabArray * pango_layout_get_tabs (PangoLayout *layout);
/* PangoCairo */
PangoContext * pango_cairo_create_context (cairo_t *cr);
PangoLayout * pango_cairo_create_layout (cairo_t *cr);
void pango_cairo_show_layout (cairo_t *cr, PangoLayout *layout);
]]
| 38.492462 | 100 | 0.670953 |
437d7a9ac9bcdcc2f4a679d0777fda5520c09111 | 462 | -- Copyright 2016 The Howl Developers
-- License: MIT (see LICENSE.md at the top-level directory of the distribution)
mode_reg =
name: 'tao'
aliases: 'tao'
extensions: 'tao'
create: -> bundle_load('tao_mode')
parent: 'curly_mode'
howl.mode.register mode_reg
unload = -> howl.mode.unregister 'tao'
return {
info:
author: 'Joshua Barretto <[email protected]>',
description: 'Tao language support',
license: 'MIT',
:unload
}
| 21 | 79 | 0.690476 |
39fc9e1298c50253eb5e07c3e1b9cf7574eaec06 | 4,454 |
-- Copyright (C) 2017-2020 DBotThePony
-- Permission is hereby granted, free of charge, to any person obtaining a copy
-- of this software and associated documentation files (the "Software"), to deal
-- in the Software without restriction, including without limitation the rights
-- to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
-- of the Software, and to permit persons to whom the Software is furnished to do so,
-- subject to the following conditions:
-- The above copyright notice and this permission notice shall be included in all copies
-- or substantial portions of the Software.
-- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
-- INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
-- PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
-- FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-- OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-- DEALINGS IN THE SOFTWARE.
class DLib.Freespace
new: (posStart = Vector(0, 0, 0), step = 25, radius = 10) =>
@pos = posStart
@mins = Vector(-4, -4, -4)
@maxs = Vector(4, 4, 4)
@step = step
@radius = radius
@addition = Vector(0, 0, 0)
@usehull = true
@filter = DLib.Set()
@mask = MASK_SOLID
@maskReachable = MASK_SOLID
@strict = false
@smins = Vector(-16, -16, 0)
@smaxs = Vector(16, 16, 0)
@sheight = 70
GetMins: => @mins
GetMaxs: => @maxs
SetMins: (val) => @mins = val
SetMaxs: (val) => @maxs = val
GetPos: => @pos
SetPos: (val) => @pos = val
GetAddition: => @addition
SetAddition: (val) => @addition = val
GetStrict: => @strict
GetStrictHeight: => @sheight
SetStrict: (val) => @strict = val
SetStrictHeight: (val) => @sheight = val
GetAABB: => @mins, @maxs
GetSAABB: => @smins, @smaxs
SetAABB: (val1, val2) => @mins, @maxs = val1, val2
SetSAABB: (val1, val2) => @smins, @smaxs = val1, val2
GetMask: => @mask
SetMask: (val) => @mask = val
GetMaskReachable: => @maskReachable
SetMaskReachable: (val) => @maskReachable = val
GetStep: => @step
GetRadius: => @radius
SetStep: (val) => @step = val
SetRadius: (val) => @radius = val
check: (target) =>
if @usehull
tr = util.TraceHull({
start: @pos
endpos: target + @addition
mins: @mins
maxs: @maxs
mask: @maskReachable
filter: @filter\getValues()
})
if @strict and not tr.Hit
tr2 = util.TraceHull({
start: target + @addition
endpos: target + @addition + Vector(0, 0, @sheight)
mins: @smins
maxs: @smaxs
mask: @mask
filter: @filter\getValues()
})
return not tr2.Hit, tr, tr2
return not tr.Hit, tr
else
tr = util.TraceLine({
start: @pos
endpos: target + @addition
mask: @maskReachable
filter: @filter\getValues()
})
if @strict and not tr.Hit
tr2 = util.TraceHull({
start: target + @addition
endpos: target + @addition + Vector(0, 0, @sheight)
mins: @smins
maxs: @smaxs
mask: @mask
filter: @filter\getValues()
})
return not tr2.Hit, tr, tr2
return not tr.Hit, tr
Search: =>
if @check(@pos)
return @pos
for radius = 1, @radius
for x = -radius, radius
pos = @pos + Vector(x * @step, radius * @step, 0)
return pos if @check(pos)
pos = @pos + Vector(x * @step, -radius * @step, 0)
return pos if @check(pos)
for y = -radius, radius
pos = @pos + Vector(radius * @step, y * @step, 0)
return pos if @check(pos)
pos = @pos + Vector(-radius * @step, y * @step, 0)
return pos if @check(pos)
return false
SearchOptimal: =>
validPositions = @SearchAll()
return false if #validPositions == 0
table.sort validPositions, (a, b) -> a\DistToSqr(@pos) < b\DistToSqr(@pos)
return validPositions[1]
SearchAll: =>
output = {}
table.insert(output, @pos) if @check(@pos)
for radius = 1, @radius
for x = -radius, radius
pos = @pos + Vector(x * @step, radius * @step, 0)
table.insert(output, pos) if @check(pos)
pos = @pos + Vector(x * @step, -radius * @step, 0)
table.insert(output, pos) if @check(pos)
for y = -radius, radius
pos = @pos + Vector(radius * @step, y * @step, 0)
table.insert(output, pos) if @check(pos)
pos = @pos + Vector(-radius * @step, y * @step, 0)
table.insert(output, pos) if @check(pos)
return output
| 28.369427 | 92 | 0.63965 |
35815cae9c825a44da071e67bdba848bcfeb0cbf | 2,351 | import map, deepcpy from require'opeth.common.utils'
Debuginfo = require'opeth.opeth.cmd.debuginfo'
print_moddiffgen = (optfn, optname) -> (fnblock) ->
fnblock.optdebug\start_rec!
optfn fnblock
fnblock.optdebug\print_modified optname
opt_names = {
{
name:"unreachable blocks removal"
description: "remove all the blocks which are unreachable for the top"
}
{
name: "constant fold"
description: "evaluate some operations beforehand"
}
{
name: "constant propagation"
description: "replace `MOVE` instruction with the another"
}
{
name:"dead-code elimination"
description: "eliminate the instructions which aren't needed"
}
{
name:"function inlining"
description: "expand a funcion call with the function's instructions"
}
}
unreachable_remove = print_moddiffgen require'opeth.opeth.unreachable_remove', opt_names[1].name
cst_fold = print_moddiffgen require'opeth.opeth.cst_fold', opt_names[2].name
cst_prop = print_moddiffgen require'opeth.opeth.cst_prop', opt_names[3].name
dead_elim = print_moddiffgen require'opeth.opeth.dead_elim', opt_names[4].name
func_inline = print_moddiffgen require'opeth.opeth.func_inline', opt_names[5].name
unused_remove = print_moddiffgen require'opeth.opeth.unused_remove', "unused resources removal"
opt_tbl = {
unreachable_remove
(=> func_inline @ if #@prototype > 0)
cst_fold
cst_prop
dead_elim
mask: (mask) =>
newtbl = deepcpy @
newtbl[i] = (=>) for i in *mask
newtbl
}
optimizer = (fnblock, mask, verbose) ->
unless fnblock.optdebug
fnblock.optdebug = Debuginfo 0, 0, nil, verbose
else fnblock.optdebug\reset_modified!
map (=> @ fnblock), opt_tbl\mask mask
for pi = 1, #fnblock.prototype
debuginfo = Debuginfo fnblock.optdebug.level + 1, pi, fnblock.optdebug\fmt!, verbose
fnblock.prototype[pi].optdebug = debuginfo
optimizer fnblock.prototype[pi], mask, verbose
optimizer fnblock, mask if fnblock.optdebug.modified > 0
recursive_clean = (fnblock, verbose) ->
unused_remove fnblock
for pi = 1, #fnblock.prototype
debuginfo = Debuginfo fnblock.optdebug.level + 1, pi, fnblock.optdebug\fmt!, verbose
fnblock.prototype[pi].optdebug = debuginfo
recursive_clean fnblock.prototype[pi], verbose
setmetatable {:opt_names},
__call: (fnblock, mask = {}, verbose) =>
optimizer fnblock, mask, verbose
recursive_clean fnblock, verbose
fnblock
| 29.759494 | 96 | 0.753722 |
57ddd904286e6b267b4073d592b8e97d0f1cd159 | 3,381 | html = require "lapis.html"
tag_classes = (tags) ->
if tags and #tags > 0
table.concat(["tagged-" .. t for t in *tags], " ")
else
"tagged-none"
class Materials extends html.Widget
content: =>
if @nav
@content_for "header", ->
render "views.nav"
if @footer
@content_for "footer", ->
render "views.footer"
div { class: "parallax-heading", ["data-0"]: "background-position: 50% -30px",
["data-top-bottom"]: "background-position: 50% -150px" }, ->
h1 @m.heading
section id: "resource-filters", ->
div id: "resource-filters-content", ->
span id: "resource-filter-label", @m.filtering.label
div class: "resource-filter-tags", ->
for tag, c in pairs @m.tags
a href: "#", class: "tag resource-filter-tag active", ["data-tag"]: tag, c.title
a id: "resource-filter-reset", href: "#", @m.filtering['reset-button']
section class: "content-body", ->
div class: "resources", ->
for edition in *@m.content
edition_tags = {}
for section in *edition.sections
for lecture in *section.lectures
if not lecture.tags
continue
for tag in *lecture.tags
edition_tags[tag] = true
edition_class = tag_classes [t for t, _ in pairs edition_tags]
h1 class: edition_class, edition.edition, ->
div class: "edition-info-container", ->
if edition.date
span class: "edition-info edition-date", edition.date
if edition.youtube
a class: "edition-info edition-yt-link", href: edition.youtube, @m['yt-link-text']
for i, section in ipairs edition.sections
h2 section.name
ul ->
for lecture in *section.lectures
cls = tag_classes lecture.tags
li class: cls, ->
if lecture.url
a href: lecture.url, lecture.title
else
span lecture.title
if lecture.tags
span class: "tags", ->
for tag in *lecture.tags
if @m.tags[tag] == nil
print "Invalid tag: #{tag}"
continue
a href: "#", ['data-tag']: tag, class: "resource-tag tag", title: @m.tags[tag].title, @m.tags[tag].slug
if lecture.resources
for resource in *lecture.resources
span " | "
a href: resource.url, resource.name
| 45.689189 | 151 | 0.404614 |
d9a630c6771fee6b5fa453344db9f7862ea625ea | 1,195 | ---------------------------------------------------------------------------
-- Environment ------------------------------------------------------------
---------------------------------------------------------------------------
import ipairs from _G
{ sigcheck:T } = require 'typecheck'
import ieach, ieachr, merge from require 'fn.table'
---------------------------------------------------------------------------
-- Implementation ---------------------------------------------------------
---------------------------------------------------------------------------
self =
_submods: nil
init = (submods, options) ->
for _, modname in ipairs submods
mod = require 'watchers.' .. modname
mod.init options[modname]
@[modname] = mod
@_submods = submods
start = () -> ieach @_submods, (n) -> @[n].start! if @[n].start
stop = () -> ieachr @_submods, (n) -> @[n].stop! if @[n].stop
---------------------------------------------------------------------------
-- Interface --------------------------------------------------------------
---------------------------------------------------------------------------
merge self, {
init: T 'table, table', init
:start, :stop
}
| 34.142857 | 75 | 0.291213 |
3ce1b4a36aa8d1d4c97b964ee130d3043cbbc6c5 | 573 | {
order: 'cgstuedfm'
kinds: {
c: { group: 'neotags_ClassTag' },
g: { group: 'neotags_EnumTypeTag' },
u: { group: 'neotags_UnionTag' },
e: { group: 'neotags_EnumTag' },
s: { group: 'neotags_StructTag' },
m: {
group: 'neotags_MemberTag',
prefix: [[\%(\%(\>\|\]\|)\)\%(\.\|->\)\)\@5<=]],
},
f: {
group: 'neotags_FunctionTag'
suffix: [[\>\%(\s*(\)\@=]]
},
d: { group: 'neotags_PreProcTag' },
t: { group: 'neotags_TypeTag' },
}
}
| 27.285714 | 60 | 0.417103 |
c0fe2c1a179482f8d198526486a6768ff39e3233 | 3,391 | -- Copyright 2018 The Howl Developers
-- License: MIT (see LICENSE.md at the top-level directory of the distribution)
ffi = require 'ffi'
const_char_p = ffi.typeof('const unsigned char *')
{:max} = math
ffi_copy, ffi_string = ffi.copy, ffi.string
SEQ_LENS = ffi.new 'const int[256]', {
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,-1,-1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,4,4,4,4,4,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,
}
REPLACEMENT_CHARACTER = "\xEF\xBF\xBD"
REPLACEMENT_SIZE = #REPLACEMENT_CHARACTER
char_arr = ffi.typeof 'char [?]'
uint_arr = ffi.typeof 'unsigned int [?]'
get_warts = (s, len = #s) ->
src = const_char_p s
w_size = 0
warts = nil
w_idx = 0
conts = 0
seq_start = nil
i = 0
mark = ->
if w_idx >= w_size - 2
old = warts
w_size = max 8192, w_size * 2
warts = uint_arr w_size
if old
ffi_copy warts, old, w_idx * ffi.sizeof('unsigned int')
pos = seq_start or i
warts[w_idx] = pos
w_idx += 1
i = pos
seq_start = nil
conts = 0
while i < len
b = src[i]
if b >= 128 -- non-ascii
if b < 192 -- continuation byte
if conts > 0
conts -= 1 -- ok continuation
if conts == 0
seq_start = nil -- end of seq
else
mark! -- unexpected continuation byte
else
-- should be a sequence start
s_len = SEQ_LENS[b]
if s_len < 0
mark! -- no, an illegal value
else
if conts > 0
mark! -- in the middle of seq already
else
-- new seq starting
seq_start = i
conts = s_len - 1
else -- ascii
if conts > 0
mark! -- expected continuation byte instead of ascii
elseif b == 0
mark! -- zero byte
i += 1
if seq_start -- broken at end
mark!
size_delta = w_idx * (#REPLACEMENT_CHARACTER - 1)
nlen = len + size_delta + 1 -- additional size for \0
warts, w_idx, nlen
clean = (s, len = #s) ->
src = const_char_p s
warts, wart_count, nlen = get_warts s, len
if wart_count == 0
return src, len, 0
-- create new valid string
dest = char_arr nlen
src_idx = 0
dest_idx = 0
for i = 0, wart_count - 1
at = warts[i]
diff = at - src_idx
if diff > 0 -- copy any content up until the wart
ffi_copy dest + dest_idx, src + src_idx, diff
dest_idx += diff
-- the replacement character
ffi_copy dest + dest_idx, REPLACEMENT_CHARACTER, REPLACEMENT_SIZE
dest_idx += REPLACEMENT_SIZE
src_idx = at + 1
diff = len - src_idx
if diff > 0 -- copy any content up until the end
ffi_copy dest + dest_idx, src + src_idx, diff
dest, nlen - 1, wart_count
clean_string = (s, len = #s) ->
ptr, len, wart_count = clean s, len
return s, 0 unless wart_count != 0
ffi_string(ptr, len), wart_count
is_valid = (s, len = #s) ->
_, wart_count, _ = get_warts const_char_p(s), len
wart_count != 0
:clean, :clean_string, :is_valid
| 26.700787 | 79 | 0.574167 |
b6a1260c7d71577f6715384f5dfd8aafca3a6158 | 930 | serpent = require "serialization.serpent"
M = {}
chunk, errormsg = love.filesystem.load("config.lua")
M.t = not errormsg and chunk() or {}
recent = M.t.recent or {}
M.t.recent = [recent[i] for i = 1, math.min(#recent, 10)]
width, height = if window = M.t.window
window.w, window.h
else
800, 600
flags = {
resizable: true
}
love.window.setMode(width, height, flags)
love.window.setTitle("pathfun editor")
M.save = =>
data = "return " .. serpent.block(@t, {comment:false})
love.filesystem.write("config.lua", data)
M.add_filename = (filename) =>
for i, entry in ipairs(@t.recent)
if filename == entry
table.remove(@t.recent, i)
break
table.insert(@t.recent, 1, filename)
@t.recent[11] = nil
@save()
M.clear_history = =>
@t.recent = {}
@save()
M.resize = (w, h) =>
@t.window = {:w, :h, fullscreen:{love.window.getFullscreen()}}
@save()
M.font_size = (size) =>
@t.font_size = size
@save()
return M
| 18.979592 | 63 | 0.645161 |
ff6035511b69b47ee208e7d202b1fc4230a16e16 | 2,211 | #!/this/is/ignored
a = 1 + 2* 3 / 6
a, bunch, go, here = another, world
func arg1, arg2, another, arg3
here, we = () ->, yeah
the, different = () -> approach; yeah
dad()
dad(lord)
hello(one,two)()
(5 + 5)(world)
fun(a)(b)
fun(a) b
fun(a) b, bad hello
hello world what are you doing here
what(the)[3243] world, yeck heck
hairy[hands][are](gross) okay okay[world]
(get[something] + 5)[years]
i,x = 200, 300
yeah = (1 + 5) * 3
yeah = ((1+5)*3)/2
yeah = ((1+5)*3)/2 + i % 100
whoa = (1+2) * (3+4) * (4+5)
->
if something
return 1,2,4
print "hello"
->
if hello
"heloo", "world"
else
no, way
-> 1,2,34
return 5 + () -> 4 + 2
return 5 + (() -> 4) + 2
print 5 + () ->
34
good nads
something 'else', "ya"
something'else'
something"else"
here(we)"go"[12123]
-- this runs
something =
test: 12323
what: -> print "hello world"
print something.test
frick = hello: "world"
argon =
num: 100
world: (self) ->
print self.num
return {
something: -> print "hi from something"
}
somethin: (self, str) ->
print "string is", str
return world: (a,b) -> print "sum", a + b
something.what()
argon\world().something()
argon\somethin"200".world(1,2)
x = -434
x = -hello world one two
hi = -"herfef"
x = -[x for x in x]
print "hello" if cool
print "nutjob"
if hello then 343
print "what" if cool else whack
arg = {...}
x = (...) ->
dump {...}
x = not true
y = not(5+5)
y = #"hello"
x = #{#{},#{1},#{1,2}}
hello, world
something\hello(what) a,b
something\hello what
something.hello\world a,b
something.hello\world(1,2,3) a,b
x = 1232
x += 10 + 3
j -= "hello"
y *= 2
y /= 100
m %= 2
hello ..= "world"
x = 0
(if ntype(v) == "fndef" then x += 1) for v in *values
hello =
something: world
if: "hello"
else: 3434
function: "okay"
good: 230203
5 + what wack
what whack + 5
5 - what wack
what whack - 5
x = hello - world - something
((something = with what
\cool 100) ->
print something)!
if something
03589
-- okay what about this
else
3434
if something
yeah
elseif "ymmm"
print "cool"
else
okay
-- test names containing keywords
x = notsomething
y = ifsomething
z = x and b
z = x andb
| 11.280612 | 53 | 0.584351 |
b983e3608fb865434d9b6bbb7d97f5a676dae2b2 | 2,104 | import match, gsub from string
import format_error, tree from require "moonscript/compile"
import string from require "moonscript/parse"
import logFatal from gmodproj.require "novacbn/gmodproj/lib/logging"
import LuaAsset from "novacbn/gmodproj-plugin-builtin/assets/LuaAsset"
-- ::PATTERN_HAS_IMPORTS -> pattern
-- Represents a pattern for checking if the MoonScript has imports declarations
PATTERN_HAS_IMPORTS = "import"
-- ::PATTERN_EXTRACT_IMPORTS -> pattern
-- Represents a pattern to extract imports from a MoonScript for transformation
PATTERN_EXTRACT_IMPORTS = "(import[%s]+[%w_,%s]+[%s]+from[%s]+)(['\"][%w/%-_]+['\"])"
-- MoonAsset::MoonAsset()
-- Represents a MoonScript asset
-- export
export MoonAsset = LuaAsset\extend {
-- MoonAsset::transformImports(string contents) -> string
-- Transforms all 'import X from "Y"' statements into 'import X from dependency("Y")'
--
transformImports: (contents) =>
-- If the MoonScript has import statements, convert then
if match(contents, PATTERN_HAS_IMPORTS)
return gsub(contents, PATTERN_EXTRACT_IMPORTS, (importStatement, assetName) ->
-- Append the new source of import to the statement
return importStatement.."dependency(#{assetName})"
)
return contents
-- MoonAsset::preTransform(string contents, boolean isProduction) -> string
-- Transforms a MoonScript asset into Lua before dependency collection
--
preTransform: (contents, isProduction) =>
-- Transform the MoonScript string import statements
contents = @transformImports(contents)
-- Parse the script into an abstract syntax tree and assert for errors
syntaxTree, err = string(contents)
logFatal("Failed to parse asset '#{@assetName}': #{err}") unless syntaxTree
-- Compile the syntax tree into valid Lua code and again assert for errors
luaCode, err, pos = tree(syntaxTree)
logFatal("Failed to compile asset '#{@assetName}': #{format_error(err, pos, contents)}") unless luaCode
return luaCode
} | 42.08 | 111 | 0.699144 |
2ed75f7bb4d0d84d1a467bfc033ac546a379a319 | 9,664 |
--
-- Copyright (C) 2017-2019 DBot
-- Permission is hereby granted, free of charge, to any person obtaining a copy
-- of this software and associated documentation files (the "Software"), to deal
-- in the Software without restriction, including without limitation the rights
-- to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
-- of the Software, and to permit persons to whom the Software is furnished to do so,
-- subject to the following conditions:
-- The above copyright notice and this permission notice shall be included in all copies
-- or substantial portions of the Software.
-- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
-- INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
-- PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
-- FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-- OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-- DEALINGS IN THE SOFTWARE.
PPM2.ALLOW_TO_MODIFY_SCALE = CreateConVar('ppm2_sv_allow_resize', '1', {FCVAR_NOTIFY, FCVAR_REPLICATED}, 'Allow to resize ponies. Disables resizing completely (visual; mechanical)')
player_manager.AddValidModel('pony', 'models/ppm/player_default_base_new.mdl')
list.Set('PlayerOptionsModel', 'pony', 'models/ppm/player_default_base_new.mdl')
player_manager.AddValidModel('ponynj', 'models/ppm/player_default_base_new_nj.mdl')
list.Set('PlayerOptionsModel', 'ponynj', 'models/ppm/player_default_base_new_nj.mdl')
player_manager.AddValidModel('ponynj_old', 'models/ppm/player_default_base_nj.mdl')
list.Set('PlayerOptionsModel', 'ponynj_old', 'models/ppm/player_default_base_nj.mdl')
player_manager.AddValidModel('pony_old', 'models/ppm/player_default_base.mdl')
list.Set('PlayerOptionsModel', 'pony_old', 'models/ppm/player_default_base.mdl')
player_manager.AddValidModel('pony_cppm', 'models/cppm/player_default_base.mdl')
list.Set('PlayerOptionsModel', 'pony_cppm', 'models/cppm/player_default_base.mdl')
player_manager.AddValidModel('ponynj_cppm', 'models/cppm/player_default_base_nj.mdl')
list.Set('PlayerOptionsModel', 'ponynj_cppm', 'models/cppm/player_default_base_nj.mdl')
player_manager.AddValidHands(model, 'models/cppm/pony_arms.mdl', 0, '') for _, model in ipairs {'pony', 'pony_cppm', 'ponynj', 'ponynj_cppm', 'pony_old'}
PPM2.MIN_WEIGHT = 0.7
PPM2.MAX_WEIGHT = 1.5
PPM2.MIN_SCALE = 0.5
PPM2.MAX_SCALE = 1.3
PPM2.PONY_HEIGHT_MODIFIER = 0.64
PPM2.PONY_HEIGHT_MODIFIER_DUCK = 1.12
PPM2.PONY_HEIGHT_MODIFIER_DUCK_HULL = 1
PPM2.MIN_NECK = 0.6
PPM2.MAX_NECK = 1.4
PPM2.MIN_LEGS = 0.6
PPM2.MAX_LEGS = 1.75
PPM2.MIN_SPINE = 0.8
PPM2.MAX_SPINE = 2
PPM2.PONY_JUMP_MODIFIER = 1.4
PPM2.PLAYER_VOFFSET = 64 * PPM2.PONY_HEIGHT_MODIFIER
PPM2.PLAYER_VOFFSET_DUCK = 28 * PPM2.PONY_HEIGHT_MODIFIER_DUCK
PPM2.PLAYER_VIEW_OFFSET = Vector(0, 0, PPM2.PLAYER_VOFFSET)
PPM2.PLAYER_VIEW_OFFSET_DUCK = Vector(0, 0, PPM2.PLAYER_VOFFSET_DUCK)
PPM2.PLAYER_VIEW_OFFSET_ORIGINAL = Vector(0, 0, 64)
PPM2.PLAYER_VIEW_OFFSET_DUCK_ORIGINAL = Vector(0, 0, 28)
PPM2.MIN_TAIL_SIZE = 0.6
PPM2.MAX_TAIL_SIZE = 1.7 -- i luv big tails
PPM2.MIN_IRIS = 0.4
PPM2.MAX_IRIS = 1.3
PPM2.MIN_HOLE = 0.1
PPM2.MAX_HOLE = .95
PPM2.MIN_HOLE_SHIFT = -0.5
PPM2.MAX_HOLE_SHIFT = 0.5
PPM2.MIN_PUPIL_SIZE = 0.2
PPM2.MAX_PUPIL_SIZE = 1
PPM2.MIN_EYE_ROTATION = -180
PPM2.MAX_EYE_ROTATION = 180
PPM2.AvaliableTails = {
'MAILCALL'
'FLOOFEH'
'ADVENTUROUS'
'SHOWBOAT'
'ASSERTIVE'
'BOLD'
'STUMPY'
'SPEEDSTER'
'EDGY'
'RADICAL'
'BOOKWORM'
'BUMPKIN'
'POOFEH'
'CURLY'
'NONE'
}
PPM2.AvaliableUpperManes = {
'MAILCALL', 'FLOOFEH', 'ADVENTUROUS', 'SHOWBOAT', 'ASSERTIVE'
'BOLD', 'STUMPY', 'SPEEDSTER', 'RADICAL', 'SPIKED'
'BOOKWORM', 'BUMPKIN', 'POOFEH', 'CURLY', 'INSTRUCTOR', 'NONE'
}
PPM2.AvaliableLowerManes = {
'MAILCALL', 'FLOOFEH', 'ADVENTUROUS', 'SHOWBOAT'
'ASSERTIVE', 'BOLD', 'STUMPY', 'HIPPIE', 'SPEEDSTER'
'BOOKWORM', 'BUMPKIN', 'CURLY', 'NONE'
}
PPM2.EyelashTypes = {
'Default', 'Double', 'Coy', 'Full', 'Mess', 'None'
}
PPM2.BodyDetails = {
'None', 'Leg gradient', 'Lines', 'Stripes', 'Head stripes'
'Freckles', 'Hooves big', 'Hooves small', 'Head layer'
'Hooves big rnd', 'Hooves small rnd', 'Spots 1', 'Robotic'
'DASH-E', 'Eye Scar', 'Eye Wound', 'Scars', 'MGS Socks'
'Sharp Hooves', 'Sharp Hooves 2', 'Muzzle', 'Eye Scar Left'
'Eye Scar Right'
}
PPM2.BodyDetailsEnum = {
'NONE', 'GRADIENT', 'LINES', 'STRIPES', 'HSTRIPES'
'FRECKLES', 'HOOF_BIG', 'HOOF_SMALL', 'LAYER'
'HOOF_BIG_ROUND', 'HOOF_SMALL_ROUND', 'SPOTS', 'ROBOTIC'
'DASH_E', 'EYE_SCAR', 'EYE_WOUND', 'SCARS', 'MGS_SOCKS'
'SHARP_HOOVES', 'SHARP_HOOVES_2', 'MUZZLE', 'EYE_SCAR_LEFT'
'EYE_SCAR_RIGHT'
}
PPM2.SocksTypes = {
'DEFAULT'
'GEOMETRIC1'
'GEOMETRIC2'
'GEOMETRIC3'
'GEOMETRIC4'
'GEOMETRIC5'
'GEOMETRIC6'
'GEOMETRIC7'
'GEOMETRIC8'
'DARK1'
'FLOWERS10'
'FLOWERS11'
'FLOWERS12'
'FLOWERS13'
'FLOWERS14'
'FLOWERS15'
'FLOWERS16'
'FLOWERS17'
'FLOWERS18'
'FLOWERS19'
'FLOWERS2'
'FLOWERS20'
'FLOWERS3'
'FLOWERS4'
'FLOWERS5'
'FLOWERS6'
'FLOWERS7'
'FLOWERS8'
'FLOWERS9'
'GREY1'
'GREY2'
'GREY3'
'HEARTS1'
'HEARTS2'
'SNOW1'
'WALLPAPER1'
'WALLPAPER2'
'WALLPAPER3'
}
PPM2.AvaliableLightwarps = {
'SFM_PONY'
'AIRBRUSH'
'HARD_LIGHT'
'PURPLE_SKY'
'SPAWN'
'TF2'
'TF2_CINEMATIC'
'TF2_CLASSIC'
'WELL_OILED'
}
PPM2.MAX_LIGHTWARP = #PPM2.AvaliableLightwarps - 1
PPM2.AvaliableLightwarpsPaths = ['models/ppm2/lightwarps/' .. mat\lower() for _, mat in ipairs PPM2.AvaliableLightwarps]
PPM2.DefaultCutiemarks = {
'8ball', 'dice', 'magichat',
'magichat02', 'record', 'microphone',
'bits', 'checkered', 'lumps',
'mirror', 'camera', 'magnifier',
'padlock', 'binaryfile', 'floppydisk',
'cube', 'bulb', 'battery',
'deskfan', 'flames', 'alarm',
'myon', 'beer', 'berryglass',
'roadsign', 'greentree', 'seasons',
'palette', 'palette02', 'palette03',
'lightningstone', 'partiallycloudy', 'thunderstorm',
'storm', 'stoppedwatch', 'twistedclock',
'surfboard', 'surfboard02', 'star',
'ussr', 'vault', 'anarchy',
'suit', 'deathscythe', 'shoop',
'smiley', 'dawsome', 'weegee'
'applej', 'applem', 'bon_bon', 'carrots', 'celestia', 'cloudy', 'custom01', 'custom02', 'derpy', 'firezap',
'fluttershy', 'fruits', 'island', 'lyra', 'mine', 'note', 'octavia', 'pankk', 'pinkie_pie', 'rainbow_dash',
'rarity', 'rosen', 'sflowers', 'storm', 'time', 'time2', 'trixie', 'twilight', 'waters', 'weer', 'zecora'
}
PPM2.AvaliableUpperManesNew = {
'MAILCALL', 'FLOOFEH', 'ADVENTUROUS', 'SHOWBOAT', 'ASSERTIVE'
'BOLD', 'STUMPY', 'SPEEDSTER', 'RADICAL', 'SPIKED'
'BOOKWORM', 'BUMPKIN', 'POOFEH', 'CURLY', 'INSTRUCTOR'
'TIMID', 'FILLY', 'MECHANIC', 'MOON', 'CLOUD'
'DRUNK', 'EMO'
'NONE'
}
PPM2.AvaliableLowerManesNew = {
'MAILCALL', 'FLOOFEH', 'ADVENTUROUS', 'SHOWBOAT'
'ASSERTIVE', 'BOLD', 'STUMPY', 'HIPPIE', 'SPEEDSTER'
'BOOKWORM', 'BUMPKIN', 'CURLY'
'TIMID', 'MOON', 'BUN', 'CLOUD', 'EMO'
'NONE'
}
PPM2.AvaliableTailsNew = {
'MAILCALL', 'FLOOFEH', 'ADVENTUROUS', 'SHOWBOAT'
'ASSERTIVE', 'BOLD', 'STUMPY', 'SPEEDSTER', 'EDGY'
'RADICAL', 'BOOKWORM', 'BUMPKIN', 'POOFEH', 'CURLY'
'NONE'
}
PPM2.AvaliableEyeTypes = {
'DEFAULT', 'APERTURE'
}
PPM2.AvaliableEyeReflections = {
'DEFAULT', 'CRYSTAL_FOAL', 'CRYSTAL'
'FOAL', 'MALE'
}
PPM2.AvaliablePonyWings = {'DEFAULT', 'BATPONY'}
PPM2.AvaliablePonySuits = {'NONE', 'ROYAL_GUARD', 'SHADOWBOLTS_FULL', 'SHADOWBOLTS_LIGHT', 'WONDERBOLTS_FULL', 'WONDERBOLTS_LIGHT', 'SPIDERMANE_LIGHT', 'SPIDERMANE_FULL'}
do
i = -1
for _, mark in ipairs PPM2.DefaultCutiemarks
i += 1
PPM2["CMARK_#{mark\upper()}"] = i
PPM2.MIN_EYELASHES = 0
PPM2.MAX_EYELASHES = #PPM2.EyelashTypes - 1
PPM2.EYELASHES_NONE = #PPM2.EyelashTypes - 1
PPM2.MIN_TAILS = 0
PPM2.MAX_TAILS = #PPM2.AvaliableTails - 1
PPM2.MIN_TAILS_NEW = 0
PPM2.MAX_TAILS_NEW = #PPM2.AvaliableTailsNew - 1
PPM2.MIN_UPPER_MANES = 0
PPM2.MAX_UPPER_MANES = #PPM2.AvaliableUpperManes - 1
PPM2.MIN_LOWER_MANES = 0
PPM2.MAX_LOWER_MANES = #PPM2.AvaliableLowerManes - 1
PPM2.MIN_UPPER_MANES_NEW = 0
PPM2.MAX_UPPER_MANES_NEW = #PPM2.AvaliableUpperManesNew - 1
PPM2.MIN_LOWER_MANES_NEW = 0
PPM2.MAX_LOWER_MANES_NEW = #PPM2.AvaliableLowerManesNew - 1
PPM2.MIN_EYE_TYPE = 0
PPM2.MAX_EYE_TYPE = #PPM2.AvaliableEyeTypes - 1
PPM2.MIN_DETAIL = 0
PPM2.MAX_DETAIL = #PPM2.BodyDetails - 1
PPM2.MIN_CMARK = 0
PPM2.MAX_CMARK = #PPM2.DefaultCutiemarks - 1
PPM2.MIN_SUIT = 0
PPM2.MAX_SUIT = #PPM2.AvaliablePonySuits - 1
PPM2.MIN_WINGS = 0
PPM2.MAX_WINGS = #PPM2.AvaliablePonyWings - 1
PPM2.MIN_SOCKS = 0
PPM2.MAX_SOCKS = #PPM2.SocksTypes - 1
PPM2.GENDER_FEMALE = 0
PPM2.GENDER_MALE = 1
PPM2.MAX_BODY_DETAILS = 8
PPM2.RACE_EARTH = 0
PPM2.RACE_PEGASUS = 1
PPM2.RACE_UNICORN = 2
PPM2.RACE_ALICORN = 3
PPM2.RACE_ENUMS = {'EARTH', 'PEGASUS', 'UNICORN', 'ALICORN'}
PPM2.RACE_HAS_HORN = 0x1
PPM2.RACE_HAS_WINGS = 0x2
PPM2.AGE_FILLY = 0
PPM2.AGE_ADULT = 1
PPM2.AGE_MATURE = 2
PPM2.AGE_ENUMS = {'FILLY', 'ADULT', 'MATURE'}
PPM2.MIN_DERP_STRENGTH = 0.1
PPM2.MAX_DERP_STRENGTH = 1.3
PPM2.MIN_MALE_BUFF = 0
PPM2.DEFAULT_MALE_BUFF = 1
PPM2.MAX_MALE_BUFF = 2
PPM2.MAX_TATTOOS = 25
PPM2.TATTOOS_REGISTRY = {
'NONE', 'ARROW', 'BLADES', 'CROSS', 'DIAMONDINNER', 'DIAMONDOUTER'
'DRACO', 'EVILHEART', 'HEARTWAVE', 'JUNCTION', 'NOTE', 'NOTE2'
'TATTOO1', 'TATTOO2', 'TATTOO3', 'TATTOO4', 'TATTOO5', 'TATTOO6', 'TATTOO7'
'WING', 'HEART'
}
PPM2.MIN_TATTOOS = 0
PPM2.MAX_TATTOOS = #PPM2.TATTOOS_REGISTRY - 1
PPM2.MIN_WING = 0.1
PPM2.MIN_WINGX = -10
PPM2.MIN_WINGY = -10
PPM2.MIN_WINGZ = -10
PPM2.MAX_WING = 2
PPM2.MAX_WINGX = 10
PPM2.MAX_WINGY = 10
PPM2.MAX_WINGZ = 10
| 27.376771 | 181 | 0.719888 |
b35fe1b353e1d2717a0be20f5364d6cc92f4c6b5 | 3,366 | import getenv from os
import arch, os from jit
import join from require "path"
import isAffirmative from "novacbn/gmodproj/lib/utilities/string"
-- ::userHome -> string
-- Represents the home folder for application data of the user
--
userHome = switch os
when "Windows" then getenv("APPDATA")
when "Linux" then getenv("HOME")
-- ::APPLICATION_CORE_VERSION -> table
-- Represents the current version of the application
-- export
export APPLICATION_CORE_VERSION = {0, 4, 0}
-- ::ENV_ALLOW_UNSAFE_SCRIPTING -> boolean
-- Represents a environment variable flag if gmodproj should allow unsafe scripting
-- export
export ENV_ALLOW_UNSAFE_SCRIPTING = isAffirmative(getenv("GMODPROJ_ALLOW_UNSAFE_SCRIPTING") or "y")
-- ::MAP_DEFAULT_PLUGINS -> table
-- Represents the default configuration of gmodproj plguins
-- export
export MAP_DEFAULT_PLUGINS = {
"gmodproj-plugin-builtin": {}
}
-- ::SYSTEM_OS_ARCH -> string
-- Represents the architecture of the operating system
-- export
export SYSTEM_OS_ARCH = arch
-- ::SYSTEM_OS_TYPE -> string
-- Represents the type of operating system currently running
-- export
export SYSTEM_OS_TYPE = os
-- ::SYSTEM_UNIX_LIKE -> string
-- Represents if the operating system is unix-like in its environment
-- export
export SYSTEM_UNIX_LIKE = os == "Linux" or os == "OSX"
-- ::PROJECT_PATH -> table
-- Represents a map of paths for stored project data
-- export
export PROJECT_PATH = with {}
-- PROJECT_PATH::home -> string
-- Represents the home directory of the current project
--
.home = process.cwd()
-- PROJECT_PATH::data -> string
-- Represents the home directory of gmodproj's project data
--
.data = join(.home, ".gmodproj")
-- PROJECT_PATH::bin -> string
-- Represents the directory of utility scripts shipped with the project directory
--
.bin = join(.home, "bin")
-- PROJECT_PATH::manifest -> string
-- Represents the project's metadata manifest
--
.manifest = join(.home, ".gmodmanifest")
-- PROJECT_PATH::packages -> string
-- Represents the project's package manifest
--
.packages = join(.home, ".gmodpackages")
-- PROJECT_PATH::cache -> string
-- Represents the directory of previously compiled modules in from the current project
--
.cache = join(.data, "cache")
-- PROJECT_PATH::logs -> string
-- Represents the directory of log files from actions previously taken for the current project
--
.logs = join(.data, "logs")
-- PROJECT_PATH::plugins -> string
-- Represents the directory of project installed plugin packages
--
.plugins = join(.data, "plugins")
-- ::USER_PATH -> table
-- Represents a map of paths for stored user data
-- export
export USER_PATH = with {}
-- USER_PATH::data -> string
-- Represents the home directory of gmodproj's user data
--
.data = join(userHome, ".gmodproj")
-- USER_PATH::applications -> string
-- Represents the globally installed command line applications
--
.applications = join(.home, "applications")
-- USER_PATH::cache -> string
-- Represents the directory of previously downloaded packages
--
.cache = join(.home, "cache")
-- USER_PATH::plugins -> string
-- Represents the directory of globally installed plugin packages
--
.plugins = join(.home, "plugins") | 29.787611 | 99 | 0.692216 |
fec9ea4c8f3116abd5c73965aeaa6c3efe03e0b6 | 218 | config = require "lapis.config"
config { "development", "test", "production" }, ->
port 8080
num_workers 1
worker_connections 7
code_cache "off"
postgresql_url "postgres://lamutib:[email protected]/lamutib"
| 21.8 | 62 | 0.711009 |
86b5ebc8b33cd63c264169d0a4a88ce5521c13d5 | 609 | table = require "table"
math = require "math"
merge = (left, right, cmp) ->
result = {}
while (#left > 0) and (#right > 0)
if cmp(left[1], right[1])
table.insert result, table.remove left, 1
else
table.insert result, table.remove right, 1
while #left > 0 do table.insert result, table.remove left, 1
while #right > 0 do table.insert result, table.remove right, 1
result
merge_sort = (tbl, cmp) ->
return tbl if #tbl < 2
middle = math.ceil(#tbl / 2)
merge merge_sort([tbl[i] for i = 1, middle], cmp), merge_sort([tbl[i] for i = middle + 1, #tbl], cmp), cmp
{:merge_sort}
| 29 | 108 | 0.632184 |
744b3b6cf36ec0d5ce11ab55e3f2343f5126be0b | 8,201 |
import with_query_fn from require "spec.helpers"
db = require "lapis.nginx.postgres"
schema = require "lapis.db.schema"
value_table = { hello: "world", age: 34 }
tests = {
-- lapis.nginx.postgres
{
-> db.escape_identifier "dad"
'"dad"'
}
{
-> db.escape_identifier "select"
'"select"'
}
{
-> db.escape_identifier 'love"fish'
'"love""fish"'
}
{
-> db.escape_identifier db.raw "hello(world)"
"hello(world)"
}
{
-> db.escape_literal 3434
"3434"
}
{
-> db.escape_literal "cat's soft fur"
"'cat''s soft fur'"
}
{
-> db.interpolate_query "select * from cool where hello = ?", "world"
"select * from cool where hello = 'world'"
}
{
-> db.encode_values(value_table)
[[("hello", "age") VALUES ('world', 34)]]
[[("age", "hello") VALUES (34, 'world')]]
}
{
-> db.encode_assigns(value_table)
[["hello" = 'world', "age" = 34]]
[["age" = 34, "hello" = 'world']]
}
{
-> db.encode_assigns thing: db.NULL
[["thing" = NULL]]
}
{
-> db.encode_clause thing: db.NULL
[["thing" IS NULL]]
}
{
-> db.interpolate_query "update x set x = ?", db.raw"y + 1"
"update x set x = y + 1"
}
{
-> db.select "* from things where id = ?", "cool days"
[[SELECT * from things where id = 'cool days']]
}
{
-> db.insert "cats", age: 123, name: "catter"
[[INSERT INTO "cats" ("name", "age") VALUES ('catter', 123)]]
[[INSERT INTO "cats" ("age", "name") VALUES (123, 'catter')]]
}
{
-> db.update "cats", { age: db.raw"age - 10" }, "name = ?", "catter"
[[UPDATE "cats" SET "age" = age - 10 WHERE name = 'catter']]
}
{
-> db.update "cats", { age: db.raw"age - 10" }, { name: db.NULL }
[[UPDATE "cats" SET "age" = age - 10 WHERE "name" IS NULL]]
}
{
-> db.update "cats", { age: db.NULL }, { name: db.NULL }
[[UPDATE "cats" SET "age" = NULL WHERE "name" IS NULL]]
}
{
-> db.update "cats", { color: "red" }, { weight: 1200, length: 392 }
[[UPDATE "cats" SET "color" = 'red' WHERE "weight" = 1200 AND "length" = 392]]
[[UPDATE "cats" SET "color" = 'red' WHERE "length" = 392 AND "weight" = 1200]]
}
{
-> db.delete "cats"
[[DELETE FROM "cats"]]
}
{
-> db.delete "cats", "name = ?", "rump"
[[DELETE FROM "cats" WHERE name = 'rump']]
}
{
-> db.delete "cats", name: "rump"
[[DELETE FROM "cats" WHERE "name" = 'rump']]
}
{
-> db.delete "cats", name: "rump", dad: "duck"
[[DELETE FROM "cats" WHERE "name" = 'rump' AND "dad" = 'duck']]
[[DELETE FROM "cats" WHERE "dad" = 'duck' AND "name" = 'rump']]
}
{
-> db.insert "cats", { hungry: true }
[[INSERT INTO "cats" ("hungry") VALUES (TRUE)]]
}
{
-> db.insert "cats", { age: 123, name: "catter" }, "age"
[[INSERT INTO "cats" ("name", "age") VALUES ('catter', 123) RETURNING "age"]]
[[INSERT INTO "cats" ("age", "name") VALUES (123, 'catter') RETURNING "age"]]
}
{
-> db.insert "cats", { age: 123, name: "catter" }, "age", "name"
[[INSERT INTO "cats" ("name", "age") VALUES ('catter', 123) RETURNING "age", "name"]]
[[INSERT INTO "cats" ("age", "name") VALUES (123, 'catter') RETURNING "age", "name"]]
}
-- lapis.db.schema
{
-> schema.add_column "hello", "dads", schema.types.integer
[[ALTER TABLE "hello" ADD COLUMN "dads" integer NOT NULL DEFAULT 0]]
}
{
-> schema.rename_column "hello", "dads", "cats"
[[ALTER TABLE "hello" RENAME COLUMN "dads" TO "cats"]]
}
{
-> schema.drop_column "hello", "cats"
[[ALTER TABLE "hello" DROP COLUMN "cats"]]
}
{
-> schema.rename_table "hello", "world"
[[ALTER TABLE "hello" RENAME TO "world"]]
}
{
-> tostring schema.types.integer
"integer NOT NULL DEFAULT 0"
}
{
-> tostring schema.types.integer null: true
"integer DEFAULT 0"
}
{
-> tostring schema.types.integer null: true, default: 100, unique: true
"integer DEFAULT 100 UNIQUE"
}
{
-> tostring schema.types.serial
"serial NOT NULL"
}
{
-> tostring schema.types.time
"timestamp NOT NULL"
}
{
-> tostring schema.types.time timezone: true
"timestamp with time zone NOT NULL"
}
{
->
import foreign_key, boolean, varchar, text from schema.types
schema.create_table "user_data", {
{"user_id", foreign_key}
{"email_verified", boolean}
{"password_reset_token", varchar null: true}
{"data", text}
"PRIMARY KEY (user_id)"
}
[[CREATE TABLE IF NOT EXISTS "user_data" (
"user_id" integer NOT NULL,
"email_verified" boolean NOT NULL DEFAULT FALSE,
"password_reset_token" character varying(255),
"data" text NOT NULL,
PRIMARY KEY (user_id)
);]]
}
{
-> schema.drop_table "user_data"
[[DROP TABLE IF EXISTS "user_data";]]
}
{
-> schema.drop_index "user_data", "one", "two", "three"
[[DROP INDEX IF EXISTS "user_data_one_two_three_idx"]]
}
{
-> db.parse_clause ""
{}
}
{
-> db.parse_clause "where something = TRUE"
{
where: "something = TRUE"
}
}
{
-> db.parse_clause "where something = TRUE order by things asc"
{
where: "something = TRUE "
order: "things asc"
}
}
{
-> db.parse_clause "where something = 'order by cool' having yeah order by \"limit\" asc"
{
having: "yeah "
where: "something = 'order by cool' "
order: '"limit" asc'
}
}
{
-> db.parse_clause "where not exists(select 1 from things limit 100)"
{
where: "not exists(select 1 from things limit 100)"
}
}
{
-> db.parse_clause "order by color asc"
{
order: "color asc"
}
}
{
-> db.parse_clause "ORDER BY color asc"
{
order: "color asc"
}
}
{
-> db.parse_clause "group BY height"
{
group: "height"
}
}
{
-> db.parse_clause "where x = limitx 100"
{
where: "x = limitx 100"
}
}
{
-> db.parse_clause "join dads on color = blue where hello limit 10"
{
limit: "10"
where: "hello "
join: {
{"join", " dads on color = blue "}
}
}
}
{
-> db.parse_clause "inner join dads on color = blue left outer join hello world where foo"
{
where: "foo"
join: {
{"inner join", " dads on color = blue "}
{"left outer join", " hello world "}
}
}
}
{
-> schema.gen_index_name "hello", "world"
"hello_world_idx"
}
{
-> schema.gen_index_name "yes", "please", db.raw "upper(dad)"
"yes_please_upper_dad_idx"
}
{
-> db.encode_case("x", { a: "b" })
[[CASE x
WHEN 'a' THEN 'b'
END]]
}
{
-> db.encode_case("x", { a: "b", foo: true })
[[CASE x
WHEN 'a' THEN 'b'
WHEN 'foo' THEN TRUE
END]]
[[CASE x
WHEN 'foo' THEN TRUE
WHEN 'a' THEN 'b'
END]]
}
{
-> db.encode_case("x", { a: "b" }, false)
[[CASE x
WHEN 'a' THEN 'b'
ELSE FALSE
END]]
}
}
local old_query_fn
describe "lapis.nginx.postgres", ->
setup ->
old_query_fn = db.set_backend "raw", (q) -> q
teardown ->
db.set_backend "raw", old_query_fn
for group in *tests
it "should match", ->
output = group[1]!
if #group > 2
assert.one_of output, { unpack group, 2 }
else
assert.same group[2], output
it "should create index", ->
old_select = db.select
db.select = -> { { c: 0 } }
input = schema.create_index "user_data", "one", "two"
assert.same input, [[CREATE INDEX "user_data_one_two_idx" ON "user_data" ("one", "two");]]
db.select = old_select
it "should create index with expression", ->
old_select = db.select
db.select = -> { { c: 0 } }
input = schema.create_index "user_data", db.raw("lower(name)"), "height"
assert.same input, [[CREATE INDEX "user_data_lower_name_height_idx" ON "user_data" (lower(name), "height");]]
db.select = old_select
it "should create not create duplicate index", ->
old_select = db.select
db.select = -> { { c: 1 } }
input = schema.create_index "user_data", "one", "two"
assert.same input, nil
db.select = old_select
| 20.974425 | 113 | 0.552737 |
5b1b989b83979ce38dcdb12a18a9dcc737bc8061 | 245 | GM.Version = "1.0"
GM.Name = "Space Wars"
GM.Author = "Emperor Penguin Protector"
DeriveGamemode "sandbox"
DEFINE_BASECLASS "gamemode_sandbox"
GM.Sandbox = BaseClass
AddCSLuaFile "cl_init.lua"
AddCSLuaFile "shared.lua"
include "shared.lua" | 18.846154 | 39 | 0.763265 |
290984f435b8c868bd0be9a2e6f4732807b15909 | 4,577 | -- Copyright 2012-2015 The Howl Developers
-- License: MIT (see LICENSE.md at the top-level directory of the distribution)
import app, Buffer, command, interact, mode from howl
import BufferPopup from howl.ui
command.register
name: 'buffer-search-forward',
description: 'Starts an interactive forward search'
input: ->
if interact.forward_search!
return true
app.editor.searcher\cancel!
handler: -> app.editor.searcher\commit!
command.register
name: 'buffer-search-backward',
description: 'Starts an interactive backward search'
input: ->
if interact.backward_search!
return true
app.editor.searcher\cancel!
handler: -> app.editor.searcher\commit!
command.register
name: 'buffer-search-line',
description: 'Starts an interactive line search'
input: ->
if interact.search_line!
return true
app.editor.searcher\cancel!
handler: -> app.editor.searcher\commit!
command.register
name: 'buffer-search-word-forward',
description: 'Jumps to next occurence of word at cursor'
input: ->
app.window.command_line\write_spillover app.editor.current_context.word.text
if interact.forward_search_word!
return true
app.editor.searcher\cancel!
handler: -> app.editor.searcher\commit!
command.register
name: 'buffer-search-word-backward',
description: 'Jumps to previous occurence of word at cursor'
input: ->
app.window.command_line\write_spillover app.editor.current_context.word.text
if interact.backward_search_word!
return true
app.editor.searcher\cancel!
handler: -> app.editor.searcher\commit!
command.register
name: 'buffer-repeat-search',
description: 'Repeats the last search'
handler: -> app.editor.searcher\repeat_last!
command.register
name: 'buffer-replace'
description: 'Replaces text (within selection or globally)'
input: ->
buffer = app.editor.buffer
chunk = app.editor.active_chunk
replacement = interact.get_replacement
title: 'Preview replacements for ' .. buffer.title
editor: app.editor
return replacement if replacement
log.info "Cancelled - buffer untouched"
handler: (replacement) ->
if replacement.text
buffer = app.editor.buffer
app.editor\with_position_restored ->
buffer\as_one_undo ->
buffer.text = replacement.text
log.info "Replaced #{replacement.num_replaced} instances"
if replacement.cursor_pos
app.editor.cursor.pos = replacement.cursor_pos
if replacement.line_at_top
app.editor.line_at_top = replacement.line_at_top
command.register
name: 'buffer-replace-regex',
description: 'Replaces text using regular expressions (within selection or globally)'
input: ->
buffer = app.editor.buffer
chunk = app.editor.active_chunk
replacement = interact.get_replacement_regex
title: 'Preview replacements for ' .. buffer.title
editor: app.editor
return replacement if replacement
log.info "Cancelled - buffer untouched"
handler: (replacement) ->
buffer = app.editor.buffer
if replacement.text
app.editor\with_position_restored ->
buffer\as_one_undo ->
buffer.text = replacement.text
log.info "Replaced #{replacement.num_replaced} instances"
if replacement.cursor_pos
app.editor.cursor.pos = replacement.cursor_pos
if replacement.line_at_top
app.editor.line_at_top = replacement.line_at_top
command.register
name: 'editor-paste..',
description: 'Pastes a selected clip from the clipboard at the current position'
input: interact.select_clipboard_item
handler: (clip) -> app.editor\paste :clip
command.register
name: 'show-doc-at-cursor',
description: 'Shows documentation for symbol at cursor, if available'
handler: ->
m = app.editor.buffer.mode
ctx = app.editor.current_context
if m.api and m.resolve_type
node = m.api
path, parts = m\resolve_type ctx
if path
node = node[k] for k in *parts when node
node = node[ctx.word.text] if node
if node and node.description
buf = Buffer mode.by_name('markdown')
buf.text = node.description
app.editor\show_popup BufferPopup buf
return
log.info "No documentation found for '#{ctx.word}'"
command.register
name: 'buffer-mode',
description: 'Sets a specified mode for the current buffer'
input: interact.select_mode
handler: (selected_mode) ->
buffer = app.editor.buffer
buffer.mode = selected_mode
log.info "Forced mode '#{selected_mode.name}' for buffer '#{buffer}'"
| 31.349315 | 87 | 0.715316 |
698e1d6601ad8a80db91a0f45509f53b30c9fee3 | 1,554 | export class Entity extends Rectangle
new: (x = 0, y = 0, width = 0, height = 0) =>
super x, y, width, height
@angle = 0
@offset = Rectangle!
@visible = true
@active = true
@solid = true
@exists = true
@center = Point @x + @width / 2, @y + @height / 2
@previous = Point @x, @y
@velocity = Vector!
@pvelocity = Vector!
@acceleration = Vector!
@max_velocity = Vector math.huge, math.huge, math.huge
@drag = Vector!
update: =>
@previous.x = @x
@previous.y = @y
@pvelocity.x = @velocity.x
@pvelocity.y = @velocity.y
if not @velocity\is_zero! or not @acceleration\is_zero!
@velocity.x = @calculate_velocity @velocity.x, @acceleration.x, @drag.x, @max_velocity.x
@velocity.y = @calculate_velocity @velocity.y, @acceleration.y, @drag.y, @max_velocity.y
@velocity.a = @calculate_velocity @velocity.a, @acceleration.a, @drag.a, @max_velocity.a
@x += (@velocity.x * axel.dt) + ((@pvelocity.x - @velocity.x) * axel.dt / 2)
@y += (@velocity.y * axel.dt) + ((@pvelocity.y - @velocity.y) * axel.dt / 2)
@angle += @velocity.a * axel.dt
@center.x = @x + @width / 2
@center.y = @y + @height / 2
calculate_velocity: (velocity, acceleration, drag, max) =>
if acceleration != 0
velocity += acceleration * axel.dt
else
drag_effect = drag * axel.dt
if velocity - drag_effect > 0
velocity -= drag_effect
elseif velocity + drag_effect < 0
velocity += drag_effect
else
velocity = 0
if velocity > max
velocity = max
elseif velocity < -max
velocity = -max
velocity | 27.263158 | 91 | 0.633205 |
3ef92ee16b91d0941d1fdfd4541ba45ab33e3a83 | 2,328 | uv = require "uv"
{:capture, :execute, :execute_sync} = require "command"
blocks = {}
named_blocks = {}
data_block = (block) ->
color = block.color
label = block.label
name = block.name
separator_block_width = block.separator_block_width
full_text = "#{label}#{block.text}"
{:color, :label, :full_text, :name, :separator_block_width, short_text: block.text}
new_block = (name, setup) ->
block_conf = {label: "", color: "#FFFFFF", text: "", :name, separator_block_width: 15}
block = setmetatable block_conf, __call: (block, name, setup) ->
env = setmetatable {
:execute
:execute_sync
:capture
label: (lbl) ->
if lbl
block_conf.label = lbl
block_conf.label
color: (clr) ->
if clr
block_conf.color = clr
block_conf.color
text: (txt) ->
if txt
block_conf.text = txt
block_conf.text
separator_block_width: (sbw) ->
sbw = assert tonumber(sbw), "separator_block_width must be a number"
if sbw
block_conf.separator_block_width = sbw
block_conf.separator_block_width
}, __index: _G
event_handler = (f) ->
setfenv f, env
-> coroutine.wrap(f)!
block_conf.updater = -> nil
block_conf.left_click = -> nil
block_conf.middle_click = -> nil
block_conf.right_click = -> nil
block_conf.scroll_up = -> nil
block_conf.scroll_down = -> nil
block_conf._interval = nil
setup_env = setmetatable {
interval: (iv) -> block_conf._interval = iv,
on_update: (f) -> block_conf.updater = event_handler(f)
on_left_click: (f) -> block_conf.left_click = event_handler(f)
on_middle_click: (f) -> block_conf.middle_click = event_handler(f)
on_right_click: (f) -> block_conf.right_click = event_handler(f)
on_scroll_up: (f) -> block_conf.scroll_up = event_handler(f)
on_scroll_down: (f) -> block_conf.scroll_down = event_handler(f)
}, __index: env
setfenv setup, setup_env
setup!
if block_conf._interval
t = uv.new_timer!
uv.timer_start t, block_conf._interval, block_conf._interval, block.updater
block_conf.updater!
blocks[#blocks + 1] = block
named_blocks[name] = block
block name, setup
block: new_block, :named_blocks, :blocks, :data_block
| 30.631579 | 88 | 0.643471 |
0f3bad221bb4d83638e73fa53041d8c0106629c5 | 231 | export modinfo = {
type: "command"
desc: "Night"
alias: {"night"}
func: (Msg,Speaker) ->
light = Service"Lighting"
light.TimeOfDay = "24:00:00"
Output2("Set time to night",{Colors.Green})
loggit("Set time to night")
} | 23.1 | 45 | 0.645022 |
f54cccfee1db8d1edf69bdf1c830935435116928 | 2,327 | -- Support LuaJIT 'bit' library
if bit
exports.arshift = bit.arshift
exports.band = bit.band
exports.bnot = bit.bnot
exports.bor = bit.bor
exports.bxor = bit.bxor
exports.lshift = bit.lshift
exports.rol = bit.rol
exports.ror = bit.ror
exports.rshift = bit.rshift
-- Support 'bit' Lua 5.2 standard library
elseif bit32
exports.arshift = bit32.arshift
exports.band = bit32.band
exports.bnot = bit32.bnot
exports.bor = bit32.bor
exports.bxor = bit32.bxor
exports.lshift = bit32.lshift
exports.rol = bit32.lrotate
exports.ror = bit32.rrotate
exports.rshift = bit32.rshift
else error("could not find 'bit' LuaJIT or 'bit32' Lua 5.2 libraries")
import
arshift, band, bor,
lshift, rshift from exports
-- ::byteFromInt8(number value) -> number
-- Packs the 8-bit integer into a single byte
-- export
export byteFromInt8 = (value) ->
return band(value, 255)
-- ::bytesFromInt16(number value) -> number, number
-- Packs the 16-bit integer into BigEndian-format two bytes
-- export
export bytesFromInt16 = (value) ->
return band(rshift(value, 8), 255), band(value, 255)
-- ::bytesFromInt32(number value) -> number, number, number, number
-- Packs the 32-bit integer into BigEndian-format four bytes
-- export
export bytesFromInt32 = (value) ->
return band(rshift(value, 24), 255), band(rshift(value, 16), 255), band(rshift(value, 8), 255), band(value, 255)
-- int32FromBytes(number byteOne, number byteTwo) -> number
-- Unpacks a single byte into a 8-bit integer
-- export
export int8FromByte = (byte) ->
-- NOTE: this is here for the sake of completeness, nothing more
return byte
-- int32FromBytes(number byteOne, number byteTwo) -> number
-- Unpacks BigEndian-format two bytes into a 16-bit integer
-- export
export int16FromBytes = (byteOne, byteTwo) ->
return bor(
lshift(byteOne, 8),
byteTwo
)
-- int32FromBytes(number byteOne, number byteTwo, number byteThree, number byteFour) -> number
-- Unpacks BigEndian-format four bytes into a 32-bit integer
-- export
export int32FromBytes = (byteOne, byteTwo, byteThree, byteFour) ->
return bor(
lshift(byteOne, 24),
lshift(byteTwo, 16),
lshift(byteThree, 8),
byteFour
) | 31.445946 | 116 | 0.674259 |
0aa41d9fc1d503cb58c1bfea00e91e432d18a13f | 3,565 | buffet = require 'buffet'
import new from require 'buffet.resty'
describe 'is_closed(bf)', ->
it "should return true if buffet is closed with 'close' method", ->
bf = new 'deadbeef'
bf\close!
n, closed = nargs buffet.is_closed bf
assert.are.equal 1, n
assert.are.equal true, closed
it "should return true if buffet is closed with 'receive' method", ->
bf = new 'deadbeef'
bf\receive 1024
n, closed = nargs buffet.is_closed bf
assert.are.equal 1, n
assert.are.equal true, closed
it 'should return false if buffet is not closed', ->
bf = new 'deadbeef'
n, closed = nargs buffet.is_closed bf
assert.are.equal 1, n
assert.are.equal false, closed
it "should not check object type and return '_closed' field as is", ->
n, closed = nargs buffet.is_closed {_closed: 'foo'}
assert.are.equal 1, n
assert.are.equal 'foo', closed
describe 'get_iterator_error(bf)', ->
it 'should return nil if there was no error', ->
iterator = coroutine.wrap ->
coroutine.yield 'foo'
coroutine.yield 'bar'
coroutine.yield nil
coroutine.yield 'baz'
bf = new iterator
chunks = {}
while true
chunk, err = bf\receive 2
if err
assert.are.equal 'closed', err
break
table.insert chunks, chunk
assert.are.same {'fo', 'ob', 'ar'}, chunks
n, iter_err = nargs buffet.get_iterator_error bf
assert.are.equal 1, n
assert.is.nil iter_err
it 'should return error value if there was an error', ->
iterator = coroutine.wrap ->
coroutine.yield 'foo'
coroutine.yield 'bar'
coroutine.yield nil, 'some error'
coroutine.yield 'baz'
bf = new iterator
chunks = {}
while true
chunk, err = bf\receive 2
if err
assert.are.equal 'closed', err
break
table.insert chunks, chunk
assert.are.same {'fo', 'ob', 'ar'}, chunks
n, iter_err = nargs buffet.get_iterator_error bf
assert.are.equal 1, n
assert.are.equal 'some error', iter_err
it "should not check object type and return '_iterator_error' field as is", ->
n, closed = nargs buffet.get_iterator_error {_iterator_error: 'foo'}
assert.are.equal 1, n
assert.are.equal 'foo', closed
describe 'get_send_buffer(bf)', ->
it 'should return a reference to the send buffer table', ->
bf = new!
bf\send 'foo'
n, buffer = nargs buffet.get_send_buffer bf
assert.are.equal 1, n
assert.are.equal bf._send_buffer, buffer
it "should not check object type and return '_send_buffer' field as is", ->
n, buffer = nargs buffet.get_send_buffer {_send_buffer: 'foo'}
assert.are.equal 1, n
assert.are.equal 'foo', buffer
describe 'get_sent_data(bf)', ->
it 'should return concatenated data chunks from send buffer', ->
bf = new!
bf\send 'foo'
bf\send {'bar', 23, 'baz'}
n, data = nargs buffet.get_sent_data bf
assert.are.equal 1, n
assert.are.equal 'foobar23baz', data
it "should not check object type and return '_send_buffer' concatenated data", ->
n, buffer = nargs buffet.get_sent_data {_send_buffer: {'foo', 'bar'}}
assert.are.equal 1, n
assert.are.equal 'foobar', buffer
| 33.632075 | 85 | 0.58878 |
6c546f45c42cf7eb335bd618dcb229dcaa4f0b74 | 8,553 | http = require 'lapis.nginx.http'
stringy = require 'stringy'
sass = require 'sass'
import map, table_index from require 'lib.utils'
import from_json, to_json, trim from require 'lapis.util'
import aql, document_get, foxx_upgrade from require 'lib.arango'
--------------------------------------------------------------------------------
write_content = (filename, content)->
file = io.open(filename, 'w+')
io.output(file)
io.write(content)
io.close(file)
--------------------------------------------------------------------------------
read_file = (filename, mode='r')->
file = io.open(filename, mode)
io.input(file)
data = io.read('*all')
io.close(file)
data
--------------------------------------------------------------------------------
install_service = (sub_domain, name)->
if name\match('^[%w_%-%d]+$') -- allow only [a-zA-Z0-9_-]+
path = "install_service/#{sub_domain}/#{name}"
os.execute("mkdir -p #{path}/APP/routes")
os.execute("mkdir #{path}/APP/scripts")
os.execute("mkdir #{path}/APP/tests")
os.execute("mkdir #{path}/APP/libs")
request = 'FOR api IN apis FILTER api.name == @name
LET routes = (FOR r IN api_routes FILTER r.api_id == api._key RETURN r)
LET scripts = (FOR s IN api_scripts FILTER s.api_id == api._key RETURN s)
LET tests = (FOR t IN api_tests FILTER t.api_id == api._key RETURN t)
LET libs = (FOR l IN api_libs FILTER l.api_id == api._key RETURN l)
RETURN { api, routes, scripts, tests, libs }'
api = aql("db_#{sub_domain}", request, { 'name': name })[1]
write_content("#{path}/APP/main.js", api.api.code)
write_content("#{path}/APP/package.json", api.api.package)
write_content("#{path}/APP/manifest.json", api.api.manifest)
for k, item in pairs api.routes
write_content("#{path}/APP/routes/#{item.name}.js", item.javascript)
for k, item in pairs api.tests
write_content("#{path}/APP/tests/#{item.name}.js", item.javascript)
for k, item in pairs api.libs
write_content("#{path}/APP/libs/#{item.name}.js", item.javascript)
for k, item in pairs api.scripts
write_content("#{path}/APP/scripts/#{item.name}.js", item.javascript)
os.execute("cd install_service/#{sub_domain}/#{name}/APP && export PATH='$PATH:/usr/local/bin' && yarn")
os.execute("cd install_service/#{sub_domain} && zip -rq #{name}.zip #{name}/")
os.execute("rm --recursive install_service/#{sub_domain}/#{name}")
foxx_upgrade(
"db_#{sub_domain}", name, read_file("install_service/#{sub_domain}/#{name}.zip")
)
--------------------------------------------------------------------------------
install_script = (sub_domain, name)->
if name\match('^[%w_%-%d]+$') -- allow only [a-zA-Z0-9_-]+
path = "scripts/#{sub_domain}/#{name}"
os.execute("mkdir -p #{path}")
request = 'FOR script IN scripts FILTER script.name == @name RETURN script'
script = aql("db_#{sub_domain}", request, { 'name': name })[1]
write_content("#{path}/package.json", script.package)
os.execute("export PATH='$PATH:/usr/local/bin' && cd #{path} && yarn")
write_content("#{path}/index.js", script.code)
--------------------------------------------------------------------------------
deploy_site = (sub_domain, settings)->
config = require('lapis.config').get!
db_config = require('lapis.config').get("db_#{config._name}")
path = "dump/#{sub_domain[1]}/"
home = from_json(settings.home)
deploy_to = stringy.split(settings.deploy_secret, '#')
request = 'FOR s IN settings LIMIT 1 RETURN s'
sub_domain_settings = aql(deploy_to[1], request)[1]
if deploy_to[2] == sub_domain_settings.secret
os.execute("mkdir -p #{path}")
command = "arangodump --collection layouts --collection partials --collection components --collection spas --collection redirections --collection datatypes --collection aqls --collection helpers --collection apis --collection api_libs --collection api_routes --collection api_scripts --collection api_tests --collection scripts --collection pages --collection folder_path --collection folders --collection scripts --server.database db_#{sub_domain} --server.username #{db_config.login} --server.password #{db_config.pass} --server.endpoint #{db_config.endpoint} --output-directory #{path} --overwrite true"
command ..= " --collection datasets" if home['deploy_datasets']
command ..= " --collection trads" if home['deploy_trads']
os.execute(command)
os.execute("arangorestore --server.database #{deploy_to[1]} --server.username #{db_config.login} --server.password #{db_config.pass} --server.endpoint #{db_config.endpoint} --input-directory #{path} --overwrite true")
os.execute("rm -Rf #{path}")
-- Restart scripts
-- scripts = aql(deploy_to[1], 'FOR script IN scripts RETURN script')
-- for k, item in pairs scripts
-- install_script(deploy_to[1], item.name)
-- Restart apis
apis = aql(deploy_to[1], 'FOR api IN apis RETURN api')
for k, item in pairs apis
install_service(deploy_to[1]\gsub('db_', ''), item.name)
--------------------------------------------------------------------------------
compile_riotjs = (sub_domain, name, id)->
if name\match('^[%w_%-%d]+$') -- allow only [a-zA-Z0-9_-]+
path = "compile_tag/#{sub_domain}/#{name}"
os.execute("mkdir -p #{path}")
tag = document_get("db_" .. sub_domain, id)
write_content("#{path}/#{name}.riot", tag.html)
command = "export PATH=\"$PATH;/usr/local/bin\" && riot --format umd #{path}/#{name}.riot --output #{path}/#{name}.js && terser --compress --mangle -o #{path}/#{name}.js #{path}/#{name}.js"
handle = io.popen(command)
result = handle\read('*a')
handle\close()
read_file("#{path}/#{name}.js")
--------------------------------------------------------------------------------
compile_tailwindcss = (sub_domain, layout_id, field)->
subdomain = 'db_' .. sub_domain
layout = document_get(subdomain, "layouts/" .. layout_id)
settings = aql(subdomain, 'FOR s IN settings LIMIT 1 RETURN s')[1]
home_settings = from_json(settings.home)
langs = stringy.split(settings.langs, ',')
path = "compile_tailwind/#{subdomain}/#{layout_id}"
os.execute("mkdir -p #{path}")
write_content("#{path}/#{layout_id}.css", sass.compile(layout[field], 'compressed'))
-- default config file
config_file = "module.exports = {
content: ['./*.html']
}"
-- check if we have defined a config file
if home_settings.tailwindcss_config
config_file = aql(
subdomain,
'FOR page IN partials FILTER page.slug == @slug RETURN page.html',
{ slug: home_settings.tailwindcss_config }
)[1]
write_content("#{path}/tailwind.config.js", config_file) if config_file
-- Layouts
layouts = aql(subdomain, 'FOR doc IN layouts RETURN { html: doc.html }')
for k, item in pairs layouts
write_content("#{path}/layout_#{k}.html", item.html)
-- Pages
pages = aql(subdomain, 'FOR doc IN pages RETURN { html: doc.html, raw_html: doc.raw_html }')
for k, item in pairs pages
for k2, lang in pairs langs
lang = stringy.strip(lang)
html = ""
if type(item['raw_html']) == 'table' and item['raw_html'][lang]
html = html .. item['raw_html'][lang]
if type(item['html']) == "table" and item['html'][lang] and item['html'][lang].html
html = html .. item['html'][lang].html
write_content("#{path}/page_#{k}_#{lang}.html", html)
-- Components
components = aql(subdomain, 'FOR doc IN components RETURN { html: doc.html }')
for k, item in pairs components
write_content("#{path}/component_#{k}.html", item.html)
-- Partials
partials = aql(subdomain, 'FOR doc IN partials RETURN { html: doc.html }')
for k, item in pairs partials
write_content("#{path}/partial_#{k}.html", item.html)
-- Widgets
partials = aql(subdomain, 'FOR doc IN widgets RETURN { html: doc.partial }')
for k, item in pairs partials
write_content("#{path}/widget_#{k}.html", item.html)
command = "cd #{path} && export PATH=\"$PATH;/usr/local/bin\" && NODE_ENV=production tailwindcss build -m -i #{layout_id}.css -o #{layout_id}_compiled.css"
handle = io.popen(command)
result = handle\read('*a')
handle\close()
data = read_file("#{path}/#{layout_id}_compiled.css")
os.execute("rm -Rf #{path}")
data
--------------------------------------------------------------------------------
-- expose methods
{ :install_service, :install_script, :deploy_site, :compile_riotjs,
:compile_tailwindcss, :write_content, :read_file } | 45.983871 | 610 | 0.614404 |
a4528721f9a3cdbc069764bb90882ffcf1b334ca | 9,097 | import style, theme, ActionBuffer from howl.ui
import Scintilla, Buffer, config from howl
describe 'style', ->
local sci, buffer
before_each ->
sci = Scintilla!
buffer = Buffer {}, sci
style.register_sci sci
it 'styles can be accessed using direct indexing', ->
t = styles: default: color: '#998877'
style.set_for_theme t
assert.equal style.default.color, t.styles.default.color
describe '.define(name, definition)', ->
it 'allows defining custom styles', ->
style.define 'custom', color: '#334455'
assert.equal style.custom.color, '#334455'
it 'automatically redefines the style in any existing sci', ->
style.define 'keyword', color: '#334455'
style.define 'custom', color: '#334455'
custom_number = style.number_for 'custom', buffer
keyword_number = style.number_for 'keyword', buffer
style.define 'keyword', color: '#665544'
style.define 'custom', color: '#776655'
keyword_fore = sci\style_get_fore keyword_number
assert.equal '#665544', keyword_fore
custom_fore = sci\style_get_fore custom_number
assert.equal '#776655', custom_fore
it 'allows specifying font size for a style as an offset spec from "font_size"', ->
style.define 'larger_style', font: size: 'larger'
style_number = style.number_for 'larger_style', buffer
font_size = sci\style_get_size style_number
assert.is_true font_size > config.font_size
it 'allows aliasing styles using a string as <definition>', ->
style.define 'target', color: '#beefed'
style.define 'alias', 'target'
style_number = style.number_for 'alias', buffer
assert.equal '#beefed', sci\style_get_fore style_number
it 'the actual style used is based upon the effective default style', ->
style.define 'default', background: '#112233'
style.define 'other_default', background: '#111111'
style.define 'custom', color: '#beefed'
sci2 = Scintilla!
buffer2 = Buffer {}, sci2
style.register_sci sci2, 'other_default'
style_number = style.number_for 'custom', buffer
assert.equal '#112233', sci\style_get_back style_number
style_number = style.number_for 'custom', buffer2
assert.equal '#111111', sci2\style_get_back style_number
it 'redefining a default style causes other styles to be rebased upon that', ->
style.define 'own_style', color: '#334455'
custom_number = style.number_for 'own_style', buffer
default_number = style.number_for 'default', buffer
style.define 'default', background: '#998877'
-- background should be changed..
custom_back = sci\style_get_back custom_number
assert.equal '#998877', custom_back
-- ..but custom color should still be intact
custom_fore = sci\style_get_fore custom_number
assert.equal '#334455', custom_fore
describe 'define_default(name, definition)', ->
it 'defines the style only if it is not already defined', ->
style.define_default 'preset', color: '#334455'
assert.equal style.preset.color, '#334455'
style.define_default 'preset', color: '#667788'
assert.equal style.preset.color, '#334455'
describe '.number_for(name, buffer [, base])', ->
it 'returns the assigned style number for name in sci', ->
assert.equal style.number_for('keyword'), 5 -- default keyword number
it 'automatically assigns a style number and defines the style in scis if necessary', ->
style.define 'my_style_a', color: '#334455'
style.define 'my_style_b', color: '#334455'
style_num = style.number_for 'my_style_a', buffer
set_fore = sci\style_get_fore style_num
assert.equal set_fore, '#334455'
assert.is_not.equal style.number_for('my_style_b', buffer), style_num
it 'remembers the style number used for a particular style', ->
style.define 'got_it', color: '#334455'
style_num = style.number_for 'got_it', buffer
style_num2 = style.number_for 'got_it', buffer
assert.equal style_num2, style_num
it 'raises an error if the number of styles are #exhausted', ->
for i = 1, 255 style.define 'my_style' .. i, color: '#334455'
assert.raises 'Out of style number', ->
for i = 1, 255 style.number_for 'my_style' .. i, buffer
it 'returns the default style number if the style is not defined', ->
assert.equal style.number_for('foo', {}), style.number_for('default', {})
it '.name_for(number, buffer, sci) returns the style name for number', ->
assert.equal style.name_for(5, {}), 'keyword' -- default keyword number
style.define 'whats_in_a_name', color: '#334455'
style_num = style.number_for 'whats_in_a_name', buffer
assert.equal style.name_for(style_num, buffer), 'whats_in_a_name'
describe '.register_sci(sci, default_style)', ->
it 'defines the default styles in the specified sci', ->
t = theme.current
t.styles.keyword = color: '#112233'
style.set_for_theme t
sci2 = Scintilla!
buffer2 = Buffer {}, sci2
number = style.number_for 'keyword', buffer2
old = sci2\style_get_fore number
style.register_sci sci2
new = sci2\style_get_fore number
assert.is_not.equal new, old
assert.equal new, t.styles.keyword.color
it 'allows specifying a different default style through <default_style>', ->
t = theme.current
t.styles.keyword = color: '#223344'
style.set_for_theme t
sci2 = Scintilla!
style.register_sci sci2, 'keyword'
def_fore = sci2\style_get_fore style.number_for 'default', {}
def_kfore = sci2\style_get_fore style.number_for 'keyword', {}
assert.equal t.styles.keyword.color, def_fore
it '.set_for_buffer(sci, buffer) initializes any previously used buffer styles', ->
sci2 = Scintilla!
style.register_sci sci2
style.define 'style_foo', color: '#334455'
prev_number = style.number_for 'style_foo', buffer
style.set_for_buffer sci2, buffer
defined_fore = sci2\style_get_fore prev_number
assert.equal defined_fore, '#334455'
new_number = style.number_for 'style_foo', buffer
assert.equal new_number, prev_number
it '.at_pos(buffer, pos) returns name and style definition at pos', ->
style.define 'stylish', color: '#101010'
buffer = ActionBuffer!
buffer\insert 'hƏllo', 1, 'keyword'
buffer\insert 'Bačon', 6, 'stylish'
name, def = style.at_pos(buffer, 5)
assert.equal name, 'keyword'
assert.same def, style.keyword
name, def = style.at_pos(buffer, 6)
assert.equal name, 'stylish'
assert.same def, style.stylish
context '(extended styles)', ->
before_each ->
style.define 'my_base', background: '#112233'
style.define 'my_style', color: '#334455'
describe '.number_for(name, buffer, base)', ->
context 'when base is specified', ->
it 'automatically defines an extended style based upon the base and specified style', ->
style_num = style.number_for 'my_style', buffer, 'my_base'
set_fore = sci\style_get_fore style_num
set_back = sci\style_get_back style_num
assert.equal set_fore, '#334455'
assert.equal set_back, '#112233'
assert.is_not_nil style['my_base:my_style']
it 'returns the base style if the specified style is not found', ->
style_num = style.number_for 'my_unknown_style', buffer, 'my_base'
assert.equal style.number_for('my_base', buffer), style_num
context 'when <name> itself specifies an extended style', ->
it 'extracts the base automatically', ->
style.define 'my_other_base', background: '#112244'
style_num = style.number_for 'my_other_base:my_style', buffer
set_fore = sci\style_get_fore style_num
set_back = sci\style_get_back style_num
assert.equal '#334455', set_fore
assert.equal '#112244', set_back
assert.is_not_nil style['my_other_base:my_style']
context 'when one of composing styles is redefined', ->
it 'updates the extended style definition', ->
style_num = style.number_for 'my_style', buffer, 'my_base'
style.define 'my_base', background: '#222222'
assert.equal '#222222', style['my_base:my_style'].background
style.define 'my_style', color: '#222222'
assert.equal '#222222', style['my_base:my_style'].color
set_fore = sci\style_get_fore style_num
set_back = sci\style_get_back style_num
assert.equal set_fore, '#222222'
assert.equal set_back, '#222222'
it 'redefining a default style also rebases extended styles', ->
style_num = style.number_for 'my_style', buffer, 'my_base'
assert.is_false sci\style_get_bold style_num
style.define 'default', font: bold: true
-- font should be bold now
assert.is_true sci\style_get_bold style_num
-- ..but custom color should still be intact
assert.equal '#112233', sci\style_get_back style_num
| 39.211207 | 96 | 0.676047 |
ae6d8fd5de96a52288dd0bdf4059e98ee28ca061 | 13,787 | socket = require "pgmoon.socket"
import insert from table
import rshift, lshift, band from require "bit"
unpack = table.unpack or unpack
VERSION = "1.10.0"
_len = (thing, t=type(thing)) ->
switch t
when "string"
#thing
when "table"
l = 0
for inner in *thing
inner_t = type inner
if inner_t == "string"
l += #inner
else
l += _len inner, inner_t
l
else
error "don't know how to calculate length of #{t}"
_debug_msg = (str) ->
require("moon").dump [p for p in str\gmatch "[^%z]+"]
flipped = (t) ->
keys = [k for k in pairs t]
for key in *keys
t[t[key]] = key
t
MSG_TYPE = flipped {
status: "S"
auth: "R"
backend_key: "K"
ready_for_query: "Z"
query: "Q"
notice: "N"
notification: "A"
password: "p"
row_description: "T"
data_row: "D"
command_complete: "C"
error: "E"
}
ERROR_TYPES = flipped {
severity: "S"
code: "C"
message: "M"
position: "P"
detail: "D"
schema: "s"
table: "t"
constraint: "n"
}
PG_TYPES = {
[16]: "boolean"
[17]: "bytea"
[20]: "number" -- int8
[21]: "number" -- int2
[23]: "number" -- int4
[700]: "number" -- float4
[701]: "number" -- float8
[1700]: "number" -- numeric
[114]: "json" -- json
[3802]: "json" -- jsonb
-- arrays
[1000]: "array_boolean" -- bool array
[1005]: "array_number" -- int2 array
[1007]: "array_number" -- int4 array
[1016]: "array_number" -- int8 array
[1021]: "array_number" -- float4 array
[1022]: "array_number" -- float8 array
[1231]: "array_number" -- numeric array
[1009]: "array_string" -- text array
[1015]: "array_string" -- varchar array
[1002]: "array_string" -- char array
[1014]: "array_string" -- bpchar array
[2951]: "array_string" -- uuid array
[199]: "array_json" -- json array
[3807]: "array_json" -- jsonb array
}
NULL = "\0"
tobool = (str) ->
str == "t"
class Postgres
convert_null: false
NULL: {"NULL"}
:PG_TYPES
user: "postgres"
host: "127.0.0.1"
port: "5432"
ssl: false
-- custom types supplementing PG_TYPES
type_deserializers: {
json: (val, name) =>
import decode_json from require "pgmoon.json"
decode_json val
bytea: (val, name) =>
@decode_bytea val
array_boolean: (val, name) =>
import decode_array from require "pgmoon.arrays"
decode_array val, tobool
array_number: (val, name) =>
import decode_array from require "pgmoon.arrays"
decode_array val, tonumber
array_string: (val, name) =>
import decode_array from require "pgmoon.arrays"
decode_array val
array_json: (val, name) =>
import decode_array from require "pgmoon.arrays"
import decode_json from require "pgmoon.json"
decode_array val, decode_json
hstore: (val, name) =>
import decode_hstore from require "pgmoon.hstore"
decode_hstore val
}
set_type_oid: (oid, name) =>
unless rawget(@, "PG_TYPES")
@PG_TYPES = {k,v for k,v in pairs @PG_TYPES}
@PG_TYPES[assert tonumber oid] = name
setup_hstore: =>
res = unpack @query "SELECT oid FROM pg_type WHERE typname = 'hstore'"
assert res, "hstore oid not found"
@set_type_oid tonumber(res.oid), "hstore"
new: (opts) =>
@sock, @sock_type = socket.new opts and opts.socket_type
if opts
@user = opts.user
@host = opts.host
@database = opts.database
@port = opts.port
@password = opts.password
@ssl = opts.ssl
@ssl_verify = opts.ssl_verify
@ssl_required = opts.ssl_required
@pool_name = opts.pool
@luasec_opts = {
key: opts.key
cert: opts.cert
cafile: opts.cafile
}
connect: =>
opts = if @sock_type == "nginx"
{
pool: @pool_name or "#{@host}:#{@port}:#{@database}:#{@user}"
}
ok, err = @sock\connect @host, @port, opts
return nil, err unless ok
if @sock\getreusedtimes! == 0
if @ssl
success, err = @send_ssl_message!
return nil, err unless success
success, err = @send_startup_message!
return nil, err unless success
success, err = @auth!
return nil, err unless success
success, err = @wait_until_ready!
return nil, err unless success
true
settimeout: (...) =>
@sock\settimeout ...
disconnect: =>
sock = @sock
@sock = nil
sock\close!
keepalive: (...) =>
sock = @sock
@sock = nil
sock\setkeepalive ...
auth: =>
t, msg = @receive_message!
return nil, msg unless t
unless MSG_TYPE.auth == t
@disconnect!
if MSG_TYPE.error == t
return nil, @parse_error msg
error "unexpected message during auth: #{t}"
auth_type = @decode_int msg, 4
switch auth_type
when 0 -- trust
true
when 3 -- cleartext password
@cleartext_auth msg
when 5 -- md5 password
@md5_auth msg
else
error "don't know how to auth: #{auth_type}"
cleartext_auth: (msg) =>
assert @password, "missing password, required for connect"
@send_message MSG_TYPE.password, {
@password
NULL
}
@check_auth!
md5_auth: (msg) =>
import md5 from require "pgmoon.crypto"
salt = msg\sub 5, 8
assert @password, "missing password, required for connect"
@send_message MSG_TYPE.password, {
"md5"
md5 md5(@password .. @user) .. salt
NULL
}
@check_auth!
check_auth: =>
t, msg = @receive_message!
return nil, msg unless t
switch t
when MSG_TYPE.error
nil, @parse_error msg
when MSG_TYPE.auth
true
else
error "unknown response from auth"
query: (q) =>
if q\find NULL
return nil, "invalid null byte in query"
@post q
local row_desc, data_rows, command_complete, err_msg
local result, notifications
num_queries = 0
while true
t, msg = @receive_message!
return nil, msg unless t
switch t
when MSG_TYPE.data_row
data_rows or= {}
insert data_rows, msg
when MSG_TYPE.row_description
row_desc = msg
when MSG_TYPE.error
err_msg = msg
when MSG_TYPE.command_complete
command_complete = msg
next_result = @format_query_result row_desc, data_rows, command_complete
num_queries += 1
if num_queries == 1
result = next_result
elseif num_queries == 2
result = { result, next_result }
else
insert result, next_result
row_desc, data_rows, command_complete = nil
when MSG_TYPE.ready_for_query
break
when MSG_TYPE.notification
notifications = {} unless notifications
insert notifications, @parse_notification(msg)
-- when MSG_TYPE.notice
-- TODO: do something with notices
if err_msg
return nil, @parse_error(err_msg), result, num_queries, notifications
result, num_queries, notifications
post: (q) =>
@send_message MSG_TYPE.query, {q, NULL}
wait_for_notification: =>
while true
t, msg = @receive_message!
return nil, msg unless t
switch t
when MSG_TYPE.notification
return @parse_notification(msg)
format_query_result: (row_desc, data_rows, command_complete) =>
local command, affected_rows
if command_complete
command = command_complete\match "^%w+"
affected_rows = tonumber command_complete\match "%d+%z$"
if row_desc
return {} unless data_rows
fields = @parse_row_desc row_desc
num_rows = #data_rows
for i=1,num_rows
data_rows[i] = @parse_data_row data_rows[i], fields
if affected_rows and command != "SELECT"
data_rows.affected_rows = affected_rows
return data_rows
if affected_rows
{ :affected_rows }
else
true
parse_error: (err_msg) =>
local severity, message, detail, position
error_data = {}
offset = 1
while offset <= #err_msg
t = err_msg\sub offset, offset
str = err_msg\match "[^%z]+", offset + 1
break unless str
offset += 2 + #str
if field = ERROR_TYPES[t]
error_data[field] = str
switch t
when ERROR_TYPES.severity
severity = str
when ERROR_TYPES.message
message = str
when ERROR_TYPES.position
position = str
when ERROR_TYPES.detail
detail = str
msg = "#{severity}: #{message}"
if position
msg = "#{msg} (#{position})"
if detail
msg = "#{msg}\n#{detail}"
msg, error_data
parse_row_desc: (row_desc) =>
num_fields = @decode_int row_desc\sub(1,2)
offset = 3
fields = for i=1,num_fields
name = row_desc\match "[^%z]+", offset
offset += #name + 1
-- 4: object id of table
-- 2: attribute number of column (4)
-- 4: object id of data type (6)
data_type = @decode_int row_desc\sub offset + 6, offset + 6 + 3
data_type = @PG_TYPES[data_type] or "string"
-- 2: data type size (10)
-- 4: type modifier (12)
-- 2: format code (16)
-- we only know how to handle text
format = @decode_int row_desc\sub offset + 16, offset + 16 + 1
assert 0 == format, "don't know how to handle format"
offset += 18
{name, data_type}
fields
parse_data_row: (data_row, fields) =>
-- 2: number of values
num_fields = @decode_int data_row\sub(1,2)
out = {}
offset = 3
for i=1,num_fields
field = fields[i]
continue unless field
{field_name, field_type} = field
-- 4: length of value
len = @decode_int data_row\sub offset, offset + 3
offset += 4
if len < 0
out[field_name] = @NULL if @convert_null
continue
value = data_row\sub offset, offset + len - 1
offset += len
switch field_type
when "number"
value = tonumber value
when "boolean"
value = value == "t"
when "string"
nil
else
if fn = @type_deserializers[field_type]
value = fn @, value, field_type
out[field_name] = value
out
parse_notification: (msg) =>
pid = @decode_int msg\sub 1, 4
offset = 4
channel, payload = msg\match "^([^%z]+)%z([^%z]*)%z$", offset + 1
unless channel
error "parse_notification: failed to parse notification"
{
operation: "notification"
pid: pid
channel: channel
payload: payload
}
wait_until_ready: =>
while true
t, msg = @receive_message!
return nil, msg unless t
if MSG_TYPE.error == t
@disconnect!
return nil, @parse_error(msg)
break if MSG_TYPE.ready_for_query == t
true
receive_message: =>
t, err = @sock\receive 1
unless t
@disconnect!
return nil, "receive_message: failed to get type: #{err}"
len, err = @sock\receive 4
unless len
@disconnect!
return nil, "receive_message: failed to get len: #{err}"
len = @decode_int len
len -= 4
msg = @sock\receive len
t, msg
send_startup_message: =>
assert @user, "missing user for connect"
assert @database, "missing database for connect"
data = {
@encode_int 196608
"user", NULL
@user, NULL
"database", NULL
@database, NULL
"application_name", NULL
"pgmoon", NULL
NULL
}
@sock\send {
@encode_int _len(data) + 4
data
}
send_ssl_message: =>
success, err = @sock\send {
@encode_int 8,
@encode_int 80877103
}
return nil, err unless success
t, err = @sock\receive 1
return nil, err unless t
if t == MSG_TYPE.status
if @sock_type == "nginx"
@sock\sslhandshake false, nil, @ssl_verify
else
@sock\sslhandshake @ssl_verify, @luasec_opts
elseif t == MSG_TYPE.error or @ssl_required
@disconnect!
nil, "the server does not support SSL connections"
else
true -- no SSL support, but not required by client
send_message: (t, data, len) =>
len = _len data if len == nil
len += 4 -- includes the length of the length integer
@sock\send {
t
@encode_int len
data
}
decode_int: (str, bytes=#str) =>
switch bytes
when 4
d, c, b, a = str\byte 1, 4
a + lshift(b, 8) + lshift(c, 16) + lshift(d, 24)
when 2
b, a = str\byte 1, 2
a + lshift(b, 8)
else
error "don't know how to decode #{bytes} byte(s)"
-- create big endian binary string of number
encode_int: (n, bytes=4) =>
switch bytes
when 4
a = band n, 0xff
b = band rshift(n, 8), 0xff
c = band rshift(n, 16), 0xff
d = band rshift(n, 24), 0xff
string.char d, c, b, a
else
error "don't know how to encode #{bytes} byte(s)"
decode_bytea: (str) =>
if str\sub(1, 2) == '\\x'
str\sub(3)\gsub '..', (hex) ->
string.char tonumber hex, 16
else
str\gsub '\\(%d%d%d)', (oct) ->
string.char tonumber oct, 8
encode_bytea: (str) =>
string.format "E'\\\\x%s'", str\gsub '.', (byte) ->
string.format '%02x', string.byte byte
escape_identifier: (ident) =>
'"' .. (tostring(ident)\gsub '"', '""') .. '"'
escape_literal: (val) =>
switch type val
when "number"
return tostring val
when "string"
return "'#{(val\gsub "'", "''")}'"
when "boolean"
return val and "TRUE" or "FALSE"
error "don't know how to escape value: #{val}"
__tostring: =>
"<Postgres socket: #{@sock}>"
{ :Postgres, new: Postgres, :VERSION }
| 22.826159 | 82 | 0.585044 |
f061a751a116ae611444491b769591a23326a8cd | 116 | TK = require "PackageToolkit"
parent = ...
members = {
"_map",
}
return TK.module.subfunctions parent, members
| 16.571429 | 45 | 0.681034 |
b80f8391c85ecb52fb412a150c4339875979a879 | 1,808 |
import insert from table
validate_functions = {
exists: (input) ->
input and input != "", "%s must be provided"
file_exists: (input) ->
type(input) == "table" and input.filename != "" and input.content != "", "Missing file"
min_length: (input, len) ->
#tostring(input or "") >= len, "%s must be at least #{len} chars"
max_length: (input, len) ->
#tostring(input or "") <= len, "%s must be at most #{len} chars"
is_integer: (input) ->
tostring(input)\match"^%d+$", "%s must be an integer"
is_color: do
hex = "[a-fA-f0-9]"
three = "^##{hex\rep 3}$"
six = "^##{hex\rep 6}$"
(input) ->
input = tostring(input)
input\match(three) or input\match(six), "%s must be a color"
equals: (input, value) ->
input == value, "%s must match"
one_of: (input, ...) ->
choices = {...}
for choice in *choices
return true if input == choice
false, "%s must be one of #{table.concat choices, ", "}"
}
test_input = (input, func, args) ->
fn = assert validate_functions[func], "Missing validation function #{func}"
args = {args} if type(args) != "table"
fn input, unpack args
validate = (object, validations) ->
errors = {}
for v in *validations
key = v[1]
error_msg = v[2]
input = object[key]
if v.optional
continue unless validate_functions.exists input
v.optional = nil
for fn, args in pairs v
continue unless type(fn) == "string"
success, msg = test_input input, fn, args
unless success
insert errors, (error_msg or msg)\format key
break
next(errors) and errors
assert_valid = (object, validations) ->
errors = validate object, validations
coroutine.yield "error", errors if errors
{ :validate, :assert_valid, :test_input, :validate_functions }
| 25.828571 | 91 | 0.611173 |
4be2836ae9522b15af1af2b5f3f5f961cc0d9cd5 | 5,483 | import Completer, Buffer, completion from howl
import Editor from howl.ui
append = table.insert
describe 'Completer', ->
buffer = nil
before_each ->
buffer = Buffer {}
describe '.complete(pos [, limit])', ->
it 'instantiates completers once with (buffer, context)', ->
buffer.text = 'mr.cat'
factory = spy.new -> nil
append buffer.completers, factory
completer = Completer(buffer, 6)
completer\complete 6
assert.spy(factory).was.called_with buffer, buffer\context_at 6
completer\complete 6
assert.spy(factory).was.called(1)
it 'lookups completers in completion when they are specified as strings', ->
buffer.text = 'yowser'
factory = spy.new -> nil
completion.register name: 'comp-name', :factory
append buffer.completers, 'comp-name'
completer = Completer(buffer, 3)
assert.spy(factory).was.called
it 'returns completions for completers in buffer and mode', ->
mode = completers: { -> complete: -> { 'mode' } }
buffer.mode = mode
append buffer.completers, -> complete: -> { 'buffer' }
completions = Completer(buffer, 1)\complete 1
assert.same completions, { 'buffer', 'mode' }
it 'returns completions for mode even if buffer has no completers', ->
mode = completers: { -> complete: -> { 'mode' } }
buffer.mode = mode
assert.same Completer(buffer, 1)\complete(1), { 'mode' }
it 'returns the search string after the completions', ->
mode = completers: { -> complete: -> { 'prefix' } }
buffer.text = 'pre'
buffer.mode = mode
append buffer.completers, -> complete: -> { 'buffer' }
_, search = Completer(buffer, 4)\complete 4
assert.same search, 'pre'
it 'calls <completer.complete()> with (completer, context)', ->
buffer.text = 'mr.cat'
comp = complete: spy.new -> {}
append buffer.completers, -> comp
completer = Completer(buffer, 6)
completer\complete 6
assert.spy(comp.complete).was.called_with comp, buffer\context_at 6
completer\complete 7
assert.spy(comp.complete).was.called_with comp, buffer\context_at 7
it 'returns completions from just one completer if completions.authoritive is set', ->
append buffer.completers, -> complete: -> { 'one', authoritive: true }
append buffer.completers, -> complete: -> { 'two' }
completions = Completer(buffer, 1)\complete 1
assert.same { 'one' }, completions
it 'merges duplicate completions from different completers', ->
append buffer.completers, -> complete: -> { 'yes'}
append buffer.completers, -> complete: -> { 'yes' }
completions = Completer(buffer, 1)\complete 1
assert.same { 'yes' }, completions
it 'gives a final boost to case-matching completions, all else equal', ->
buffer.text = 'he'
append buffer.completers, -> complete: -> { 'Hello', 'hello' }
completions = Completer(buffer, 3)\complete 3
assert.same { 'hello', 'Hello' }, completions
buffer.text = 'He'
append buffer.completers, -> complete: -> { 'hello', 'Hello' }
completions = Completer(buffer, 3)\complete 3
assert.same { 'Hello', 'hello' }, completions
context 'limiting completions', ->
it 'returns at most `completion_max_shown` completions', ->
completions = ["cand-#{i}" for i = 1,15]
append buffer.completers, -> complete: -> completions
buffer.config.completion_max_shown = 3
actual = Completer(buffer, 1)\complete 1
assert.equal 3, #actual
it 'returns at most <limit> completions if specified', ->
completions = ["cand-#{i}" for i = 1,15]
append buffer.completers, -> complete: -> completions
actual = Completer(buffer, 1)\complete 1, 4
assert.equal 4, #actual
it '.start_pos holds the start position for completing', ->
buffer.text = 'oh cruel word'
assert.equal 4, Completer(buffer, 9).start_pos
describe 'accept(completion)', ->
context 'when hungry_completion is true', ->
it 'replaces the current word with <completion>', ->
buffer.text = 'hello there'
buffer.config.hungry_completion = true
completer = Completer(buffer, 3)
completer\accept 'hey', 3
assert.equal 'hey there', buffer.text
context 'when hungry_completion is false', ->
it 'inserts <completion> at the start position', ->
buffer.text = 'hello there'
buffer.config.hungry_completion = false
completer = Completer(buffer, 7)
completer\accept 'over', 7
assert.equal 'hello overthere', buffer.text
it 'returns the position after the accepted completion', ->
buffer.text = 'hello there'
assert.equal 5, Completer(buffer, 4)\accept 'hƏlp', 4
context "(interacting with mode's .on_completion_accepted)", ->
it "invokes it with (mode, completion, context) if present", ->
mode = on_completion_accepted: spy.new -> nil
buffer.mode = mode
buffer.text = 'hello there'
Completer(buffer, 4)\accept 'help', 4
assert.spy(mode.on_completion_accepted).was_called_with mode, 'help', buffer\context_at(5)
it "uses it's return value as the position returned if it's a number", ->
mode = on_completion_accepted: -> 6
buffer.mode = mode
buffer.text = 'hello there'
assert.equal 6, Completer(buffer, 4)\accept 'help', 4
| 40.021898 | 98 | 0.637972 |
b1d12a5d9b1a5a6d257dcf74a0a57c2e258c3e9d | 3,443 | -- Copyright 2014-2015 The Howl Developers
-- License: MIT (see LICENSE.md at the top-level directory of the distribution)
ffi = require 'ffi'
require 'ljglibs.cdefs.cairo'
core = require 'ljglibs.core'
require 'ljglibs.cairo.pattern'
C, gc = ffi.C, ffi.gc
cairo_gc_ptr = (o) ->
gc(o, C.cairo_destroy)
core.define 'cairo_t', {
properties: {
line_width: {
get: => tonumber C.cairo_get_line_width @
set: (width) => C.cairo_set_line_width @, width
}
clip_extents: =>
a = ffi.new 'double[4]'
C.cairo_clip_extents @, a, a + 1, a + 2, a + 3
{ x1: tonumber(a[0]), y1: tonumber(a[1]), x2: tonumber(a[2]), y2: tonumber(a[3]) }
fill_extents: =>
a = ffi.new 'double[4]'
C.cairo_fill_extents @, a, a + 1, a + 2, a + 3
{ x1: tonumber(a[0]), y1: tonumber(a[1]), x2: tonumber(a[2]), y2: tonumber(a[3]) }
status: => C.cairo_status @
operator: {
get: => C.cairo_get_operator @
set: (operator) => C.cairo_set_operator @, operator
}
line_join: {
get: => C.cairo_get_line_join @
set: (lj) => C.cairo_set_line_join @, lj
}
line_cap: {
get: => C.cairo_get_line_cap @
set: (lc) => C.cairo_set_line_cap @, lc
}
dash: {
get: => @get_dash!
set: (a) => @set_dash a
}
dash_count: =>
tonumber C.cairo_get_dash_count(@)
target: =>
C.cairo_get_target @
source: {
get: => @get_source!
set: (v) => @set_source v
}
has_current_point: => C.cairo_has_current_point(@) != 0
}
create: (surface) -> cairo_gc_ptr C.cairo_create surface
save: => C.cairo_save @
restore: => C.cairo_restore @
set_source: (source) => C.cairo_set_source @, source
set_source_rgb: (r, g, b) => C.cairo_set_source_rgb @, r, g, b
set_source_rgba: (r, g, b, a) => C.cairo_set_source_rgba @, r, g, b, a
set_source_surface: (surface, x, y) => C.cairo_set_source_surface @, surface, x, y
get_source: =>
src = C.cairo_get_source(@)
gc(C.cairo_pattern_reference(src), C.cairo_pattern_destroy)
set_dash: (dashes, offset = 1) =>
count = (#dashes - offset) + 1
a = ffi.new 'double[?]', count
for i = 1, count
a[i - 1] = dashes[offset + i - 1]
C.cairo_set_dash @, a, count, 0
get_dash: =>
count = @dash_count
return {} if count < 1
a = ffi.new 'double[?]', count
C.cairo_get_dash @, a, nil
dashes = {}
for i = 1, count
dashes[i] = a[i - 1]
stroke: => C.cairo_stroke @
stroke_preserve: => C.cairo_stroke_preserve @
fill: => C.cairo_fill @
fill_preserve: => C.cairo_fill_preserve @
paint_with_alpha: (alpha) => C.cairo_paint_with_alpha @, alpha
line_to: (x, y) => C.cairo_line_to @, x, y
rel_line_to: (dx, dy) => C.cairo_rel_line_to @, dx, dy
move_to: (x, y) => C.cairo_move_to @, x, y
rel_move_to: (x, y) => C.cairo_rel_move_to @, x, y
in_clip: (x, y) => C.cairo_in_clip(@, x, y) != 0
clip: => C.cairo_clip @
clip_preserve: => C.cairo_clip_preserve @
push_group: => C.cairo_push_group @
pop_group: => C.cairo_pop_group @
-- Path operations
rectangle: (x, y, width, height) => C.cairo_rectangle @, x, y, width, height
arc: (xc, yc, radius, angle1, angle2) => C.cairo_arc @, xc, yc, radius, angle1, angle2
close_path: => C.cairo_close_path @
new_path: => C.cairo_new_path @
-- Transformations
translate: (tx, ty) => C.cairo_translate @, tx, ty
}, (t, ...) -> t.create ...
| 27.544 | 88 | 0.602382 |
cf0bceebfa5f295f650794b2e3353ab9a64f2f3a | 1,041 | import app, signal, Project from howl
import File from howl.io
tstack = {}
handler = (args) ->
tstack[#tstack+1] = args.file
if args.file.basename == '-'
app.window\remove_view! if #app.window.views > 1
fn = tstack[#tstack-1] or File '.howl-proj'
file = fn\open!
app\open_file File '.howl-proj' unless tstack[#tstack-1]
table.insert Project.roots, File '.'
script = ''
iscript = false
for line in file\lines!
continue if line\gsub("^%s*(.-)%s*$", "%1") == ''
if iscript
script ..= line .. '\n'
elseif line == '!:'
iscript = true
else
app\open_file fn.parent\join line
app.window\remove_view! if #app.window.views > 1
app\close_buffer args.buffer
if script != ''
f = require('moonscript').loadstring script
assert f
f!
signal.connect 'file-opened', handler
{
info:
author: 'Ryan Gonzalez'
description: 'An extended project system'
license: 'MIT'
unload: ->
signal.disconnect 'file-opened', handler
}
| 25.390244 | 60 | 0.604227 |
adccf4fb2225e55772f0789bc6015f118c3f1785 | 1,523 | ffi = require 'ffi'
core = require 'ljglibs.core'
require 'ljglibs.cdefs.gtk'
C = ffi.C
core.auto_loading 'gtk', {
constants: {
prefix: 'GTK_'
-- GtkStateFlags
'STATE_FLAG_NORMAL',
'STATE_FLAG_ACTIVE',
'STATE_FLAG_PRELIGHT',
'STATE_FLAG_SELECTED',
'STATE_FLAG_INSENSITIVE',
'STATE_FLAG_INCONSISTENT',
'STATE_FLAG_FOCUSED',
-- GtkPositionType
'POS_LEFT',
'POS_RIGHT',
'POS_TOP',
'POS_BOTTOM'
-- GtkOrientation
'ORIENTATION_HORIZONTAL',
'ORIENTATION_VERTICAL',
-- GtkPackType
'PACK_START',
'PACK_END'
-- GtkJustification
'JUSTIFY_LEFT'
'JUSTIFY_RIGHT'
'JUSTIFY_CENTER'
'JUSTIFY_FILL'
-- GtkWindowPosition;
'WIN_POS_NONE'
'WIN_POS_CENTER'
'WIN_POS_MOUSE'
'WIN_POS_CENTER_ALWAYS'
'WIN_POS_CENTER_ON_PARENT'
-- GtkAlign
'ALIGN_FILL',
'ALIGN_START',
'ALIGN_END',
'ALIGN_CENTER',
'ALIGN_BASELINE',
-- GtkTargetFlags
'TARGET_SAME_APP',
'TARGET_SAME_WIDGET',
'TARGET_OTHER_APP',
'TARGET_OTHER_WIDGET',
}
cairo_should_draw_window: (cr, window) ->
C.gtk_cairo_should_draw_window(cr, window) != 0
get_major_version: -> tonumber C.gtk_get_major_version!
get_minor_version: -> tonumber C.gtk_get_minor_version!
get_micro_version: -> tonumber C.gtk_get_micro_version!
check_version: (major, minor = 0, micro = 0) ->
result = C.gtk_check_version major, minor, micro
if result != ffi.NULL
ffi.string result
else
nil
}
| 20.581081 | 57 | 0.665135 |
4e3a7311a18e90cd2f18706b4f27004d2b96c105 | 1,997 | -- implement async or bulk logging
http = require "mooncrafts.http"
azt = require "mooncrafts.aztable"
util = require "mooncrafts.util"
log = require "mooncrafts.log"
import from_json, to_json, table_clone from util
local *
-- number of items when flush
-- currently set to 1 until we get azure bulk to work
BUFFER_COUNT = 1
-- time between flush
-- currently set to very low until we get azure bulk to work
FLUSH_INTERVAL = 0.01
myopts = {}
dolog = (rsp) =>
v = {}
req = rsp.req
logs = req.logs or {}
req.logs = nil
-- replace illegal forward slash char
rk = "#{req.host} #{req.path}"\gsub("/", "$")
time = os.time()
btime = os.date("%Y%m%d%H%m%S",time)
rtime = 99999999999999 - btime
btime = os.date("%Y-%m-%d %H:%m:%S", time)
rand = math.random(10, 1000)
pk = "#{rtime}_#{btime} #{rand}"
btime = os.date("%Y%m", time)
table_name = "log#{btime}"
opts = azt.item_create({
tenant: "a",
table_name: table_name,
rk: rk,
pk: pk,
account_name: myopts.account_name,
account_key: myopts.account_key
})
v.RowKey = rk
v.PartitionKey = pk
v.host = req.host
v.path = req.path
v.time = req.end - req.start
v.req = to_json(req)
v.err = tostring(rsp.err)
v.code = rsp.code
v.status = rsp.status
v.headers = to_json(rsp.headers)
v.body = rsp.body
v.logs = to_json(logs) if (#logs > 0)
opts.body = to_json(v)
res = azt.request(opts, true)
res
class AsyncLogger
new: (opts={:account_name, :account_key}) =>
assert(opts.account_name, "opts.account_name parameter is required")
assert(opts.account_key, "opts.account_key parameter is required")
myopts = opts
dolog: dolog
log: (rsp) =>
if (ngx)
myrsp = table_clone(rsp)
delay = math.random(10, 100)
ok, err = ngx.timer.at(delay / 1000, dolog, self, myrsp)
@
AsyncLogger
| 24.654321 | 72 | 0.594392 |
7cde8c2f1b82f4b9b22fa5039a0bbe8d62738a16 | 3,974 | path = require "lapis.cmd.path"
import get_free_port from require "lapis.cmd.util"
class AttachedServer
new: (@runner) =>
start: (environment, env_overrides) =>
@existing_config = if path.exists @runner.compiled_config_path
path.read_file @runner.compiled_config_path
@port = get_free_port!
if type(environment) == "string"
environment = require("lapis.config").get environment
if env_overrides
assert not getmetatable(env_overrides), "env_overrides already has metatable, aborting"
environment = setmetatable env_overrides, __index: environment
env = require "lapis.environment"
env.push environment
@runner\write_config_for environment, @\process_config
pid = @runner\get_pid!
@fresh = not pid
if pid
@runner\send_hup!
else
assert @runner\start_nginx true
@wait_until_ready!
wait_until: (server_status="open") =>
socket = require "socket"
max_tries = 1000
while true
sock = socket.connect "127.0.0.1", @port
switch server_status
when "open"
if sock
sock\close!
break
when "close"
if sock
sock\close!
else
break
else
error "don't know how to wait for #{server_status}"
max_tries -= 1
if max_tries == 0
error "Timed out waiting for server to #{server_status}"
socket.sleep 0.001
wait_until_ready: => @wait_until "open"
wait_until_closed: => @wait_until "close"
detach: =>
if @existing_config
path.write_file @runner.compiled_config_path, @existing_config
if @fresh
@runner\send_term!
@wait_until_closed!
else
@runner\send_hup!
env = require "lapis.environment"
env.pop!
true
exec: (lua_code) =>
assert loadstring lua_code -- syntax check code
ltn12 = require "ltn12"
http = require "socket.http"
buffer = {}
_, status = http.request {
url: "http://127.0.0.1:#{@port}/run_lua"
sink: ltn12.sink.table buffer
source: ltn12.source.string lua_code
headers: {
"content-length": #lua_code
}
}
unless status == 200
error "Failed to exec code on server, got: #{status}"
table.concat buffer
-- this inserts a special server block in the config that gives remote access
-- to it over a special port/location.
process_config: (cfg) =>
assert @port, "attached server doesn't have a port to bind rpc to"
run_code_action = [[
ngx.req.read_body()
-- hijack print to write to buffer
local old_print = print
local buffer = {}
print = function(...)
local str = table.concat({...}, "\t")
io.stdout:write(str .. "\n")
table.insert(buffer, str)
end
local success, err = pcall(loadstring(ngx.var.request_body))
if not success then
ngx.status = 500
print(err)
end
ngx.print(table.concat(buffer, "\n"))
print = old_print
]]
-- escape for nginx config
run_code_action = run_code_action\gsub("\\", "\\\\")\gsub('"', '\\"')
test_server = [[
server {
allow 127.0.0.1;
deny all;
listen ]] .. @port .. [[;
location = /run_lua {
client_body_buffer_size 10m;
client_max_body_size 10m;
content_by_lua "
]] .. run_code_action .. [[
";
}
}
]]
-- inject the lua path
if @runner.base_path != ""
default_path = os.getenv "LUA_PATH"
default_cpath = os.getenv "LUA_CPATH"
server_path = path.join @runner.base_path, "?.lua"
server_cpath = path.join @runner.base_path, "?.so"
test_server = "
lua_package_path '#{server_path};#{default_path}';
lua_package_cpath '#{server_cpath};#{default_cpath}';
" .. test_server
cfg\gsub "%f[%a]http%s-{", "http {\n" .. test_server
{ :AttachedServer }
| 24.530864 | 93 | 0.602919 |
57bf929abfa80a25908866c5ec76ed7b484d599c | 3,053 | howl.util.lpeg_lexer ->
keyword = capture 'keyword', word {
'return', 'break', 'local', 'for', 'while', 'if', 'elseif', 'else', 'then',
'export', 'import', 'from', 'with', 'in', 'and', 'or', 'not', 'class',
'extends', 'super', 'do', 'using', 'switch', 'when', 'unless', 'continue'
}
comment = capture 'comment', P'--' * scan_until(eol)
hexadecimal_number = P'0' * S'xX' * xdigit^1 * (P'.' * xdigit^1)^0 * (S'pP' * S'-+'^0 * xdigit^1)^0
float = digit^0 * P'.' * digit^1
number = capture 'number', any({
hexadecimal_number * any('LL', 'll', 'ULL', 'ull')^-1,
digit^1 * any('LL', 'll', 'ULL', 'ull'),
(float + digit^1) * (S'eE' * P('-')^0 * digit^1)^0
})
operator = capture 'operator', any {
S'+-*!\\/%^#=<>;:,.(){}[]',
any { '~=', 'or=', 'and=' }
}
ident = (alpha + '_')^1 * (alpha + digit + '_')^0
identifier = capture 'identifier', ident
member = capture 'member', (P'@' + 'self.') * ident^0
special = capture 'special', word { 'true', 'false', 'nil' }
type_name = capture 'type', upper^1 * (alpha + digit + '_')^0
type_def = sequence {
capture('keyword', 'class'),
capture 'whitespace', blank^1,
capture 'type_def', upper^1 * (alpha + digit + '_')^0
}
lua_keywords = capture 'error', word { 'function', 'goto', 'end' }
sq_string = span("'", "'", '\\')
dq_string = span('"', '"', P'\\')
long_string = span('[[', ']]', '\\')
key = capture 'key', any {
P':' * ident,
ident * P':',
(sq_string + dq_string) * P':'
}
ws = capture 'whitespace', blank^0
cdef = sequence {
capture('identifier', 'ffi'),
capture('operator', '.'),
capture('identifier', 'cdef'),
ws,
any {
sequence {
capture('string', '[['),
sub_lex('c', ']]'),
capture('string', ']]')^-1,
},
sequence {
capture('string', '"'),
sub_lex('c', '"'),
capture('string', '"')^-1,
},
sequence {
capture('string', "'"),
sub_lex('c', "'"),
capture('string', "'")^-1,
}
}
}
P {
'all'
all: any {
number, key, V'string', comment, operator, special, type_def, keyword, member,
type_name, lua_keywords, V'fdecl', cdef, identifier
}
string: any {
capture 'string', any { sq_string, long_string }
V'dq_string'
}
interpolation: #P'#' * (-P'}' * (V'all' + 1))^1 * capture('operator', '}') * V'dq_string_chunk'
dq_string_chunk: capture('string', scan_to(P'"' + #P'#{', P'\\')) * V('interpolation')^0
dq_string: capture('string', '"') * (V'dq_string_chunk')
fdecl: sequence {
capture('fdecl', ident),
ws,
capture('operator', '='),
ws,
sequence({
capture('operator', '('),
any({
-#P')' * operator,
special,
identifier,
capture('whitespace', blank^1),
number,
V'string'
})^0,
capture('operator', ')'),
ws,
})^0,
capture('operator', any('->', '=>')),
}
}
| 28.268519 | 102 | 0.488045 |
01ede10d77b62d1de87f1effaecfaf4df633a0de | 4,822 |
-- Copyright (C) 2018-2020 DBotThePony
-- Permission is hereby granted, free of charge, to any person obtaining a copy
-- of this software and associated documentation files (the "Software"), to deal
-- in the Software without restriction, including without limitation the rights
-- to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
-- of the Software, and to permit persons to whom the Software is furnished to do so,
-- subject to the following conditions:
-- The above copyright notice and this permission notice shall be included in all copies
-- or substantial portions of the Software.
-- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
-- INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
-- PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
-- FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-- OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-- DEALINGS IN THE SOFTWARE.
savedata = (ent) ->
objects = [ent\GetPhysicsObjectNum(i) for i = 0, ent\GetPhysicsObjectCount() - 1]
{
ent, ent\GetPos(), ent\GetAngles()
[obj\GetVelocity() for obj in *objects]
[obj\GetPos() for obj in *objects]
[obj\GetAngles() for obj in *objects]
[obj\IsAsleep() for obj in *objects]
[obj\IsMotionEnabled() for obj in *objects]
}
loaddata = (data) ->
{ent, pos, angles, velt, post, angt, asleept, motiont} = data
return if not IsValid(ent)
objects = [ent\GetPhysicsObjectNum(i) for i = 0, ent\GetPhysicsObjectCount() - 1]
ent\SetPos(pos)
ent\SetAngles(angles)
for i, obj in ipairs(objects)
obj\SetVelocity(velt[i]) if velt[i]
obj\SetPos(post[i]) if post[i]
obj\SetAngles(angt[i]) if angt[i]
if asleept[i] == true
obj\Sleep()
elseif asleept[i] == false
obj\Wake()
obj\EnableMotion(motiont[i]) if motiont[i] ~= nil
snapshot = (ply, ent) ->
if ply\GetInfoBool('dpp2_cl_physgun_undo_custom', true)
ply.__dpp2_physgun_undo = ply.__dpp2_physgun_undo or {}
if contraption = ent\DPP2GetContraption()
table.insert(ply.__dpp2_physgun_undo, [savedata(ent) for ent in *contraption.ents when ent\IsValid()])
else
table.insert(ply.__dpp2_physgun_undo, {savedata(ent)})
else
if contraption = ent\DPP2GetContraption()
data2 = [savedata(ent) for ent in *contraption.ents when ent\IsValid()]
data = savedata(ent)
undo.Create('Physgun')
undo.SetPlayer(ply)
undo.AddFunction(-> loaddata(data) for data in *data2)
undo.Finish()
else
data = savedata(ent)
undo.Create('Physgun')
undo.SetPlayer(ply)
undo.AddFunction(-> loaddata(data))
undo.Finish()
PhysgunPickup = (ply = NULL, ent = NULL) ->
return if not DPP2.PHYSGUN_UNDO\GetBool()
return if not ply\GetInfoBool('dpp2_cl_physgun_undo', true)
return if ent\IsPlayer()
snapshot(ply, ent)
return
IsValid = FindMetaTable('Entity').IsValid
invalidate_history_ticks = 0
player_GetAll = player.GetAll
table_remove = table.remove
EntityRemoved = (ent) ->
return if invalidate_history_ticks >= 2
invalidate_history_ticks += 1
ProcessPhysgunInvalidate = ->
return if invalidate_history_ticks <= 0
invalidate_history_ticks -= 1
for ply in *player_GetAll()
if history = ply.__dpp2_physgun_undo
for histroy_index = #history, 1, -1
history_entry = history[histroy_index]
if #history_entry == 0
table_remove(history, histroy_index)
else
for entry_index = #history_entry, 1, -1
if not IsValid(history_entry[entry_index][1])
table_remove(history_entry, entry_index)
if #history_entry == 0
table_remove(history, histroy_index)
OnPhysgunReload = (physgun = NULL, ply = NULL) ->
return if not DPP2.PHYSGUN_UNDO\GetBool()
return if not ply\GetInfoBool('dpp2_cl_physgun_undo', true)
return if not IsValid(ply)
tr = ply\GetEyeTrace()
return if not IsValid(tr.Entity) or tr.Entity\IsPlayer()
ent = tr.Entity
snapshot(ply, ent)
return
hook.Add 'PhysgunPickup', 'DPP2.PhysgunHistory', PhysgunPickup, 3
hook.Add 'OnPhysgunReload', 'DPP2.PhysgunHistory', OnPhysgunReload, 3
hook.Add 'EntityRemoved', 'DPP2.PhysgunHistory', EntityRemoved
hook.Add 'Think', 'DPP2.ProcessPhysgunInvalidate', ProcessPhysgunInvalidate
DPP2.cmd.undo_physgun = (args = {}) =>
if not @__dpp2_physgun_undo
DPP2.LMessagePlayer(@, 'gui.dpp2.undo.physgun_nothing')
return
last = table.remove(@__dpp2_physgun_undo, #@__dpp2_physgun_undo)
hit = false
while last
for entry in *last
if IsValid(entry[1])
hit = true
loaddata(entry)
break if hit
last = table.remove(@__dpp2_physgun_undo, #@__dpp2_physgun_undo)
if hit
DPP2.NotifyUndo(@, nil, 'gui.dpp2.undo.physgun')
else
DPP2.LMessagePlayer(@, 'gui.dpp2.undo.physgun_nothing')
| 32.146667 | 105 | 0.730195 |
End of preview. Expand
in Dataset Viewer.
- Downloads last month
- 48