Personal Anthology
This page contains some of the small to medium-sized Goal programs that I've written, most of which depend on ari's extensions to Goal, shared here under the MIT license.
Many are incomplete, there are certainly mistakes and non-idiomatic constructions, and per the MIT license, I make no warranties. It's often helpful to see how other people did things, so I'm sharing how I have.
License
Copyright 2025 Daniel Gregoire
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the βSoftwareβ), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED βAS ISβ, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
archive-stale.goalπ (Copied!)
import"sc-client/sc"
/ Dates
staledur:*/(time.Hour;24;30;15); dtfmt:{time.format[time.RFC3339;x]}
now:time.utc time.now""; stale:time.add[now;-staledur]
/ Stories
eng:cache.workflows[1488]
qtmp:{..["archived":-0w;"updated_at_end":dtfmt[stale];"workflow_state_id":x]}
sss:story.search'qtmp'(..id)'eng"states"
toarchive:,/sss; #toarchive
ids:"i"$(..id)'toarchive; names:(..name)'toarchive
grade:>ids; sids:ids[grade]; snms:names[grade]; sarc:toarchive[grade]
"toarchive.csv"print","csv(sids;snms)
/ Updates
prts:(-500)$sarc; #prts
arcem:{[batch]n:#batch; say"Archiving batch of $n stories..."; story.update.bulk[..["story_ids":"i"$(..id)'batch;"archived":0w]]}
arcem'prts
bugslo.goalπ (Copied!)
/ Goal tools
import "../go/goal/lib/fmt.goal"
/ Shortcut HTTP API
url:"https://api.app.shortcut.com/api/v3/";tv:"SHORTCUT_API_TOKEN";st:env tv;and["e"~@st;say "[ERR] Set the ${tv} env variable."]
urlrep:" ""%20"
urlenc:{[s]sub[urlrep] s}
gcmd:!"curl -s --fail-with-body -X GET -H 'Content-Type:application/json' -H 'Shortcut-Token:${st}' -L '${url}"
pcmd:("curl";"-s";"-X";"POST";"-d";"DATA";"-H";"'Content-Type: application/json'";"-H";"'Shortcut-Token: ${st}'";"-L";"${url}")
sc.get:{[p]ret:shell" "/@[gcmd;-1+#gcmd;+;p+"'"];?["e"~@ret;ret;json ret]}
sc.getq:{[p;querys]q:urlenc querys; ret:shell" "/@[gcmd;-1+#gcmd;+;p+"?$q'"];?["e"~@ret;ret;json ret]}
sc.pst:{[p;js]cmd:@[pcmd;&"DATA"=pcmd;:;"'${js}'"];ret:shell" "/@[cmd;-1+#cmd;+;p];?["e"~@ret;ret;json ret]}
sc.q:{[query]sc.pst["stories/search";""json sq]}
true:0w; false:-0w; null:0n
ppd:fmt.dict[;"%.1f"]
/ Shortcut data needed to resolve IDs
say "Fetching workflows..."; wfs:sc.get"workflows"; cwfs:#wfs; say "Fetched ${cwfs} workflows"
dwf:!/+(..id,name)'wfs / DICTIONARY: Workflow ID to name
stidnames:{[wf]wfid:wf"id"; sts:wf"states"; stids:(..id)'sts; stnames:(..name)'sts; (stids;stnames)}'wfs
dwfst:,/!/'idnames / DICTIONARY: Workflow State ID to name
/ Bug SLO
sq:..[archived:false;"story_type":"bug";"created_at_start":"2024-05-07T00:00:00Z"]
:ftsq:rq`type:bug !is:archived !is:done created:2024-05-07..*`
:sres:sc.getq["search/stories";"query=$ftsq"]
sres"total"
say "\n"/(..name)'sres"data"
say "Fetching bugs..."; b1bugs:sc.q[sq]; c:#b1bugs; say "Fetched $c bugs"
b1nd:sc.q[sq..["workflow_state_types":,"backlog""unstarted""started"]] / not done
b1dn:sc.q[sq..["workflow_state_types":,"done"]] / done
#b1dn
#b1bugs
sq
sq..["workflow_state_types":,"done"]
@[;$"workflow_state_id"]
(..x"workflow_state_id")'b1nd
(.."workflow_state_id")
(..name)
!*b1nd
#'(b1nd;b1dn;b1bugs)
#done
ppd@*done
#allbugs
22 % 63
"65% are done, 35% left to be squashed"
"%.1f"$100 * 1 - 17 % 68
68 % 17
csv-import-leads.goalπ (Copied!)
fld:"csv-import-leads/"; jfiles:"\n"\-shell"ls $fld"
cols:^?,/!json read fld+*jfiles
msgs:json read fld+jfiles
xs:"\n"\-(..text)'msgs[&{\x;"bot_message"~x["subtype"]}'msgs]
xs[!10]
csvplay.goalπ (Copied!)
a:{sql.q"select * from starwars.csv"}
b:{csv.tbl@csv 'read"starwars.csv"}
help"rt.time"
"%.2f"$math.avg 1_3{rt.time[a;0;1000]}\()
"%.2f"$math.avg 1_3{rt.time[b;0;1000]}\()
math.avg 1_3{rt.time[b;0;1000]}\()
deptree.goalπ (Copied!)
say ARGS
pd:{[d]ks:!d;mk::|/#'"c"$'!d;say"\n"/{" "+((-mk)!y)+" : "+$x[y]}[d;]'ks}
url:"https://api.app.shortcut.com/api/v3/";tv:"SHORTCUT_API_TOKEN";st:env tv;and["e"~@st;say "[ERR] Set the ${tv} env variable."]
diag:"" / final dependency diagram
idstr:{[d]and[~"d"=@d;:error"${o} expects dictionary"]; "%0.f"$d"id"} / goal deserializes JSON numbers as floats
gcmd:!"curl -s --fail-with-body -X GET -H 'Content-Type:application/json' -H 'Shortcut-Token:${st}' -L '${url}"
sc.get:{[p]ret:shell" "/@[gcmd;-1+#gcmd;+;p+"'"];?["e"~@ret;ret;json ret]}
epid:259457 / TODO From ARGS
ss:sc.get"epics/${epid}/stories"
nodes:{[st]id:idstr[st];name:sub[rx/(\r\n|\r|\n)/;" "]st["name"];"${id}: ${name}"}'ss
say "\n"/nodes
:s:ss[2]
d:*s"story_links"
pd's"story_links"
pd[*s"story_links"]
:sl:st"story_links"
st"id"
{x"subject_id"}'sl
es:{[st]stlnks:st["story_links"];stlnks[{(st"id")=x["subject_id"]}]}'sts
(..type="subject")''es
:edges:{[st]stlnks:st["story_links"];subj:(~(..type="subject")'stlnks)^stlnks}'sts
stlnks:{x["story_links"]}'sts
sub[rx/(\r\n|\r|\n)/;" "]"foo\nbar"
"foo\nbar"sub
ids:(..id)'sts
dd:ids!(#ids)#()
dd
numlinks:#'stlnks
(numlinks;~numlinks)
+/~numdeps
{~~numdeps}^stlnks
{~~numdeps}[1]
@'stlnks
stlnks
:nodeps:(~numdeps)^stlks
whdeps
deps:stlks[whdeps]
*sts
(#!:)'sts
stlks[&#'stlks]
#'stlks
{#'stlks}#stlks
/
TODO Integrate d2 (written in Go) with cloned version of Goal,
so a single executable can make a local SVG of the deps.
\
/pcmd:("curl";"-s";"-X";"POST";"-d";"DATA";"-H";"'Content-Type: application/json'";"-H";"'Shortcut-Token: ${st}'";"-L";"${url}")
/sc.pst:{[p;js]cmd:@[pcmd;&"DATA"=pcmd;:;"'${js}'"];ret:shell" "/@[cmd;-1+#cmd;+;p];?["e"~@ret;ret;json ret]}
/say "Fetching all members and saving in md..."
/ms:sc.get"members"
/mids:@[;"id"]'ms
/mnms:{x["profile";"name"]}'ms
/md:mids!mnms
/!md
/stdep:500165911 / workflow state 'Deployed'
/strqa:500165915 / workflow state 'Ready for QA'
/stqa:500165916 / workflow state 'In QA'
/say "Searching for Bug Bash deployed stories...";ss:sc.pst["stories/search";qq#{"archived": false, "label_name": "bugbash_2024_moved_deployed"}#]
/ls:#ss;say "History of ${ls} deployed stories...";sc.hist:{[id]sc.get"stories/${id}/history"}
/his:{[s]sc.hist id[s]}
/win:{[h]
/ macts:@[;["member_id","actions"]]'h
/ acts:{x[1]}'macts
/ ups:macts[&0<{+/|"update"=x}'@[;"action"]''acts] / includes an update
/ who:{md[x]}'*''macts[&{or[x~stdep;x~stqa]}'''{and["d"=@x;x"new"]}''{and["d"=@x;x"workflow_state_id"]}''@[;"changes"]''acts]
/ who
/}
/say win his[*ss]
/h:sc.hist[id[*|ss]]
/macts:@[;!"member_id actions"]'h
/acts:@[;1]'macts
/mups:macts[&0<{+/|"update"=x}'@[;"action"]''acts]
/"--------"
/upacts:@[;1]'mups
/upact:upacts[11]
/!'upact
/*upact
/@[;"changes"]'upact
/acts[&0<{+/|"update"=x}'@[;"action"]''acts]
/say @[cmd;-1+#cmd;+;"member"]
/say @[cmd;&"GET"=cmd;:;"PUT"]
/d:("archived")!("v"$"false")
/say cmd,(" "json d)
/say sub["TMP";qq# "wow": true,\n "awe": "some",#] jtmp
/say "Fetching workflows...";wfs:sc.get"workflows"
detach-eng-workflow.goalπ (Copied!)
/ To detach the deprecated Engineering workflow, this script:
/ - Identifies stories assigned to teams in the Engineering workflow
/ - Labels those stories with `team-<mention-name-of-team>`
/ - Removes the team association
/
/ At that point, the Engineering workflow can be unassigned in
/ Shortcut settings.
/ gr.diamond gr.teamgold
""import"sc-client/sc"
/ Search
wf:cache.workflows[1488]
wfsts:(..id)'wf"states"
dsss:{story.search[..["group_id":x;"workflow_state_id":y]]}[gr.diamond"id"]'wfsts
gsss:{story.search[..["group_id":x;"workflow_state_id":y]]}[gr.teamgold"id"]'wfsts
/ Label
lblem:{[lblname;batch]n:#batch; say"Labeling batch of $n stories..."; story.update.bulk[..["story_ids":"i"$(..id)'batch;"labels_add":,..[name:lblname]]]}
lblem["team-diamond"]'(-500)$,/dsss
lblem["team-gold"]'(-500)$,/gsss
/ Remove team ownership
disownem:{[batch]n:#batch; say"Removing group/team from batch of $n stories..."; story.update.bulk[..["story_ids":"i"$(..id)'batch;"group_id":0n]]}
disownem'(-500)$,/dsss
disownem'(-500)$,/gsss
dos-story-arrival.goalπ (Copied!)
""import"sc-client/sc"
doswf:(.cache.workflows)[2]
triage:doswf["states";0]
triage"name";"i"$triage"id" / 500179017
tss:story.search[..[archived:-0w;"workflow_state_id":"i"$triage"id"]]
s2:283816
h2:story.history[s2]
wchanges:{[story]
acts:,/(..actions)'story.history[story]
actions:(..action)'acts
entypes:(..x["entity_type"])'acts
wfstids:(..x["workflow_state_id"])'acts
/(!"a e w")!(actions;entypes;wfstids)
(actions;entypes;wfstids)
}
/ get value at k from d, if present, return fallback fb
getin:{.[{(x).(y)}[x];y;{y}[;z]]}
/Debug
:acts:,/(..actions)'h2
{say" "json x}acts
{say" "json x}'getin[;!"changes workflow_state_id";"none"]'acts
{say" "json x}'getin[;"changes";"none"]'acts
{say" "json x}'getin[;"workflow_state_id";()]'getin[;"changes";"none"]'acts
:changes:getin[;"workflow_state_id";()]'getin[;"changes";"none"]'acts
changes[&~{0=#x}'changes]
chwf:{changes:..x["changes"]; "d"~@changes}
@..[a:2]
chwf'acts
.[chwf;*acts;]
{.[(..x["changes";"workflow_state_id"]);z;:]}'acts
help rx/try/
help rx/error/
..changes
actions:(..action)'acts
entypes:(..x["entity_type"])'acts
wfstids:(..x["workflow_state_id"])'acts
fmt.pp t:(!"a e w")!(actions;entypes;wfstids)
fmt.pp (1;..a="create";..e="story")#t
fmt.pp (1;..a="update";..e="story")#t
{say" "json x}'acts
fmt.pp +thss
epic-comments-terse.goalπ (Copied!)
/ CSV of Epic Comments
(~2=#ARGS)and:error"USAGE: goal epic-comments.goal <epic-id>";(p;epicid):ARGS / epicid:170742
url:"https://api.app.shortcut.com/api/v3/";'st:env "SHORTCUT_API_TOKEN"
gcmd:!"curl -s --fail-with-body -X GET -H 'Content-Type:application/json' -H 'Shortcut-Token:${st}' -L '${url}"
scget:{[p]'ret:shell" "/@[gcmd;-1+#gcmd;+;p+"'"];?["e"~@ret;:ret;json ret]}
pid:"parent_id";kcs:"comments";cks:!"id author_id text ${pid}";ixpid:cks?pid
ac:{[cm;pcid]vs:cm[cks];vs[ixpid]:pcid;acc::cks!(.acc),'vs;o[;cm"id"]'(cm[kcs]);acc}
acs:{[epicid]cs:scget"epics/${epicid}/${kcs}";acc::cks!(#cks)#(());ac[;0n]'cs;acc}
t:acs[epicid];sid:"%0.f"$;t:@[t;!"id";sid];t:@[t;,pid;{{?[nan x;"";sid x]}'x}]
now:time "unixmilli";fn:"${kcs}-epic-${epicid}-ts-${now}.csv";say "Writing to ${fn}"
csvdata:","csv t;'csvfile:"w"open fn;csvfile print csvdata;close csvfile;say "ok"
epic-comments.goalπ (Copied!)
/ CSV of Epic Comments
(~2=#ARGS)and:error"USAGE: goal epic-comments.goal <epic-id>";(p;epicid):ARGS / epicid:170742
url:"https://api.app.shortcut.com/api/v3/";'st:env "SHORTCUT_API_TOKEN"
gcmd:!"curl -s --fail-with-body -X GET -H 'Content-Type:application/json' -H 'Shortcut-Token:${st}' -L '${url}"
scget:{[p]'ret:shell" "/@[gcmd;-1+#gcmd;+;p+"'"];?["e"~@ret;:ret;json ret]}
pid:"parent_id";kcs:"comments";cks:!"id author_id text ${pid}";ixpid:cks?pid
/ Helper fn for acs; expects acc to be initialized
ac:{[cm;pcid]
vs:cm[cks];vs[ixpid]:pcid / get values, set parent comment id
acc::cks!(.acc),'vs / append values as row
o[;cm"id"]'(cm[kcs]) / recur: mutate acc for all nested comments
acc}
/ All comments for epicid
acs:{[epicid]
acc::cks!(#cks)#(()) / initialize acc table
cs:scget"epics/${epicid}/${kcs}" / fetch Shortcut's "List Epic Comments"
ac[;0n]'cs / process comments; top-level have parent_id of 0n
acc}
/ Helper fn, format float as int string
sid:"%0.f"$
/ Flat table of nested comment data
t:acs[epicid]
t:@[t;!"id";sid] / string ids
t:@[t;,pid;{{?[nan x;"";sid x]}'x}] / string ids, empty string for 0n
/ Write CSV as file with epic id and timestamp in file name
now:time "unixmilli";fn:"${kcs}-epic-${epicid}-ts-${now}.csv";say "Writing to ${fn}"
csvdata:","csv t;'csvfile:"w"open fn;csvfile print csvdata;close csvfile;say "ok"
epics-on-roadmap.goalπ (Copied!)
""import"sc-client/sc"
eps:epic""
t:json.tbl eps
where.match:{[tbl;col;val] (1;val~'(..x[p.col])tbl)#tbl}
where.in:{[tbl;col;val] (1;\ΒΏ[;val]'(..x[p.col])tbl)#tbl}
where.gr:where.match[t;"group_id"]
deps:where.gr[gr.diamond"id"]
/ All but Backlog; see bottom of file
/ states:500167369 500159124 500155207 500155209 500105052 500155208 500042484 500155210 500130130 500155211 500155212 500159479 500165183 500042485 500105047
states:500042484 500155211 500155212 500159479 500165183
dactive:where.in[deps;"epic_state_id";states]
fmt.pp dactive
+dactive[!"name app_url"]
/
(500159124 "π Prioritized"
500155207 "π Planning"
500155209 "βοΈ In Design"
500105052 "π¦ Ready For Dev"
500155208 "ποΈ To Refine"
500042484 "γ In Development"
500155210 "β οΈ Blocked"
500130130 "βΈοΈ On Hold"
500155211 "π Internal Release"
500155212 "% Release"
500159479 "π Learning"
500165183 "π GA Release"
500042485 "Done"
500105047 "π« Canceled")
\
groups.goalπ (Copied!)
/ # Fetch
server:- 'shell rq`aws-vault exec developer -- peer-server production | grep 'production analytics-server' | cut -d' ' -f3`
namewss:"active-workspaces"
namegroups:"groups-without-perms"
nameperms:"perms-in-groups"
nameepicswithgroups:"epics-with-groups"
presto:{"presto --server $server --catalog analytics --schema production_build -f ${x}.sql --output-format CSV_HEADER_UNQUOTED > ${x}.csv"}
say "Running query $namewss ..."; 'shell presto[namewss]
say "Running query $namegroups ..."; 'shell presto[namegroups]
say "Running query $nameperms ..."; 'shell presto[nameperms]
say "Running query $nameepicswithgroups ..."; 'shell presto[nameepicswithgroups]
say "Running query $nameepicswithoutgroups ..."; 'shell presto[nameepicswithoutgroups]
/ # Local DB
pp:fmt.tbl[;20;8;"%.1f"]; cleanplan:sub[":product.tier/" ""]
/ ## Teams/Groups
empgrp:{(*'x)!1_'x}@csv 'read"${namegroups}.csv"
empgrp:@[empgrp;!"plan";cleanplan]; empgrp:@[empgrp;!"nb np";"i"$]
fllgrp:{(*'x)!1_'x}@csv 'read"${nameperms}.csv" / row per permission
fllgrp:@[fllgrp;!"plan";cleanplan]; fllgrp:@[fllgrp;!"nb";"i"$]
freqs:k.freq[fllgrp"grp"]
idx::0; prog:{and[~1000!idx; print"$idx "]} / progress
catnp:{[grp] prog"";idx+:1; d:*'(1;..grp=p.grp)#fllgrp; d:(,"perm")^d; (.d),freqs[grp]}
nps:catnp'!freqs
allgrp:empgrp / start with empgrp, which has correct shape
allgrp:allgrp,'+nps / add non-empty groups
wss:{(*'x)!1_'x}@csv 'read"${namewss}.csv" / row per workspace
wss:@[wss;!"plan";cleanplan]; wss:@[wss;!"nb";"i"$]
ew:{(*'x)!1_'x}@csv 'read"${nameepicswithgroups}.csv"
ew:@[ew;!"plan";cleanplan]; ew:@[ew;!"nb";"i"$]
ewstarted:(1;..estate="in progress")#ew
ewfreqs:k.freq[ewstarted"ws"] / count of epics in each workspace, which is the rows in wss
wss0teamepicsks:(!ewfreqs)^wss"ws"
wss0teamepicsvs:(#wss0teamepicsks)#0
wss0teamepics:wss0teamepicsks!wss0teamepicsvs
w:wss0teamepics,ewfreqs
(!^w)~((wss@<wss..ws)"ws") / demonstrate ordering of ws ids
wss:(wss@<wss..ws),(,"epicswithteams")!,(.^w)
"_groups.csv"print ","csv allgrp
shell rq`duckdb_cli -c "DROP TABLE IF EXISTS grps;" groups.duckdb`
shell rq`duckdb_cli -c "CREATE TABLE grps AS SELECT * FROM '_groups.csv';" groups.duckdb`
"_wss.csv"print ","csv wss
shell rq`duckdb_cli -c "DROP TABLE IF EXISTS wss;" groups.duckdb`
shell rq`duckdb_cli -c "CREATE TABLE wss AS SELECT * FROM '_wss.csv';" groups.duckdb`
hex.goalπ (Copied!)
numerals:,/!10,"c"$'97+!6; hex:{sub[" ";"0"]2!""/"s"$'numerals@16\x}; rtoh:{[rgb] "#"+""/hex'rgb}
ics.goalπ (Copied!)
files:glob"*.ics"
f:"daniel.gregoire@shortcut.com.ics"; s: 'read f
ls:=s
begs:&"BEGIN:VEVENT"=ls
ends:&"END:VEVENT"=ls
evts:ls[begs+!'1+ends-begs]
e:*evts
say'e
say'pevt e
pevt:{[ls]
ctd:&(.." "~x[0;1])'ls
dbg::ls[ctd]
dbg.ctd::ctd
\joined:(ctd-1)(..p.ls[x]+1_'p.ls[y])'ctd
dbg.joined::joined
/ TODO START HERE vvv
rem:ctd; is:!#ls
ls@:&(#rem)=rem?is; ls[ctd-1]:joined}
pevt e
dbg
ctg:(Β«dbg.ctd)-dbg.ctd
ctg[-1+#ctg]:-/dbg.ctd[-1 -2]
ctg
ctg
help rx/rot/
dbg.joined
1 rotate dbg.ctd
-1 rotate dbg.ctd
say'e
a:7 9 11 14 15 16 17 18 8 10 12 15 16 17 18 19
b:0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26
{a}^b
help"v"
b@&((#a)=a?b)
a:8 10 12 15 16 17 18 19
(Β«a)-a
a-Β»a
help rx/shift/
-/a[-1 -2]
a:8 10 12 15 16 17 18 19
a
a-0iΒ»a
{y-x}/2^a
-\1 -1_`a
2-/':a
ideas-channel.goalπ (Copied!)
/ #server-errors channel; messages consist of attachments, first is workspace, rest are per log :tag
url:"https://slack.com/api/"; aj:"application/json"; tk: 'env"SLACK_USER_TOKEN"; hd:..[Accept:aj;"Content-Type":aj;"Authorization":"Bearer $tk"]
hc:http.client[..[Header:hd]]; hp:{[f]{[httpf;path]r: 'httpf[hc;url+path]; 'json r"bodystring"}[f]}
hpp:{[f]{[httpf;path;reqopts]r: 'httpf[hc;url+path;reqopts]; 'json r"bodystring"}[f]}
get:hp[http.get]; getq:hpp[http.get]; post:hpp[http.post]; jp:{say " "json x}
dt:{time.unix["i"$_"n"$x]}; int:"i"$_"n"$
/ convos:get"conversations.list"
/ chans:convos"channels"; ^(..name,id)'chans
/ channel:"C057UQVFA3S" / #ideas
channel:"C08HKS47HCH" / # csv-import-leads
link:{[ts]getq["chat.getPermalink";..[QueryParam:..[channel;"message_ts":ts]]]}
Day:time.Hour * 24; unow:time.utc time.now@0
:oldest:time.unix[time.add[unow;-3000 * Day]]
:latest:time.unix[unow]
msgs:{[acc;channel;oldest;latest] \latest / print progress
hist:post["conversations.history";..[Body:""json..[channel;oldest;latest]]]
ms:hist"messages"; newlatest:(..ts)@*|ms; acc,:ms
?[hist"has_more"
o[acc;channel;oldest;newlatest]
acc]}
allmsgs:msgs[();channel;oldest;latest]; "ok"
time.unix'"i"$"v"$(..ts)'allmsgs
members-without-teams.goalπ (Copied!)
/import"sc-client/scapi"
mbs:member.list""
gps:group.list""
(#mbs;#gps)
ac"cache.*"
agps:gps[&-0w=(..archived)'gps]
ambs:mbs[&-0w=(..disabled)'mbs]
rp:,/'{(x["profile";!"email_address mention_name name"];#x["group_ids"])}'ambs
t:(!"email mentionname name numteams")!+rp
f:"user-teams.csv"
f print ","csv (..x@<numteams)(..x@<email)t
prs-without-stories.goalπ (Copied!)
/ PRs without Shortcut Stories
ghurl:"https://api.github.com/"; ghtok: 'env"GITHUB_API_TOKEN"
year:2025;qtrs:("$year-01-01..$year-03-31";"$year-04-01..$year-06-30";"$year-07-01..$year-09-30";"$year-10-01..$year-12-31")
prsearch:{[repo;mergerange;page]
q:"%20"/!"repo:useshortcut/$repo is:pull-request is:merged merged:$mergerange"
"per_page=100&page=$page&q=$q"}
bot:"shortcut-integration[bot]"
nostory:{[fname]cs: 'json 'read fname;users:(..user..login)'cs;?[0=#cs;1;0=|/bot=users]}
clean:{f:sub["https://api.";"https://"];g:sub["github.com/repos/";"github.com/"];g f x}
curl:{[url]qq#curl -sS -H "Authorization: Bearer $ghtok" -H "Accept: application/vnd.github+json" "$url"#}
query:{[repo;qtr;page]qp:prsearch[repo;qtrs[qtr];page]
url:"https://api.github.com/search/issues?$qp"
finalurl:curl[url]
json shell curl[url]}
commentfiles:-1_"\n"\shell "ls *-comments.json"
nostoryprs:commentfiles[&nostory'commentfiles]
prid:{[fname]m:rx/pr-(\d+)-comments.json/fname;m[1]}
nostoryprids:prid'nostoryprs
openpr:{[fname]m:rx/pr-(\d+)-comments.json/fname;id:m[1];j:json read "pr-${id}.json";url:clean j"url"; shell "open $url"}
/ openpr'sample
pr:{[prid]json read "pr-${prid}.json"}
prs:pr'nostoryprids
/ Only those in the nostoryprids
ghpeeps:..[] / REDACTED
dos:cache.workflows[(!cache.workflows)@2]
doscompleted:500179018
lbl:..[name:"audit-2025"]
storyforpr:{[prid]fname:"pr-${prid}.json";j:json read fname;(name;description;completedat;prurl):j@!"title body closed_at url";description:?[nan description;"";description];owner:ghpeeps[j["user";"login"]];..[name;description;"workflow_state_id":doscompleted;"owner_ids":,owner"id";"completed_at_override":completedat;labels:,lbl;"external_links":,prurl]}
ss:storyforpr'nostoryprids
batches:-100$ss
#'batches
curlp:{[url;body]qq#curl -L -XPOST -H "Authorization: Bearer $ghtok" -H "Accept: application/vnd.github+json" -d'{"body":"$body"}' "$url"#}
linktogh:{prurl:x["external_links";0];csurl:"$prurl/comments";storyid:"i"$x["id"];cstext:"[sc-$storyid]"; curlp[csurl;cstext]}
sss:story.search[..["label_name":"audit-2025";archived:-0w]]
fullsss:story.get'sss
/ Korey
addkorey:{[st]ids:(st"owner_ids"),(mb.korey"id");story.update[st"id";..["owner_ids":ids]]}
addkorey'sss
addkorey story.get[294843]
story.update.bulk[..["story_ids":"i"$(..id)'sss;"owner_ids_add":,(mb.korey)"id"]]
koreyproceed:{[st]cs:story.comments[st"id"];koreycs:(mb.korey"id")=@[;"author_id"]'cs;rootcs:nan'@[;"parent_id"]'cs;c:*cs[&koreycs&rootcs];pid:c"id";story.comments.create[story.get[st"id"];..[text:"Proceed.";"parent_id":pid]]}
haskoreycomment:{[st]cs:st"comments";koreycs:(mb.korey"id")=@[;"author_id"]'cs;(..x["created_at"])'cs[&koreycs]}
#'haskoreycomment'fullsss
haskoreycomment'fullsss
koreyproceed[story.get[294843]]
/ resp21@!"total_count incomplete_results"
/ items:,/@[;"items"]'(resp01;resp11;resp21)
/ {[i]fname:"pr-"+($"i"$i"id")+".json";fname print ""json i;}'items
/ {[i]id:$"i"$i"id";\id;fname:"pr-$id-comments.json";cs:json shell curl[i"comments_url"];fname print ""json cs;}'items
/ [sc-new-story] as GH PR comment
/ curlp:{[url]qq#curl -L -XPOST -H "Authorization: Bearer $ghtok" -H "Accept: application/vnd.github+json" -d'{"body":"[sc-new-story]"}' "$url"#}
/ shell curlp (json read"pr-2793685003.json")"comments_url"
/ {url:clean x"url";shell "open $url"}'sample / Open stories in browser
/ TODO Delete archived audit-2025 stories
/ {shell linktogh[x]}'1_b0
/ sample:-10?b0
/ shell'"open " + (..x["app_url"])'sample
/ [DONE] b0:story.create.bulk[..["stories":batches[0]]]
/ [DONE] b1:story.create.bulk[..["stories":batches[1]]]
/ [DONE] b2:story.create.bulk[..["stories":batches[2]]]
/ [DONE] b3:story.create.bulk[..["stories":batches[3]]]
/ {shell linktogh[x]}'b3
/ story.update.bulk[..["story_ids":"i"$(..id)'b0;"archived":0w]]
/ fullstories:story.get'sss
/ {shell "open "+x["app_url"]}@*fullstories[&~#'(..x["pull_requests"])'fullstories]
/ story.get[(*sss)]["pull_requests"]
1
sc.goalπ (Copied!)
/ Shortcut API
prompt:"sc> "; ari.prompt:prompt
hoste:env "SHORTCUT_API_HOST";host:?["e"~@hoste;"api.app.shortcut.com";hoste]
url:"https://$host/api/v3/"; tok: 'env "SHORTCUT_API_TOKEN"; sc.debug:0
aj:"application/json"; hd:..["Shortcut-Token":tok;Accept:aj;"Content-Type":aj]; hc:http.client[..[Header:hd]]
hp:{[f]{[httpf;path]hc:?[sc.debug;..[Header:hd;Debug:1];hc]; r: 'httpf[hc;url+path]; 'json r"bodystring"}[f]}
hpp:{[f]{[httpf;path;reqopts]hc:?[sc.debug;..[Header:hd;Debug:1];hc]; r: 'httpf[hc;url+path;reqopts]; 'json r"bodystring"}[f]}
get:hp[http.get]; post:hpp[http.post]; put:hpp[http.put]; browse:{[ent]u:ent"app_url"; shell "open $u"}
sg:{or[..["stories":"story"]x;-1_x]}; sgmg:{[s]munge@sg s}; infi:1=sign; / singular, munge + singular, infinity-to-int
/ Strings
etk:"entity_type"; sc.ents:!"custom-fields epic-workflows epics groups iterations key-results labels members milestones objectives stories workflows"
stsfields:!"archived completed_at_end completed_at_start created_at_end created_at_start deadline_end deadline_start epic_id epic_ids estimate external_id group_id group_ids includes_description iteration_id iteration_ids label_ids label_name owner_id owner_ids project_id project_ids requested_by_id story_type updated_at_end updated_at_start workflow_state_id workflow_state_types"
idfs:(!"d s n")!({id:x"id";?["s"=@id;id;$"i"$id]};{x};{$"i"$x}); id:{tp:@x; ?[|/tp=!idfs;idfs[tp]x;$x]} / fmt ids
et:{x[etk]}
entn:{n:x"name"; ?[n="";x["profile";"name"];n]}
typeid:{et:x[etk]; type:munge[et]; i:id[x]; (type;i)}
epr:{(type;i):typeid x; :qq/$type["$i"]/}
/ TODO Brittle.
/ entprstr:{(type;id):typeid x; n:entn x; "$name[$i] / $n"}
/ ari.print:{and[ist x;:fmt.tbl[x;3;3;"%.1f"]]; and["d"=@x;:(?[|/etk=!x;(et:x[etk];say entprstr[x]);say x])]; and["e"=@x;:(say x; say"")]; say $x}
cd:{ari.c::x; ?[x;ari.prompt::epr[x]+"> ";ari.prompt::prompt]; 1}; jj:say" "json
san:{sub[rx/[^a-zA-Z0-9]/;""]x}; jp:{say " "json x}; / sanitize Goal name / print JSON (of ent)
fetcher:{[path;arg]u:?[""~arg;path;path+"/"+id[arg]];get u}
poster:{[path;body]b:""json body; post[path;..[Body:b]]}
putter:{[path;arg;body]u:path+"/"+id[arg]; b:""json body; put[u;..[Body:b]]}
/ NB: DELETE endpoints intentionally unsupported. Archive via PUT as needed.
{name:sgmg x; ::["${name}.get";fetcher[x]]; ::[name;fetcher[x]]}'sc.ents
{name:sgmg x; ::["${name}.create";poster[x]]}'sc.ents
{name:sgmg x; ::["${name}.update";putter[x]]}'sc.ents
/ TODO Define member and group fetchers to accept either id or mention name
{name:sgmg x; ::["${name}.list";..q.get[p.x]]}'sc.ents[0 1 2 3 5 6 7 8 9] / Listable endpoints
epicworkflow:{get"epic-workflow"}; epicworkflow.get:{get"epic-workflow"} / Singleton: workflow that epics go through
member.current:{get"member"} / Singleton: token's member + workspace info
workspace.slug:{(member.current"")["workspace2";"url_slug"]} / Synthetic singleton: workspace slug
story.history:{[st]i:id[st]; get"stories/$i/history"}
/ TODO Make sc.q* functions that pluck out IDs from entity args
stsglobals:{[fld]k:"sts."+san[fld]; ::[k;fld]}; stsglobals'stsfields / DEF story search fields
story.search:{[d]body:""json d; post["stories/search";..[Body:body]]} / API story search
story.create.bulk:{[d]body:""json d; post["stories/bulk";..[Body:body]]}
story.update.bulk:{[d]body:""json d; put["stories/bulk";..[Body:body]]}
/ Containers
story.tasks:{[st]i:id st; st:story i; st"tasks"}
story.comments:{[st]i:id st; get"stories/$i/comments"}
story.comments.create:{[st;body]i:id st; b:""json body; post["stories/$i/comments";..[Body:b]]}
epic.stories:{[ep]i:id ep; get"epics/$i/stories"}
iteration.stories:{[it]i:id it; get"iterations/$i/stories"}
label.stories:{[lb]i:id lb; get"labels/$i/stories"}; label.epics:{[lb]i:id lb; get"labels/$i/epics"}
member.stories:{[mb]i:id mb; story.search@..["owner_id":i]}
milestone.epics:{[mi]i:id mi; get"milestones/$i/epics"}
objective.epics:{[ob]i:id ob; get"objectives/$i/epics"}
group.members:{[gr]i:id gr; gr:group i; ids:gr"member_ids"; member'ids}
workflow.states:{[wf]i:id wf; wf:workflow i; wf"states"}
entity.get:{ / IDs are unique within workspace, so we can fetch an id without knowing its type
hc:?[sc.debug;..[Header:hd;Debug:1];hc]
ents:sc.ents[9 2 4 3 7 6 8 9 5 10 0 1]; or[(#ents)=#sc.ents;:error"entity.get doesn't try all entities"]
entity.get.cont::1
f:{r: 'http.get[hc;"${url}${y}/$x"]; entity.get.cont::~r"ok"; res:and[r"ok";json r"bodystring"]}
fid:f["s"$x;]; (..p.fid[y])/[{entity.get.cont};0n;ents]
}
search:{[qp]res:http.get[hc;url+"search";..[QueryParam:qp]];json res"bodystring"}
search.stories:{[qp]res:http.get[hc;url+"search/stories";..[QueryParam:qp]];json res"bodystring"}
search.epics:{[qp]res:http.get[hc;url+"search/epics";..[QueryParam:qp]];json res"bodystring"}
search.iterations:{[qp]res:http.get[hc;url+"search/iterations";..[QueryParam:qp]];json res"bodystring"}
search.objectives:{[qp]res:http.get[hc;url+"search/objectives";..[QueryParam:qp]];json res"bodystring"}
member.mention:{[mem]mem["profile";"mention_name"]}
current:{base:..[archived:-0w;"workflow_state_types":!"backlog unstarted started"]; story.search@base["owner_id"]:id[x]}
/ All epics a member's stories were a part of, as well as all stories not assigned to an epic.
mbworkstreams:{[yearmonthS;yearmonthE;member] / [yearmonthS,yearmonthE) eg "2024-01"
q:..[archived:-0w;"workflow_state_types":,"done";"owner_id":q.id p.member
"completed_at_start":"$yearmonthS-01T00:00:00Z";"completed_at_end":"$yearmonthE-01T00:00:00Z"]
ss:story.search[q]; sids:(..x["epic_id"])'ss; nil:(..0n~'p.sids); inepicids:^?nil^sids; outepics:^?nil#ss
/epicnames:(..x["name"][0;30]+"...")'epics
epics:epic.get'inepicids; epicnames:(..name)'epics
epicurls:(..x["app_url"])'epics; etype:(#inepicids)#"epic"
storynames:(..name)'ss; storyurls:(..x["app_url"])'ss; stype:(#outepics)#"story"
(!"type id name url")!(etype,stype;inepicids,(..id)'outepics;epicnames,storynames;epicurls,storyurls)}
ls:{[e]t:et e
?["epic"~t; epic.stories e; "iteration"~t; iteration.stories e
"group"~t; group.members e; "label"~t; label.stories e
"member"~t; member.stories e; "milestone"~t; milestone.epics e
"objective"~t; objective.epics e; "story"~t; story.tasks e
"workflow"~t; workflow.states e; :error"Can't list children of type '$t'"]}
whoami:{member.current""}
/ Data
/ DATA FETCH for specific workspace2
cache.refresh:{
cache.members::get"members"; cache.groups::get"groups"; wfs::get"workflows"; cache.workflows::(@[;"id"]'wfs)!(wfs)
cache.workflowstates::()!()
{[wf]{[wf;state]state["_workflow_id"]:wf"id";cache.workflowstates[state"id"]:state}[wf]'wf"states"}'cache.workflows
cache.epicworkflow::get"epic-workflow"; cache.epicworkflowstates::(@[;"id"]'epsts)!epsts:cache.epicworkflow["epic_states"]
1}
cache.refresh""
/ >> DEF mb.* (members) >>
mnames:(..profile["mention_name"])'cache.members; mnamed:mnames!cache.members
mnameglobals:{[mname]k:"mb."+san[mname]; v:mnamed[mname]; n:mnamed[mname;"profile";"name"]; help[k;"Member: $n"]; ::[k;v]}
mnameglobals'mnames
cache.membersbyid:((..id)'cache.members)!cache.members
/ << DEF mb.* (members) <<
/ >> DEF gr.* (groups) >>
gnames:(..x["mention_name"])'cache.groups; gnamed:gnames!cache.groups; gnameglobals:{[gname]k:"gr."+san[gname]; v:gnamed[gname]; n:gnamed[gname;"name"]; help[k;"Group/Team: $n"]; ::[k;v]}
ign:gnameglobals'gnames / DEF gr. (groups)
cache.groupsbyid:((..id)'cache.groups)!cache.groups
/ << DEF gr.* (groups)
/ Story workflow states
story.isworkflowstatetype:{[type;st] wfst:st"workflow_state_id";type=cache.workflowstates[wfst]"type"}
(story.isbacklog;story.isdone;story.isstarted;story.isunstarted):story.isworkflowstatetype'!"backlog done started unstarted"
/ Epic workflow states
epic.isworkflowstatetype:{[type;ep] id:ep"epic_state_id"; type=epsts[id]"type"}
(epic.isdone;epic.isstarted;epic.isunstarted):epic.isworkflowstatetype'!"done started unstarted"
/ Help
/ TODO Add help for fetchers, putters, posters
"story.search"help rq#{ "archived": true, "completed_at_end": "2016-12-31T12:30:00Z", "completed_at_start": "2016-12-31T12:30:00Z", "created_at_end": "2016-12-31T12:30:00Z", "created_at_start": "2016-12-31T12:30:00Z",
"deadline_end": "2016-12-31T12:30:00Z", "deadline_start": "2016-12-31T12:30:00Z", "epic_id": 123, "epic_ids": [123], "estimate": 123, "external_id": null,
"group_id": "12345678-9012-3456-7890-123456789012", "group_ids": ["12345678-9012-3456-7890-123456789012"], "includes_description": true,
"iteration_id": 123, "iteration_ids": [123], "label_ids": [123], "label_name": "foo", "owner_id": "12345678-9012-3456-7890-123456789012", "owner_ids": ["12345678-9012-3456-7890-123456789012"],
"project_id": 123, "project_ids": [123], "requested_by_id": "12345678-9012-3456-7890-123456789012", "story_type": "bug", "updated_at_end": "2016-12-31T12:30:00Z", "updated_at_start": "2016-12-31T12:30:00Z",
"workflow_state_id": 123, "workflow_state_types": ["backlog"] }
workflow_state_types:(backlog|done|started|unstarted)#
/ CLI Niceties
entpal:..["backlog":"#8c8d8c";"custom-field":"#df8632";"done":"#71b259";"epic-workflow":"#97c6e8";"epic":"#71b259";"group":"#d96e7b";"iteration":"#3895c9";"key-result":"#71b259";"label":"#ad9865";"member":"#8bb771";"objective":"#df8632";"story":"#ad87f3";"workflow":"#97c6e8";"bug":"#e75569";"chore":"#3895c9";"done":"#71b259";"error":"#e75569";"feature":"#e6d566";"info":"#71b259";"prompt":"#58b1e4";"role.admin":"#e6d566";"role.member":"#3a95c9";"role.observer":"#8c8d8c";"role.owner":"#e75569";"started":"#ad87f3";"task":"#3a95c9";"title":"#97c6e8";"subdued":"#575858";"unstarted":"#e75569";"warn":"#e6d566";"_fallback":"#8c8d8c"]
fg:{[colorstr;s]tc:tui.color colorstr; ty:tui.style ..[Foreground:p.tc]; tui.render[ty;s]};"fg"help"Render s with Foreground color s"
colorent:{s: epr x; t:et x; c:entpal@et x; fg[c;s]}
sc.width::60
trunc:{?[(sc.width-1)<""#x;x[0;sc.width]+"...";x]}
member.pstr:{p:colorent x; m:member.mention x; "$p / $m"}
currentmember.pstr:{i:id x; m:x["mention_name"]; qq#member["$i"] / $m#}
storytypes:..[feature:"Ftr";bug:"Bug";chore:"Chr"]
renderar:{[archived] ?[archived;fg[entpal"error";"[ARCHIVED] "];""]}
renderwf:{[wfst]
(wname;wtype):wfst[!"name type"]; wtypeabb:uc *""\wtype
fg[entpal[_wtype];"[$wtypeabb:$wname]"]}
(..name)'cache.epicworkflowstates
story.pstr:{
form:colorent x; (a;e;n;t;wfsid):x[!"archived estimate name story_type workflow_state_id"]
archived:renderar[a]
type:storytypes[t]; type:fg[entpal t;"[$type]"]; name:trunc n
e:?[nan e;"_";"%0.f"$e]; est:"[$e]"; est:fg[entpal"done";est]
wfstate:renderwf[cache.workflowstates wfsid]
"$form / ${archived}${type}${est}${wfstate} $name"}
epic.pstr:{
form:colorent x; (a;n;s;wfsid):x[!"archived name state epic_state_id"]
archived:renderar[a]; name:trunc n
wfstate:renderwf[cache.epicworkflowstates wfsid]
"$form / ${archived}${wfstate} $name"}
default.pstr:{p:colorent x; n:x"name"; "$p / $n"}
tcurrentmember:{[e]*/(#e)>(!e)?!"mention_name workspace2"}
pstr:{[e]t:et e
?["member"~t;member.pstr e
""~t;?[tcurrentmember e;currentmember.pstr e;[s:$e; :error"Don't know how to print entity $s"]]
"story"~t;story.pstr e
"epic"~t;epic.pstr e
default.pstr e]}
pp:{?["d"~@x;say pstr x;x]} / TODO Accept array of ents / Tacit pp:say pstr@
ll:{pp'ls x} / Tacit pp'ls@
colorprompt:{ari.prompt::colorent[x]+"> "}
cd:{ari.c::x; ?[x;colorprompt x;ari.prompt::prompt]; 1};
1
search-as-export.goalπ (Copied!)
base:"https://api.app.shortcut.com"
prev::"START"
counter::0
searchepics:{[url]
counter::counter+1
say "Run $counter $url"
resp:http.get[hc;url]
body:resp"bodystring"
j:json body
and["e"~@j;:body]
d:j"data"
n:j"next"
?[n;o[base+n];:j]
}
searchepics["https://api.app.shortcut.com/api/v3/search/epics?query=is:epic&detail=slim&page_size=250"]
sentry.goalπ (Copied!)
/ Sentry API for Error Bash
url:"https://sentry.io/api/0/"; aj:"application/json"; tk: 'env"SENTRY_API_TOKEN"; hd:..[Accept:aj;"Content-Type":aj;"Authorization":"Bearer $tk"]
hc:http.client[..[Header:hd]]; hp:{[f]{[httpf;path]r: 'httpf[hc;url+path]; 'json r"bodystring"}[f]}
hpp:{[f]{[httpf;path;reqopts]r: 'httpf[hc;url+path;reqopts]; 'json r"bodystring"}[f]}
get:hp[http.get]; post:hpp[http.post]; jp:{say " "json x}
oslug:"useshortcut"; pslug:"REDACTED"
issues:{[acc;nexturl;hasmore]
\nexturl
?[hasmore
[res:http.get[hc;nexturl]
nextlink:*1_("; "\)'","\*res["headers";"Link"]
nexturl:@[;1]rx/<([^>]+)>/@nextlink[0]
o[acc,json res"bodystring";nexturl;|/rq`results="true"`=nextlink]]
[say"DONE";acc]]}
params:url.encode ..[query:"environment:production is:unresolved"]
iurl:url+"projects/$oslug/$pslug/issues/?$params"
ai:issues[();iurl;1]
#ai
withstory:+/(0<#:)'@[;"annotations"]'ai
100*withstory%#ai
aiu:ai@(>(..userCount)'ai)
nostories:{(0=#:)'(..annotations)'aiu}#aiu
nostorieshi:{(50<)'(..userCount)'nostories}#nostories
say'^(..lastSeen)'nostorieshi
tocsv:+(,!"title userCount sentryLink"),(..title,userCount,permalink)'nostorieshi
ts:$time.unix[time.utc[time.now 0]]
"/Users/dlg/data/sentry-high-priority-no-stories-${ts}.csv"print","csv tocsv
sctk:shell"cred shortcut production token"; scurl:"https://api.app.shortcut.com/api/v3/"
schd:..[Accept:aj;"Content-Type":aj;"Shortcut-Token":q.sctk]
schc:http.client[..[Header:schd]]; schp:{[f]{[httpf;path]r: 'httpf[schc;scurl+path]; 'json r"bodystring"}[f]}
schpp:{[f]{[httpf;path;reqopts]r: 'httpf[schc;scurl+path;reqopts]; 'json r"bodystring"}[f]}
scget:schp[http.get]; post:schpp[http.post]
storyurls:{x["annotations";0;"url"]}'{(0<#:)'(..annotations)'ai}#ai
storyids:@[;1]'{rx#/story/(\d+)#x}'storyurls
stories:{scget"stories/$x"}'storyids
stories
(..x["workflow_state_id"])'stories
"done"
@[;"annotations"]'ai
withstory:+/(0<#:)'@[;"annotations"]'ai
/
|/(..[key:"environment";value:"production"]~)'(..tags)@*ies
\
server.goalπ (Copied!)
http.serve["localhost:8089";]
shortcut-solitaire.goalπ (Copied!)
/ invoke with ari -l $HOME/dev/goal/sc-client-goal/sc.goal
/ Need 10 workflow states to play a game of Klondike solitaire:
/ Stock, discard, home, and 7 columns for main playing area
(..name)'(.cache.workflows)[&10=#'(..states)'.cache.workflows]
taskwf:500179166
cache.workflows[taskwf;"states"]
/ Klondike Solitaire
:vals:!"A 2 3 4 5 6 7 8 9 T J Q K"
:suits:!"β€ β§ β‘ β’"
:deck:,/+vals+'`suits
:deal:-52?52
:cards:deck[deal]
:stock:!0; discard:!0; spades:!0; clubs:!0; hearts:!0; diamonds:!0
:cols:{l:*|x;(l+1)_y}\!'+\-1*!-7
:board:(stock;discard;spades;clubs;hearts;diamonds),cols
:viz:{a:(#x)#0; a[0]:1}'cols
:playable:viz{y[&x]}'cols
takestock:{[stock]\c:*stock;stock::1_stock;discard::(,c),discard}
:stock:(+/#'cols)_deal
discard:!0
takestock stock
stock
discard
snake.goalπ (Copied!)
rs:{((*/x)#y){(-y)$x}/|1_x}; reset:{end::0; l::15; dir::"s"; newdir::""; brd::rs[(l;l);0]; walls::((3 0);(3 8);(1 4);(8 4);(6 7);(4 1)); allpos::,/(!l),'`(!l); snk::(2 0;1 0;0 0); frt::5 5}; reset""
dirs:!"n e s w"; kp:(!"Up Right Down Left")!dirs; odirs:dirs!2 rotate dirs; dirfns::dirs!({(x-1;y)};{(x;y+1)};{(x+1;y)};{(x;y-1)})
newfrt:{[snk]*1?allpos[&~allposΒΏwalls,snk]}
plot:{brd::rs[(l;l);"π΄"]; {brd::.[brd;x;{"π£"}]}'walls; ?[end;rs[(l;l);"π₯"];[{brd::.[brd;x;{"π©·"}]}'1_snk; brd::.[brd;*snk;{"π²"}]; brd::.[brd;frt;{"πΏ"}]; brd]]}
coll:{[hd;snk]ouch:walls,1_snk; end::|/(|/ouchΒΏ,hd;-1=&/sign hd;l=|/hd); end}
move:{[keyPress]ndir::kp keyPress; dir::?[ndir~odirs[dir];dir;ndir];dirfn:dirfns[dir];{brd::.[brd;x;{0}]}'snk;hd:dirfn.*snk;?[coll[hd;snk];:1;0];eats:hd~frt;tl:?[eats;snk;-1_snk];snk::(,hd),tl;frt::?[eats;newfrt[snk];frt]}
update:{[dt]?[end;:error"Game over";0]; ?[game.KeyPress;move game.KeyPress;0]}
draw:{say shell"clear";say'plot[brd];say "\nπ " + $#snk}
/ Play
reset"";draw""
game.KeyPress::"Down";update"";draw""
story-backlog-position-history.goalπ (Copied!)
""import"sc-client/sc"
sh:story.history[296476]
s:story.get[296476]; s"position"
(..actions)'sh
!sh
(..version)'sh
(..references)'sh
/ 3862917120.0
/ 3854626816.0
shell "open "+s"app_url"
sub-task-demo.goalπ (Copied!)
st:229689 / Big User Problem, and its Solution
sub1:231241 / Sub-task Alpha
sub2:231242 / Sub-task Beta
sub3:229688 / Sub-task Gamma
setsubs:{[subs]..["sub_tasks":{..["story_id":x]}'subs]}
story.update[st;setsubs (sub1;sub2;sub3)]
story.update[st;setsubs (sub2;sub3;sub1)]
story.update[st;setsubs ()]
team-usage.goalπ (Copied!)
pp:fmt.tbl[;5;8;"%.1f"]
db:sql.open"groups.duckdb"
qActiveOrgs:rq`
SELECT o.plan, COUNT(DISTINCT o.org)
FROM wss o
GROUP BY o.plan
ORDER BY o.plan;
`
activeOrgs:db sql.q qActiveOrgs
qPaidActiveWss:rq`SELECT COUNT(DISTINCT(ws)) FROM WSS WHERE plan <> 'free-limited';`
pp db sql.q qPaidActiveWss
pp db sql.q "select distinct(plan) from wss;"
pp db sql.q "describe wss;"
qOrgsEpicsWithTeams:rq`
SELECT w.plan, COUNT(DISTINCT w.org)
FROM wss w
WHERE w.epicswithteams > 0
GROUP BY w.plan
ORDER BY w.plan;
`
orgsEpicsWithTeams:db sql.q qOrgsEpicsWithTeams
qUsesTeams:rq`
WITH uses_teams AS
(SELECT g.org AS org, COUNT(g.grp) AS numgrps
FROM grps g
WHERE g.np > 0
GROUP BY g.org)
SELECT o.plan, COUNT(DISTINCT o.org) AS ct
FROM wss o
WHERE o.org IN (SELECT org FROM uses_teams WHERE numgrps > 1)
GROUP BY o.plan
ORDER BY o.plan;
`
usesTeams:db sql.q qUsesTeams
qActiveOrgsNb:rq`select sum(nb) as totalnb from (select distinct(org), nb, plan from wss) where plan <> 'free-limited';`
:activeOrgsNb:db sql.q qActiveOrgsNb
qUsesTeamsNb:rq`
WITH uses_teams AS
(SELECT g.org AS org, COUNT(g.grp) AS numgrps
FROM grps g
WHERE g.np > 0
GROUP BY g.org)
SELECT SUM(o.nb) AS totalnb
FROM wss o
WHERE o.org IN (SELECT org FROM uses_teams WHERE numgrps > 1)
AND o.plan <> 'free-limited';
`
:usesTeamsNb:db sql.q qUsesTeamsNb
100*usesTeamsNb%activeOrgsNb
qMaybeTeams:qq`
WITH maybe_teams AS
(SELECT g.org AS org, COUNT(g.grp) AS numgrps
FROM grps g
WHERE g.np > 1
GROUP BY g.org)
SELECT o.plan, COUNT(DISTINCT o.org) AS ct
FROM wss o
WHERE o.org IN (SELECT org FROM maybe_teams WHERE numgrps = 1)
GROUP BY o.plan
ORDER BY o.plan;
`
maybeTeams:db sql.q qMaybeTeams
qMaybeTeamsNb:qq`
WITH maybe_teams AS
(SELECT g.org AS org, COUNT(g.grp) AS numgrps
FROM grps g
WHERE g.np > 1
GROUP BY g.org)
SELECT SUM(o.nb) AS totalnb
FROM wss o
WHERE o.org IN (SELECT org FROM maybe_teams WHERE numgrps = 1)
AND o.plan <> 'free-limited';
`
:maybeTeamsNb:db sql.q qMaybeTeamsNb
100*maybeTeamsNb%activeOrgsNb
qDontTeams1:qq`
SELECT o.plan, COUNT(DISTINCT(o.org)) AS ct
FROM wss o
WHERE o.org NOT IN (SELECT DISTINCT(g.org) FROM grps g)
GROUP BY plan
ORDER BY plan;
`
dontTeams1:db sql.q qDontTeams1
qDontTeams1Nb:qq`
SELECT SUM(nb) as totalnb FROM (
SELECT DISTINCT(o.org), nb
FROM wss o
WHERE o.org NOT IN (SELECT DISTINCT(g.org) FROM grps g)
AND o.plan <> 'free-limited');
`
:dontTeams1Nb:db sql.q qDontTeams1Nb
100*dontTeams1Nb%activeOrgsNb
qDontTeams2:qq`
WITH zero AS (
SELECT g.org AS org FROM grps g WHERE g.np = 0 GROUP BY g.org
),
nonzero AS (
SELECT g.org AS org FROM grps g WHERE g.np > 0 GROUP BY g.org
),
onlyzero AS (
SELECT org FROM zero WHERE org NOT IN (SELECT DISTINCT(org) FROM nonzero)
)
SELECT o.plan, COUNT(DISTINCT o.org) AS ct
FROM wss o
WHERE o.org IN (SELECT DISTINCT(org) FROM onlyzero)
GROUP BY o.plan
ORDER BY o.plan;
`
dontTeams2:db sql.q qDontTeams2
dActiveOrgs:!/.activeOrgs
dOrgsEpicsWithTeams:!/.orgsEpicsWithTeams
dUsesTeams:!/.usesTeams
dMaybeTeams:!/.maybeTeams
dDontTeams1:!/.dontTeams1
dDontTeams2:!/.dontTeams2
dDontTeams:dDontTeams1+dDontTeams2
dPercentUsesTeams:100*dUsesTeams%dActiveOrgs
dPercentMaybeTeams:100*dMaybeTeams%dActiveOrgs
dPercentDontTeams1:100*dDontTeams1%dActiveOrgs
dPercentDontTeams2:100*dDontTeams2%dActiveOrgs
dPercentDontTeams:100*dDontTeams%dActiveOrgs
dPercentEpicsWithTeams:100*dOrgsEpicsWithTeams%dActiveOrgs
dPercentEpicsWithoutTeams:100-dPercentEpicsWithTeams
mdf:{[d]ks:!d; hd:"|plan|percent|\n|---|---|\n"; tmp:{"|"+($y)+"|"+("%.2f"$x[y])+"|"}; rs:"\n"/tmp[d;]'ks; hd+rs}
mds:{[d]ks:!d; hd:"|plan|count|\n|---|---|\n"; tmp:{"|"+($y)+"|"+($x[y])+"|"}; rs:"\n"/tmp[d;]'ks; hd+rs}
say "Active Orgs"
say mds dActiveOrgs
say "Uses Teams"
say mdf dPercentUsesTeams
say "Maybe Uses Teams"
say mdf dPercentMaybeTeams
say "Doesn't Use Teams"
say mdf dPercentDontTeams
say mdf dPercentDontTeams1
say mdf dPercentDontTeams2
say "Epics with Teams"
say mdf dPercentEpicsWithTeams
say "No Epics with Teams"
say mdf dPercentEpicsWithoutTeams
/ say "Check on Percentages:"
/ say +/(dPercentUsesTeams;dPercentMaybeTeams;dPercentDontTeams)
/ sql.close db
/
Definitions:
Uses Teams active org: Multiple teams with 1 or more members
Maybe Uses Teams active org: Orgs with 1 team that has 2+ members
Doesn't Use Teams active org: Either(No teams, All teams have 0 members)
Questions:
1. What % of active orgs use teams?
1a) By plan?
2. What % of active orgs maybe use teams?
2a) By plan?
3. What % of active orgs don't use teams?
3a) By plan?
\
time-to-deploy.goalπ (Copied!)
/ PR Data, pre-fetched using Jupyter notebook
say"Processing PR JSON..."
jsonl:-1_=shell"ls *.jsonl" / Locate *.jsonl files
lines:,/json 'read jsonl / Deserialize
t:json.tbl lines / Format as table
prstamps:^t"closed_at" / Get & sort all closed_at timestamps
/ Deployments Data
say"Processing Git deployment tags..."
tags:=-1_shell"cd \$SHORTCUT_HOME/REDACTED && git tag" / All REDACTED tags
prodtags:^tags[&"production-2024-*"%tags] / Filter & sort production-2024-* tags
deptimes:-{shell qq#cd \$SHORTCUT_HOME/REDACTED && git for-each-ref --format="%(creatordate)" refs/tags/$x#}'prodtags
dpstamps:time.RFC3339 time.format time.utc "Mon Jan _2 15:04:05 2006 -0700" time.parse deptimes / Format datetimes as RFC3339
/ Time from first PR after last deploy until next deploy
say"Calculating wait times..."
prtimes:^time.unixmilli time.RFC3339 time.parse prstamps / PR merge times in millis
dptimes:^time.unixmilli time.RFC3339 time.parse dpstamps / Deploy times in millis
diffs:dptimes-`prtimes / Time from PR merge to deploy for every deploy in 2024
postdeployprs:(*&~0<)'diffs / First non-negative time represents first PR closed after last deploy
fdps:1_dptimes / Ignore first deploy time, since it marks our baseline
fprs:(-1)_prtimes[postdeployprs] / Ignore last PR merge, since no corresponding deployment
fdiffs:fdps-fprs / Wait time we care about
fdiffs:"i"$fdiffs%1000 / Finest granularity in the data is seconds, so convert from millis to secs
/ Write uniform data and calculated wait times to CSV files
say"Writing CSV files..."
"prstamps.csv" print csv +prstamps
"dpstamps.csv" print csv +dpstamps
"waits_sec.csv"print csv +fdiffs
user-counts-analysis.goalπ (Copied!)
d19:csv.tbl@csv read"2024-12-19.csv"
d20:csv.tbl@csv read"2024-12-20.csv"
d21:csv.tbl@csv read"2024-12-21.csv"
(#*d20;#*d21)
=/'(
(#d19"public-id";#?d19"public-id")
(#d20"public-id";#?d20"public-id")
(#d21"public-id";#?d21"public-id")
)
"public-id" in ^!d19
d19["public-id";10?#*d19]
#*t