diff --git a/resources/public/css/style.css b/resources/public/css/style.css index 2c18993..44d9e46 100644 --- a/resources/public/css/style.css +++ b/resources/public/css/style.css @@ -170,12 +170,12 @@ a { ul { list-style-type: disc; list-style-position: inside; - padding: 0.5rem 0; + padding-bottom: 1rem; } ol { list-style-position: inside; - padding: 0.5rem 0; + padding-bottom: 1rem; } li { @@ -378,7 +378,7 @@ header .hidden { } header img:hover, -svg:hover, +header svg:hover, a:hover { animation: var(--pulse-animation); } @@ -929,10 +929,6 @@ section .post-body .info { } } -.post-body:hover img { - animation: var(--pulse-animation); -} - section button { margin: 0.5rem; } diff --git a/resources/public/main.js b/resources/public/main.js index 66fabe3..d76375c 100644 --- a/resources/public/main.js +++ b/resources/public/main.js @@ -2374,114 +2374,114 @@ function sl(a){var b=r.j(a);if("_DOT__DOT_"===b)var c="..";else a:{c=new RegExp( if("undefined"===typeof Hb||"undefined"===typeof Ib||"undefined"===typeof tl)var tl=null;function ul(a){return a instanceof nb}"undefined"!==typeof console&&(Nb=!1,Lb=function(){var a=arguments;return console.log.apply(console,Sa.call(null,a))},Mb=function(){var a=arguments;return console.error.apply(console,Sa.call(null,a))});if("undefined"===typeof Hb||"undefined"===typeof Ib||"undefined"===typeof vl)var vl=function(){throw Error("cljs.core/*eval* not bound");};var wl=new x(null,"args","args",1315556576),xl=new $d("time","instant","time/instant",-2013236992,null),yl=new x("cljs.spec.alpha","failure","cljs.spec.alpha/failure",188258592),zl=new x(null,"rel","rel",1378823488),Al=new $d("cljs.spec.alpha","*","cljs.spec.alpha/*",-1238084288,null),Bl=new x(null,"unnamed","unnamed",-26044928),Cl=new $d(null,"uuid","uuid",-504564192,null),Dl=new x(null,"path","path",-188191168),El=new x(null,"keywordize?","keywordize?",-234306912),Fl=new x(null,"add-event","add-event", 938429088),Gl=new x(null,"conflicting","conflicting",2003828416),Hl=new x(null,"req-un","req-un",1074571008),Il=new x(null,"terminators","terminators",-1448255712),Jl=new x("markdown-to-hiccup.core","markdown","markdown-to-hiccup.core/markdown",1289845536),Kl=new x(null,"opt-un","opt-un",883442496),Ll=new x(null,"ex-kind","ex-kind",1581199296),Ml=new x("fx.app","toggle-css-class","fx.app/toggle-css-class",-664295456),Nl=new x(null,"next-line","next-line",-1187000287),Ol=new x(null,"yield","yield", 177875009),Pl=new x(null,"codeblock","codeblock",-851153855),Ql=new x(null,"indented-code","indented-code",-904930175),Rl=new x(null,"div.simple-link","div.simple-link",1867158849),Sl=new x("post","tags","post/tags",1766643137),Tl=new x("image","alt","image/alt",97098305),Ul=new $d(null,"\x26?","\x26?",-461365599,null),Vl=new x(null,"reader-error","reader-error",1610253121),Wl=new x(null,"paused","paused",-1710376127),Xl=new x("app","theme","app/theme",-1248046239),Yl=new $d("cljs.spec.alpha","+", -"cljs.spec.alpha/+",2101263265,null),Zl=new x("evt.nav","close-navbar","evt.nav/close-navbar",1826365377),$l=new x(null,"p2","p2",905500641),am=new x(null,"min","min",444991522),bm=new x(null,"routes","routes",457900162),cm=new x(null,"on-set","on-set",-140953470),dm=new x(null,"ret","ret",-468222814),em=new x("cljs.spec.alpha","unknown","cljs.spec.alpha/unknown",651034818),fm=new $d("cljs.core","sequential?","cljs.core/sequential?",1777854658,null),gm=new x("cljs.spec.alpha","value","cljs.spec.alpha/value", -1974786274),hm=new x(null,"div.tag","div.tag",1088822530),im=new x(null,"gfn","gfn",791517474),jm=new x(null,"format","format",-1306924766),km=new x("cljs.spec.alpha","recursion-limit","cljs.spec.alpha/recursion-limit",601284994),lm=new x("cljs.spec.alpha","name","cljs.spec.alpha/name",205233570),mm=new x(null,"children","children",-940561982),nm=new x("sg.flybot.pullable.core","not-found","sg.flybot.pullable.core/not-found",625068482),om=new x(null,"blockquote-paragraph","blockquote-paragraph",1931783682), -pm=new x("about","post","about/post",446064130),qm=new x(null,"svg.menu-top","svg.menu-top",415216194),rm=new $d("clojure.test.check.generators","hash-map","clojure.test.check.generators/hash-map",1961346626,null),sm=new $d("clojure.test.check.generators","keyword","clojure.test.check.generators/keyword",24530530,null),tm=new x(null,"page-name","page-name",974981762),um=new x("markdown-to-hiccup.decode","string","markdown-to-hiccup.decode/string",-2048172350),vm=new x(null,"div.info","div.info",-1023482078), -wm=new x("sg.flybot.pullable.core.option","watch","sg.flybot.pullable.core.option/watch",872342338),xm=new x("post","employer","post/employer",752984930),ym=new x("post","date","post/date",-1460888702),zm=new x("markdown-to-hiccup.decode","vector","markdown-to-hiccup.decode/vector",1979814818),Am=new x("reitit.core","router","reitit.core/router",1293076450),Bm=new x(null,"-\x3e","-\x3e",514830339),Cm=new x(null,"pred-exprs","pred-exprs",1792271395),Dm=new $d(null,"?view","?view",-1050183581,null), -Em=new x(null,"home","home",-74557309),Fm=new x(null,"keys-pred","keys-pred",858984739),Gm=new x(null,"hr","hr",1377740067),Hm=new x(null,"button.nav-btn.hidden","button.nav-btn.hidden",1883500835),Im=new x(null,"last-fragment","last-fragment",-919830173),Jm=new x(null,"cljsLegacyRender","cljsLegacyRender",-1527295613),Km=new x(null,"srcdark","srcdark",-1640621501),Lm=new x(null,"div.post-links","div.post-links",1079291587),Mm=new x(null,"original-event","original-event",2121330403),Lk=new x(null, -"ready","ready",1086465795),Nm=new $d("time","day-of-week","time/day-of-week",-1018890461,null),Om=new x(null,"idle","idle",-2007156861),Pm=new x(null,"footnotes","footnotes",-1842778205),Qm=new x(null,"temp","temp",1791541284),Rm=new x(null,"shrunk","shrunk",-2041664412),Sm=new x(null,"fn","fn",-1175266204),Tm=new $d("cljs.spec.alpha","alt","cljs.spec.alpha/alt",-2130750332,null),Um=new x(null,"div.vignette","div.vignette",-2024675196),Vm=new $d(null,"*runtime-asserts*","*runtime-asserts*",1632801956, -null),Wm=new x(null,"lists","lists",-884730684),Xm=new x(null,"buf","buf",-213913340),Ym=new $d("cljs.core","vector?","cljs.core/vector?",-1550392028,null),Zm=new x(null,"blockquote-end","blockquote-end",1122544964),$m=new x(null,"namespaced-map","namespaced-map",1235665380),an=new x(null,"rep+","rep+",-281382396),bn=new $d("clojure.test.check.generators","fmap","clojure.test.check.generators/fmap",1957997092,null),cn=new x(null,"group","group",582596132),Tb=new x(null,"meta","meta",1499536964),dn= -new x("fx.app","set-theme-local-store","fx.app/set-theme-local-store",-1685133628),en=new x(null,"interceptor","interceptor",1127739076),fn=new x(null,"unprocessed","unprocessed",766771972),gn=new x(null,"table","table",-564943036),hn=new x("reitit.impl","accumulator","reitit.impl/accumulator",1345422212),jn=new x(null,"opt-keys","opt-keys",1262688261),kn=new x(null,"ul","ul",-1349521403),ln=new x(null,"event-handler","event-handler",-487718843),mn=new x("sg.flybot.pullable.core","context","sg.flybot.pullable.core/context", -994024549),nn=new x(null,"validate","validate",-201300827),on=new $d("clojure.test.check","quick-check","clojure.test.check/quick-check",-810344251,null),Ub=new x(null,"dup","dup",556298533),pn=new x(null,"div.menu-right","div.menu-right",-1140629147),qn=new x(null,"vec","vec",-657847931),rn=new x("cljs.spec.alpha","rep","cljs.spec.alpha/rep",1483217317),sn=new x(null,"pred","pred",1927423397),tn=new x(null,"clojurescript","clojurescript",-299769403),un=new x("reitit.trie","multiple-terminators", -"reitit.trie/multiple-terminators",-116295163),vn=new x(null,"key","key",-1516042587),wn=new x(null,"in-table-body?","in-table-body?",-136773915),xn=new x(null,"fsm-state","fsm-state",1656310533),yn=new x(null,"top-displace","top-displace",-2094589019),zn=new x(null,"splice","splice",449588165),An=new x("evt.nav","toggle","evt.nav/toggle",2052424806),Bn=new x("cljs.spec.alpha","accept","cljs.spec.alpha/accept",370988198),Cn=new x("cofx.app","local-store-theme","cofx.app/local-store-theme",-1877277338), -Dn=new x(null,"reader-exception","reader-exception",-1938323098),En=new x(null,"private","private",-558947994),Fn=new x(null,"inline-heading","inline-heading",-158773818),Gn=new x(null,"router","router",1091916230),Hn=new x(null,"when","when",-576417306),In=new x(null,"lookup","lookup",1225356838),Jn=new x(null,"gen","gen",142575302),Kn=new x(null,"nav.mobile","nav.mobile",1429685958),Ln=new x(null,"decode-type","decode-type",-1943601402),Mn=new x(null,"\x3c-","\x3c-",760412998),Nn=new x(null,"div.tags", -"div.tags",-1073137818),On=new x(null,"replace","replace",-786587770),Pn=new x(null,"ks","ks",1900203942),Qn=new x(null,"alt","alt",-3214426),Rn=new x(null,"div.post-body","div.post-body",210532294),Sn=new x(null,"meta-merge","meta-merge",638856199),Tn=new $d("time","month","time/month",-324062169,null),Un=new x(null,"script","script",-1304443801),Vn=new $d(null,"p1__26347#","p1__26347#",1868569799,null),Wn=new x("post","md-content","post/md-content",-1777572601),Xn=new x(null,"childContextTypes", -"childContextTypes",578717991),Yn=new x(null,"last-line-empty?","last-line-empty?",1279111527),Zn=new x(null,"db","db",993250759),$n=new x(null,"fx-handler","fx-handler",-549783097),ao=new x(null,"sub","sub",-2093760025),bo=new $d(null,"?theme","?theme",2088351303,null),co=new $d("cljs.spec.alpha","cat","cljs.spec.alpha/cat",-1471398329,null),eo=new x("re-frame.std-interceptors","not-found","re-frame.std-interceptors/not-found",-1614827865),fo=new $d("time","year","time/year",1979222727,null),go= -new x(null,"queue","queue",1455835879),ho=new x(null,"displayName","displayName",-809144601),io=new x(null,"_","_",1453416199),jo=new x(null,"validator","validator",-1966190681),ko=new x(null,"div.links","div.links",440246312),lo=new x(null,"fragment","fragment",826775688),mo=new x(null,"maybe","maybe",-314397560),no=new x("fx.app","highlight-code","fx.app/highlight-code",1994810536),fl=new x(null,"default","default",-1987822328),oo=new x("reitit.trie","unclosed-brackets","reitit.trie/unclosed-brackets", -1599327560),po=new x(null,"via","via",-1904457336),qo=new x(null,"sequential","sequential",-1082983960),ro=new $d(null,"?fragment","?fragment",1211696808,null),so=new x(null,"conflicts","conflicts",-1219561816),to=new x(null,"post-id","post-id",1618659080),uo=new $d("clojure.test.check.generators","choose","clojure.test.check.generators/choose",909997832,null),vo=new x(null,"ns","ns",441598760),wo=new x(null,"frozen-strings","frozen-strings",-1410661560),xo=new x(null,"found-token","found-token", -113525576),yo=new x(null,"symbol","symbol",-1038572696),zo=new x(null,"warn","warn",-436710552),Ao=new x(null,"prepend","prepend",342616040),Bo=new x(null,"name","name",1843675177),Co=new x(null,"div.post.error","div.post.error",-1659704279),Do=new $d(null,"NaN","NaN",666918153,null),Eo=new $d("clojure.test.check.generators","generate","clojure.test.check.generators/generate",-690390711,null),Mk=new x(null,"pending","pending",-220036727),Fo=new $d("cljs.core","string?","cljs.core/string?",-2072921719, -null),Go=new x(null,"update-paths","update-paths",-813404599),Ho=new x(null,"deco","deco",769202793),Io=new x(null,"fill","fill",883462889),Jo=new $d("clojure.test.check.generators","set","clojure.test.check.generators/set",-1027639543,null),Ko=new $d("time","month-day","time/month-day",61138729,null),Lo=new x(null,"req-specs","req-specs",553962313),Mo=new $d("clojure.test.check.generators","one-of","clojure.test.check.generators/one-of",-183339191,null),No=new x(null,"value","value",305978217),Oo= -new x("cljs.spec.alpha","gfn","cljs.spec.alpha/gfn",-593120375),Po=new x(null,"post-route","post-route",323113865),Qo=new x(null,"div.menu-left","div.menu-left",-1967651927),Ro=new x("post","articles","post/articles",-458677271),So=new x(null,"parse-tag","parse-tag",1427313738),To=new x(null,"contextTypes","contextTypes",-2023853910),Uo=new x(null,"alignment","alignment",1040093386),Vo=new x(null,"file","file",-1269645878),Wo=new $d(null,"v","v",1661996586,null),Xo=new x("image","src-dark","image/src-dark", -1452555818),Yo=new x("cljs.spec.alpha","spec","cljs.spec.alpha/spec",1947137578),Zo=new x(null,"h5.info","h5.info",932286058),$o=new x("image","src","image/src",-1820578166),ap=new x(null,"div.menu","div.menu",-175336694),bp=new $d(null,"js","js",-886355190,null),cp=new x(null,"readers","readers",-2118263030),dp=new x(null,"do-fx","do-fx",1194163050),ep=new x(null,"footer#footer-contact.container","footer#footer-contact.container",-1355543670),fp=new x(null,"end-column","end-column",1425389514),gp= -new x(null,"query-id","query-id",1474128842),hp=new x(null,"db-page-name","db-page-name",-1373879285),ip=new x(null,"div.link","div.link",-340346549),jp=new x(null,"footnotes?","footnotes?",-1590157845),kp=new x(null,"params","params",710516235),lp=new $d("cljs.spec.alpha","fspec","cljs.spec.alpha/fspec",-1289128341,null),mp=new $d(null,"fn","fn",465265323,null),np=new x(null,"amp","amp",271690571),op=new x(null,"section.container","section.container",-1656920213),pp=new x(null,"component-did-update", -"component-did-update",-1468549173),qp=new x(null,"div.title","div.title",-1929547732),Gk=new x(null,"val","val",128701612),rp=new x("cljs.spec.alpha","op","cljs.spec.alpha/op",-1269055252),sp=new x(null,"dispatch-n","dispatch-n",-504469236),tp=new $d(null,"inst","inst",-2008473268,null),up=new x(null,"type","type",1174270348),vp=new x("cljs.spec.alpha","v","cljs.spec.alpha/v",552625740),wp=new x(null,"div.image","div.image",923573900),xp=new $d("clojure.test.check.generators","map","clojure.test.check.generators/map", -45738796,null),yp=new x(null,"template","template",-702405684),zp=new x(null,"debug","debug",-1608172596),Ap=new x(null,"src","src",-1651076051),Bp=new x("markdown-to-hiccup.decode","pass","markdown-to-hiccup.decode/pass",1096198285),Cp=new x("post","repos","post/repos",645359853),Dp=new $d(null,"p1__26348#","p1__26348#",1825593645,null),Ep=new x("reitit.core","path-conflicting","reitit.core/path-conflicting",617644429),Fp=new x(null,"getDerivedStateFromProps","getDerivedStateFromProps",-991834739), -Gp=new x(null,"getDerivedStateFromError","getDerivedStateFromError",166658477),wk=new x(null,"fallback-impl","fallback-impl",-1501286995),Hp=new x(null,"encode?","encode?",-640109139),Ip=new $d("clojure.test.check.properties","for-all*","clojure.test.check.properties/for-all*",67088845,null),Jp=new x(null,"references","references",882562509),Rk=new x(null,"keyword-fn","keyword-fn",-64566675),Kp=new $d(null,"Inf","Inf",647172781,null),Lp=new $d("cljs.core","map?","cljs.core/map?",-1390345523,null), -Mp=new x("post","md-content-short","post/md-content-short",-861891763),Rb=new x(null,"flush-on-newline","flush-on-newline",-151457939),Np=new x("post","page","post/page",848001005),Op=new x(null,"componentWillUnmount","componentWillUnmount",1573788814),Pp=new x(null,"path-parts","path-parts",945822894),Qp=new x(null,"displace","displace",-1153355602),Rp=new x(null,"string","string",-1989541586),Sp=new $d(null,"queue","queue",-1198599890,null),Tp=new x(null,"p1","p1",-936759954),Up=new $d("clojure.test.check.generators", -"bind","clojure.test.check.generators/bind",-361313906,null),Vp=new $d("clojure.test.check.generators","symbol-ns","clojure.test.check.generators/symbol-ns",-862629490,null),Wp=new x(null,"vector","vector",1902966158),Xp=new x(null,"body-params","body-params",-369749490),Yp=new $d("cljs.core","zipmap","cljs.core/zipmap",-1902130674,null),Zp=new x(null,"illegal-argument","illegal-argument",-1845493170),$p=new x(null,"code-style","code-style",-2144009586),aq=new x(null,"deregister-event-handler","deregister-event-handler", --1096518994),bq=new x(null,"header","header",119441134),cq=new x("cljs.spec.alpha","problems","cljs.spec.alpha/problems",447400814),dq=new x(null,"div.resources","div.resources",-1425738834),eq=new $d(null,"%","%",-950237169,null),fq=new x(null,"alignment-seq","alignment-seq",1587946543),gq=new x(null,"path-conflicts","path-conflicts",-1238675313),hq=new $d("cljs.core","map","cljs.core/map",-338988913,null),iq=new x(null,"on-click","on-click",1632826543),jq=new x(null,"strable","strable",1877668047), -kq=new x(null,"className","className",-1983287057),Wk=new x(null,"descendants","descendants",1824886031),lq=new x("cljs.spec.alpha","kvs-\x3emap","cljs.spec.alpha/kvs-\x3emap",579713455),mq=new x(null,"title","title",636505583),nq=new x(null,"running","running",1554969103),oq=new x(null,"md-str","md-str",-1726634417),pq=new x("post","title","post/title",628880975),qq=new x("markdown-to-hiccup.core","hiccup","markdown-to-hiccup.core/hiccup",1147350639),rq=new x(null,"column","column",2078222095),sq= -new x(null,"dark","dark",1818973999),tq=new x(null,"headers","headers",-835030129),uq=new x(null,"colon","colon",-965200945),vq=new x("reitit.impl","merge-data","reitit.impl/merge-data",-588218417),wq=new x(null,"center","center",-748944368),xq=new x(null,"batch","batch",-662921200),yq=new x(null,"shouldComponentUpdate","shouldComponentUpdate",1795750960),zq=new x(null,"global-interceptors","global-interceptors",-1995759472),Xk=new x(null,"ancestors","ancestors",-776045424),Aq=new x(null,"div.contact-icons", -"div.contact-icons",-1044574E3),Bq=new x(null,"flush-dom","flush-dom",-933676816),Cq=new x(null,"style","style",-496642736),Dq=new x(null,"theme","theme",-1247880880),Eq=new $d(null,"or","or",1876275696,null),Fq=new x("muuntaja","request","muuntaja/request",-1616403792),Gq=new x(null,"div","div",1057191632),Hq=new $d("cljs.spec.alpha","keys","cljs.spec.alpha/keys",1109346032,null),Iq=new x(null,"trim-v","trim-v",-1274938640),Sb=new x(null,"readably","readably",1129599760),Jq=new x(null,"codeblock-end", -"codeblock-end",1507794736),Kq=new x(null,"mobile?","mobile?",1358664528),mk=new x(null,"more-marker","more-marker",-14717935),Lq=new x(null,"dispatch","dispatch",1319337009),Mq=new x(null,"div.menu-center","div.menu-center",-1277310703),Nq=new x("nav.main","open?","nav.main/open?",-1008469743),Oq=new x(null,"g","g",1738089905),Pq=new x(null,"reagentRender","reagentRender",-358306383),Qq=new $d("time","year-month","time/year-month",-1840595535,null),Rq=new x(null,"reason","reason",-2070751759),Sq= -new x("cljs.spec.alpha","invalid","cljs.spec.alpha/invalid",-1220295119),Tq=new x(null,"req","req",-326448303),Uq=new x(null,"svg.diamond","svg.diamond",-1103138895),Vq=new x("markdown-to-hiccup.decode","map","markdown-to-hiccup.decode/map",1547761649),Wq=new x(null,"\x3c\x3e","\x3c\x3e",1280186386),Xq=new x("proc","val","proc/val",123303954),Yq=new $d("clojure.test.check.generators","double","clojure.test.check.generators/double",668331090,null),Zq=new x("markdown-to-hiccup.core","encode?","markdown-to-hiccup.core/encode?", --1516433262),$q=new x(null,"no-cache","no-cache",1588056370),ar=new x(null,"render","render",-1408033454),br=new x(null,"db-handler","db-handler",579530098),cr=new x(null,"filter","filter",-948537934),dr=new x("evt.app","toggle-theme","evt.app/toggle-theme",1265100274),er=new x(null,"event","event",301435442),fr=new x(null,"after","after",594996914),gr=new x(null,"div.post","div.post",-381003886),hr=new $d("clojure.test.check.generators","list","clojure.test.check.generators/list",506971058,null), -ir=new x("proc","type","proc/type",1170508786),jr=new $d("clojure.test.check.generators","large-integer*","clojure.test.check.generators/large-integer*",-437830670,null),kr=new x(null,"ol","ol",932524051),lr=new x(null,"reagent-render","reagent-render",-985383853),mr=new x(null,"function-components","function-components",1492814963),nr=new x(null,"form-params","form-params",1884296467),or=new x(null,"dynamic","dynamic",704819571),pr=new x(null,"line","line",212345235),qr=new x(null,"assertion-failed", -"assertion-failed",-970534477),rr=new x(null,"list","list",765357683),sr=new $d(null,"fn*","fn*",-752876845,null),tr=new x(null,"svg.nav-arrow","svg.nav-arrow",447254227),ur=new x(null,"on-write","on-write",31519475),vr=new x(null,"keyword","keyword",811389747),Kk=new x(null,"status","status",-1997798413),wr=new x(null,"result","result",1415092211),xr=new x(null,"div.vignette-container","div.vignette-container",-943167501),yr=new x(null,"not-found","not-found",-629079980),zr=new x(null,"with","with", --1536296876),Ar=new x(null,"next-fn-id","next-fn-id",738579636),Vb=new x(null,"print-length","print-length",1931866356),Br=new x(null,"max","max",61366548),Cr=new x(null,"syntax","syntax",-1637761676),Dr=new x("post","id","post/id",-1375514188),Er=new x("cljs.spec.alpha","amp","cljs.spec.alpha/amp",831147508),Fr=new x(null,"col","col",-1959363084),Gr=new x(null,"with-reitit?","with-reitit?",2145638964),Hr=new x(null,"id","id",-1388402092),Ir=new x(null,"name-conflicts","name-conflicts",-2016386444), -Jr=new x(null,"class","class",-2030961996),Kr=new x(null,"effects","effects",-282369292),Lr=new x("subs","pattern","subs/pattern",244885332),Mr=new $d("clojure.test.check.generators","such-that","clojure.test.check.generators/such-that",-1754178732,null),Nr=new $d("time","date-time","time/date-time",1814680468,null),Or=new x(null,"getInitialState","getInitialState",1541760916),Pr=new x(null,"div.txt","div.txt",-2029500395),Qr=new x(null,"coercion","coercion",904067157),Rr=new x(null,"open?","open?", -1238443125),Sr=new x(null,"nil","nil",99600501),Tr=new $d("time","period","time/period",1291634901,null),Ur=new x(null,"smallest","smallest",-152623883),Vr=new x(null,"constructor","constructor",-1953928811),Wr=new x(null,"auto-run","auto-run",1958400437),Xr=new x(null,"bracket","bracket",-600276523),Yr=new x(null,"comment","comment",532206069),Zr=new x(null,"event-v","event-v",1378681365),Vk=new x(null,"parents","parents",-2027538891),$r=new x("blog","post","blog/post",264335925),as=new x(null,"div.contribute", -"div.contribute",1287452309),bs=new $d(null,"/","/",-1371932971,null),cs=new $d("cljs.core","nil?","cljs.core/nil?",945071861,null),ds=new x(null,"local-store-theme","local-store-theme",512970517),es=new x(null,"compiler","compiler",-267926731),fs=new x(null,"run-queue","run-queue",-1701798027),gs=new x(null,"req-keys","req-keys",514319221),hs=new x(null,"watch","watch",380988277),is=new $d("clojure.test.check.generators","-\x3eGenerator","clojure.test.check.generators/-\x3eGenerator",-1179475051, -null),js=new x(null,"skip-next-line?","skip-next-line?",1683617749),ks=new $d(null,"k","k",-505765866,null),ls=new x(null,"component-will-unmount","component-will-unmount",-2058314698),ms=new x("cljs.spec.alpha","k","cljs.spec.alpha/k",-1602615178),ns=new x(null,"div.text","div.text",645060726),os=new x(null,"div.menu-mid","div.menu-mid",51272950),ps=new $d(null,"?","?",-62633706,null),qs=new $d("cljs.core","fn","cljs.core/fn",-1065745098,null),rs=new $d("markdown-to-hiccup.core","md-\x3ehiccup", -"markdown-to-hiccup.core/md-\x3ehiccup",1623858518,null),ss=new x(null,"code","code",1586293142),ts=new $d(null,"?x","?x",-555096650,null),us=new x(null,"stack","stack",-793405930),vs=new x(null,"listen-key","listen-key",51973686),ws=new x(null,"portfolio","portfolio",957568598),xs=new x(null,"query-params","query-params",900640534),ys=new x(null,"opt-specs","opt-specs",-384905450),zs=new $d("clojure.test.check.generators","return","clojure.test.check.generators/return",1744522038,null),As=new $d("clojure.test.check.generators", -"simple-type-printable","clojure.test.check.generators/simple-type-printable",-58489962,null),Bs=new x(null,"wilds","wilds",132271223),Cs=new x(null,"end-line","end-line",1837326455),Ds=new x(null,"ignore-anchor-click?","ignore-anchor-click?",-186007337),Es=new x("re-frame.interceptor","original-exception?","re-frame.interceptor/original-exception?",-527923945),Fs=new x(null,"inject-global-interceptors","inject-global-interceptors",-2144129737),Gs=new x(null,"blockquote-start","blockquote-start", -276074935),Hs=new x(null,"nav.browser","nav.browser",-894435913),Is=new x(null,"display-name","display-name",694513143),Js=new x(null,"right","right",-452581833),Ks=new x(null,"scheduled","scheduled",553898551),Ls=new $d("clojure.test.check.generators","symbol","clojure.test.check.generators/symbol",-1305461065,null),Ms=new $d(null,"-Inf","-Inf",-2123243689,null),Ns=new x("evt.app","highlight-code","evt.app/highlight-code",1954082775),Os=new x(null,"coerce","coerce",1917884504),Ps=new x("reitit.coercion", -"serialize-failed-result","reitit.coercion/serialize-failed-result",786287704),Qs=new $d(null,"ifn?","ifn?",-2106461064,null),Rs=new $d("clojure.test.check.generators","uuid","clojure.test.check.generators/uuid",1589373144,null),Ss=new x(null,"pred-forms","pred-forms",172611832),Ts=new x(null,"on-dispose","on-dispose",2105306360),Us=new x("reitit.trie","following-parameters","reitit.trie/following-parameters",-1072685800),Vs=new x(null,"d","d",1972142424),Ws=new x(null,"f","f",-1597136552),Zs=new x("post", -"image","post/image",-61565512),$s=new x(null,"pause","pause",-2095325672),at=new x(null,"error","error",-978969032),bt=new x(null,"h2","h2",-372662728),ct=new x(null,"processed","processed",800622264),dt=new x(null,"popstate-listener","popstate-listener",806944472),et=new x(null,"componentFunction","componentFunction",825866104),ft=new x("fx.app","update-html-class","fx.app/update-html-class",-1360578664),gt=new x(null,"exception","exception",-335277064),ht=new x(null,"coeffects","coeffects",497912985), -it=new x(null,"named","named",-422393479),jt=new $d("clojure.test.check.generators","any-printable","clojure.test.check.generators/any-printable",-1570493991,null),kt=new x("fx.app","scroll-to","fx.app/scroll-to",-1993679399),lt=new x(null,"form","form",-1624062471),mt=new x(null,"missing","missing",362507769),nt=new x(null,"tag","tag",-1290361223),ot=new x("reitit.coercion","extract-request-format","reitit.coercion/extract-request-format",-1687953607),pt=new x(null,"seq","seq",-1817803783),qt=new x(null, -"target","target",253001721),rt=new x(null,"heading-anchors","heading-anchors",1713527866),st=new $d("time","date","time/date",179823674,null),tt=new x("post","css-class","post/css-class",-915881798),ut=new x("sg.flybot.pullable.core","invalid","sg.flybot.pullable.core/invalid",-1512548070),vt=new x(null,"parse-meta?","parse-meta?",-1938948742),wt=new x(null,"set","set",304602554),xt=new x(null,"unwrap","unwrap",-1399175462),yt=new x(null,"data-reitit-handle-click","data-reitit-handle-click",2084811610), -zt=new x(null,"h1","h1",-1896887462),At=new x(null,"arglists","arglists",1661989754),Bt=new x(null,"loaded?","loaded?",-1108015206),Ct=new x(null,"catch-all","catch-all",1184112570),Dt=new x(null,"query","query",-1288509510),Et=new x(null,"groupEnd","groupEnd",-337721382),Ft=new x(null,"ctx-handler","ctx-handler",-1777672230),Gt=new x(null,"atom","atom",-397043653),Ht=new $d("time","zoned-date-time","time/zoned-date-time",-2125640645,null),It=new x(null,"is-prev-header?","is-prev-header?",-1637281701), -Jt=new x(null,"header.container","header.container",591808603),Kt=new x("app","posts","app/posts",760396923),Lt=new x(null,"trigger","trigger",103466139),Mt=new x(null,"viewBox","viewBox",-469489477),Nt=new x(null,"eof","eof",-489063237),Ot=new $d("clojure.test.check.generators","boolean","clojure.test.check.generators/boolean",1586992347,null),Pt=new x(null,"light","light",1918998747),Qt=new x(null,"hierarchy","hierarchy",-1053470341),Rt=new x("reitit.trie","trie-compiler","reitit.trie/trie-compiler", -2125029755),St=new $d("clojure.test.check.generators","string-alphanumeric","clojure.test.check.generators/string-alphanumeric",836374939,null),Tt=new x(null,"click-listen-key","click-listen-key",642240955),Ut=new x(null,"cofx","cofx",2013202907),Vt=new x(null,"path-params","path-params",-48130597),Wt=new x(null,"h3","h3",2067611163),Xt=new x(null,"body","body",-2049205669),Yt=new x(null,"reference-links?","reference-links?",-2003778981),Zt=new $d("clojure.test.check.generators","tuple","clojure.test.check.generators/tuple", --143711557,null),$t=new x("evt.app","initialize","evt.app/initialize",-141172005),vk=new x(null,"alt-impl","alt-impl",670969595),au=new x(null,"resume","resume",-118572261),bu=new x(null,"div.top","div.top",154731355),cu=new x(null,"ms","ms",-1152709733),du=new x(null,"doc","doc",1913296891),eu=new $d(null,"cljs.spec.alpha","cljs.spec.alpha",505122844,null),fu=new x(null,"fx","fx",-1237829572),gu=new x(null,"div.name","div.name",1027675228),hu=new x("reitit.coercion","parameter-coercion","reitit.coercion/parameter-coercion", --1825124100),iu=new x(null,"before","before",-1633692388),ju=new x(null,"blog","blog",-302707364),ku=new $d("time","duration","time/duration",-1273941668,null),lu=new x(null,"on-navigate","on-navigate",-297227908),mu=new x("markdown-to-hiccup.core","options","markdown-to-hiccup.core/options",439665020),nu=new x(null,"callback","callback",-705136228),ou=new x(null,"parameters","parameters",-1229919748),pu=new $d(null,"apply","apply",-1334050276,null),qu=new x(null,"handler","handler",-195596612),ru= -new x(null,"contact","contact",609093372),su=new x(null,"log","log",-1595516004),tu=new x(null,"expand","expand",595248157),uu=new x(null,"compile","compile",608186429),vu=new x(null,"p","p",151049309),wu=new x(null,"div.menu-bottom","div.menu-bottom",-42043299),xu=new x("reitit.trie","parameters","reitit.trie/parameters",-1304786787),yu=new $d("clojure.test.check.generators","vector","clojure.test.check.generators/vector",1081775325,null),zu=new x(null,"character","character",380652989),Au=new x(null, -"metadata","metadata",1799301597),Bu=new $d("clojure.test.check.generators","char","clojure.test.check.generators/char",-1426343459,null),Cu=new x(null,"map","map",1371690461),Du=new x("subs.post","posts","subs.post/posts",1100023325),Eu=new $d("time","zone","time/zone",-580695523,null),Fu=new x(null,"finish-run","finish-run",753148477),Gu=new x("cljs.spec.alpha","alt","cljs.spec.alpha/alt",523685437),Hu=new $d("cljs.spec.alpha","*runtime-asserts*","cljs.spec.alpha/*runtime-asserts*",-1060443587, -null),Iu=new x(null,"contextType","contextType",1033066077),Ju=new x(null,"paragraph","paragraph",296707709),Ku=new $d("cljs.core","or","cljs.core/or",1201033885,null),Lu=new x(null,"div.img","div.img",2113685181),Mu=new x(null,"test","test",577538877),Nu=new x(null,"on-coercion-error","on-coercion-error",-970787),Ou=new x(null,"direction","direction",-633359395),Pu=new $d("time","time","time/time",-1246984162,null),Qu=new $d(null,"?name","?name",2050703390,null),Ru=new $d("time","offset-date-time", -"time/offset-date-time",-1254592482,null),Su=new x(null,"href","href",-793805698),Tu=new x(null,"blockquote","blockquote",372264190),Uu=new x(null,"required","required",1807647006),Vu=new x(null,"img","img",1442687358),Wu=new x(null,"forms","forms",2045992350),Xu=new x("evt.page","set-current-view","evt.page/set-current-view",2103831998),Yu=new $d("clojure.test.check.generators","elements","clojure.test.check.generators/elements",438991326,null),Zu=new x(null,"use-fragment","use-fragment",-1617737154), -$u=new $d(null,"and","and",668631710,null),av=new x("reitit.coercion","skip","reitit.coercion/skip",-2123160930),bv=new x(null,"custom-transformers","custom-transformers",1440601790),cv=new x("cljs.spec.alpha","nil","cljs.spec.alpha/nil",1733813950),dv=new $d("time","offset-time","time/offset-time",-1026521346,null),ev=new x(null,"inhibit-separator","inhibit-separator",1268116222),fv=new x(null,"dispatch-later","dispatch-later",291951390),gv=new x(null,"a","a",-2123407586),hv=new x("reitit.exception", -"cause","reitit.exception/cause",2130128734),iv=new x(null,"ps","ps",292358046),jv=new x(null,"join","join",-758861890),kv=new x(null,"view","view",1247994814),lv=new $d("clojure.test.check.generators","large-integer","clojure.test.check.generators/large-integer",-865967138,null),mv=new $d("clojure.test.check.generators","keyword-ns","clojure.test.check.generators/keyword-ns",-1492628482,null),nv=new x(null,"about","about",1423892543),ov=new x(null,"div.menu-top","div.menu-top",-1071763361),pv=new $d("clojure.test.check.generators", -"shuffle","clojure.test.check.generators/shuffle",1032401055,null),qv=new x(null,"heading","heading",-1312171873),rv=new x("app","current-view","app/current-view",-1037443937),sv=new x(null,"hashchange-listener","hashchange-listener",-1234988801),tv=new $d("cljs.spec.alpha","conformer","cljs.spec.alpha/conformer",2140085535,null),uv=new x(null,"in","in",-1531184865),vv=new x(null,"replacement-transformers","replacement-transformers",-2028552897),wv=new x(null,"left","left",-399115937),xv=new x(null, -"pattern","pattern",242135423),yv=new x(null,"html","html",-998796897),Ij=new x("cljs.core","not-found","cljs.core/not-found",-1572889185),zv=new x(null,"accept","accept",1874130431),Av=new $d("cljs.spec.alpha","\x26","cljs.spec.alpha/\x26",1635809823,null),Bv=new x(null,"opt","opt",-794706369),Cv=new x(null,"text","text",-1790561697),Dv=new x("cljs.spec.alpha","pcat","cljs.spec.alpha/pcat",26406623),Ev=new x(null,"data","data",-232669377),Fv=new x("portfolio","post","portfolio/post",-781301953), -Gv=new $d("cljs.spec.alpha","?","cljs.spec.alpha/?",1605136319,null);var Hv=new S(null,25,5,T,[ij([xm,ym,Wn,Ro,Cp,Mp,Np,pq,Dr,Zs,tt],["My journey so far",new S(null,1,5,T,["2024-01-06"],null),"\n## Work Experiences\n\n### 2024-now: Staff Software Engineer | [Flybot Pte Ltd](https://www.flybot.sg/), Singapore\n\n- Lead the Flybot's engineering team into meeting our client's expectations.\n- Report to the CEO directly to gather client's needs and plan accordingly\n- Design software architecture and delegate project responsibilities to the team's engineers. \n\n### 2023: Senior Software Engineer | [Flybot Pte Ltd](https://www.flybot.sg/), Singapore\n- Designed a challenge recommender that suggests personal challenges to Golden Island's players. The recommender is a Clojure application deployed in a POD in AWS EKS that consume events from kafka topics and produces personalized challenges to a dedicated kafka topic. It uses Datomic as storage solution within the EKS cluster | *Clojure, AWS EKS, k8s, datomic, kafka*\n- Developed the company blog mobile app with React Native framework. The mobile frontend and web frontend share most of the re-frame (state management) logic | *ClojureScript, ReactNative* (open-source)\n- Conducted technical interviews for junior developers and onboarding of new employees\n\n### 2020-2023: Software Engineer | [Flybot Pte Ltd](https://www.flybot.sg/), Singapore\n\n- Developed the company full-stack web app. The website has a blog. Oauth2 is used for authentication. The website is deployed in a docker container on AWS. It showcases some of Flybot's open-source libs for dependency injections and data pulling | *Clojure, ClojureScript, React* (open-source)\n- Developed a basic Monte Carlo Tree Search bot for our card games | *Clojure*\n- Ported our Clojure backend libraries to Unity so the Unity frontend developers can use the Clojure logic in Unity | *Clojure, C#*\n- Improved the Nostrand project management to ease the compilation with the Magic compiler (compile Clojure file to .NET assemblies) | *Clojure, C#* (open-source)\n- Developed a library called `MetaGame` to compose card games (play multiple rounds, make it a tournament). An entire tournament can be sent up using pure Clojure data | *Clojure*\n- Developed online Chinese card games (Pǎo Dé Kuài (跑得快) and Big two (锄大地) ) backend | *Clojure*\n\n### 2019: End of study project | [Bosch SEA Pte Ltd](https://www.bosch.com.sg/our-company/bosch-in-singapore/), Singapore\n- Modeled and provisioned infrastructure using AWS CloudFormation for a project that consists in facilitating the diagnosis of damaged automobile pieces via trend detection\n- Deployed and maintained AWS resources with Jenkins\n- Cohered Agile Software Development using Jira Kanban and Scrum as frameworks, Git for version-control system and Atlassian software *| Bitbucket, Jira and SourceTree*\n\n### 2017-2018: One-year internship | [Electriduct Inc](https://www.electriduct.com/), Fort Lauderdale, Florida, USA\n- Improved Web Design and responsivity | *HTML, CSS, JS, 3dcart templates*\n- Optimized online ad campaigns | *Google AdWords/Shopping/Analytics*\n- Developed an inventory management program using UPC barcode reading | *PHP, SQL, HTML, CSS, JS*\n- Developed a customized barcode generator for either sheet printers or thermal printer | *C#, SQL*\n\n## Education\n\n### 2015-2019: Master’s Degree | [CPE](https://www.cpe.fr/en/) Lyon, France\n\n- **Specialization**: Software Engineering\n- **Major Project**: Full-stack JS web app and Mobile App development allowing users to find new friends to go to common interest nearby events together *| Node.js, ReactJS, React Native*\n- **Secondary Projects**: Android Chat App *(Java)*, Big Data hackathon *(Hadoop, Tableau)*, Chess Game *(Java)*, Siam Game *(C)*, UX design *(Balsamiq)*\n\n### 2014-2015: Undergraduate in Engineering Sciences | [CPE](https://www.cpe.fr/en/) Lyon, France\n- **Major**: mathematics and physics\n- **Minor**: computer sciences and automatism\n\n## Skills\n\n### ICTS \n- **Back-End**: Clojure, Python, Java, PHP, Node.js, C, C++, C#\n- **HTTP**: Clojure Ring, Clojure Aleph\n- **Front-End**: ClojureScript, HTML, CSS, JS, C#, Re-frame/Reagent (React), figwheel-main\n- **Database**: MySQL, PostgreSQL, Datomic, Datalevin, Cassandra\n- **Mobile**: React Native, figwheel-main\n- **Cloud**: AWS, Vercel, Netlify\n- **Containers**: Docker, k8s, EKS\n- **Event Streaming**: Kafka\n- **Proj Management**: GitHub, Gitlab, Bitbucket, Trello, Jira, Slack, Jenkins\n\n### Certifications\n- **AWS**: Solutions Architect - Associate\n", -new S(null,1,5,T,[new S(null,2,5,T,["My Tech Blog","../blog"],null)],null),new S(null,1,5,T,[new S(null,2,5,T,["My GitHub","https://github.com/skydread1"],null)],null),"\n",nv,"About Me","about-me",new n(null,3,[$o,"/assets/loic-logo.png",Xo,"/assets/loic-logo.png",Tl,"Loic Logo"],null),"about-me"]),ij([Sl,ym,Wn,Cp,Mp,Np,pq,Dr,Zs,tt],[new S(null,4,5,T,["Clojure","System","Component","Deps Injection"],null),new S(null,1,5,T,["2023-05-08"],null),'\n## Prerequisites\n\nIf you are not familiar with [fun-map](https://github.com/robertluo/fun-map), please refer to the doc [Fun-Map: a solution to deps injection in Clojure](https://www.loicblanchard.me/blog/fun-map).\n\n## Goal\n\nIn this document, I will show you how we leverage `fun-map` to create different systems in the website [flybot.sg](https://www.flybot.sg/): `prod-system`, `dev-system`, `test-system` and `figwheel-system`.\n\n## Prod System\n\nIn our backend, we use `life-cycle-map` to manage the life cycle of all our stateful components.\n\n### Describe the system\n\nHere is the system we currently have for production:\n\n```clojure\n(defn system\n [{:keys [http-port db-uri google-creds oauth2-callback client-root-path]\n :or {client-root-path "/"}}]\n (life-cycle-map\n {:db-uri db-uri\n :db-conn (fnk [db-uri]\n (let [conn (d/get-conn db-uri db/initial-datalevin-schema)]\n (load-initial-data conn data/init-data)\n (closeable\n {:conn conn}\n #(d/close conn))))\n :oauth2-config (let [{:keys [client-id client-secret]} google-creds]\n (-\x3e config/oauth2-default-config\n (assoc-in [:google :client-id] client-id)\n (assoc-in [:google :client-secret] client-secret)\n (assoc-in [:google :redirect-uri] oauth2-callback)\n (assoc-in [:google :client-root-path] client-root-path)))\n :session-store (memory-store)\n :injectors (fnk [db-conn]\n [(fn [] {:db (d/db (:conn db-conn))})])\n :executors (fnk [db-conn]\n [(handler/mk-executors (:conn db-conn))])\n :saturn-handler handler/saturn-handler\n :ring-handler (fnk [injectors saturn-handler executors]\n (handler/mk-ring-handler injectors saturn-handler executors))\n :reitit-router (fnk [ring-handler oauth2-config session-store]\n (handler/app-routes ring-handler oauth2-config session-store))\n :http-server (fnk [http-port reitit-router]\n (let [svr (http/start-server\n reitit-router\n {:port http-port})]\n (closeable\n svr\n #(.close svr))))}))\n\n(def prod-system\n "The prod system starts a server on port 8123.\n It does not load any init-data on touch and it does not delete any data on halt!.\n You can use it in your local environment as well."\n (let [prod-cfg (config/system-config :prod)]\n (system prod-cfg)))\n```\n\nAt a glance, we can easily understand the dependency injections flow of the app.\n\nIf we were to represent these deps as a simple graph, we could have:\n\n```bash\nlife-cycle-map\n├── :db-conn (closeable)\n├── :oauth2-config\n├── :session-store\n├── :injectors\n│ └── :db-conn\n├── :executors\n│ └── :db-conn\n├── :saturn-handler\n├── :ring-handler\n│ ├── :injectors\n│ ├── :executors\n│ ├── :saturn-handler\n├── :reitit-router\n│ ├── :ring-handler\n│ ├── :oauth2-config\n│ └── :session-store\n└── :http-server (closeable)\n ├── :http-port\n ├── :reitit-router\n```\n\nThe function `prod-system` just fetches some env variables with the necessary configs to start the system.\n\n### Run the system\n\nWe can then easily start the system via the fun-map function `touch` :\n\n```clojure\nclj꞉clj.flybot.core꞉\x3e \n(touch prod-system)\n{:ring-handler #function[clj.flybot.handler/mk-ring-handler/fn--37646],\n :executors [#function[clj.flybot.handler/mk-executors/fn--37616]],\n :injectors [#function[clj.flybot.core/system/fn--38015/fn--38016]],\n :http-server\n #object[aleph.netty$start_server$reify__11448 0x389add75 "AlephServer[channel:[id: 0xd98ed2db, L:/0.0.0.0:8123], transport::nio]"],\n :reitit-router #function[clojure.lang.AFunction/1],\n :http-port 8123,\n :db-uri "datalevin/prod/flybotdb",\n :oauth2-config\n {:google\n {:scopes ["https://www.googleapis.com/auth/userinfo.email" "https://www.googleapis.com/auth/userinfo.profile"],\n :redirect-uri "https://v2.fybot.sg/oauth/google/callback",\n :client-id "client-id",\n :access-token-uri "https://oauth2.googleapis.com/token",\n :authorize-uri "https://accounts.google.com/o/oauth2/auth",\n :launch-uri "/oauth/google/login",\n :client-secret "client-secret",\n :project-id "flybot-website",\n :landing-uri "/oauth/google/success"}},\n :session-store\n #object[ring.middleware.session.memory.MemoryStore 0x1afb7eac "ring.middleware.session.memory.MemoryStore@1afb7eac"],\n :saturn-handler #function[clj.flybot.handler/saturn-handler],\n :db-conn\n {:conn\n #\x3cAtom@1ada44a1: \n {:store #object[datalevin.storage.Store 0x4578bf30 "datalevin.storage.Store@4578bf30"],\n :eavt #{},\n :avet #{},\n :veat #{},\n :max-eid 73,\n :max-tx 5,\n :hash nil}\x3e}}\n```\n\n## Dev System\n\nThe `system` described above can easily be adapted to be used for development purposes.\n\nActually, the only differences between the prod and dev systems are the following:\n\n- The configs (db uri, oauth2 callback)\n- How to shutdown the db system (`dev` clears the db, `prod` retains db data)\n\nThus, we just have to assoc a new db component to the `system` and read some dev configs instead of getting prod env variables:\n\n```clojure\n(defn db-conn-system\n "On touch: empty the db and get conn.\n On halt!: close conn and empty the db."\n [init-data]\n (fnk [db-uri]\n (let [conn (d/get-conn db-uri)\n _ (d/clear conn)\n conn (d/get-conn db-uri db/initial-datalevin-schema)]\n (load-initial-data conn init-data)\n (closeable\n {:conn conn}\n #(d/clear conn)))))\n\n(def dev-system\n "The dev system starts a server on port 8123.\n It loads some real data sample. The data is deleted when the system halt!.\n It is convenient if you want to see your backend changes in action in the UI."\n (-\x3e (system (config/system-config :dev))\n (assoc :db-conn (db-conn-system data/init-data))))\n```\n\nThe important thing to remember is that all the modifications to the system must be done before starting the system (via `touch`). If some modifications need to be made to the running system:\n\n1. Shutdown the system (via `halt!`)\n2. Update the system logic\n3. Start the newly modified system (via `touch`)\n\n## Test system\n\nNaturally, the fun-map system also plays well with testing.\n\nSame process as for dev and prod, we just need to adapt the system a bit to run our tests.\n\nThe tests requirement are:\n\n- Dedicated db uri and specific data sample to work with\n- Ignore Oauth2.0.\n\nSo same as for dev, we just read dedicated test configs and assoc a test db system to the default system:\n\n```clojure\n(defn test-system\n []\n (-\x3e (config/system-config :test)\n sys/system\n (dissoc :oauth2-config)\n (assoc :db-conn (sys/db-conn-system test-data))))\n```\n\nThis works well with the clojure.test fixtures:\n\n```clojure\n;; atom required to re-evalualte (test-system) because of fixture `:each`\n(def a-test-system (atom nil))\n\n(defn system-fixture [f]\n (reset! a-test-system (test-system))\n (touch @a-test-system)\n (f)\n (halt! @a-test-system))\n\n(use-fixtures :each system-fixture)\n```\n\n## Figwheel system\n\nIt is possible to [provide a ring-handler](https://figwheel.org/docs/ring-handler.html) to figwheel configs which will be passed to a server figwheel starts for us.\n\nWe just need to specify a ring-handler in `figwheel-main.edn` like so:\n\n```clojure\n{:ring-handler flybot.server.systems/figwheel-handler\n :auto-testing true}\n```\n\nOur system does have a ring-handler we can supply to figwheel, it is called `reitit-router` in our system (it returns a ring-handler).\n\nSince figwheel starts the server, we do not need the aleph server dependency in our system anymore, se we can dissoc it from the system.\n\nSo here is the `figwheel-system` :\n\n```clojure\n(def figwheel-system\n "Figwheel automatically touches the system via the figwheel-main.edn on port 9500.\n Figwheel just needs a handler and starts its own server hence we dissoc the http-server.\n If some changes are made in one of the backend component (such as handler for instance),\n you can halt!, reload ns and touch again the system."\n (-\x3e (config/system-config :figwheel)\n system\n (assoc :db-conn (db-conn-system data/init-data))\n (dissoc :http-port :http-server)))\n\n(def figwheel-handler\n "Provided to figwheel-main.edn.\n Figwheel uses this handler to starts a server on port 9500.\n Since the system is touched on namespace load, you need to have\n the flag :figwheel? set to true in the config."\n (when (:figwheel? CONFIG)\n (-\x3e figwheel-system\n touch\n :reitit-router)))\n```\n\nThe `figheel-handler` is the value of the key `:reitit-router` of our running system.\n\nSo the system is started first via `touch` and its handler is provided to the servers figwheel starts that will be running while we work on our frontend.\n', -new S(null,1,5,T,[new S(null,2,5,T,["Flybot","https://github.com/skydread1/flybot.sg"],null)],null),"\n",ju,"Fun-Map applied to flybot.sg","fun-map-applied-to-flybot",new n(null,3,[$o,"/assets/loic-blog-logo.png",Xo,"/assets/loic-blog-logo.png",Tl,"Logo referencing Aperture Science"],null),"blog-fun-map-flybot"]),ij([Sl,ym,Wn,Mp,Np,pq,Dr,Zs,tt],[new S(null,4,5,T,["Clojure","Datomic","Cassandra","Docker"],null),new S(null,1,5,T,["2022-12-02"],null),'\n## Introduction\n\nWhile working on [flybot.sg](http://flybot.sg) , I experimented with `datomic-free`, datomic `starter-pro` with Cassandra and datomic starter-pro with embedded storage.\n\n## Rational\n\nYou can read the rationale of Datomic from their [on-prem documentation](https://docs.datomic.com/on-prem/getting-started/brief-overview.html)\n\nStuart Sierra explained very well how datomic works in the video [Intro to Datomic](https://www.youtube.com/watch?v\x3dR6ObrDWTlYA\x26t\x3d2776s).\n\nBasically, Datomic works as a layer on top of your underlying storage (in this case, we will use Cassandra db).\n\nYour `application` and a Datomic `transactor` are contained in a `peer`. \n\nThe transactor is the process that controls inbounds, and coordinates persistence to the storage services.\n\nThe process acts as a single authority for inbound transactions. A single transactor process allows the to be ACID compliant and fully consistent.\n\nThe peer is the process that will query the persisted data.\n\nSince Datomic leverages existing storage services, you can change persistent storage fairly easily.\n\n## Datomic Starter Pro with Cassandra\n\n### Datomic pro starter version\n\nDatomic is closed-source and commercial.\n\nYou can see the different pricing models in the page [Get Datomic On-Prem](https://www.datomic.com/get-datomic.html).\n\nThere are a few way to get started for free. The first one being to use the [datomic-free](https://blog.datomic.com/2012/07/datomic-free-edition.html) version which comes with in-mem database storage and local-storage transactor. You don’t need any license to use it so it is a good choice to get familiar with the datomic Clojure API.\n\nThen, there is `datomic pro starter` renamed `datomic starter` which is free and maintained for 1 year. After the one year threshold, you won’t benefit from support and you won’t get new versions of Datomic. You need to register to Datomic to get the license key.\n\n### Cassandra, Java and Python version caveats\n\nDatomic only support Cassandra up to version 3.x.x\n\nDatomic start pro version of Cassandra at the time of writting: 3.7.1\n\nClosest stable version of Cassandra: 3.11.10\n\n**Problem 1: Datomic does not support java 11 so we have to have a java 8 version on the machine**\n\nSolution: use [jenv](https://github.com/jenv/jenv) to manage multiple java version\n\n```bash\n# jenv to manage java version\nbrew install jenv\necho \'export PATH\x3d"$HOME/.jenv/bin:$PATH"\' \x3e\x3e ~/.bash_profile\necho \'eval "$(jenv init -)"\' \x3e\x3e ~/.bash_profile\n# add cask version\nbrew tap homebrew/cask-versions\n# install java 8 cask\nbrew install --cask adoptopenjdk8\n# add java 11 (current java version) to jenv\njenv add "$(/usr/libexec/java_home)"\n# add java 8 to jenv\njenv add /Library/Java/JavaVirtualMachines/adoptopenjdk-8.jdk/Contents/Home\n# update the ${JAVA_HOME} everytim we change version\njenv enable-plugin export\n#swith to java 8\njenv global 1.8\n```\n\n**Problem 2: cqlsh does not work with python3 with Cassandra running on java8**\n\nSolution: download the python2 pkg directly from [python.org](https://www.python.org/downloads/release/python-2718/)\n\n**Problem 3: `brew install cassandra@3` triggers an execution error hard to debug**\n\nSolution: download the tar.gz directly on [apache.org](https://www.apache.org/dyn/closer.lua/cassandra/3.11.14/apache-cassandra-3.11.14-bin.tar.gz)\n\n### Setup Cassandra locally and run start the transactor\n\nTo test Cassandra and datomic locally, we can use the Test Cluster of Cassandra which comes up with only one node.\n\nDatomic instruction for Cassandra [here](https://docs.datomic.com/on-prem/overview/storage.html#cassandra)\n\n```bash\n# Check if all the versions are ok\njava -version\nopenjdk version "1.8.0_292"\nOpenJDK Runtime Environment (AdoptOpenJDK)(build 1.8.0_292-b10)\nOpenJDK 64-Bit Server VM (AdoptOpenJDK)(build 25.292-b10, mixed mode)\npython2 -V\nPython 2.7.18\ncqlsh\nConnected to Test Cluster at 127.0.0.1:9042.\n[cqlsh 5.0.1 | Cassandra 3.11.14 | CQL spec 3.4.4 | Native protocol v4]\nUse HELP for help.\n\n# Start cassandra\ncassandra -f\n\n# \x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\n# in other terminal\n\n# Only setup replica to 1 for the test cluster locally\n# add datomic keyspace and table\ncqlsh\nCREATE KEYSPACE IF NOT EXISTS datomic WITH replication \x3d {\'class\': \'SimpleStrategy\', \'replication_factor\' : 1};\nCREATE TABLE IF NOT EXISTS datomic.datomic\n(\n id text PRIMARY KEY,\n rev bigint,\n map text,\n val blob\n);\n\n# \x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\n# in other terminal\n\n# start datomic transactor\n# A sample of the cassandra transactor properties is provided in the datomic distribution samples.\n# the documentation of datomic mentioned we should have a msg of the shape:\n# System starter URI but I do not have URI but it seems to work nonetheless\ncd datomic-pro-1.0.6527/\nbin/transactor ~/workspaces/myproj/config/cassandra-transactor.properties\nLaunching with Java options -server -Xms1g -Xmx1g -XX:+UseG1GC -XX:MaxGCPauseMillis\x3d50\nSystem started\n\n# \x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\n# in other terminal\n\n# Test if the peer works properly on our localhost single node\nbin/shell\nDatomic Java Shell\nType Shell.help(); for help.\ndatomic % uri \x3d "datomic:cass://localhost:9042/datomic.datomic/myproj";\n\x3cdatomic:cass://localhost:9042/datomic.datomic/myproj\x3e\ndatomic % Peer.createDatabase(uri);\n\x3ctrue\x3e\ndatomic % conn \x3d Peer.connect(uri);\n\x3c{:unsent-updates-queue 0, :pending-txes 0, :next-t 1000, :basis-t 66, :index-rev 0, :db-id "myproj-some-id-here"}\x3e\n```\n\nIt’s important to note that we do not add `ssl` in the database URI so we don’t have to deal with the [KeyStore and TrustStore](https://docs.datomic.com/on-prem/overview/storage.html#troubleshooting) (for local use only)\n\n### Use Clojure API to create db and perform transactions\n\nSince the peer works using the datomic shell, we can confidently use the Clojure API from our code now.\n\nWe just need to add the datomic and Cassandra deps in the `deps.edn`:\n\n```clojure\n;; deps.edn : versions are provided upon subscription to datomic-pro\ncom.datomic/datomic-pro {:mvn/version "1.0.6527"}\ncom.datastax.cassandra/cassandra-driver-core {:mvn/version "3.1.0"}\n```\n\n## Datomic Starter Pro with embedded storage\n\nIn case of embedded DB, we only need to start a transactor and that’s it.\n\nThe URI to connect to the peer is of the shape:\n\n```clojure\n"datomic:dev://localhost:4334/myproj-db?password\x3dmy-secret"\n;; the password is the `storage-datomic-password` setup in the transactor properties.\n```\n\n## Datomic in docker container\n\nIn case we want to run datomic in a container (and maybe having our app in another container), we can do the following:\n\n- create DockerFile for our app\n- create DockerFile for Datomic Starter Pro (you could do the same with datomic-free)\n- create docker-compose file to run both the containers\n- update the transactors properties to be sure the app and transactor can communicate.\n\n### DockerFiles\n\nWe assume that the app has its own DockerFile and run on port 8123 in this example.\n\nHere is a DockerFile example to have Datomic running in a container:\n\n```docker\nFROM clojure:lein-2.6.1-alpine\n\nENV DATOMIC_VERSION 1.0.6527\nENV DATOMIC_HOME /opt/datomic-pro-$DATOMIC_VERSION\nENV DATOMIC_DATA $DATOMIC_HOME/data\n\nRUN apk add --no-cache unzip curl\n\n# Datomic Pro Starter as easy as 1-2-3\n# 1. Create a .credentials file containing user:pass\n# for downloading from my.datomic.com\nADD .credentials /tmp/.credentials\n\n# 2. Make sure to have a config/ folder in the same folder as your\n# Dockerfile containing the transactor property file you wish to use\nRUN curl -u $(cat /tmp/.credentials) -SL https://my.datomic.com/repo/com/datomic/datomic-pro/$DATOMIC_VERSION/datomic-pro-$DATOMIC_VERSION.zip -o /tmp/datomic.zip \\\n \x26\x26 unzip /tmp/datomic.zip -d /opt \\\n \x26\x26 rm -f /tmp/datomic.zip\n\nADD config $DATOMIC_HOME/config\n\nWORKDIR $DATOMIC_HOME\nRUN echo DATOMIC HOME: $DATOMIC_HOME\n\n# 3. Provide a CMD argument with the relative path to the transactor.properties\nVOLUME $DATOMIC_DATA\n\nEXPOSE 4334 4335 4336\n\nCMD bin/transactor -Ddatomic.printConnectionInfo\x3dtrue config/dev-transactor.properties\n```\n\n### Docker Compose\n\nHere is a `docker-compose.yml` we could use describing our app and datomic transactor containers\n\n```yaml\nversion: \'3.0\'\nservices:\n datomicdb:\n image: datomic-img\n hostname: datomicdb\n ports:\n - "4336:4336"\n - "4335:4335"\n - "4334:4334"\n volumes:\n - "/data"\n myprojapp:\n image: myproj-img\n ports:\n - "8123:8123"\n depends_on:\n - datomicdb\n```\n\nHere are the commands to create the images and run 2 containers.\n\n```docker\n# Create datomic transactor image\ndocker build -t datomic-img .\n\n# Create app image\ndocker build -t myproj-img .\n\n# run the 2 images in containers\ndocker-compose up\n```\n\nHowever, this will not work right away as we need to add a few configurations to the datomic transactor properties to make sure the app can communicate with the transactor.\n\n### Transactors Properties\n\nRegarding the transactor properties (datomic provides a template for a transactor with Cassandra storage), when we use docker, we need to pay attention to 3 properties:\n\n- The `localhost` is now 0.0.0.0\n- `alt-host` must be added with the container name (or IP) or the container running the app.\n- `storage-access` must be set to `remote`\n\nHere are the difference between containerized and not containerized properties for a `dev-transactor`: \n\n```yaml\n# If datomic not in container\nprotocol\x3ddev\nhost\x3dlocalhost\nport\x3d4334\n\n# If datomic in container\nprotocol\x3ddev\nhost\x3d0.0.0.0\nport\x3d4334\nalt-host\x3ddatomicdb\nstorage-access\x3dremote\n```\n\nAfter updating the transactor properties, you should be able to see the app running on port 8123 and be able to perform transactions as expected.\n', -"\n",ju,"Datomic Setup examples: embedded, cassandra, docker.","datomic-setup-examples",new n(null,3,[$o,"/assets/loic-blog-logo.png",Xo,"/assets/loic-blog-logo.png",Tl,"Logo referencing Aperture Science"],null),"blog-datomic-setup"]),ij([Sl,ym,Wn,Mp,Np,pq,Dr,Zs,tt],[new S(null,4,5,T,["Clojure","Java 8 time","Tick","Duration/Period"],null),new S(null,1,5,T,["2024-04-20"],null),'\n## Introduction\n\nIt is always very confusing to deal with time in programming. In fact there are so many time representations, for legacy reasons, that sticking to one is not possible as our dependencies, databases or even programming languages might use different ways of representing time!\n\nYou might have asked yourself the following questions:\n- Why so many time formats? `timestamp`, `date-time`, `offset-date-time`, `zoned-date-time`, `instant`, `inst`?\n- What is `UTC`, `DST`?\n- why use Java `Instant` instead of Java `Date`?\n- Why not only deal with `timestamp`?\n- How to go from one time representation to the other without getting lost?\n- What is the difference between a `duration` and a `period`?\n\nThis article will answer these questions and will illustrate the answers with Clojure code snippets using the `juxt/tick` library.\n\n## What is `Tick`?\n\n[juxt/tick](https://github.com/juxt/tick) is an excellent open-source **Clojure** library to deal with `date` and `time` as values. The [documentation](https://juxt.github.io/tick/) is of very good quality as well.\n\n## Time since epoch (timestamp)\n\nThe `time since epoch`, or `timestamp`, is a way of measuring time by counting the number of time units that have elapsed since a specific point in time, called the **epoch**. It is often represented in either milliseconds or seconds, depending on the level of precision required for a particular application.\n\nSo basically, it is just an `int` such as `1705752000000`\n\nThe obvious advantage is the universal simplicity of representing time. The disadvantage is the human readability. So we need to find a more human-friendly representation of time.\n\n## Local time\n\n*Alice is having some fish and chips for her lunch in the UK. She checks her clock on the wall and it shows 12pm. She checks her calendar and it shows the day is January the 20th.*\n\nThe local time is the time in a specific time zone, usually represented using a date and time-of-day without any time zone information. In java it is called `java.time.LocalDateTime`. However, `tick` mentioned that when you asked someone the time, it is always going to be "local", so they prefer to call it `date-time` as the local part is implicit.\n\nSo if we ask Alice for the time and date, she will reply:\n```clojure\n(-\x3e (t/time "12:00")\n (t/on "2024-01-20"))\n;\x3d\x3e #time/date-time "2024-01-20T12:00"\n```\n\n*At the same time and date Alice is having lunch in London, Bob is having some fish soup for dinner in his Singapore\'s nearby food court. He checked the clock on the wall and reads 8pm.*\n\nSo if we ask Bob for the time, he will reply that it is 8pm. So we can see that the local time is indeed local as Bob and Alice have different times.\n\nThe question is: how to have a common time representation for Bob and Alice?\n\n## offset-date-time\n\nOne of the difference between Bob and Alice times is due to the Coordinated Universal Time (**UTC**). The UTC offset is the difference between the local time and the UTC time, and it is usually represented using a plus or minus sign followed by the number of hours ahead or behind UTC\n\nThe United Kingdom is located on the prime meridian, which is the reference line for measuring longitude and the basis for the UTC time standard. Therefore, the local time in the UK is always the same as UTC time, and the time zone offset is `UTC+0` (also called `Z`). Alice is on the prime meridian, therefore the time she sees is the UTC time, the universal time reference.\n\nAs you go east, the difference with UTC increase. For example, Singapore is located at approximately 103.8 degrees east longitude, which means that it is eight hours ahead of UTC, and its time zone offset is `UTC+8`. That is why Bob is 8 hours ahead of Alice (8 hours in the "future")\n\nAs you go west, the difference with UTC decrease. For example, New York City is located at approximately 74 degrees west longitude, which means that it is four hours behind UTC during standard time, and its time zone offset is `UTC-4` (4 hours behind - 4 hours in the "past").\n\nSo, going back to our example, Bob is 8 hours ahead (in the "future") of Alice as we can see via the `UTC+8`:\n\n```clojure\n;; Alice time\n(-\x3e (t/time "12:00")\n (t/on "2024-01-20")\n (t/offset-by 0))\n;\x3d\x3e #time/offset-date-time "2024-01-20T12:00Z"\n\n;; Bob time\n(-\x3e (t/time "12:00")\n (t/on "2024-01-20")\n (t/offset-by 8))\n;\x3d\x3e #time/offset-date-time "2024-01-20T12:00+08:00"\n```\n\nWe added the offset to our time representation, note the tick name for that representation: `offset-date-time`. In java, it is called `java.time.OffsetDateTime`. We can see for Bob\'s time a `+08:00`. This represents The Coordinated Universal Time (**UTC**) offset.\n\nSo we could assume that the UTC offset remains the same within the same **zone** (country or region), but it is not the case. Let\'s see why in the next section.\n\n## zoned-date-time\n\nSo far we have the following components to define a time:\n- date\n- time\n- UTC offset\n\nHowever, counter-intuitively, the UTC offset for Alice is not the same all year long. Sometimes it is `UTC+0` (`Z`) in winter (as we saw earlier) but sometimes it is `UTC+1` in summer.\n\nLet me prove it to you:\n```clojure\n;; time for Alice in winter\n(-\x3e (t/time "12:00")\n (t/on "2024-01-20") ;; January - a winter month\n (t/in "Europe/London")\n (t/offset-date-time))\n;\x3d\x3e #time/offset-date-time "2024-01-20T12:00Z"\n\n;; time for Alice in summer\n(-\x3e (t/time "12:00")\n (t/on "2024-08-20") ;; August - a summer month\n (t/in "Europe/London")\n (t/offset-date-time))\n;\x3d\x3e #time/offset-date-time "2024-08-20T12:00+01:00"\n```\n\nThis UTC offset difference is due to the Daylight Saving Time (**DST**).\n\nDaylight Saving Time (DST) is a system of adjusting the clock in order to make better use of daylight during the summer months by setting the clock forward by one hour in the spring and setting it back by one hour in the fall. This way, Alice can enjoy more of the sunlight in summer since the days are "longer" (more sunlight duration) while keeping her same working hours!\n\nIt is important to note that not all countries implement DSL. Some countries do not use DSL because they don\'t need. That is the case of Singapore. In Singapore, the sunset/sunrise is almost happening at the same time everyday so technically, there is no Winter/Summer. Some country chose not to use it. That\'s the case of Japan for instance. Japan could benefit from the DSL but chose not to implement it for diverse reasons.\n\nSo we can conclude that a UTC offset is not representative of a Zone because some country might implement DST and other not. Also, for the country implementing DST, their UTC is therefore not fix throughout the year. Thus, we need another parameter to fully define a time: the **Zone**:\n\n```clojure\n(-\x3e (t/time "12:00")\n (t/on "2024-01-20") ;; January - a winter month\n (t/in "Europe/London"))\n;\x3d\x3e #time/zoned-date-time "2024-01-20T12:00Z[Europe/London]"\n```\n\nYou can notice that it is the same code as before but I remove the conversion to an `offset-date-time`. Indeed, Adding the zone like in `(t/in "Europe/London")` is already considering the **Zone** obviously (and therefore the **UTC**) thus creating a `zoned-date-time`.\n\nA `#time/zoned-date-time` in Java is called a `java.time.ZonedDateTime`.\n\nSo we now have a complete way to describe the time:\n- a date\n- a time\n- a zone (that includes the location and the UTC encapsulating the DST)\n\nSo the time for Bob is:\n```clojure\n(-\x3e (t/time "12:00")\n (t/on "2024-01-20")\n (t/in "Asia/Singapore"))\n;\x3d\x3e #time/zoned-date-time "2024-01-20T12:00+08:00[Asia/Singapore]"\n```\n\nSo to recap:\n- the **Zone** `Asia/Singapore` always has the same **UTC** all year long because no **DST**\n- the **Zone** `Europe/London` has a different **UTC** in summer and winter\n- thus Bob is ahead of Alice by 8 hours during winter and Bob is ahead of Alice by 7 hours during summer.\n- This is due by the fact that the UK implements **DST** which makes its own **UTC** throughout the year.\n\nSo a **Zone** encapsulates the notion of **UTC** and **DST**.\n\n## instant\n\nYou might thought we were done here but actually the recommended time representation would be an `instant`. In java, it is called `java.time.Instant`. Why do we want to use instant is actually to avoid confusion. When you store a time in your DB, or when you want to add 10 days to this time, you actually don\'t want to deal with time zone. In programming, we always want to have a solution as simple as possible. Remember the very first time representation I mentioned? The **time since epoch**. The `epoch` in the prime meridian (`UTC+0`) is the same for everybody. So the time since epoch (to current UTC+0 time) in ms is a universal way of representing the time.\n\n```clojure\n;; instant time for Alice\n(-\x3e (t/time "12:00")\n (t/on "2024-01-20")\n (t/in "Europe/London")\n (t/instant))\n;\x3d\x3e #time/instant "2024-01-20T12:00:00Z"\n\n;; instant time for Bob\n(-\x3e (t/time "20:00")\n (t/on "2024-01-20")\n (t/in "Asia/Singapore")\n (t/instant))\n;\x3d\x3e #time/instant "2024-01-20T12:00:00Z"\n```\n\nWe can see in the example above, that since Singapore is 8 hours ahead of London, 12pm in London and 8pm in Singapore are indeed the same `instant`.\n\nThe `instant` is the human-friendly time representation of the timestamp (time since epoch). You can then store that format in your DB or do operation on it such as adding/substituting duration or period to it (more on this later).\n\nThe `epoch` in time-since-epoch is equivalent to #time/instant "1970-01-01T00:00:00Z":\n```clojure\n(t/epoch)\n;\x3d\x3e #time/instant "1970-01-01T00:00:00Z"\n```\n\n## Alice and Bob don\'t care about instants\n\nThat is correct, if we have a web page, we want Alice to see the time in London time and Bob the time in Singapore time. This is easy to do. we can derive the `zoned-date-time` from an `instant` since we know the zone of Bob and Alice:\n\n```clojure\n;; in Alice\'s browser\n(t/format (t/formatter "yyyy-MM-dd HH:mm:ss")\n (t/in #time/instant "2024-01-20T12:00:00Z" "Europe/London"))\n"2024-01-20 12:00:00"\n\n;; in Bob\'s browser\n(t/format (t/formatter "yyyy-MM-dd HH:mm:ss")\n (t/in #time/instant "2024-01-20T12:00:00Z" "Asia/Singapore"))\n"2024-01-20 20:00:00"\n```\n\n## inst\n\nLast time format I promise. As a clojure developer, you might often see `inst`. It is **different** from `instant`. In java `inst` is called `java.util.Date`. The `java.util.Date` class is an old and flawed class that was replaced by the Java 8 time API, and it should be avoided when possible.\n\nHowever, some libraries might require you to pass `inst` instead of `instant` still, and it is easy to convert between the two using the Tick library:\n\n```clojure\n(t/inst #time/instant "2024-01-20T04:00:00Z")\n;\x3d\x3e #inst "2024-01-20T04:00:00.000-00:00"\n```\n\nWhat about the other way around?\n\n```clojure\n(t/instant #inst "2024-01-20T04:00:00.000-00:00")\n;\x3d\x3e #time/instant "2024-01-20T04:00:00Z"\n```\n\n## All theses time formats are confusing\n\nJust remember these key points:\n- to store or do operations on time, use `instant` (java.time.Instant)\n- to represent time locally for users, convert your instant to `zoned-date-time` (java.time.ZonedDateTime)\n- to have a human readable format aka browser, parse your `zoned-date-time` using string formatter\n- if a third party lib needs other format, use tick intuitive conversion functions (t/inst, t/instant etc)\n\n## Duration vs Period\n\nWe now know that we need to use `instant` to perform operations on time. However, sometimes we use `duration` and sometimes we use `period`:\n\n```clojure\n(t/new-duration 10 :seconds)\n;\x3d\x3e #time/duration "PT10S"\n\n(t/new-period 10 :weeks)\n;\x3d\x3e #time/period "P70D"\n```\n\nThey are not interchangeable:\n```clojure\n(t/new-period 10 :seconds)\n; Execution error (IllegalArgumentException) at tick.core/new-period (core.cljc:649).\n; No matching clause: :seconds\n```\n\nSo what is the difference? I will give you a clue:\n- all units from `nanosecond` to `day` (included) are `durations`\n- all units from `day` such as a `week` for instance are a `period`.\n\nThere is one unit that can be both a `duration` and a `period`: a `day`:\n\n```clojure\n;; day as duration\n(t/new-duration 10 :days)\n#time/duration "PT240H"\n\n;; day as period\n(t/new-period 10 :days)\n#time/period "P10D"\n```\n\nTherefore, a simple definition could be:\n- a `duration` measures an amount of time using time-based values (seconds, nanoseconds). \n- a `period` uses date-based (we can also calendar-based) values (years, months, days)\n- a `day` can be both `duration` and `period`: a duration of one day is exactly 24 hours long but a period of one day, when considering the calendar, may vary.\n\nFirst, here is how you would add a day as duration or as a period to the proper format:\n\n```clojure\n;; time-based so use duration\n(-\x3e (t/time "10:00")\n (t/\x3e\x3e (t/new-duration 4 :hours)))\n;\x3d\x3e #time/time "14:00"\n\n;; date-based so use period\n(-\x3e (t/date "2024-04-01")\n (t/\x3e\x3e (t/new-period 1 :days)))\n;\x3d\x3e #time/date "2024-04-02"\n```\n\nNow, let me prove to you that we need to be careful to chose the right format for a day. In London, at 1am on the last Sunday of March, the clocks go forward 1 hour (DST increase by one because we enter summer months). So in 2024, at 1am, on March 31st, clocks go forward 1 hour.\n\n```clojure\n;; we add a period of 1 day\n(-\x3e (t/time "08:00")\n (t/on "2024-03-30")\n (t/in "Europe/London")\n (t/\x3e\x3e (t/new-period 1 :days)))\n#time/zoned-date-time "2024-03-31T08:00+01:00[Europe/London]"\n\n;; we add a duration of 1 day\n(-\x3e (t/time "08:00")\n (t/on "2024-03-30")\n (t/in "Europe/London")\n (t/\x3e\x3e (t/new-duration 1 :days)))\n#time/zoned-date-time "2024-03-31T09:00+01:00[Europe/London]"\n```\n\nWe can see that since in this specific DST update to summer month, the day 03/31 "gained" an hour so it has a `duration` of 25 hours, therefore our new time is `09:00`. However, the `period` taking into consideration the date in a calendar system, does not see a day as 24 hours (time-base) but as calendar unit (date-based) and therefore the new time is still `08:00`.\n\n## Conclusion\n\nA **Zone** encapsulates the notion of **UTC** and **DST**.\n\nThe **time since epoch** is the universal *computer-friendly* of representing time whereas the **Instant** is the universal *human-friendly* of representing time.\n\nA `duration` measures an amount of time using time-based values whereas a `period` uses date-based (calendar) values.\n\nFinally, for Clojure developers, I highly recommend using `juxt/tick` as it allows us to handle time efficiently (conversion, operations) and elegantly (readable, as values) and I use it in several of my projects. It is also of course possible to do interop with the `java.time.Instant` class directly if you prefer.\n', -"\n",ju,"Time as a value with Tick","tick",new n(null,3,[$o,"/assets/loic-blog-logo.png",Xo,"/assets/loic-blog-logo.png",Tl,"Logo referencing Aperture Science"],null),"juxt-tick"]),ij([Sl,ym,Wn,Cp,Mp,Np,pq,Dr,Zs,tt],[new S(null,2,5,T,["Clojure","Pull Pattern"],null),new S(null,1,5,T,["2023-04-08"],null),'\n## Prerequisites\n\nIf you are not familiar with [lasagna-pull](https://github.com/flybot-sg/lasagna-pull), please refer to the doc [Lasagna Pull: Precisely select from deep nested data](https://www.loicblanchard.me/blog/lasagna-pull)\n\n## Goal\n\nIn this document, I will show you how we leverage `lasagna-pull` in the [flybot app](https://github.com/skydread1/flybot.sg) to define a pure data API.\n\n## Defines API as pure data\n\nA good use case of the pattern is as parameter in a post request.\n\nIn our backend, we have a structure representing all our endpoints:\n\n```clojure\n;; BACKEND data structure\n(defn pullable-data\n "Path to be pulled with the pull-pattern.\n The pull-pattern `:with` option will provide the params to execute the function\n before pulling it."\n [db session]\n {:posts {:all (fn [] (get-all-posts db))\n :post (fn [post-id] (get-post db post-id))\n :new-post (with-role session :editor\n (fn [post] (add-post db post)))\n :removed-post (with-role session :editor\n (fn [post-id user-id] (delete-post db post-id user-id)))}\n :users {:all (with-role session :owner\n (fn [] (get-all-users db)))\n :user (fn [id] (get-user db id))\n :removed-user (with-role session :owner\n (fn [id] (delete-user db id)))\n :auth {:registered (fn [id email name picture] (register-user db id email name picture))\n :logged (fn [] (login-user db (:user-id session)))}\n :new-role {:admin (with-role session :owner\n (fn [email] (grant-admin-role db email)))\n :owner (with-role session :owner\n (fn [email] (grant-owner-role db email)))}\n :revoked-role {:admin (with-role session :owner\n (fn [email] (revoke-admin-role db email)))}}})\n```\n\nThis resembles a REST API structure.\n\nSince the API “route” information is contained within the pattern keys themselves, all the http requests with a pattern as params can hit the same backend URI.\n\nSo we have a single route for all pattern http request:\n\n```clojure\n(into (auth/auth-routes oauth2-config)\n [["/pattern" {:post ring-handler}] ;; all requests with pull pattern go here\n ["/users/logout" {:get (auth/logout-handler client-root-path)}]\n ["/oauth/google/success" {:get ring-handler :middleware [[auth/authentification-middleware client-root-path]]}]\n ["/*" {:get {:handler index-handler}}]])\n```\n\nTherefore the pull pattern:\n\n- Describes the API routes\n- Provides the data expected by the server in its `:with` option for the concerned endpoints\n- Describes what is asked by the client to only return relevant data\n- Can easily perform authorization\n\n## Example: pull a post\n\nFor instance, getting a specific post, meaning with the “route”: `:posts :post`, can be done this way:\n\n```clojure\n((pull/qfn\n {:posts\n {(list :post :with [s/post-1-id]) ;; provide required params to pullable-data :post function\n {:post/id \'?\n :post/page \'?\n :post/css-class \'?\n :post/creation-date \'?\n :post/last-edit-date \'?\n :post/author {:user/id \'?\n :user/email \'?\n :user/name \'?\n :user/picture \'?\n :user/roles [{:role/name \'?\n :role/date-granted \'?}]}\n :post/last-editor {:user/id \'?\n :user/email \'?\n :user/name \'?\n :user/picture \'?\n :user/roles [{:role/name \'?\n :role/date-granted \'?}]}\n :post/md-content \'?\n :post/image-beside {:image/src \'?\n :image/src-dark \'?\n :image/alt \'?}\n :post/default-order \'?}}}\n \'\x26? ;; bind the whole data\n ))\n; \x3d\x3e \n{:posts\n {:post\n #:post{:id #uuid "64cda032-b4e4-431e-bd85-0dbe34a8feeb" ;; s/post-1-id\n :page :home\n :css-class "post-1"\n :creation-date #inst "2023-01-04T00:00:00.000-00:00"\n :last-edit-date #inst "2023-01-05T00:00:00.000-00:00"\n :author #:user{:id "alice-id"\n :email "alice@basecity.com"\n :name "Alice"\n :picture "alice-pic"\n :roles [#:role{:name :editor\n :date-granted\n #inst "2023-01-02T00:00:00.000-00:00"}]}\n :last-editor #:user{:id "bob-id"\n :email "bob@basecity.com"\n :name "Bob"\n :picture "bob-pic"\n :roles [#:role{:name :editor\n :date-granted\n #inst "2023-01-01T00:00:00.000-00:00"}\n #:role{:name :admin\n :date-granted\n #inst "2023-01-01T00:00:00.000-00:00"}]}\n :md-content "#Some content 1"\n :image-beside #:image{:src "https://some-image.svg"\n :src-dark "https://some-image-dark-mode.svg"\n :alt "something"}\n :default-order 0}}}\n```\n\nIt is important to understand that the param `s/post-1-id` in `(list :post :with [#uuid s/post-1-id])` was passed to `(fn [post-id] (get-post db post-id))` in `pullable-data`. \n\nThe function returned the post fetched from the db.\n\nWe decided to fetch all the information of the post in our pattern but we could have just fetch some of the keys only:\n\n```clojure\n((pull/qfn\n {:posts\n {(list :post :with [s/post-1-id]) ;; only fetch id and page even though all the other keys have been returned here\n {:post/id \'?\n :post/page \'?}}}\n \'\x26?))\n\x3d\x3e {:posts\n {:post\n {:post/id #uuid "64cda032-b4e4-431e-bd85-0dbe34a8feeb"\n :post/page :home}}}\n```\n\nThe function `(fn [post-id] (get-post db post-id))` returned **all** the post keys but we only select the `post/id` and `post/page`.\n\nSo we provided the required param `s/post-1-id` to the endpoint `:post` and we also specified what information we want to pull: `:post/id` and `:post/page`.\n\nYou can start to see how convenient that is as a frontend request to the backend. our post request body can just be a `pull-pattern`! (more on this further down in the doc).\n\n## Post data validation\n\nIt is common to use [malli](https://github.com/metosin/malli) schema to validate data.\n\nHere is the malli schema for the post data structure we used above:\n\n```clojure\n(def post-schema\n [:map {:closed true}\n [:post/id :uuid]\n [:post/page :keyword]\n [:post/css-class {:optional true} [:string {:min 3}]]\n [:post/creation-date inst?]\n [:post/last-edit-date {:optional true} inst?]\n [:post/author user-schema]\n [:post/last-editor {:optional true} user-schema]\n [:post/md-content [:and\n [:string {:min 10}]\n [:fn\n {:error/message "Level 1 Heading `#` missing in markdown."}\n md/has-valid-h1-title?]]]\n [:post/image-beside\n {:optional true}\n [:map\n [:image/src [:string {:min 10}]]\n [:image/src-dark [:string {:min 10}]]\n [:image/alt [:string {:min 5}]]]]\n [:post/default-order {:optional true} nat-int?]])\n```\n\n## Pattern data validation\n\n`lasagna-pull` also allows us to provide schema alongside the pattern to validate 2 things:\n\n- the pattern format is correct\n- the pattern content respects a malli schema\n\nThis is very good because we can have a malli schema for the entire `pullable-data` structure like so:\n\n```clojure\n(def api-schema\n "All keys are optional because it is just a data query schema.\n maps with a property :preserve-required set to true have their keys remaining unchanged."\n (all-keys-optional\n [:map\n {:closed true}\n [:posts\n [:map\n [:post [:\x3d\x3e [:cat :uuid] post-schema]] ;; route from our get post example \n [:all [:\x3d\x3e [:cat] [:vector post-schema]]]\n [:new-post [:\x3d\x3e [:cat post-schema-create] post-schema]]\n [:removed-post [:\x3d\x3e [:cat :uuid :string] post-schema]]]]\n [:users\n [:map\n [:user [:\x3d\x3e [:cat :string] user-schema]]\n [:all [:\x3d\x3e [:cat] [:vector user-schema]]]\n [:removed-user [:\x3d\x3e [:cat :string] user-schema]]\n [:auth [:map\n [:registered [:\x3d\x3e [:cat :string user-email-schema :string :string] user-schema]]\n [:logged [:\x3d\x3e [:cat] user-schema]]]]\n [:new-role [:map\n [:admin [:\x3d\x3e [:cat user-email-schema] user-schema]]\n [:owner [:\x3d\x3e [:cat user-email-schema] user-schema]]]]\n [:revoked-role [:map\n [:admin [:\x3d\x3e [:cat user-email-schema] user-schema]]]]]]]))\n```\n\nIf we go back to the scenario where we want to fetch a specific post from the DB, we can see that we are indeed having a function as params of the key `:post` that expects one param: a uuid:\n\n```clojure\n[:post [:\x3d\x3e [:cat :uuid] post-schema]] \n```\n\nIt corresponds to the pattern part:\n\n```clojure\n(list :post :with [s/post-1-id])\n```\n\nAnd `lasagna-pull` provides validation of the function’s params which is very good to be sure the proper data is sent to the server!\n\nPlus, in case the params given to one of the routes are not valid, the function won’t even be executed.\n\nSo now we have a way to do post request to our backend providing a pull-pattern as the request body and our server can validate this pattern format and content as the data is being pulled.\n\n## Pattern query context\n\n### How it works\n\nEarlier, I asked you to assume that the function from `pullable-data` was returning a post data structure.\n\nIn reality, it is a bit more complex than this because what is returned by the different functions (endpoints) in `pullable-data` is a map. For instance:\n\n```clojure\n;; returned by get-post\n{:response (db/get-post db post-id)} ;; note the response key here\n\n;; returned by register-user\n{:response user\n :effects {:db {:payload [user]}} ;; the db transaction description to be made\n :session {:user-id user-id} ;; the user info to be added to the session\n}\n```\n\nThis is actually a problem because our pattern for a post is:\n\n```clojure\n{:posts\n {(list :post :with [s/post-1-id])\n {:post/id \'?}}}\n```\n\nand with what is returned by `(fn [post-id] (get-post db post-id))`, we should have:\n\n```clojure\n{:posts\n {(list :post :with [s/post-1-id])\n {:response ;; note the response here\n \t {:post/id \'?}}}}\n```\n\nAlso, in case of a user registration for instance, you saw that we have other useful information such as\n- effects: the db transaction to add the user to the db\n- session: some user info to add to the session. \n\nHowever we do not want to pull the `effects` and `session`. We just want a way to accumulate them somewhere.\n\nWe could perform the transaction directly and return the post, but we don\'t want that.\n\nWe prefer to accumulate side effects descriptions and execute them all at once in a dedicated `executor`.\n\nThe `response` needs to be added to the pulled data, but the `effects` and `session` need to be stored elsewhere and executed later on.\n\nThis is possible via a `modifier` and a `finalizer` context in the `pull/query` API.\n\nIn our case, we have a `mk-query` function that uses a `modifier` and `finalizer` to achieve what I described above:\n\n```clojure\n(defn mk-query\n "Given the pattern, make an advance query using a context:\n modifier: gather all the effects description in a coll\n finalizer: assoc all effects descriptions in the second value of pattern."\n [pattern]\n (let [effects-acc (transient [])\n session-map (transient {})]\n (pull/query\n pattern\n (pull/context-of\n (fn [_ [k {:keys [response effects session error] :as v}]]\n (when error\n (throw (ex-info "executor-error" error)))\n (when session ;; assoc session to the map session\n (reduce\n (fn [res [k v]] (assoc! res k v))\n session-map\n session))\n (when effects ;; conj the db transaction description to effects vector\n (conj! effects-acc effects))\n (if response\n [k response]\n [k v]))\n #(assoc % ;; returned the whole pulled data and assoc the effects and session to it\n :context/effects (persistent! effects-acc)\n :context/sessions (persistent! session-map))))))\n```\n\n### Example of post creation\n\nLet’s have a look at an example:\n\nWe want to add a new post. When we make a request for a new post, if everything works fine, the pullable-data function at the route `:new-post` returns a map such as:\n\n```clojure\n{:response full-post ;; the pullable data to return to the client\n :effects {:db {:payload posts}} ;; the new posts to be added to the db\n}\n```\n\nThe pull pattern for such request can be like this:\n\n```clojure\n{:posts\n {(list :new-post :with [post-in]) ;; post-in is a full post to be added with all required keys\n {:post/id \'?\n :post/page \'?\n :post/default-order \'?}}}\n```\n\nThe `post-in` is provided to the pullable-data function of the key `:new-post`.\n\nThe function of `add-post` actually determine all the new `:post/default-order` of the posts given the new post. That is why we see in the side effects that several `posts` are returned because we need to have their order updated in db.\n\nRunning this pattern with the pattern **context** above returns:\n\n```clojure\n{\x26? {:posts {:new-post {:post/id #uuid "64cda032-3dae-4845-b7b2-e4a6f9009cbd"\n :post/page :home\n :post/creation-date #inst "2023-01-07T00:00:00.000-00:00"\n :post/default-order 2}}}\n :context/effects [{:db {:payload [{:post/id #uuid "64cda032-3dae-4845-b7b2-e4a6f9009cbd"\n :post/page :home\n :post/md-content "#Some content 3"\n :post/creation-date #inst "2023-01-07T00:00:00.000-00:00"\n :post/author {:user/id "bob-id"}\n :post/default-order 2}]}}]\n :context/sessions {}}\n```\n\n- the response has been returned from the :with function to the pattern in the ‘\x26? key\n- the effects have been accumulated and assoc in `:context/effects`\n- there was no data to be added to the session\n\nThen, in the ring response, we can just return the value of `\x26?`\n\nAlso, the effects can be executed in a dedicated executor functions all at once.\n\nThis allows us to deal with pure data until the very last moment when we run all the side effects (db transaction and session) in one place only we call `executor`.\n\n## Saturn handler\n\nIn our system, we have a component called the `saturn-handler`. The component `ring-handler` depends on it.\n\nIn order to isolate the side effects as much as we can, our endpoints from our `pullable-data`, highlighted previously, do not perform side effects but return **descriptions** in pure data of the side effects to be done. These side effects are the ones we gather in `:context/effects` and `:context/sessions` using the pull-pattern\'s query context.\n\nThe saturn-handler returns a map with the `response` (data pulled and requested in the client pattern) to be sent to the client, the `effect-desc` to be perform (in our case, just db transactions) and the `session` update to be done:\n\n```clojure\n(defn saturn-handler\n "A saturn handler takes a ring request enhanced with additional keys form the injectors.\n The saturn handler is purely functional.\n The description of the side effects to be performed are returned and they will be executed later on in the executors."\n [{:keys [params body-params session db]}]\n (let [pattern (if (seq params) params body-params)\n data (op/pullable-data db session)\n {:context/keys [effects sessions] :as resp}\n (pull/with-data-schema v/api-schema ((mk-query pattern) data))]\n {:response (\'\x26? resp)\n :effects-desc effects\n :session (merge session sessions)}))\n```\n\nYou can also notice that the data is being validated via `pull/with-data-schema`. In case of validation error, since we do not have any side effects done during the pulling, an error will be thrown and no mutations will be done.\n\nHaving no side-effects at all makes it way easier to tests and debug and it is more predictable.\n\nFinally, the `ring-handler` will be the component responsible to **execute** all the side effects at once. \n\nSo the `saturn-handler` purpose was to be sure the data is being pulled properly, validated using malli, and that the side effects descriptions are gathered in one place to be executed later on.\n', -new S(null,1,5,T,[new S(null,2,5,T,["Flybot","https://github.com/skydread1/flybot.sg"],null)],null),"\n",ju,"Lasagna-pull applied to flybot.sg","lasagna-pull-applied-to-flybot",new n(null,3,[$o,"/assets/loic-blog-logo.png",Xo,"/assets/loic-blog-logo.png",Tl,"Logo referencing Aperture Science"],null),"blog-lasagna-pull-flybot"]),ij([Sl,ym,Wn,Mp,Np,pq,Dr,Zs,tt],[new S(null,4,5,T,["Clojure","Reagent","Figwheel","Mono Repo"],null),new S(null,1,5,T,["2023-02-16"],null),'\n## Context\n\nOur app [skydread1/flybot.sg](https://github.com/skydread1/flybot.sg) is a full-stack Clojure **web** and **mobile** app.\n\nWe opted for a mono-repo to host:\n- the `server`: Clojure app\n- the `web` client: Reagent (React) app using Re-Frame\n- the `mobile` client: Reagent Native (React Native) app using Re-Frame\n\nNote that the web app does not use NPM at all. However, the React Native mobile app does use NPM and the `node_modules` need to be generated.\n\nBy using only one `deps.edn`, we can easily starts the different parts of the app.\n\n## Goal\n\nThe goal of this document is to highlight the mono-repo structure and how to run the different parts (dev, test, build etc).\n\n## Repo structure\n\n```\n├── client\n│ ├── common\n│ │ ├── src\n│ │ │ └── flybot.client.common\n│ │ └── test\n│ │ └── flybot.client.common\n│ ├── mobile\n│ │ ├── src\n│ │ │ └── flybot.client.mobile\n│ │ └── test\n│ │ └── flybot.client.mobile\n│ └── web\n│ ├── src\n│ │ └── flybot.client.web\n│ └── test\n│ └── flybot.client.web\n├── common\n│ ├── src\n│ │ └── flybot.common\n│ └── test\n│ └── flybot.common\n├── server\n│ ├── src\n│ │ └── flybot.server\n│ └── test\n│ └── flybot.server\n```\n\n- `server` dir contains then `.clj` files\n- `common` dir the `.cljc` files\n- `clients` dir the `.cljs` files.\n\n## Deps Management\n\nYou can have a look at the [deps.edn](https://github.com/skydread1/flybot.sg/blob/master/deps.edn).\n\nWe can use namespaced aliases in `deps.edn` to make the process clearer.\n\nI will go through the different aliases and explain their purposes and how to I used them to develop the app.\n\n## Common libraries\n\n### clj and cljc deps\n\nFirst, the root deps of the deps.edn, inherited by all aliases:\n\n#### Both frontend and backend\n- org.clojure/clojure\n- metosin/malli\n- metosin/reitit\n- metosin/muuntaja\n- sg.flybot/lasagna-pull\n\n#### Backend\n- ring/ring-defaults \n- aleph/aleph\n- robertluo/fun-map\n- datalevin/datalevin\n- skydread1/reitit-oauth2\n \nThe deps above are used in both `server/src` and `common/src` (clj and cljc files).\n\nSo every time you start a `deps` REPL or a `deps+figwheel` REPL, these deps will be loaded.\n\n### Sample data\n\nIn the [common/test/flybot/common/test_sample_data.cljc](https://github.com/skydread1/flybot.sg/blob/master/common/test/flybot/common/test_sample_data.cljc) namespace, we have sample data that can be loaded in both backend dev system of frontend dev systems.\n\nThis is made possible by reader conditionals clj/cljs.\n\n### IDE integration\n\nI use the `calva` extension in VSCode to jack-in deps and figwheel REPLs but you can use Emacs if you prefer for instance.\n\nWhat is important to remember is that, when you work on the backend only, you just need a `deps` REPL. There is no need for figwheel since we do not modify the cljs content.\nSo in this scenario, the frontend is fixed (the main.js is generated and not being reloaded) but the backend changes (the `clj` files and `cljc` files).\n\nHowever, when you work on the frontend, you need to load the backend deps to have your server running but you also need to recompile the js when a cljs file is saved. Therefore your need both `deps+figwheel` REPL. So in this scenario, the backend is fixed and running but the frontend changes (the `cljs` files and `cljc` files)\n\nYou can see that the **common** `cljc` files are being watched in both scenarios which makes sense since they "become" clj or cljs code depending on what REPL type you are currently working in.\n\n## Server aliases\n\nFollowing are the aliases used for the server:\n\n- `:jvm-base`: JVM options to make datalevin work with java version \x3e java8\n- `:server/dev`: clj paths for the backend systems and tests\n- `:server/test`: Run clj tests\n\n## Client common aliases\n\nFollowing is the alias used for both web and mobile clients:\n\n- `:client`: deps for frontend libraries common to web and react native.\n\nThe extra-paths contains the `cljs` files.\n\nWe can note the `client/common/src` path that contains most of the `re-frame` logic because most subscriptions and events work on both web and react native right away!\n\nThe main differences between the re-frame logic for Reagent and Reagent Native have to do with how to deal with Navigation and oauth2 redirection. That is the reason we have most of the logic in a **common** dir in `client`.\n\n## Mobile Client\n\nFollowing are the aliases used for the **mobile** client:\n\n- `:mobile/rn`: contains the cljs deps only used for react native. They are added on top of the client deps.\n- `:mobile/ios`: starts the figwheel REPL to work on iOS.\n\n## Web Client\n\nFollowing are the aliases used for the **web** client:\n\n- `:web/dev`: starts the dev REPL\n- `:web/prod`: generates the optimized js bundle main.js\n- `:web/test`: runs the cljs tests\n- `:web/test-headless`: runs the headless cljs tests (fot GitHub CI)\n\n## CI/CD aliases\n\n### build.clj\n\nFollowing is the alias used to build the js bundle or a uberjar:\n\n- `:build`: [clojure/tools.build](https://github.com/clojure/tools.build) is used to build the main.js and also an uber jar for local testing, we use .\n\nThe build.clj contains the different build functions:\n\n- Build frontend js bundle: `clj -T:build js-bundle`\n- Build backend uberjar: `clj -T:build uber`\n- Build both js and jar: `clj -T:build uber+js`\n\n### Jibbit\n\nFollowing is the alias used to build an image and push it to local docker or AWS ECR:\n\n- `:jib`: build image and push to image repo\n\n## Antq\n\nFollowing is the alias used to points out outdated dependencies\n\n- `:outdated`: prints the outdated deps and their last available version\n\n\n## Notes on Mobile CD\n\nWe have not released the mobile app yet, that is why there is no aliases related to CD for react native yet.\n\n## Conclusion\n\nThis is one solution to handle server and clients in the same repo.\n\nFeel free to consult the complete [deps.edn](https://github.com/skydread1/flybot.sg/blob/master/deps.edn) content.\n\nIt is important to have a clear directory structure to only load required namespaces and avoid errors.\n\nUsing `:extra-paths` and `:extra-deps` in deps.edn is important because it prevent deploying unnecessary namespaces and libraries on the server and client.\n\nAdding namespace to the aliases make the distinction between backend, common and client (web and mobile) clearer.\n\nUsing `deps` jack-in for server only work and `deps+figwheel` for frontend work is made easy using `calva` in VSCode (work in other editors as well).\n', -"\n",ju,"Clojure Mono Repo example : server + 2 clients","clojure-mono-repo",new n(null,3,[$o,"/assets/loic-blog-logo.png",Xo,"/assets/loic-blog-logo.png",Tl,"Logo referencing Aperture Science"],null),"blog-mono-repo"]),ij([Sl,ym,Wn,Mp,Np,pq,Dr,Zs,tt],[new S(null,3,5,T,["Clojure","Pull Pattern","Malli"],null),new S(null,1,5,T,["2022-04-12"],null),"\n## Context\n\n[flybot-sg/lasagna-pull](https://github.com/flybot-sg/lasagna-pull) by [@robertluo](https://github.com/robertluo) aims at precisely select from deep data structure in Clojure.\n\n## Goal\n\nIn this document, I will show you the benefit of `pull-pattern` in pulling nested data.\n\n## Rational\n\nIn Clojure, it is very common to have to precisely select data in nested maps. the Clojure core `select-keys` and `get-in` functions do not allow to easily select in deeper levels of the maps with custom filters or parameters.\n\nOne of the libraries of the `lasagna-stack` is [flybot-sg/lasagna-pull](https://github.com/flybot-sg/lasagna-pull). It takes inspiration from the [datomic pull API](https://docs.datomic.com/on-prem/query/pull.html) and the library [redplanetlabs/specter](https://github.com/redplanetlabs/specter).\n\n`lasagna-pull` aims at providing a clearer pattern than the datomic pull API.\n\nIt also allows the user to add options on the selected keys (filtering, providing params to values which are functions etc). It supports less features than the `specter` library but the syntax is more intuitive and covers all major use cases you might need to select the data you want.\n\nFinally, a [metosin/malli](https://github.com/metosin/malli) schema can be provided to perform data validation directly using the provided pattern. This allows the client to prevent unnecessary pulling if the pattern does not match the expected shape (such as not providing the right params to a function, querying the wrong type etc).\n\n## A query language to select deep nested structure\n\nSelecting data in nested structure is made intuitive via a pattern that describes the data to be pulled following the shape of the data.\n\n### Simple query cases\n\nHere are some simple cases to showcase the syntax:\n\n- query a map\n\n```clojure\n(require '[sg.flybot.pullable :as pull])\n\n((pull/query '{:a ? :b {:b1 ?}})\n {:a 1 :b {:b1 2 :b2 3}})\n;\x3d\x3e {\x26? {:a 1, :b {:b1 2}}}\n```\n\n- query a sequence of maps\n\n```clojure\n((pull/query '[{:a ? :b {:b1 ?}}])\n [{:a 1 :b {:b1 2 :b2 3}}\n {:a 2 :b {:b1 2 :b2 4}}])\n;\x3d\x3e {\x26? [{:a 1, :b {:b1 2}} {:a 2, :b {:b1 2}}]}\n```\n\n- query nested sequences and maps\n\n```clojure\n((pull/query '[{:a ?\n :b [{:c ?}]}])\n [{:a 1 :b [{:c 2}]}\n {:a 11 :b [{:c 22}]}])\n;\x3d\x3e {\x26? [{:a 1, :b [{:c 2}]} {:a 11, :b [{:c 22}]}]}\n```\n\nLet’s compare datomic pull and lasagna pull query with a simple example:\n\n- datomic pull\n\n```clojure\n(def sample-data\n [{:a 1 :b {:b1 2 :b2 3}}\n {:a 2 :b {:b1 2 :b2 4}}])\n\n(pull ?db\n [:a {:b [:b1]}]\n sample-data)\n```\n\n- Lasagna pull\n```clojure\n((pull/query '[{:a ? :b {:b1 ?}}])\n sample-data)\n;\x3d\x3e {\x26? [{:a 1, :b {:b1 2}} {:a 2, :b {:b1 2}}]}\n```\n\nA few things to note\n\n- lasagna-pull uses a map to query a map and surround it with a vector to query a sequence which is very intuitive to use.\n- `?` is just a placeholder on where the value will be after the pull.\n- lasagna-pull returns a map with your pulled data in a key `\x26?`.\n\n### Query specific keys\n\nYou might not want to fetch the whole path down to a leaf key, you might want to query that key and store it in a dedicated var. It is possible to do this by providing a var name after the placeholder `?` such as `?a` for instance. The key `?a` will then be added to the result map along side the `\x26?` that contains the whole data structure.\n\nLet’s have a look at an example.\n\nLet’s say we want to fetch specific keys in addition to the whole data structure:\n\n```clojure\n((pull/query '{:a ?a\n :b {:b1 ?b1 :b2 ?}})\n {:a 1 :b {:b1 2 :b2 3}})\n; \x3d\x3e {?\x26 {:a 1 :b {:b1 2 :b2 3}} ;; all nested data structure\n; ?a 1 ;; var a\n; ?b1 2 ;; var b1\n }\n```\n\nThe results now contain the logical variable we selected via `?a` and `?b1`. Note that the `:b2` key has just a `?` placeholder so it does not appear in the results map keys.\n\nIt works also for sequences:\n\n```clojure\n;; logical variable for a sequence\n((pull/query '{:a [{:b1 ?} ?b1]})\n {:a [{:b1 1 :b2 2} {:b1 2} {}]})\n;\x3d\x3e {?b1 [{:b1 1} {:b1 2} {}]\n; \x26? {:a [{:b1 1} {:b1 2} {}]}}\n```\n\nNote that `'{:a [{:b1 ?b1}]}` does not work because the logical value cannot be the same for all the `b1` keys:\n\n```clojure\n((pull/query '{:a [{:b1 ?b1}]})\n {:a [{:b1 1 :b2 2} {:b1 2} {}]})\n;\x3d\x3e {\x26? {:a [{:b1 1} nil nil]}} ;; not your expected result\n```\n\n## A query language to select structure with params and filters\n\nMost of the time, just selecting nested keys is not enough. We might want to select the key if certain conditions are met, or even pass a parameter if the value of the key is a function so we can run the function and get the value.\n\nWith library like [redplanetlabs/specter](https://github.com/redplanetlabs/specter), you have different possible transformations using diverse [macros](https://github.com/redplanetlabs/specter/wiki/List-of-Macros) which is an efficient way to select/transform data. The downside is that it introduces yet another syntax to get familiar with.\n\n`lasagna-pull` supports most of the features at a key level.\n\nInstead of just providing the key you want to pull in the pattern, you can provide a list with the key as first argument and the options as the rest of the list.\n\nThe transformation is done at the same time as the selection, the pattern can be enhanced with options:\n\n- not found\n\n```clojure\n((pull/query '{(:a :not-found ::not-found) ?}) {:b 5})\n;\x3d\x3e {\x26? {:a :user/not-found}}\n```\n\n- when\n\n```clojure\n((pull/query {(:a :when even?) '?}) {:a 5})\n;\x3d\x3e {\x26? {}} ;; empty because the value of :a is not even\n```\n\n- with\n\nIf the value of a query is a function, using `:with` option can invoke it and returns the result instead:\n\n```clojure\n((pull/query '{(:a :with [5]) ?}) {:a #(* % 2)})\n;\x3d\x3e {\x26? {:a 10}} ;; the arg 5 was given to #(* % 2) and the result returned\n```\n\n- batch\n\nBatched version of :with option:\n\n```clojure\n((pull/query '{(:a :batch [[5] [7]]) ?}) {:a #(* % 2)})\n;\x3d\x3e {\x26? {:a (10 14)}}\n```\n\n- seq\n\nApply to sequence value of a query, useful for pagination:\n\n```clojure\n((pull/query '[{:a ? :b ?} ? :seq [2 3]]) [{:a 0} {:a 1} {:a 2} {:a 3} {:a 4}])\n;\x3d\x3e {\x26? ({:a 2} {:a 3} {:a 4})}\n```\n\nAs you can see with the different options above, the transformations are specified within the selected keys. Unlike specter however, we do not have a way to apply transformation to all the keys for instance.\n\n## Pattern validation with Malli schema\n\nWe can optionally provide a [metosin/malli](https://github.com/metosin/malli) schema to specify the shape of the data to be pulled.\n\nThe client malli schema provided is actually internally \"merged\" to a internal schema that checks the pattern shape so both the pattern syntax and the pattern shape are validated.\n\n## Context\n\nYou can provide a context to the query. You can provide a `modifier` and a `finalizer`.\n\nThis context can help you gathering information from the query and apply a function on the results.\n\n## Lasagna Pull applied to flybot.sg\n\nTo see Lasagna Pull in action, refer to the doc [Lasagna Pull applied to flybot.sg](https://www.loicblanchard.me/blog/lasagna-pull-applied-to-flybot).\n", -"\n",ju,"Lasagna Pull: Precisely select from deep nested data","lasagna-pull",new n(null,3,[$o,"/assets/loic-blog-logo.png",Xo,"/assets/loic-blog-logo.png",Tl,"Logo referencing Aperture Science"],null),"blog-lasagna-pull"]),ij([Sl,ym,Wn,Cp,Mp,Np,pq,Dr,Zs,tt],[new S(null,4,5,T,["Clojure","React Native","Reagent","Figwheel"],null),new S(null,1,5,T,["2023-02-03"],null),'\n## Prerequisites\n\nThis project is stored alongside the backend and the web frontend in the mono-repo: [skydread1/flybot.sg](https://github.com/skydread1/flybot.sg)\n\nThe codebase is a full-stack **Clojure(Script)** app.\nThe backend is written in **Clojure** and the web and mobile clients are written in **ClojureScript**.\n\nFor the web app, we use [reagent](https://github.com/reagent-project/reagent), a ClojureScript interface for `React`.\n\nFor the mobile app, we use [reagent-react-native](https://github.com/vouch-opensource/reagent-react-native), a ClojureScript interface for `React Native`.\n\nThe mono-repo structure is as followed:\n\n```\n├── client\n│   ├── common\n│   │   ├── src\n│   │   │   └── flybot.client.common\n│   │   └── test\n│   │   └── flybot.client.common\n│   ├── mobile\n│   │   ├── src\n│   │   │   └── flybot.client.mobile\n│   │   └── test\n│   │   └── flybot.client.mobile\n│   └── web\n│   ├── src\n│   │   └── flybot.client.web\n│   └── test\n│   └── flybot.client.web\n├── common\n│   ├── src\n│   │   └── flybot.common\n│   └── test\n│   └── flybot.common\n├── server\n│   ├── src\n│   │   └── flybot.server\n│   └── test\n│   └── flybot.server\n```\n\nSo far, the RN app has only been tested on iOS locally.\n\n## Rational\n\nThe goal was to have a mobile app targeting both iOS and Android, written in `ClojureScript`, which can reuse most of our web frontend logic.\n\nTo do so, I used `React Native` for the following reasons:\n\n- Integrate very well with [figwheel-main](https://github.com/bhauman/figwheel-main) and [re-frame](https://github.com/day8/re-frame)\n- Target both iOS and Android\n- Does not necessitate too much configuration to get it running\n- React Native has an overall good documentation\n\n## Setup\n\nTo get React Native working, you need to follow a few steps.\n\nThe setup steps are well described in the [Figwheel doc](https://figwheel.org/docs/react-native.html).\n\n### npm\n\nThe Figwheel doc has a [dedicated section](https://figwheel.org/docs/npm.html) to install and setup NPM in a project. The best way to install npm is to use [nvm](https://github.com/nvm-sh/nvm).\n\n### React Native\n\nTo do mobile dev, some tools need to be installed and the react native [doc](https://reactnative.dev/docs/next/environment-setup) has the instructions on how to prepare the environment.\n\n### Ruby\n\nThe default Ruby version installed on MacOS is not enough to work with React Native. Actually, React Native needs a specific version of Ruby hence the use of a ruby version manager. I used [rbenv](https://github.com/rbenv/rbenv).\n\n```bash\n~:brew install rbenv ruby-build\n\n~:rbenv -v\nrbenv 1.2.0\n```\n\nReact Native uses [this version](https://github.com/facebook/react-native/blob/main/template/_ruby-version) of ruby so we need to download it.\n\n```bash\n# install proper ruby version\n~:rbenv install 2.7.6\n\n# set ruby version as default\n~:rbenv global 2.7.6\n```\n\nWe also need to add these 2 lines to the .zshrc\n\n```bash\nexport PATH\x3d"$HOME/.rbenv/bin:$PATH"\neval "$(rbenv init -)"\n```\n\nFinally we make sure we have the correct version:\n\n```bash\n~:ruby -v\nruby 2.7.6p219 (2022-04-12 revision c9c2245c0a) [arm64-darwin22]\n```\n\n### Ruby\'s Bundler\n\nFrom the doc:\n\nRuby\'s [Bundler](https://bundler.io/) is a Ruby gem that helps managing the Ruby dependencies of your project. We need Ruby to install Cocoapods and using Bundler will make sure that all the dependencies are aligned and that the project works properly.\n\n```bash\n# install the bundler\n~:gem install bundler\nFetching bundler-2.4.5.gem\nSuccessfully installed bundler-2.4.5\n...\n\n# Check the location where gems are being installed\n~:gem env home\n/Users/loicblanchard/.rbenv/versions/2.7.6/lib/ruby/gems/2.7.0\n```\n\n### Xcode\n\nFrom the doc:\n\n\x3e The easiest way to install `Xcode` is via the [Mac App Store](https://itunes.apple.com/us/app/xcode/id497799835?mt\x3d12)\n. Installing Xcode will also install the iOS Simulator and all the necessary tools to build your iOS app.\n\nI downloaded it from the apple store.\n\n`Xcode command line` tools also needs to be installed. It can be chosen in `Xcode→Settings→Locations`\n\n```bash\n~:xcode-select -p\n/Library/Developer/CommandLineTools\n```\n\n### Installing an iOS Simulator in Xcode\n\nIt should be already installed.\n\n### React Native Command Line Interface\n\nWe can use `npx` directly because it was shipped with `npm`.\n\n### CocoaPods\n\n[CocoaPods](https://github.com/CocoaPods/CocoaPods) is required to use the Ruby’s Bundler and we can install it using [rubygems](https://github.com/rubygems/rubygems):\n\n```bash\nsudo gem install cocoapods\n\n# check version\n~:gem which cocoapods\n/Users/loicblanchard/.rbenv/versions/2.7.6/lib/ruby/gems/2.7.0/gems/cocoapods-1.11.3/lib/cocoapods.rb\n```\n\n### Troubleshooting\n\nIn case of the error [Multiple Profiles](https://github.com/CocoaPods/CocoaPods/issues/11641), we need to switch to the Xcode cli manually like so:\n\n```bash\nsudo xcode-select --switch /Applications/Xcode.app\n```\n\n## Create Project\n\nWe now should have all the tools installed to start a React Native project on Mac targeting iOS.\n\n```bash\n# setup project\nnpx react-native init MyAwesomeProject\n```\n\n### Running the project\n\n```bash\nnpx react-native run-ios\n```\n\nThis should open a simulator with the welcome React Native display.\n\n## Integrate RN with Clojure and Figwheel\n\nAdd an alias to the deps.edn:\n\n```clojure\n:cljs/ios {:main-opts ["--main" "figwheel.main"\n "--build" "ios"\n "--repl"]}\n```\n\nNote: We need to use cljs version `1.10.773` because the latest version causes this [error](https://github.com/log4js-node/log4js-node/issues/1171) which is hard to debug.\n\nAlso, we need to add the figwheel config for `ios` in `ios.cljs.edn` :\n\n```clojure\n^{:react-native :cli\n :watch-dirs ["client/mobile/src" "client/common/src"]}\n{:main flybot.client.mobile.core\n :closure-defines {flybot.client.common.db.event/BASE-URI "http://localhost:9500"}}\n```\n\nAnd then we add the source files in the src folder like explained in the [figwheel doc](https://figwheel.org/docs/react-native.html).\n\nTo run the project, we start a REPLs (clj and cljs) with the proper aliases and in another terminal, we can run `run npm ios` to start the Xcode simulator.\n\nFor more details regarding the aliases: have a look at the [README](https://github.com/skydread1/flybot.sg)\n\n## Deps management\n\nIf we want to add a npm package, we need 2 steps:\n\n```bash\nnpm i my-npm-package\ncd ios\npod install\ncd ..\n```\n\n## Troubleshooting\n\nIn case of the error [RNSScreenStackHeaderConfig](https://stackoverflow.com/questions/73268848/i-am-trying-to-work-with-react-navigation-library-but-this-issue-keeps-coming), we need to:\n\n```bash\nnpm i react-native-gesture-handler\ncd ios\npod install\ncd ..\n\n# We restart the similutor and the error should be gone\n```\n\n## APP architecture and features\n\n### HTTP\n\nRegarding the http request made by the re-frame fx `http-xhrio`, it should work right away, same as for the web, but we just need to manually pass the cookie to the header as RN do not manage cookie for us like the web does.\n\nPassing the cookie in the request was quite straight forward, I just added `:headers {:cookie my-cookie}` to the `:http-xhrio` fx for all the requests that require a session for the mobile app.\n\n### Markdown to Native components\n\nI use [react-native-markdown-package](https://github.com/andangrd/react-native-markdown-package)\n\n```bash\nnpm i react-native-markdown-package --save\n```\n\n### Font\n\nOn iOS, I had to add the fonts in the `info.plist` like so:\n\n```xml\n\x3ckey\x3eUIAppFonts\x3c/key\x3e\n\t\x3carray\x3e\n\t \x3cstring\x3eAntDesign.ttf\x3c/string\x3e\n\t \x3cstring\x3eEntypo.ttf\x3c/string\x3e\n\t \x3cstring\x3eEvilIcons.ttf\x3c/string\x3e\n\t \x3cstring\x3eFeather.ttf\x3c/string\x3e\n\t \x3cstring\x3eFontAwesome.ttf\x3c/string\x3e\n\t \x3cstring\x3eFontAwesome5_Brands.ttf\x3c/string\x3e\n\t \x3cstring\x3eFontAwesome5_Regular.ttf\x3c/string\x3e\n\t \x3cstring\x3eFontAwesome5_Solid.ttf\x3c/string\x3e\n\t \x3cstring\x3eFoundation.ttf\x3c/string\x3e\n\t \x3cstring\x3eIonicons.ttf\x3c/string\x3e\n\t \x3cstring\x3eMaterialIcons.ttf\x3c/string\x3e\n\t \x3cstring\x3eMaterialCommunityIcons.ttf\x3c/string\x3e\n\t \x3cstring\x3eSimpleLineIcons.ttf\x3c/string\x3e\n\t \x3cstring\x3eOcticons.ttf\x3c/string\x3e\n\t \x3cstring\x3eZocial.ttf\x3c/string\x3e\n\t\x3c/array\x3e\n```\n\n## Navigation\n\n### Navigators\n\nAs for now we have 2 Navigators:\n\n[Tab Navigator](https://reactnavigation.org/docs/tab-based-navigation/)\n\n- `login` screen\n- `blog` screen: [Stack Navigator](https://reactnavigation.org/docs/stack-navigator/)\n\n[Stack Navigator](https://reactnavigation.org/docs/stack-navigator/)\n\n- `post-lists` screen\n- `post-read` screen\n- `post-edit` screen\n- `preview` screen\n\nSo the Stack Navigator is inside the Tab Navigator blog screen.\n\n#### How to navigate\n\nFor the navigation, we can use `re-frame` dispatch to change the navigation object ref to the new route.\n\nSince we are using re-frame, we might not be able to access `props.navigation.navigate`.\n\nHowever, we could store a reference to the navigation object in our re-frame DB so we can [Navigate without the navigation prop](https://reactnavigation.org/docs/navigating-without-navigation-prop/).\n\nTherefore, just using `re-frame/dispatch` to store the navigation ref to the `re-frame/db` and use `re-frame/subscribe` to get the ref (and so the nav params) is enough to handle navigation in our case. Thus, we do not use the props at all.\n\nRegarding the hot reloading, the only way I found is to store the js state and navigation objects in atoms via `defonce` so we can remain on the same screen with same params as before the reload.\n\nNote: Maybe I could use the AsyncStorage instead of the atoms even though it is only for dev purposes.\n\n## Env variables\n\nOne of the env variables we need to define is for the `uri`. For the web app, we can use relative path such as `/posts/all` but on mobile, there is no such thing as path and we would need to pass an absolute path such as `http://localhost:9500/posts/all` for instance in our case.\n\nTherefore, we need to have some config to pass to the cljs build. It is possible to do so via the compiler option [:closure-defines](https://clojurescript.org/reference/compiler-options#closure-defines).\n\n`:closure-defines` is a ClojureScript compiler option that allows you to specify a list of key-value pairs to be passed as JavaScript defines to the Google Closure Compiler. These defines can be used to conditionally compile code based on the value of the defined key. For example, you can define `:foo true` as a closure define and then use `#?(:foo some-code)` in your ClojureScript code to include `some-code` only when `:foo` is true.\n\nLuckily, figwheel allows us to [setup the closures-define in the config files](https://figwheel.org/docs/compile_config.html).\n\n## OAuth2.0\n\nI redirect the request back to an intermediate end point that will directly fetch the user info and create a ring-session that contains the google tokens, the user-name and user-permissions. Then ring encrypts that for us and put that `ring-session` in a cookie that is sent to the client.\n\nThus, my clients only receive this ring-session id that will be passed to every request made (automatic for browser, manually added to request for mobile).\n\nWhen the user logout, ring still passes a `ring-session` but it will be nil once decrypted by the server.\n\n### How to redirect back to the mobile app\n\nTo go back to the app after OAuth2.0 success, I had to add the scheme following to the `info.plist` for iOS:\n\n```xml\n\x3ckey\x3eCFBundleURLTypes\x3c/key\x3e\n\t\x3carray\x3e\n\t\x3cdict\x3e\n\t\t\x3ckey\x3eCFBundleURLSchemes\x3c/key\x3e\n\t\t\x3carray\x3e\n\t\t\x3cstring\x3eflybot-app\x3c/string\x3e\n\t\t\x3c/array\x3e\n\t\x3c/dict\x3e\n```\n\nAlso, in `ios/AppDelegate.mm`, I added:\n\n```jsx\n#import \x3cReact/RCTLinkingManager.h\x3e\n\n/// listen to incoming app links during your app\'s execution\n- (BOOL)application:(UIApplication *)application\n openURL:(NSURL *)url\n options:(NSDictionary\x3cUIApplicationOpenURLOptionsKey,id\x3e *)options\n{\n return [RCTLinkingManager application:application openURL:url options:options];\n}\n```\n\n## Cookie management\n\nI store the cookie in async-storage for this because it is enough for our simple use case.\n\n```jsx\nnpm install @react-native-async-storage/async-storage\n```\n\n### AsyncStorage with re-frame\n\nOnce the `ring-session` cookie is received from the server, a re-frame dispatch is triggered to set a cookie name `ring-session` in the device AsyncStorage. This event also updates the re-frame db value of `:user/cookie`.\n\nOne of the issues with AsyncStorage is that it returns a `Promise`. Therefore, we cannot access the value directly but only do something in the `.then` method. So, once the Promise is resolved, in the .then, we `re-frame/dispatch` an event that will update the re-frame/db.\n\nThe Promises to get or set a cookie from storage, being side effects, are done in a re-frame `reg-fx`. These `reg-fx` will be done inside `reg-event-fx` event. We want to respect the principle: `reg-fx` for pulling with side effect and `reg-event-fx` for pushing pure event.\n\n### Ensure order of events\n\nWe want to be sure the cookie is pulled from AsyncStorage before the db is initialised and all the posts and the user pulled. However, we cannot just dispatch the event to pull the cookie from AsyncStorage (returns a Promise that will then dispatch another event to update re-frame/db), and then dispatch the event to get all the posts from the server because there is no guarantee the cookie will be set before the request is made.\n\nThe solution is to dispatch the initialisation event inside the event from the Promise like so:\n\n```clojure\n;; setup all db param and do get request to get posts, pages and user using cookie\n(rf/reg-event-fx\n :evt.app/initialize\n (fn [{:keys [db]} _] \n {:db (assoc db ...)\n :http-xhrio {:method :post\n :uri (base-uri "/pages/all")\n :headers {:cookie (:user/cookie db)}\n :params ...\n :format (edn-request-format {:keywords? true})\n :response-format (edn-response-format {:keywords? true})\n :on-success [:fx.http/all-success]\n :on-failure [:fx.http/failure]}}))\n\n;; Impure fx to fet cookie from storage and dispatch new event to update db\n(rf/reg-fx ;; 2)\n :fx.app/get-cookie-async-store\n (fn [k]\n (-\x3e (async-storage/get-item k) ;; Promise\n (.then #(rf/dispatch [:evt.cookie/get %])))))\n\n;; Pure event triggered at the start of the app\n(rf/reg-event-fx ;; 1)\n :evt.app/initialize-with-cookie\n (fn [_ [_ cookie-name]]\n {:fx [[:fx.app/get-cookie-async-store cookie-name]]}))\n\n;; Pure event triggered by :fx.app/get-cookie-async-store\n(rf/reg-event-fx ;; 3)\n :evt.cookie/get\n (fn [{:keys [db]} [_ cookie-value]]\n {:db (assoc db :user/cookie cookie-value)\n :fx [[:dispatch [:evt.app/initialize]]]}))\n```\n\n## Styling\n\nAs for now, the styling is directly done in the `:style` keys of the RN component’s hiccups. Some more complex components have some styling that takes functions and or not in the `:style` keyword.\n\n## Conclusion\n\nI hope that this unusual mobile app stack made you want to consider `ClojureScript` as a good alternative to build mobile apps.\n\nIt is important to note that the state management logic (re-frame) is the same at 90% for both the web app and the mobile app which is very convenient.\n\nFinally, the web app is deployed but not the mobile app. All the codebase is open-source so feel free to take inspiration.\n', -new S(null,1,5,T,[new S(null,2,5,T,["Flybot","https://github.com/skydread1/flybot.sg"],null)],null),"\n",ju,"Reagent React Native Mobile App","reagent-native-app",new n(null,3,[$o,"/assets/loic-blog-logo.png",Xo,"/assets/loic-blog-logo.png",Tl,"Logo referencing Aperture Science"],null),"blog-reagent-native"]),ij([Sl,ym,Wn,Cp,Mp,Np,pq,Dr,Zs,tt],[new S(null,4,5,T,["Clojure","Compiler","CLR","Unity"],null),new S(null,1,5,T,["2022-04-08"],null),'\nIn this article, I will show you:\n\n1. how to handle CLR interop to prepare your Clojure code for the CLR\n2. how to use type hints to have your code more performant on the CLR\n3. how to manage dependencies\n4. how to compile to the CLR using Nostrand\n5. how to test in the CLR using Nostrand\n\nNote: the steps for packing the code into nugget package, pushing it to remote github and fetching it in Unity are highlighted in another article.\n\n## Rational\n\n### What is the Magic Compiler\n\nMagic is a bootsrapped compiler writhen in Clojure that take Clojure code as input and produces dotnet assemblies (.dll) as output.\n\nCompiler Bootstrapping is the technique for producing a self-compiling compiler that is written in the same language it intends to compile. In our case, MAGIC is a **Clojure** compiler that compiles **Clojure** code to .**NET** assemblies (.dll and .exe files).\n\nIt means we need the old dlls of MAGIC to generate the new dlls of the MAGIC compiler. We repeat this process until the compiler is good enough. \n\nThe very first magic dlls were generated with the [clojure/clojure-clr](https://github.com/clojure/clojure-clr) project which is also a Clojure compiler to CLR but written in **C#** with limitations over the dlls generated (the problem MAGIC is intended to solve).\n\n### Why the Magic Compiler\n\nThe already existing clojure-\x3eclr compiler [clojure/clojure-clr](https://github.com/clojure/clojure-clr). However, clojure-clr uses a technology called the DLR (dynamic language runtime) to optimize dynamic call sites but it emits self modifying code which make the assemblies not usable on mobile devices (IL2CPP in Unity). So we needed a way to have a compiler that emit assemblies that can target both Desktop and mobile (IL2CPP), hence the Magic compiler.\n\n## Step 1: Interop\n\n### Reader conditionals\n\nWe don’t want separate branches for JVM and CLR so we use reader conditionals.\n\nYou can find how to use the reader conditionals in this [guide](https://clojure.org/guides/reader_conditionals).\n\nYou will mainly need them for the `require` and `import` as well as the function parameters.\n\nDon’t forget to change the extension of your file from `.clj` to `.cljc`.\n\n### Clj-kondo Linter supporting reader conditionals\n\nIn `Emacs` (with `spacemacs` distribution), you might encounter some lint issues if you are using reader conditionals and some configuration might be needed.\n\nThe Clojure linter library [clj-kondo/clj-kondo](https://github.com/clj-kondo/clj-kondo) supports the reader conditionals.\n\nAll the instruction on how to integrate it to the editor you prefer [here](https://github.com/clj-kondo/clj-kondo/blob/master/doc/editor-integration.md).\n\nTo use [clj-kondo](https://github.com/clj-kondo/clj-kondo) with [syl20bnr/spacemacs](https://github.com/syl20bnr/spacemacs), you need the layer [borkdude/flycheck-clj-kondo](https://github.com/borkdude/flycheck-clj-kondo).\n\nHowever, there is no way to add configuration in the `.spacemacs` config file.\n\nThe problem is that we need to set `:clj` as the default language to be checked.\n\nIn `VScode` I did not need any config to make it work.\n\n### Setting up the default reader conditionals of the Clj-kondo linter\n\nIt has nothing to do with the `:default` reader conditional key such as:\n\n```clojure\n#?(:clj (Clojure expression)\n :cljs (ClojureScript expression)\n :cljr (Clojure CLR expression)\n :default (fallthrough expression))\n```\n\nIn the code above, the `:default` reader is used if none of the other reader matches the platform the code is run on. There is no need to add the `:default` tag everywhere as the code will be ran only on 2 potential environment: `:clj` and `:cljr`.\n\nFor our linter, on your Clojure environment (in case of Emacs with [syl20bnr/spacemacs](https://github.com/syl20bnr/spacemacs) distribution), you can highlight the codes for the `:clj` reader only.\n\nThe `:cljr` code will be displayed as comments. \n\nTo add the default `:clj` reader, we need to add it in the config file : `~/.config/clj-kondo/config.edn` (to affect all our repos). It is possible to add config at project level as well as stated [here](https://cljdoc.org/d/clj-kondo/clj-kondo/2020.09.09/doc/configuration).\n\nHere is the config to setup `:clj` as default reader:\n\n```clojure\n{:cljc {:features #{:clj}}}\n```\n\nIf you don’t specify a default reader, `clj-kondo` will trigger lots of error if you don’t provide the `:default` reader because it assumes that you might run the code on a platform that doesn’t match any of the provided reader.\n\n## Step 2 (optional): Add type hints\n\nMagic supports the same shorthands as in Clojure: [Magic types shorthands](https://github.com/nasser/magic/blob/master/src/magic/analyzer/types.clj#L37).\n\n### Value Type hints\n\nWe want to add Magic type hints in our Clojure code to prevent slow argument boxing at run time.\n\nThe main place we want to add the type hints are the function arguments such as in:\n\n```clojure\n(defn straights-n\n "Returns all possible straights with given length of cards."\n [n cards wheel?]\n #?(:clj [n cards wheel?]\n :cljr [^int n cards ^Boolean wheel?])\n (...))\n```\n\nNote the user conditionals here to not affect our Clojure codes and tests to be run on the JVM. \n\nI did not remove the reader conditionals here (the shorthands being the same in both Clojure and Magic It would run), because we don’t want our Clojure tests to be affected and we want to keep the dynamic idiom of Clojure. Also `wheel?` could very likely have the value `nil`, passed by one of the tests, which is in fact not a boolean.\n\nSo we want to keep our type hints in the `:cljr` reader to prevent Magic from doing slow reflection but we don’t want to affect our `:clj` reader that must remain dynamic and so type free to not alter our tests.\n\n### Ref Type hints\n\nOne of the best benefit of type hinting for Magic is to type hint records and their fields.\n\nHere is an example of a record fields type hinting:\n\n```clojure\n(defrecord GameState #?(:clj [players next-pos game-over?]\n :cljr [players ^long next-pos ^boolean game-over?])\n(...))\n```\n\nAs you can see, not all fields are type hinted because for some, we don’t have a way to do so.\n\nThere is no way to type hints a collection parameter in Magic.\n\n`players` is a vector of `Players` records. We don’t have a way to type hints such type. Actually we don’t have a way to type hints a collection in Magic. In Clojure (Java), we can type hint a collection of a known types such as:\n\n```clojure\n;; Clojure file\nuser\x3e (defn f\n "`poker-cards` is a vector of `PokerCard`."\n [^"[Lmyproj.PokerCard;" poker-cards]\n (map :num poker-cards))\n;\x3d\x3e #\'myproj.combination/f\n\n;; Clojure REPL\nuser\x3e (f [(-\x3ePokerCard :d :3) (-\x3ePokerCard :c :4)])\n;\x3d\x3e (:3 :4)\n```\n\nHowever, in Magic, such thing is not possible.\n\nparameters which are `maps` do not benefit much from type hinting because a map could be a `PersistentArrayMap`, a `PersistentHashMap` or even a `PersistentTreeMap` so we would need to just `^clojure.lang.APersistentMap` just to be generic which is not really relevant.\n\nTo type hint a record as parameter, it is advices to `import` it first to avoid having to write the fully qualified namespace:\n\n```clojure\n;; Import the Combination class so we can use type hint format ^Combination\n#?(:cljr (:import [myproj.combination Combination]))\n```\n\nThen we can type hint a parameter which is a record conveniently such as:\n\n```clojure\n(defn pass?\n "Returns true it the combi is a pass."\n #?(:clj [combi]\n :cljr [^Combination combi])\n (combi/empty-combi? combi))\n```\n\nA record field can also a be a known record types such as:\n\n```clojure\n(defrecord Player #?(:clj [combi penalty?]\n :cljr [^Combination combi\n ^boolean penalty?]))\n```\n\n### Type hints and testing\n\nSince in Clojure, we tend to use simplified parameters to our function to isolate the logic being tested (a map instead of a record, nil instead of false, a namespaced keyword instead of a map etc.), naturally lots of tests will fail in the CLR because of the type hints.\n\nWe don’t want to change our test suite with domain types so you can just add a reader conditionals to the tests affected by the type hints in the CLR.\n\n### Interop common cases\n\n#### Normal case\n\nFor interop, you can use the reader conditionals such as in:\n\n```clojure\n(defn round-perc\n "Rounds the given `number`."\n [number]\n #?(:clj (-\x3e number double Math/round)\n :cljr (-\x3e number double Math/Round long)))\n```\n\n#### Deftype equals methods override\n\nFor the `deftype` to work in the CLR, we need to override different equals methods than the Java ones. In Java we use `hashCode` and `equal` but in .net we use `hasheq` and `equiv`.\n\nHere is an example on how to override such methods:\n\n```clojure\n(deftype MyRecord [f-conj m rm]\n ;; Override equals method to compare two MyRecord.\n #?@(:clj\n [Object\n (hashCode [_] (.hashCode m))\n (equals [_ other]\n (and (instance? MyRecord other) (\x3d m (.m other))))]\n :cljr\n [clojure.lang.IHashEq\n (hasheq [_] (hash m))\n clojure.lang.IPersistentCollection\n (equiv [_ other]\n (and (instance? MyRecord other) (\x3d m (.m other))))]))\n```\n\n#### Defecord empty method override for IL2CCP\n\nFor the `defrecord` to work in case we target **IL2CPP** (all our apps), you need to override the default implementation of the `empty` method such as:\n\n```clojure\n(defrecord PokerCard [^clojure.lang.Keyword suit ^clojure.lang.Keyword num]\n #?@(:cljr\n [clojure.lang.IPersistentCollection\n (empty [_] nil)]))\n```\n\nNote the vector required with the **splicing** reader conditional `#?@`.\n\n## Step 3: Manage dependencies\n\nSince magic was created before `tools.deps` or `leiningen`, it has its own deps management system and the dedicated file for it is `project.edn`.\n\nHere is an example of a project.edn:\n```clojure\n{:name "My project"\n :source-paths ["src" "test"]\n :dependencies [[:github skydread1/clr.test.check "magic"\n :sha "a23fe55e8b51f574a63d6b904e1f1299700153ed"\n :paths ["src"]]\n [:gitlab my-private-lib1 "master"\n :paths ["src"]\n :sha "791ef67978796aadb9f7aa62fe24180a23480625"\n :token "r7TM52xnByEbL6mfXx2x"\n :domain "my.domain.sg"\n :project-id "777"]]}\n```\n\nRefer to the Nostrand [README](https://github.com/nasser/nostrand/blob/master/README.md) for more details.\n\nSo you need to add a `project.edn`at the root of your directory with other libraries.\n\n## Step 4: Compile to the CLR\n\n### Nostrand\n\n[nasser/nostrand](https://github.com/nasser/nostrand) is for magic what [tools.deps](https://github.com/clojure/tools.deps.alpha) or [leiningen](https://github.com/technomancy/leiningen) are for a regular Clojure project. Magic has its own dependency manager and does not use tools.deps or len because it was implemented before these deps manager came out!\n\nYou can find all the information you need to build and test your libraries in dotnet in the [README](https://github.com/nasser/nostrand/blob/master/README.md).\n\nIn short, you need to clone nostrand and create a dedicated Clojure namespace at the root of your project to run function with Nostrand.\n\n### Build your Clojure project to .net\n\nIn my case I named my nostrand namespace `dotnet.clj`.\n\nYou cna have a look at the [clr.test.check/dotnet.clj](https://github.com/skydread1/clr.test.check/blob/magic/dotnet.clj), it is a port of clojure/test.check that compiles in both JVM and CLR.\n\nWe have the following require:\n```clojure\n(:require [clojure.test :refer [run-all-tests]]\n [magic.flags :as mflags])\n```\n\nDon’t forget to set the 2 magic flags to true:\n\n```clojure\n(defn build\n "Compiles the project to dlls.\n This function is used by `nostrand` and is called from the terminal in the root folder as:\n nos dotnet/build"\n []\n (binding [*compile-path* "build"\n *unchecked-math* *warn-on-reflection*\n mflags/*strongly-typed-invokes* true\n mflags/*direct-linking* true\n mflags/*elide-meta* false]\n (println "Compile into DLL To : " *compile-path*)\n (doseq [ns prod-namespaces]\n (println (str "Compiling " ns))\n (compile ns))))\n```\n\nTo build to the `*compile-path*` folder, just run the `nos` command at the root of your project:\n\n```clojure\nnos dotnet/build\n```\n\n## Step 5: Test your Clojure project to .net\n\nSame remark as for the build section:\n\n```clojure\n(defn run-tests\n "Run all the tests on the CLR.\n This function is used by `nostrand` and is called from the terminal in the root folder as:\n nos dotnet/run-tests"\n []\n (binding [*unchecked-math* *warn-on-reflection*\n mflags/*strongly-typed-invokes* true\n mflags/*direct-linking* true\n mflags/*elide-meta* false]\n (doseq [ns (concat prod-namespaces test-namespaces)]\n (require ns))\n (run-all-tests)))\n```\n\nTo run the tests, just run the `nos` command at the root of your project:\n\n```clojure\nnos dotnet/run-tests\n```\n\n## Example of a Clojure library ported to Magic\n\nAn example of a Clojure library that has been ported to Magic is [skydread1/clr.test.check](https://github.com/skydread1/clr.test.check/tree/magic), a fork of clojure/clr.test.check.\nMy fork uses reader conditionals so it can be run and tested in both JVM and CLR.\n\n## Learn more\n\nNow that your library is compiled to dotnet, you can learn how to package it to nuget, push it in to your host repo and import in Unity in this article:\n- [Pack, Push and Import Clojure to Unity](https://www.loicblanchard.me/blog/clojure-in-unity).\n', -new S(null,2,5,T,[new S(null,2,5,T,["Magic","https://github.com/nasser/magic"],null),new S(null,2,5,T,["Nostrand","https://github.com/nasser/nostrand"],null)],null),"\n",ju,"Port your Clojure lib to the CLR with MAGIC","port-clj-lib-to-clr",new n(null,3,[$o,"/assets/loic-blog-logo.png",Xo,"/assets/loic-blog-logo.png",Tl,"Logo referencing Aperture Science"],null),"blog-port-clj-lib"]),ij([Sl,ym,Wn,Mp,Np,pq,Dr,Zs,tt],[new S(null,6,5,T,"Clojure;Kaocha;Malli;Rich Comment Tests;Instrumentation;Data validation/generation".split(";"), +"cljs.spec.alpha/+",2101263265,null),Zl=new x(null,"p2","p2",905500641),$l=new x(null,"min","min",444991522),am=new x(null,"routes","routes",457900162),bm=new x(null,"on-set","on-set",-140953470),cm=new x(null,"ret","ret",-468222814),dm=new x("cljs.spec.alpha","unknown","cljs.spec.alpha/unknown",651034818),em=new $d("cljs.core","sequential?","cljs.core/sequential?",1777854658,null),fm=new x("cljs.spec.alpha","value","cljs.spec.alpha/value",1974786274),gm=new x(null,"div.tag","div.tag",1088822530), +hm=new x(null,"gfn","gfn",791517474),im=new x(null,"format","format",-1306924766),jm=new x("cljs.spec.alpha","recursion-limit","cljs.spec.alpha/recursion-limit",601284994),km=new x("cljs.spec.alpha","name","cljs.spec.alpha/name",205233570),lm=new x(null,"children","children",-940561982),mm=new x("sg.flybot.pullable.core","not-found","sg.flybot.pullable.core/not-found",625068482),nm=new x(null,"blockquote-paragraph","blockquote-paragraph",1931783682),om=new x("about","post","about/post",446064130), +pm=new x(null,"svg.menu-top","svg.menu-top",415216194),qm=new $d("clojure.test.check.generators","hash-map","clojure.test.check.generators/hash-map",1961346626,null),rm=new $d("clojure.test.check.generators","keyword","clojure.test.check.generators/keyword",24530530,null),sm=new x(null,"page-name","page-name",974981762),tm=new x("markdown-to-hiccup.decode","string","markdown-to-hiccup.decode/string",-2048172350),um=new x(null,"div.info","div.info",-1023482078),vm=new x("sg.flybot.pullable.core.option", +"watch","sg.flybot.pullable.core.option/watch",872342338),wm=new x("post","employer","post/employer",752984930),xm=new x("post","date","post/date",-1460888702),ym=new x("markdown-to-hiccup.decode","vector","markdown-to-hiccup.decode/vector",1979814818),zm=new x("reitit.core","router","reitit.core/router",1293076450),Am=new x(null,"-\x3e","-\x3e",514830339),Bm=new x(null,"pred-exprs","pred-exprs",1792271395),Cm=new $d(null,"?view","?view",-1050183581,null),Dm=new x(null,"home","home",-74557309),Em= +new x(null,"keys-pred","keys-pred",858984739),Fm=new x(null,"hr","hr",1377740067),Gm=new x(null,"button.nav-btn.hidden","button.nav-btn.hidden",1883500835),Hm=new x(null,"last-fragment","last-fragment",-919830173),Im=new x(null,"cljsLegacyRender","cljsLegacyRender",-1527295613),Jm=new x(null,"srcdark","srcdark",-1640621501),Km=new x(null,"div.post-links","div.post-links",1079291587),Lm=new x(null,"original-event","original-event",2121330403),Lk=new x(null,"ready","ready",1086465795),Mm=new $d("time", +"day-of-week","time/day-of-week",-1018890461,null),Nm=new x(null,"idle","idle",-2007156861),Om=new x(null,"footnotes","footnotes",-1842778205),Pm=new x(null,"temp","temp",1791541284),Qm=new x(null,"shrunk","shrunk",-2041664412),Rm=new x(null,"fn","fn",-1175266204),Sm=new $d("cljs.spec.alpha","alt","cljs.spec.alpha/alt",-2130750332,null),Tm=new x(null,"div.vignette","div.vignette",-2024675196),Um=new $d(null,"*runtime-asserts*","*runtime-asserts*",1632801956,null),Vm=new x(null,"lists","lists",-884730684), +Wm=new x(null,"buf","buf",-213913340),Xm=new $d("cljs.core","vector?","cljs.core/vector?",-1550392028,null),Ym=new x(null,"blockquote-end","blockquote-end",1122544964),Zm=new x(null,"namespaced-map","namespaced-map",1235665380),$m=new x(null,"rep+","rep+",-281382396),an=new $d("clojure.test.check.generators","fmap","clojure.test.check.generators/fmap",1957997092,null),bn=new x(null,"group","group",582596132),Tb=new x(null,"meta","meta",1499536964),cn=new x("fx.app","set-theme-local-store","fx.app/set-theme-local-store", +-1685133628),dn=new x(null,"interceptor","interceptor",1127739076),en=new x(null,"unprocessed","unprocessed",766771972),fn=new x(null,"table","table",-564943036),gn=new x("reitit.impl","accumulator","reitit.impl/accumulator",1345422212),hn=new x(null,"opt-keys","opt-keys",1262688261),jn=new x(null,"ul","ul",-1349521403),kn=new x(null,"event-handler","event-handler",-487718843),ln=new x("sg.flybot.pullable.core","context","sg.flybot.pullable.core/context",994024549),mn=new x(null,"validate","validate", +-201300827),nn=new $d("clojure.test.check","quick-check","clojure.test.check/quick-check",-810344251,null),Ub=new x(null,"dup","dup",556298533),on=new x(null,"div.menu-right","div.menu-right",-1140629147),pn=new x(null,"vec","vec",-657847931),qn=new x("cljs.spec.alpha","rep","cljs.spec.alpha/rep",1483217317),rn=new x(null,"pred","pred",1927423397),sn=new x(null,"clojurescript","clojurescript",-299769403),tn=new x("reitit.trie","multiple-terminators","reitit.trie/multiple-terminators",-116295163), +un=new x(null,"key","key",-1516042587),vn=new x(null,"in-table-body?","in-table-body?",-136773915),wn=new x(null,"fsm-state","fsm-state",1656310533),xn=new x(null,"top-displace","top-displace",-2094589019),yn=new x(null,"splice","splice",449588165),zn=new x("evt.nav","toggle","evt.nav/toggle",2052424806),An=new x("cljs.spec.alpha","accept","cljs.spec.alpha/accept",370988198),Bn=new x("cofx.app","local-store-theme","cofx.app/local-store-theme",-1877277338),Cn=new x(null,"reader-exception","reader-exception", +-1938323098),Dn=new x(null,"private","private",-558947994),En=new x(null,"inline-heading","inline-heading",-158773818),Fn=new x(null,"router","router",1091916230),Gn=new x(null,"when","when",-576417306),Hn=new x(null,"lookup","lookup",1225356838),In=new x(null,"gen","gen",142575302),Jn=new x(null,"nav.mobile","nav.mobile",1429685958),Kn=new x(null,"decode-type","decode-type",-1943601402),Ln=new x(null,"\x3c-","\x3c-",760412998),Mn=new x(null,"div.tags","div.tags",-1073137818),Nn=new x(null,"replace", +"replace",-786587770),On=new x(null,"ks","ks",1900203942),Pn=new x(null,"alt","alt",-3214426),Qn=new x(null,"div.post-body","div.post-body",210532294),Rn=new x(null,"meta-merge","meta-merge",638856199),Sn=new $d("time","month","time/month",-324062169,null),Tn=new x(null,"script","script",-1304443801),Un=new $d(null,"p1__26347#","p1__26347#",1868569799,null),Vn=new x("post","md-content","post/md-content",-1777572601),Wn=new x(null,"childContextTypes","childContextTypes",578717991),Xn=new x(null,"last-line-empty?", +"last-line-empty?",1279111527),Yn=new x(null,"db","db",993250759),Zn=new x(null,"fx-handler","fx-handler",-549783097),$n=new x(null,"sub","sub",-2093760025),ao=new $d(null,"?theme","?theme",2088351303,null),bo=new $d("cljs.spec.alpha","cat","cljs.spec.alpha/cat",-1471398329,null),co=new x("re-frame.std-interceptors","not-found","re-frame.std-interceptors/not-found",-1614827865),eo=new $d("time","year","time/year",1979222727,null),fo=new x(null,"queue","queue",1455835879),go=new x(null,"displayName", +"displayName",-809144601),ho=new x(null,"_","_",1453416199),io=new x(null,"validator","validator",-1966190681),jo=new x(null,"div.links","div.links",440246312),ko=new x(null,"fragment","fragment",826775688),lo=new x(null,"maybe","maybe",-314397560),mo=new x("fx.app","highlight-code","fx.app/highlight-code",1994810536),fl=new x(null,"default","default",-1987822328),no=new x("reitit.trie","unclosed-brackets","reitit.trie/unclosed-brackets",1599327560),oo=new x(null,"via","via",-1904457336),po=new x(null, +"sequential","sequential",-1082983960),qo=new $d(null,"?fragment","?fragment",1211696808,null),ro=new x(null,"conflicts","conflicts",-1219561816),so=new x(null,"post-id","post-id",1618659080),to=new $d("clojure.test.check.generators","choose","clojure.test.check.generators/choose",909997832,null),uo=new x(null,"ns","ns",441598760),vo=new x(null,"frozen-strings","frozen-strings",-1410661560),wo=new x(null,"found-token","found-token",113525576),xo=new x(null,"symbol","symbol",-1038572696),yo=new x(null, +"warn","warn",-436710552),zo=new x(null,"prepend","prepend",342616040),Ao=new x(null,"name","name",1843675177),Bo=new x(null,"div.post.error","div.post.error",-1659704279),Co=new $d(null,"NaN","NaN",666918153,null),Do=new $d("clojure.test.check.generators","generate","clojure.test.check.generators/generate",-690390711,null),Mk=new x(null,"pending","pending",-220036727),Eo=new $d("cljs.core","string?","cljs.core/string?",-2072921719,null),Fo=new x(null,"update-paths","update-paths",-813404599),Go= +new x(null,"deco","deco",769202793),Ho=new x(null,"fill","fill",883462889),Io=new $d("clojure.test.check.generators","set","clojure.test.check.generators/set",-1027639543,null),Jo=new $d("time","month-day","time/month-day",61138729,null),Ko=new x(null,"req-specs","req-specs",553962313),Lo=new $d("clojure.test.check.generators","one-of","clojure.test.check.generators/one-of",-183339191,null),Mo=new x(null,"value","value",305978217),No=new x("cljs.spec.alpha","gfn","cljs.spec.alpha/gfn",-593120375), +Oo=new x(null,"post-route","post-route",323113865),Po=new x(null,"div.menu-left","div.menu-left",-1967651927),Qo=new x("post","articles","post/articles",-458677271),Ro=new x(null,"parse-tag","parse-tag",1427313738),So=new x(null,"contextTypes","contextTypes",-2023853910),To=new x(null,"alignment","alignment",1040093386),Uo=new x(null,"file","file",-1269645878),Vo=new $d(null,"v","v",1661996586,null),Wo=new x("image","src-dark","image/src-dark",1452555818),Xo=new x("cljs.spec.alpha","spec","cljs.spec.alpha/spec", +1947137578),Yo=new x(null,"h5.info","h5.info",932286058),Zo=new x("image","src","image/src",-1820578166),$o=new x(null,"div.menu","div.menu",-175336694),ap=new $d(null,"js","js",-886355190,null),bp=new x(null,"readers","readers",-2118263030),cp=new x(null,"do-fx","do-fx",1194163050),dp=new x(null,"footer#footer-contact.container","footer#footer-contact.container",-1355543670),ep=new x(null,"end-column","end-column",1425389514),fp=new x(null,"query-id","query-id",1474128842),gp=new x(null,"db-page-name", +"db-page-name",-1373879285),hp=new x(null,"div.link","div.link",-340346549),ip=new x(null,"footnotes?","footnotes?",-1590157845),jp=new x(null,"params","params",710516235),kp=new $d("cljs.spec.alpha","fspec","cljs.spec.alpha/fspec",-1289128341,null),lp=new $d(null,"fn","fn",465265323,null),mp=new x(null,"amp","amp",271690571),np=new x(null,"section.container","section.container",-1656920213),op=new x(null,"component-did-update","component-did-update",-1468549173),pp=new x(null,"div.title","div.title", +-1929547732),Gk=new x(null,"val","val",128701612),qp=new x("cljs.spec.alpha","op","cljs.spec.alpha/op",-1269055252),rp=new x(null,"dispatch-n","dispatch-n",-504469236),sp=new $d(null,"inst","inst",-2008473268,null),tp=new x(null,"type","type",1174270348),up=new x("cljs.spec.alpha","v","cljs.spec.alpha/v",552625740),vp=new x(null,"div.image","div.image",923573900),wp=new $d("clojure.test.check.generators","map","clojure.test.check.generators/map",45738796,null),xp=new x(null,"template","template", +-702405684),yp=new x(null,"debug","debug",-1608172596),zp=new x(null,"src","src",-1651076051),Ap=new x("markdown-to-hiccup.decode","pass","markdown-to-hiccup.decode/pass",1096198285),Bp=new x("post","repos","post/repos",645359853),Cp=new $d(null,"p1__26348#","p1__26348#",1825593645,null),Dp=new x("reitit.core","path-conflicting","reitit.core/path-conflicting",617644429),Ep=new x(null,"getDerivedStateFromProps","getDerivedStateFromProps",-991834739),Fp=new x(null,"getDerivedStateFromError","getDerivedStateFromError", +166658477),wk=new x(null,"fallback-impl","fallback-impl",-1501286995),Gp=new x(null,"encode?","encode?",-640109139),Hp=new $d("clojure.test.check.properties","for-all*","clojure.test.check.properties/for-all*",67088845,null),Ip=new x(null,"references","references",882562509),Rk=new x(null,"keyword-fn","keyword-fn",-64566675),Jp=new $d(null,"Inf","Inf",647172781,null),Kp=new $d("cljs.core","map?","cljs.core/map?",-1390345523,null),Lp=new x("post","md-content-short","post/md-content-short",-861891763), +Rb=new x(null,"flush-on-newline","flush-on-newline",-151457939),Mp=new x("post","page","post/page",848001005),Np=new x(null,"componentWillUnmount","componentWillUnmount",1573788814),Op=new x(null,"path-parts","path-parts",945822894),Pp=new x(null,"displace","displace",-1153355602),Qp=new x(null,"string","string",-1989541586),Rp=new $d(null,"queue","queue",-1198599890,null),Sp=new x(null,"p1","p1",-936759954),Tp=new $d("clojure.test.check.generators","bind","clojure.test.check.generators/bind",-361313906, +null),Up=new $d("clojure.test.check.generators","symbol-ns","clojure.test.check.generators/symbol-ns",-862629490,null),Vp=new x(null,"vector","vector",1902966158),Wp=new x(null,"body-params","body-params",-369749490),Xp=new $d("cljs.core","zipmap","cljs.core/zipmap",-1902130674,null),Yp=new x(null,"illegal-argument","illegal-argument",-1845493170),Zp=new x(null,"code-style","code-style",-2144009586),$p=new x(null,"deregister-event-handler","deregister-event-handler",-1096518994),aq=new x(null,"header", +"header",119441134),bq=new x("cljs.spec.alpha","problems","cljs.spec.alpha/problems",447400814),cq=new x(null,"div.resources","div.resources",-1425738834),dq=new $d(null,"%","%",-950237169,null),eq=new x(null,"alignment-seq","alignment-seq",1587946543),fq=new x(null,"path-conflicts","path-conflicts",-1238675313),gq=new $d("cljs.core","map","cljs.core/map",-338988913,null),hq=new x(null,"on-click","on-click",1632826543),iq=new x(null,"strable","strable",1877668047),jq=new x(null,"className","className", +-1983287057),Wk=new x(null,"descendants","descendants",1824886031),kq=new x("cljs.spec.alpha","kvs-\x3emap","cljs.spec.alpha/kvs-\x3emap",579713455),lq=new x(null,"title","title",636505583),mq=new x(null,"running","running",1554969103),nq=new x(null,"md-str","md-str",-1726634417),oq=new x("post","title","post/title",628880975),pq=new x("markdown-to-hiccup.core","hiccup","markdown-to-hiccup.core/hiccup",1147350639),qq=new x(null,"column","column",2078222095),rq=new x(null,"dark","dark",1818973999), +sq=new x(null,"headers","headers",-835030129),tq=new x(null,"colon","colon",-965200945),uq=new x("reitit.impl","merge-data","reitit.impl/merge-data",-588218417),vq=new x(null,"center","center",-748944368),wq=new x(null,"batch","batch",-662921200),xq=new x(null,"shouldComponentUpdate","shouldComponentUpdate",1795750960),yq=new x(null,"global-interceptors","global-interceptors",-1995759472),Xk=new x(null,"ancestors","ancestors",-776045424),zq=new x(null,"div.contact-icons","div.contact-icons",-1044574E3), +Aq=new x(null,"flush-dom","flush-dom",-933676816),Bq=new x(null,"style","style",-496642736),Cq=new x(null,"theme","theme",-1247880880),Dq=new $d(null,"or","or",1876275696,null),Eq=new x("muuntaja","request","muuntaja/request",-1616403792),Fq=new x(null,"div","div",1057191632),Gq=new $d("cljs.spec.alpha","keys","cljs.spec.alpha/keys",1109346032,null),Hq=new x(null,"trim-v","trim-v",-1274938640),Sb=new x(null,"readably","readably",1129599760),Iq=new x(null,"codeblock-end","codeblock-end",1507794736), +Jq=new x(null,"mobile?","mobile?",1358664528),mk=new x(null,"more-marker","more-marker",-14717935),Kq=new x(null,"dispatch","dispatch",1319337009),Lq=new x(null,"div.menu-center","div.menu-center",-1277310703),Mq=new x("nav.main","open?","nav.main/open?",-1008469743),Nq=new x(null,"g","g",1738089905),Oq=new x(null,"reagentRender","reagentRender",-358306383),Pq=new $d("time","year-month","time/year-month",-1840595535,null),Qq=new x(null,"reason","reason",-2070751759),Rq=new x("cljs.spec.alpha","invalid", +"cljs.spec.alpha/invalid",-1220295119),Sq=new x(null,"req","req",-326448303),Tq=new x(null,"svg.diamond","svg.diamond",-1103138895),Uq=new x("markdown-to-hiccup.decode","map","markdown-to-hiccup.decode/map",1547761649),Vq=new x(null,"\x3c\x3e","\x3c\x3e",1280186386),Wq=new x("proc","val","proc/val",123303954),Xq=new $d("clojure.test.check.generators","double","clojure.test.check.generators/double",668331090,null),Yq=new x("markdown-to-hiccup.core","encode?","markdown-to-hiccup.core/encode?",-1516433262), +Zq=new x(null,"no-cache","no-cache",1588056370),$q=new x(null,"render","render",-1408033454),ar=new x(null,"db-handler","db-handler",579530098),br=new x(null,"filter","filter",-948537934),cr=new x("evt.app","toggle-theme","evt.app/toggle-theme",1265100274),dr=new x(null,"event","event",301435442),er=new x(null,"after","after",594996914),fr=new x(null,"div.post","div.post",-381003886),gr=new $d("clojure.test.check.generators","list","clojure.test.check.generators/list",506971058,null),hr=new x("proc", +"type","proc/type",1170508786),ir=new $d("clojure.test.check.generators","large-integer*","clojure.test.check.generators/large-integer*",-437830670,null),jr=new x(null,"ol","ol",932524051),kr=new x(null,"reagent-render","reagent-render",-985383853),lr=new x(null,"function-components","function-components",1492814963),mr=new x(null,"form-params","form-params",1884296467),nr=new x(null,"dynamic","dynamic",704819571),or=new x(null,"line","line",212345235),pr=new x(null,"assertion-failed","assertion-failed", +-970534477),qr=new x(null,"list","list",765357683),rr=new $d(null,"fn*","fn*",-752876845,null),sr=new x(null,"svg.nav-arrow","svg.nav-arrow",447254227),tr=new x(null,"on-write","on-write",31519475),ur=new x(null,"keyword","keyword",811389747),Kk=new x(null,"status","status",-1997798413),vr=new x(null,"result","result",1415092211),wr=new x(null,"div.vignette-container","div.vignette-container",-943167501),xr=new x(null,"not-found","not-found",-629079980),yr=new x(null,"with","with",-1536296876),zr= +new x(null,"next-fn-id","next-fn-id",738579636),Vb=new x(null,"print-length","print-length",1931866356),Ar=new x(null,"max","max",61366548),Br=new x(null,"syntax","syntax",-1637761676),Cr=new x("post","id","post/id",-1375514188),Dr=new x("cljs.spec.alpha","amp","cljs.spec.alpha/amp",831147508),Er=new x(null,"col","col",-1959363084),Fr=new x(null,"with-reitit?","with-reitit?",2145638964),Gr=new x(null,"id","id",-1388402092),Hr=new x(null,"name-conflicts","name-conflicts",-2016386444),Ir=new x(null, +"class","class",-2030961996),Jr=new x(null,"effects","effects",-282369292),Kr=new x("subs","pattern","subs/pattern",244885332),Lr=new $d("clojure.test.check.generators","such-that","clojure.test.check.generators/such-that",-1754178732,null),Mr=new $d("time","date-time","time/date-time",1814680468,null),Nr=new x(null,"getInitialState","getInitialState",1541760916),Or=new x(null,"div.txt","div.txt",-2029500395),Pr=new x(null,"coercion","coercion",904067157),Qr=new x(null,"open?","open?",1238443125), +Rr=new x(null,"nil","nil",99600501),Sr=new $d("time","period","time/period",1291634901,null),Tr=new x(null,"smallest","smallest",-152623883),Ur=new x(null,"constructor","constructor",-1953928811),Vr=new x(null,"auto-run","auto-run",1958400437),Wr=new x(null,"bracket","bracket",-600276523),Xr=new x(null,"comment","comment",532206069),Yr=new x(null,"event-v","event-v",1378681365),Vk=new x(null,"parents","parents",-2027538891),Zr=new x("blog","post","blog/post",264335925),$r=new x(null,"div.contribute", +"div.contribute",1287452309),as=new $d(null,"/","/",-1371932971,null),bs=new $d("cljs.core","nil?","cljs.core/nil?",945071861,null),cs=new x(null,"local-store-theme","local-store-theme",512970517),ds=new x(null,"compiler","compiler",-267926731),es=new x(null,"run-queue","run-queue",-1701798027),fs=new x(null,"req-keys","req-keys",514319221),gs=new x(null,"watch","watch",380988277),hs=new $d("clojure.test.check.generators","-\x3eGenerator","clojure.test.check.generators/-\x3eGenerator",-1179475051, +null),is=new x(null,"skip-next-line?","skip-next-line?",1683617749),js=new $d(null,"k","k",-505765866,null),ks=new x(null,"component-will-unmount","component-will-unmount",-2058314698),ls=new x("cljs.spec.alpha","k","cljs.spec.alpha/k",-1602615178),ms=new x(null,"div.text","div.text",645060726),ns=new x(null,"div.menu-mid","div.menu-mid",51272950),os=new $d(null,"?","?",-62633706,null),ps=new $d("cljs.core","fn","cljs.core/fn",-1065745098,null),qs=new $d("markdown-to-hiccup.core","md-\x3ehiccup", +"markdown-to-hiccup.core/md-\x3ehiccup",1623858518,null),rs=new x(null,"code","code",1586293142),ss=new $d(null,"?x","?x",-555096650,null),ts=new x(null,"stack","stack",-793405930),us=new x(null,"listen-key","listen-key",51973686),vs=new x(null,"portfolio","portfolio",957568598),ws=new x(null,"query-params","query-params",900640534),xs=new x(null,"opt-specs","opt-specs",-384905450),ys=new $d("clojure.test.check.generators","return","clojure.test.check.generators/return",1744522038,null),zs=new $d("clojure.test.check.generators", +"simple-type-printable","clojure.test.check.generators/simple-type-printable",-58489962,null),As=new x(null,"wilds","wilds",132271223),Bs=new x(null,"end-line","end-line",1837326455),Cs=new x(null,"ignore-anchor-click?","ignore-anchor-click?",-186007337),Ds=new x("re-frame.interceptor","original-exception?","re-frame.interceptor/original-exception?",-527923945),Es=new x(null,"inject-global-interceptors","inject-global-interceptors",-2144129737),Fs=new x(null,"blockquote-start","blockquote-start", +276074935),Gs=new x(null,"nav.browser","nav.browser",-894435913),Hs=new x(null,"display-name","display-name",694513143),Is=new x(null,"right","right",-452581833),Js=new x(null,"scheduled","scheduled",553898551),Ks=new $d("clojure.test.check.generators","symbol","clojure.test.check.generators/symbol",-1305461065,null),Ls=new $d(null,"-Inf","-Inf",-2123243689,null),Ms=new x("evt.app","highlight-code","evt.app/highlight-code",1954082775),Ns=new x(null,"coerce","coerce",1917884504),Os=new x("reitit.coercion", +"serialize-failed-result","reitit.coercion/serialize-failed-result",786287704),Ps=new $d(null,"ifn?","ifn?",-2106461064,null),Qs=new $d("clojure.test.check.generators","uuid","clojure.test.check.generators/uuid",1589373144,null),Rs=new x(null,"pred-forms","pred-forms",172611832),Ss=new x(null,"on-dispose","on-dispose",2105306360),Ts=new x("reitit.trie","following-parameters","reitit.trie/following-parameters",-1072685800),Us=new x(null,"d","d",1972142424),Vs=new x(null,"f","f",-1597136552),Ws=new x("post", +"image","post/image",-61565512),Xs=new x(null,"pause","pause",-2095325672),$s=new x(null,"error","error",-978969032),at=new x(null,"h2","h2",-372662728),bt=new x(null,"processed","processed",800622264),ct=new x(null,"popstate-listener","popstate-listener",806944472),dt=new x(null,"componentFunction","componentFunction",825866104),et=new x("fx.app","update-html-class","fx.app/update-html-class",-1360578664),ft=new x(null,"exception","exception",-335277064),gt=new x(null,"coeffects","coeffects",497912985), +ht=new x(null,"named","named",-422393479),it=new $d("clojure.test.check.generators","any-printable","clojure.test.check.generators/any-printable",-1570493991,null),jt=new x("fx.app","scroll-to","fx.app/scroll-to",-1993679399),kt=new x(null,"form","form",-1624062471),lt=new x(null,"missing","missing",362507769),mt=new x(null,"tag","tag",-1290361223),nt=new x("reitit.coercion","extract-request-format","reitit.coercion/extract-request-format",-1687953607),ot=new x(null,"seq","seq",-1817803783),pt=new x(null, +"target","target",253001721),qt=new x(null,"heading-anchors","heading-anchors",1713527866),rt=new $d("time","date","time/date",179823674,null),st=new x("post","css-class","post/css-class",-915881798),tt=new x("sg.flybot.pullable.core","invalid","sg.flybot.pullable.core/invalid",-1512548070),ut=new x(null,"parse-meta?","parse-meta?",-1938948742),vt=new x(null,"set","set",304602554),wt=new x(null,"unwrap","unwrap",-1399175462),xt=new x(null,"data-reitit-handle-click","data-reitit-handle-click",2084811610), +yt=new x(null,"h1","h1",-1896887462),zt=new x(null,"arglists","arglists",1661989754),At=new x(null,"loaded?","loaded?",-1108015206),Bt=new x(null,"catch-all","catch-all",1184112570),Ct=new x(null,"query","query",-1288509510),Dt=new x(null,"groupEnd","groupEnd",-337721382),Et=new x(null,"ctx-handler","ctx-handler",-1777672230),Ft=new x(null,"atom","atom",-397043653),Gt=new $d("time","zoned-date-time","time/zoned-date-time",-2125640645,null),Ht=new x(null,"is-prev-header?","is-prev-header?",-1637281701), +It=new x(null,"header.container","header.container",591808603),Jt=new x("app","posts","app/posts",760396923),Kt=new x(null,"trigger","trigger",103466139),Lt=new x(null,"viewBox","viewBox",-469489477),Mt=new x(null,"eof","eof",-489063237),Nt=new $d("clojure.test.check.generators","boolean","clojure.test.check.generators/boolean",1586992347,null),Ot=new x(null,"light","light",1918998747),Pt=new x(null,"hierarchy","hierarchy",-1053470341),Qt=new x("reitit.trie","trie-compiler","reitit.trie/trie-compiler", +2125029755),Rt=new $d("clojure.test.check.generators","string-alphanumeric","clojure.test.check.generators/string-alphanumeric",836374939,null),St=new x(null,"click-listen-key","click-listen-key",642240955),Tt=new x(null,"cofx","cofx",2013202907),Ut=new x(null,"path-params","path-params",-48130597),Vt=new x(null,"h3","h3",2067611163),Wt=new x(null,"body","body",-2049205669),Xt=new x(null,"reference-links?","reference-links?",-2003778981),Yt=new $d("clojure.test.check.generators","tuple","clojure.test.check.generators/tuple", +-143711557,null),Zt=new x("evt.app","initialize","evt.app/initialize",-141172005),vk=new x(null,"alt-impl","alt-impl",670969595),$t=new x(null,"resume","resume",-118572261),au=new x(null,"div.top","div.top",154731355),bu=new x(null,"ms","ms",-1152709733),cu=new x(null,"doc","doc",1913296891),du=new $d(null,"cljs.spec.alpha","cljs.spec.alpha",505122844,null),eu=new x(null,"fx","fx",-1237829572),fu=new x(null,"div.name","div.name",1027675228),gu=new x("reitit.coercion","parameter-coercion","reitit.coercion/parameter-coercion", +-1825124100),hu=new x(null,"before","before",-1633692388),iu=new x(null,"blog","blog",-302707364),ju=new $d("time","duration","time/duration",-1273941668,null),ku=new x(null,"on-navigate","on-navigate",-297227908),lu=new x("markdown-to-hiccup.core","options","markdown-to-hiccup.core/options",439665020),mu=new x(null,"callback","callback",-705136228),nu=new x(null,"parameters","parameters",-1229919748),ou=new $d(null,"apply","apply",-1334050276,null),pu=new x(null,"handler","handler",-195596612),qu= +new x(null,"contact","contact",609093372),ru=new x(null,"log","log",-1595516004),su=new x(null,"expand","expand",595248157),tu=new x(null,"compile","compile",608186429),uu=new x(null,"p","p",151049309),vu=new x(null,"div.menu-bottom","div.menu-bottom",-42043299),wu=new x("reitit.trie","parameters","reitit.trie/parameters",-1304786787),xu=new $d("clojure.test.check.generators","vector","clojure.test.check.generators/vector",1081775325,null),yu=new x(null,"character","character",380652989),zu=new x(null, +"metadata","metadata",1799301597),Au=new $d("clojure.test.check.generators","char","clojure.test.check.generators/char",-1426343459,null),Bu=new x(null,"map","map",1371690461),Cu=new x("subs.post","posts","subs.post/posts",1100023325),Du=new $d("time","zone","time/zone",-580695523,null),Eu=new x(null,"finish-run","finish-run",753148477),Fu=new x("cljs.spec.alpha","alt","cljs.spec.alpha/alt",523685437),Gu=new $d("cljs.spec.alpha","*runtime-asserts*","cljs.spec.alpha/*runtime-asserts*",-1060443587, +null),Hu=new x(null,"contextType","contextType",1033066077),Iu=new x(null,"paragraph","paragraph",296707709),Ju=new $d("cljs.core","or","cljs.core/or",1201033885,null),Ku=new x(null,"div.img","div.img",2113685181),Lu=new x(null,"test","test",577538877),Mu=new x(null,"on-coercion-error","on-coercion-error",-970787),Nu=new x(null,"direction","direction",-633359395),Ou=new $d("time","time","time/time",-1246984162,null),Pu=new $d(null,"?name","?name",2050703390,null),Qu=new $d("time","offset-date-time", +"time/offset-date-time",-1254592482,null),Ru=new x(null,"href","href",-793805698),Su=new x(null,"blockquote","blockquote",372264190),Tu=new x(null,"required","required",1807647006),Uu=new x(null,"img","img",1442687358),Vu=new x(null,"forms","forms",2045992350),Wu=new x("evt.page","set-current-view","evt.page/set-current-view",2103831998),Xu=new $d("clojure.test.check.generators","elements","clojure.test.check.generators/elements",438991326,null),Yu=new x(null,"use-fragment","use-fragment",-1617737154), +Zu=new $d(null,"and","and",668631710,null),$u=new x("reitit.coercion","skip","reitit.coercion/skip",-2123160930),av=new x(null,"custom-transformers","custom-transformers",1440601790),bv=new x("cljs.spec.alpha","nil","cljs.spec.alpha/nil",1733813950),cv=new $d("time","offset-time","time/offset-time",-1026521346,null),dv=new x(null,"inhibit-separator","inhibit-separator",1268116222),ev=new x(null,"dispatch-later","dispatch-later",291951390),fv=new x(null,"a","a",-2123407586),gv=new x("reitit.exception", +"cause","reitit.exception/cause",2130128734),hv=new x(null,"ps","ps",292358046),iv=new x(null,"join","join",-758861890),jv=new x(null,"view","view",1247994814),kv=new $d("clojure.test.check.generators","large-integer","clojure.test.check.generators/large-integer",-865967138,null),lv=new $d("clojure.test.check.generators","keyword-ns","clojure.test.check.generators/keyword-ns",-1492628482,null),mv=new x("evt.nav","set-navbar","evt.nav/set-navbar",-1471919041),nv=new x(null,"about","about",1423892543), +ov=new x(null,"div.menu-top","div.menu-top",-1071763361),pv=new $d("clojure.test.check.generators","shuffle","clojure.test.check.generators/shuffle",1032401055,null),qv=new x(null,"heading","heading",-1312171873),rv=new x("app","current-view","app/current-view",-1037443937),sv=new x(null,"hashchange-listener","hashchange-listener",-1234988801),tv=new $d("cljs.spec.alpha","conformer","cljs.spec.alpha/conformer",2140085535,null),uv=new x(null,"in","in",-1531184865),vv=new x(null,"replacement-transformers", +"replacement-transformers",-2028552897),wv=new x(null,"left","left",-399115937),xv=new x(null,"pattern","pattern",242135423),yv=new x(null,"html","html",-998796897),Ij=new x("cljs.core","not-found","cljs.core/not-found",-1572889185),zv=new x(null,"accept","accept",1874130431),Av=new $d("cljs.spec.alpha","\x26","cljs.spec.alpha/\x26",1635809823,null),Bv=new x(null,"opt","opt",-794706369),Cv=new x(null,"text","text",-1790561697),Dv=new x("cljs.spec.alpha","pcat","cljs.spec.alpha/pcat",26406623),Ev= +new x(null,"data","data",-232669377),Fv=new x("portfolio","post","portfolio/post",-781301953),Gv=new $d("cljs.spec.alpha","?","cljs.spec.alpha/?",1605136319,null);var Hv=new S(null,25,5,T,[ij([wm,xm,Vn,Qo,Bp,Lp,Mp,oq,Cr,Ws,st],["My journey so far",new S(null,1,5,T,["2024-01-06"],null),"\n## Work Experiences\n\n### 2024-now: Staff Software Engineer | [Flybot Pte Ltd](https://www.flybot.sg/), Singapore\n\n- Lead the Flybot's engineering team into meeting our client's expectations.\n- Report to the CEO directly to gather client's needs and plan accordingly\n- Design software architecture and delegate project responsibilities to the team's engineers. \n\n### 2023: Senior Software Engineer | [Flybot Pte Ltd](https://www.flybot.sg/), Singapore\n- Designed a challenge recommender that suggests personal challenges to Golden Island's players. The recommender is a Clojure application deployed in a POD in AWS EKS that consume events from kafka topics and produces personalized challenges to a dedicated kafka topic. It uses Datomic as storage solution within the EKS cluster | *Clojure, AWS EKS, k8s, datomic, kafka*\n- Developed the company blog mobile app with React Native framework. The mobile frontend and web frontend share most of the re-frame (state management) logic | *ClojureScript, ReactNative* (open-source)\n- Conducted technical interviews for junior developers and onboarding of new employees\n\n### 2020-2023: Software Engineer | [Flybot Pte Ltd](https://www.flybot.sg/), Singapore\n\n- Developed the company full-stack web app. The website has a blog. Oauth2 is used for authentication. The website is deployed in a docker container on AWS. It showcases some of Flybot's open-source libs for dependency injections and data pulling | *Clojure, ClojureScript, React* (open-source)\n- Developed a basic Monte Carlo Tree Search bot for our card games | *Clojure*\n- Ported our Clojure backend libraries to Unity so the Unity frontend developers can use the Clojure logic in Unity | *Clojure, C#*\n- Improved the Nostrand project management to ease the compilation with the Magic compiler (compile Clojure file to .NET assemblies) | *Clojure, C#* (open-source)\n- Developed a library called `MetaGame` to compose card games (play multiple rounds, make it a tournament). An entire tournament can be sent up using pure Clojure data | *Clojure*\n- Developed online Chinese card games (Pǎo Dé Kuài (跑得快) and Big two (锄大地) ) backend | *Clojure*\n\n### 2019: End of study project | [Bosch SEA Pte Ltd](https://www.bosch.com.sg/our-company/bosch-in-singapore/), Singapore\n- Modeled and provisioned infrastructure using AWS CloudFormation for a project that consists in facilitating the diagnosis of damaged automobile pieces via trend detection\n- Deployed and maintained AWS resources with Jenkins\n- Cohered Agile Software Development using Jira Kanban and Scrum as frameworks, Git for version-control system and Atlassian software *| Bitbucket, Jira and SourceTree*\n\n### 2017-2018: One-year internship | [Electriduct Inc](https://www.electriduct.com/), Fort Lauderdale, Florida, USA\n- Improved Web Design and responsivity | *HTML, CSS, JS, 3dcart templates*\n- Optimized online ad campaigns | *Google AdWords/Shopping/Analytics*\n- Developed an inventory management program using UPC barcode reading | *PHP, SQL, HTML, CSS, JS*\n- Developed a customized barcode generator for either sheet printers or thermal printer | *C#, SQL*\n\n## Education\n\n### 2015-2019: Master’s Degree | [CPE](https://www.cpe.fr/en/) Lyon, France\n\n- **Specialization**: Software Engineering\n- **Major Project**: Full-stack JS web app and Mobile App development allowing users to find new friends to go to common interest nearby events together *| Node.js, ReactJS, React Native*\n- **Secondary Projects**: Android Chat App *(Java)*, Big Data hackathon *(Hadoop, Tableau)*, Chess Game *(Java)*, Siam Game *(C)*, UX design *(Balsamiq)*\n\n### 2014-2015: Undergraduate in Engineering Sciences | [CPE](https://www.cpe.fr/en/) Lyon, France\n- **Major**: mathematics and physics\n- **Minor**: computer sciences and automatism\n\n## Skills\n\n### ICTS \n- **Back-End**: Clojure, Python, Java, PHP, Node.js, C, C++, C#\n- **HTTP**: Clojure Ring, Clojure Aleph\n- **Front-End**: ClojureScript, HTML, CSS, JS, C#, Reagent (React), Re-frame, figwheel-main\n- **Database**: MySQL, PostgreSQL, Datomic, Datalevin, Cassandra\n- **Mobile**: Reagent React Native, figwheel-main\n- **Cloud**: AWS, Vercel, Netlify\n- **Containers**: Docker, k8s, AWS EKS\n- **Event Streaming**: Kafka\n- **Proj Management**: GitHub, Gitlab, Bitbucket, Trello, Jira, Slack, Jenkins\n\n### Certifications\n- **AWS**: Solutions Architect - Associate\n", +new S(null,1,5,T,[new S(null,2,5,T,["My Tech Blog","../blog"],null)],null),new S(null,1,5,T,[new S(null,2,5,T,["My GitHub","https://github.com/skydread1"],null)],null),"\n",nv,"About Me","about-me",new n(null,3,[Zo,"/assets/loic-logo.png",Wo,"/assets/loic-logo.png",Tl,"Loic Logo"],null),"about-me"]),ij([Sl,xm,Vn,Bp,Lp,Mp,oq,Cr,Ws,st],[new S(null,4,5,T,["Clojure","System","Component","Deps Injection"],null),new S(null,1,5,T,["2023-05-08"],null),'\n## Prerequisites\n\nIf you are not familiar with [fun-map](https://github.com/robertluo/fun-map), please refer to the doc [Fun-Map: a solution to deps injection in Clojure](https://www.loicblanchard.me/blog/fun-map).\n\n## Goal\n\nIn this document, I will show you how we leverage `fun-map` to create different systems in the website [flybot.sg](https://www.flybot.sg/): `prod-system`, `dev-system`, `test-system` and `figwheel-system`.\n\n## Prod System\n\nIn our backend, we use `life-cycle-map` to manage the life cycle of all our stateful components.\n\n### Describe the system\n\nHere is the system we currently have for production:\n\n```clojure\n(defn system\n [{:keys [http-port db-uri google-creds oauth2-callback client-root-path]\n :or {client-root-path "/"}}]\n (life-cycle-map\n {:db-uri db-uri\n :db-conn (fnk [db-uri]\n (let [conn (d/get-conn db-uri db/initial-datalevin-schema)]\n (load-initial-data conn data/init-data)\n (closeable\n {:conn conn}\n #(d/close conn))))\n :oauth2-config (let [{:keys [client-id client-secret]} google-creds]\n (-\x3e config/oauth2-default-config\n (assoc-in [:google :client-id] client-id)\n (assoc-in [:google :client-secret] client-secret)\n (assoc-in [:google :redirect-uri] oauth2-callback)\n (assoc-in [:google :client-root-path] client-root-path)))\n :session-store (memory-store)\n :injectors (fnk [db-conn]\n [(fn [] {:db (d/db (:conn db-conn))})])\n :executors (fnk [db-conn]\n [(handler/mk-executors (:conn db-conn))])\n :saturn-handler handler/saturn-handler\n :ring-handler (fnk [injectors saturn-handler executors]\n (handler/mk-ring-handler injectors saturn-handler executors))\n :reitit-router (fnk [ring-handler oauth2-config session-store]\n (handler/app-routes ring-handler oauth2-config session-store))\n :http-server (fnk [http-port reitit-router]\n (let [svr (http/start-server\n reitit-router\n {:port http-port})]\n (closeable\n svr\n #(.close svr))))}))\n\n(def prod-system\n "The prod system starts a server on port 8123.\n It does not load any init-data on touch and it does not delete any data on halt!.\n You can use it in your local environment as well."\n (let [prod-cfg (config/system-config :prod)]\n (system prod-cfg)))\n```\n\nAt a glance, we can easily understand the dependency injections flow of the app.\n\nIf we were to represent these deps as a simple graph, we could have:\n\n```bash\nlife-cycle-map\n├── :db-conn (closeable)\n├── :oauth2-config\n├── :session-store\n├── :injectors\n│ └── :db-conn\n├── :executors\n│ └── :db-conn\n├── :saturn-handler\n├── :ring-handler\n│ ├── :injectors\n│ ├── :executors\n│ ├── :saturn-handler\n├── :reitit-router\n│ ├── :ring-handler\n│ ├── :oauth2-config\n│ └── :session-store\n└── :http-server (closeable)\n ├── :http-port\n ├── :reitit-router\n```\n\nThe function `prod-system` just fetches some env variables with the necessary configs to start the system.\n\n### Run the system\n\nWe can then easily start the system via the fun-map function `touch` :\n\n```clojure\nclj꞉clj.flybot.core꞉\x3e \n(touch prod-system)\n{:ring-handler #function[clj.flybot.handler/mk-ring-handler/fn--37646],\n :executors [#function[clj.flybot.handler/mk-executors/fn--37616]],\n :injectors [#function[clj.flybot.core/system/fn--38015/fn--38016]],\n :http-server\n #object[aleph.netty$start_server$reify__11448 0x389add75 "AlephServer[channel:[id: 0xd98ed2db, L:/0.0.0.0:8123], transport::nio]"],\n :reitit-router #function[clojure.lang.AFunction/1],\n :http-port 8123,\n :db-uri "datalevin/prod/flybotdb",\n :oauth2-config\n {:google\n {:scopes ["https://www.googleapis.com/auth/userinfo.email" "https://www.googleapis.com/auth/userinfo.profile"],\n :redirect-uri "https://v2.fybot.sg/oauth/google/callback",\n :client-id "client-id",\n :access-token-uri "https://oauth2.googleapis.com/token",\n :authorize-uri "https://accounts.google.com/o/oauth2/auth",\n :launch-uri "/oauth/google/login",\n :client-secret "client-secret",\n :project-id "flybot-website",\n :landing-uri "/oauth/google/success"}},\n :session-store\n #object[ring.middleware.session.memory.MemoryStore 0x1afb7eac "ring.middleware.session.memory.MemoryStore@1afb7eac"],\n :saturn-handler #function[clj.flybot.handler/saturn-handler],\n :db-conn\n {:conn\n #\x3cAtom@1ada44a1: \n {:store #object[datalevin.storage.Store 0x4578bf30 "datalevin.storage.Store@4578bf30"],\n :eavt #{},\n :avet #{},\n :veat #{},\n :max-eid 73,\n :max-tx 5,\n :hash nil}\x3e}}\n```\n\n## Dev System\n\nThe `system` described above can easily be adapted to be used for development purposes.\n\nActually, the only differences between the prod and dev systems are the following:\n\n- The configs (db uri, oauth2 callback)\n- How to shutdown the db system (`dev` clears the db, `prod` retains db data)\n\nThus, we just have to assoc a new db component to the `system` and read some dev configs instead of getting prod env variables:\n\n```clojure\n(defn db-conn-system\n "On touch: empty the db and get conn.\n On halt!: close conn and empty the db."\n [init-data]\n (fnk [db-uri]\n (let [conn (d/get-conn db-uri)\n _ (d/clear conn)\n conn (d/get-conn db-uri db/initial-datalevin-schema)]\n (load-initial-data conn init-data)\n (closeable\n {:conn conn}\n #(d/clear conn)))))\n\n(def dev-system\n "The dev system starts a server on port 8123.\n It loads some real data sample. The data is deleted when the system halt!.\n It is convenient if you want to see your backend changes in action in the UI."\n (-\x3e (system (config/system-config :dev))\n (assoc :db-conn (db-conn-system data/init-data))))\n```\n\nThe important thing to remember is that all the modifications to the system must be done before starting the system (via `touch`). If some modifications need to be made to the running system:\n\n1. Shutdown the system (via `halt!`)\n2. Update the system logic\n3. Start the newly modified system (via `touch`)\n\n## Test system\n\nNaturally, the fun-map system also plays well with testing.\n\nSame process as for dev and prod, we just need to adapt the system a bit to run our tests.\n\nThe tests requirement are:\n\n- Dedicated db uri and specific data sample to work with\n- Ignore Oauth2.0.\n\nSo same as for dev, we just read dedicated test configs and assoc a test db system to the default system:\n\n```clojure\n(defn test-system\n []\n (-\x3e (config/system-config :test)\n sys/system\n (dissoc :oauth2-config)\n (assoc :db-conn (sys/db-conn-system test-data))))\n```\n\nThis works well with the clojure.test fixtures:\n\n```clojure\n;; atom required to re-evalualte (test-system) because of fixture `:each`\n(def a-test-system (atom nil))\n\n(defn system-fixture [f]\n (reset! a-test-system (test-system))\n (touch @a-test-system)\n (f)\n (halt! @a-test-system))\n\n(use-fixtures :each system-fixture)\n```\n\n## Figwheel system\n\nIt is possible to [provide a ring-handler](https://figwheel.org/docs/ring-handler.html) to figwheel configs which will be passed to a server figwheel starts for us.\n\nWe just need to specify a ring-handler in `figwheel-main.edn` like so:\n\n```clojure\n{:ring-handler flybot.server.systems/figwheel-handler\n :auto-testing true}\n```\n\nOur system does have a ring-handler we can supply to figwheel, it is called `reitit-router` in our system (it returns a ring-handler).\n\nSince figwheel starts the server, we do not need the aleph server dependency in our system anymore, se we can dissoc it from the system.\n\nSo here is the `figwheel-system` :\n\n```clojure\n(def figwheel-system\n "Figwheel automatically touches the system via the figwheel-main.edn on port 9500.\n Figwheel just needs a handler and starts its own server hence we dissoc the http-server.\n If some changes are made in one of the backend component (such as handler for instance),\n you can halt!, reload ns and touch again the system."\n (-\x3e (config/system-config :figwheel)\n system\n (assoc :db-conn (db-conn-system data/init-data))\n (dissoc :http-port :http-server)))\n\n(def figwheel-handler\n "Provided to figwheel-main.edn.\n Figwheel uses this handler to starts a server on port 9500.\n Since the system is touched on namespace load, you need to have\n the flag :figwheel? set to true in the config."\n (when (:figwheel? CONFIG)\n (-\x3e figwheel-system\n touch\n :reitit-router)))\n```\n\nThe `figheel-handler` is the value of the key `:reitit-router` of our running system.\n\nSo the system is started first via `touch` and its handler is provided to the servers figwheel starts that will be running while we work on our frontend.\n', +new S(null,1,5,T,[new S(null,2,5,T,["Flybot","https://github.com/skydread1/flybot.sg"],null)],null),"\n",iu,"Fun-Map applied to flybot.sg","fun-map-applied-to-flybot",new n(null,3,[Zo,"/assets/loic-blog-logo.png",Wo,"/assets/loic-blog-logo.png",Tl,"Logo referencing Aperture Science"],null),"blog-fun-map-flybot"]),ij([Sl,xm,Vn,Lp,Mp,oq,Cr,Ws,st],[new S(null,4,5,T,["Clojure","Datomic","Cassandra","Docker"],null),new S(null,1,5,T,["2022-12-02"],null),'\n## Introduction\n\nWhile working on [flybot.sg](http://flybot.sg) , I experimented with `datomic-free`, datomic `starter-pro` with Cassandra and datomic starter-pro with embedded storage.\n\n## Rational\n\nYou can read the rationale of Datomic from their [on-prem documentation](https://docs.datomic.com/on-prem/getting-started/brief-overview.html)\n\nStuart Sierra explained very well how datomic works in the video [Intro to Datomic](https://www.youtube.com/watch?v\x3dR6ObrDWTlYA\x26t\x3d2776s).\n\nBasically, Datomic works as a layer on top of your underlying storage (in this case, we will use Cassandra db).\n\nYour `application` and a Datomic `transactor` are contained in a `peer`. \n\nThe transactor is the process that controls inbounds, and coordinates persistence to the storage services.\n\nThe process acts as a single authority for inbound transactions. A single transactor process allows the to be ACID compliant and fully consistent.\n\nThe peer is the process that will query the persisted data.\n\nSince Datomic leverages existing storage services, you can change persistent storage fairly easily.\n\n## Datomic Starter Pro with Cassandra\n\n### Datomic pro starter version\n\nDatomic is closed-source and commercial.\n\nYou can see the different pricing models in the page [Get Datomic On-Prem](https://www.datomic.com/get-datomic.html).\n\nThere are a few way to get started for free. The first one being to use the [datomic-free](https://blog.datomic.com/2012/07/datomic-free-edition.html) version which comes with in-mem database storage and local-storage transactor. You don’t need any license to use it so it is a good choice to get familiar with the datomic Clojure API.\n\nThen, there is `datomic pro starter` renamed `datomic starter` which is free and maintained for 1 year. After the one year threshold, you won’t benefit from support and you won’t get new versions of Datomic. You need to register to Datomic to get the license key.\n\n### Cassandra, Java and Python version caveats\n\nDatomic only support Cassandra up to version 3.x.x\n\nDatomic start pro version of Cassandra at the time of writting: 3.7.1\n\nClosest stable version of Cassandra: 3.11.10\n\n**Problem 1: Datomic does not support java 11 so we have to have a java 8 version on the machine**\n\nSolution: use [jenv](https://github.com/jenv/jenv) to manage multiple java version\n\n```bash\n# jenv to manage java version\nbrew install jenv\necho \'export PATH\x3d"$HOME/.jenv/bin:$PATH"\' \x3e\x3e ~/.bash_profile\necho \'eval "$(jenv init -)"\' \x3e\x3e ~/.bash_profile\n# add cask version\nbrew tap homebrew/cask-versions\n# install java 8 cask\nbrew install --cask adoptopenjdk8\n# add java 11 (current java version) to jenv\njenv add "$(/usr/libexec/java_home)"\n# add java 8 to jenv\njenv add /Library/Java/JavaVirtualMachines/adoptopenjdk-8.jdk/Contents/Home\n# update the ${JAVA_HOME} everytim we change version\njenv enable-plugin export\n#swith to java 8\njenv global 1.8\n```\n\n**Problem 2: cqlsh does not work with python3 with Cassandra running on java8**\n\nSolution: download the python2 pkg directly from [python.org](https://www.python.org/downloads/release/python-2718/)\n\n**Problem 3: `brew install cassandra@3` triggers an execution error hard to debug**\n\nSolution: download the tar.gz directly on [apache.org](https://www.apache.org/dyn/closer.lua/cassandra/3.11.14/apache-cassandra-3.11.14-bin.tar.gz)\n\n### Setup Cassandra locally and run start the transactor\n\nTo test Cassandra and datomic locally, we can use the Test Cluster of Cassandra which comes up with only one node.\n\nDatomic instruction for Cassandra [here](https://docs.datomic.com/on-prem/overview/storage.html#cassandra)\n\n```bash\n# Check if all the versions are ok\njava -version\nopenjdk version "1.8.0_292"\nOpenJDK Runtime Environment (AdoptOpenJDK)(build 1.8.0_292-b10)\nOpenJDK 64-Bit Server VM (AdoptOpenJDK)(build 25.292-b10, mixed mode)\npython2 -V\nPython 2.7.18\ncqlsh\nConnected to Test Cluster at 127.0.0.1:9042.\n[cqlsh 5.0.1 | Cassandra 3.11.14 | CQL spec 3.4.4 | Native protocol v4]\nUse HELP for help.\n\n# Start cassandra\ncassandra -f\n\n# \x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\n# in other terminal\n\n# Only setup replica to 1 for the test cluster locally\n# add datomic keyspace and table\ncqlsh\nCREATE KEYSPACE IF NOT EXISTS datomic WITH replication \x3d {\'class\': \'SimpleStrategy\', \'replication_factor\' : 1};\nCREATE TABLE IF NOT EXISTS datomic.datomic\n(\n id text PRIMARY KEY,\n rev bigint,\n map text,\n val blob\n);\n\n# \x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\n# in other terminal\n\n# start datomic transactor\n# A sample of the cassandra transactor properties is provided in the datomic distribution samples.\n# the documentation of datomic mentioned we should have a msg of the shape:\n# System starter URI but I do not have URI but it seems to work nonetheless\ncd datomic-pro-1.0.6527/\nbin/transactor ~/workspaces/myproj/config/cassandra-transactor.properties\nLaunching with Java options -server -Xms1g -Xmx1g -XX:+UseG1GC -XX:MaxGCPauseMillis\x3d50\nSystem started\n\n# \x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\x3d\n# in other terminal\n\n# Test if the peer works properly on our localhost single node\nbin/shell\nDatomic Java Shell\nType Shell.help(); for help.\ndatomic % uri \x3d "datomic:cass://localhost:9042/datomic.datomic/myproj";\n\x3cdatomic:cass://localhost:9042/datomic.datomic/myproj\x3e\ndatomic % Peer.createDatabase(uri);\n\x3ctrue\x3e\ndatomic % conn \x3d Peer.connect(uri);\n\x3c{:unsent-updates-queue 0, :pending-txes 0, :next-t 1000, :basis-t 66, :index-rev 0, :db-id "myproj-some-id-here"}\x3e\n```\n\nIt’s important to note that we do not add `ssl` in the database URI so we don’t have to deal with the [KeyStore and TrustStore](https://docs.datomic.com/on-prem/overview/storage.html#troubleshooting) (for local use only)\n\n### Use Clojure API to create db and perform transactions\n\nSince the peer works using the datomic shell, we can confidently use the Clojure API from our code now.\n\nWe just need to add the datomic and Cassandra deps in the `deps.edn`:\n\n```clojure\n;; deps.edn : versions are provided upon subscription to datomic-pro\ncom.datomic/datomic-pro {:mvn/version "1.0.6527"}\ncom.datastax.cassandra/cassandra-driver-core {:mvn/version "3.1.0"}\n```\n\n## Datomic Starter Pro with embedded storage\n\nIn case of embedded DB, we only need to start a transactor and that’s it.\n\nThe URI to connect to the peer is of the shape:\n\n```clojure\n"datomic:dev://localhost:4334/myproj-db?password\x3dmy-secret"\n;; the password is the `storage-datomic-password` setup in the transactor properties.\n```\n\n## Datomic in docker container\n\nIn case we want to run datomic in a container (and maybe having our app in another container), we can do the following:\n\n- create DockerFile for our app\n- create DockerFile for Datomic Starter Pro (you could do the same with datomic-free)\n- create docker-compose file to run both the containers\n- update the transactors properties to be sure the app and transactor can communicate.\n\n### DockerFiles\n\nWe assume that the app has its own DockerFile and run on port 8123 in this example.\n\nHere is a DockerFile example to have Datomic running in a container:\n\n```docker\nFROM clojure:lein-2.6.1-alpine\n\nENV DATOMIC_VERSION 1.0.6527\nENV DATOMIC_HOME /opt/datomic-pro-$DATOMIC_VERSION\nENV DATOMIC_DATA $DATOMIC_HOME/data\n\nRUN apk add --no-cache unzip curl\n\n# Datomic Pro Starter as easy as 1-2-3\n# 1. Create a .credentials file containing user:pass\n# for downloading from my.datomic.com\nADD .credentials /tmp/.credentials\n\n# 2. Make sure to have a config/ folder in the same folder as your\n# Dockerfile containing the transactor property file you wish to use\nRUN curl -u $(cat /tmp/.credentials) -SL https://my.datomic.com/repo/com/datomic/datomic-pro/$DATOMIC_VERSION/datomic-pro-$DATOMIC_VERSION.zip -o /tmp/datomic.zip \\\n \x26\x26 unzip /tmp/datomic.zip -d /opt \\\n \x26\x26 rm -f /tmp/datomic.zip\n\nADD config $DATOMIC_HOME/config\n\nWORKDIR $DATOMIC_HOME\nRUN echo DATOMIC HOME: $DATOMIC_HOME\n\n# 3. Provide a CMD argument with the relative path to the transactor.properties\nVOLUME $DATOMIC_DATA\n\nEXPOSE 4334 4335 4336\n\nCMD bin/transactor -Ddatomic.printConnectionInfo\x3dtrue config/dev-transactor.properties\n```\n\n### Docker Compose\n\nHere is a `docker-compose.yml` we could use describing our app and datomic transactor containers\n\n```yaml\nversion: \'3.0\'\nservices:\n datomicdb:\n image: datomic-img\n hostname: datomicdb\n ports:\n - "4336:4336"\n - "4335:4335"\n - "4334:4334"\n volumes:\n - "/data"\n myprojapp:\n image: myproj-img\n ports:\n - "8123:8123"\n depends_on:\n - datomicdb\n```\n\nHere are the commands to create the images and run 2 containers.\n\n```docker\n# Create datomic transactor image\ndocker build -t datomic-img .\n\n# Create app image\ndocker build -t myproj-img .\n\n# run the 2 images in containers\ndocker-compose up\n```\n\nHowever, this will not work right away as we need to add a few configurations to the datomic transactor properties to make sure the app can communicate with the transactor.\n\n### Transactors Properties\n\nRegarding the transactor properties (datomic provides a template for a transactor with Cassandra storage), when we use docker, we need to pay attention to 3 properties:\n\n- The `localhost` is now 0.0.0.0\n- `alt-host` must be added with the container name (or IP) or the container running the app.\n- `storage-access` must be set to `remote`\n\nHere are the difference between containerized and not containerized properties for a `dev-transactor`: \n\n```yaml\n# If datomic not in container\nprotocol\x3ddev\nhost\x3dlocalhost\nport\x3d4334\n\n# If datomic in container\nprotocol\x3ddev\nhost\x3d0.0.0.0\nport\x3d4334\nalt-host\x3ddatomicdb\nstorage-access\x3dremote\n```\n\nAfter updating the transactor properties, you should be able to see the app running on port 8123 and be able to perform transactions as expected.\n', +"\n",iu,"Datomic Setup examples: embedded, cassandra, docker.","datomic-setup-examples",new n(null,3,[Zo,"/assets/loic-blog-logo.png",Wo,"/assets/loic-blog-logo.png",Tl,"Logo referencing Aperture Science"],null),"blog-datomic-setup"]),ij([Sl,xm,Vn,Lp,Mp,oq,Cr,Ws,st],[new S(null,4,5,T,["Clojure","Java 8 time","Tick","Duration/Period"],null),new S(null,1,5,T,["2024-04-20"],null),'\n## Introduction\n\nIt is always very confusing to deal with time in programming. In fact there are so many time representations, for legacy reasons, that sticking to one is not possible as our dependencies, databases or even programming languages might use different ways of representing time!\n\nYou might have asked yourself the following questions:\n- Why so many time formats? `timestamp`, `date-time`, `offset-date-time`, `zoned-date-time`, `instant`, `inst`?\n- What is `UTC`, `DST`?\n- why use Java `Instant` instead of Java `Date`?\n- Why not only deal with `timestamp`?\n- How to go from one time representation to the other without getting lost?\n- What is the difference between a `duration` and a `period`?\n\nThis article will answer these questions and will illustrate the answers with Clojure code snippets using the `juxt/tick` library.\n\n## What is `Tick`?\n\n[juxt/tick](https://github.com/juxt/tick) is an excellent open-source **Clojure** library to deal with `date` and `time` as values. The [documentation](https://juxt.github.io/tick/) is of very good quality as well.\n\n## Time since epoch (timestamp)\n\nThe `time since epoch`, or `timestamp`, is a way of measuring time by counting the number of time units that have elapsed since a specific point in time, called the **epoch**. It is often represented in either milliseconds or seconds, depending on the level of precision required for a particular application.\n\nSo basically, it is just an `int` such as `1705752000000`\n\nThe obvious advantage is the universal simplicity of representing time. The disadvantage is the human readability. So we need to find a more human-friendly representation of time.\n\n## Local time\n\n*Alice is having some fish and chips for her lunch in the UK. She checks her clock on the wall and it shows 12pm. She checks her calendar and it shows the day is January the 20th.*\n\nThe local time is the time in a specific time zone, usually represented using a date and time-of-day without any time zone information. In java it is called `java.time.LocalDateTime`. However, `tick` mentioned that when you asked someone the time, it is always going to be "local", so they prefer to call it `date-time` as the local part is implicit.\n\nSo if we ask Alice for the time and date, she will reply:\n```clojure\n(-\x3e (t/time "12:00")\n (t/on "2024-01-20"))\n;\x3d\x3e #time/date-time "2024-01-20T12:00"\n```\n\n*At the same time and date Alice is having lunch in London, Bob is having some fish soup for dinner in his Singapore\'s nearby food court. He checked the clock on the wall and reads 8pm.*\n\nSo if we ask Bob for the time, he will reply that it is 8pm. So we can see that the local time is indeed local as Bob and Alice have different times.\n\nThe question is: how to have a common time representation for Bob and Alice?\n\n## offset-date-time\n\nOne of the difference between Bob and Alice times is due to the Coordinated Universal Time (**UTC**). The UTC offset is the difference between the local time and the UTC time, and it is usually represented using a plus or minus sign followed by the number of hours ahead or behind UTC\n\nThe United Kingdom is located on the prime meridian, which is the reference line for measuring longitude and the basis for the UTC time standard. Therefore, the local time in the UK is always the same as UTC time, and the time zone offset is `UTC+0` (also called `Z`). Alice is on the prime meridian, therefore the time she sees is the UTC time, the universal time reference.\n\nAs you go east, the difference with UTC increase. For example, Singapore is located at approximately 103.8 degrees east longitude, which means that it is eight hours ahead of UTC, and its time zone offset is `UTC+8`. That is why Bob is 8 hours ahead of Alice (8 hours in the "future")\n\nAs you go west, the difference with UTC decrease. For example, New York City is located at approximately 74 degrees west longitude, which means that it is four hours behind UTC during standard time, and its time zone offset is `UTC-4` (4 hours behind - 4 hours in the "past").\n\nSo, going back to our example, Bob is 8 hours ahead (in the "future") of Alice as we can see via the `UTC+8`:\n\n```clojure\n;; Alice time\n(-\x3e (t/time "12:00")\n (t/on "2024-01-20")\n (t/offset-by 0))\n;\x3d\x3e #time/offset-date-time "2024-01-20T12:00Z"\n\n;; Bob time\n(-\x3e (t/time "12:00")\n (t/on "2024-01-20")\n (t/offset-by 8))\n;\x3d\x3e #time/offset-date-time "2024-01-20T12:00+08:00"\n```\n\nWe added the offset to our time representation, note the tick name for that representation: `offset-date-time`. In java, it is called `java.time.OffsetDateTime`. We can see for Bob\'s time a `+08:00`. This represents The Coordinated Universal Time (**UTC**) offset.\n\nSo we could assume that the UTC offset remains the same within the same **zone** (country or region), but it is not the case. Let\'s see why in the next section.\n\n## zoned-date-time\n\nSo far we have the following components to define a time:\n- date\n- time\n- UTC offset\n\nHowever, counter-intuitively, the UTC offset for Alice is not the same all year long. Sometimes it is `UTC+0` (`Z`) in winter (as we saw earlier) but sometimes it is `UTC+1` in summer.\n\nLet me prove it to you:\n```clojure\n;; time for Alice in winter\n(-\x3e (t/time "12:00")\n (t/on "2024-01-20") ;; January - a winter month\n (t/in "Europe/London")\n (t/offset-date-time))\n;\x3d\x3e #time/offset-date-time "2024-01-20T12:00Z"\n\n;; time for Alice in summer\n(-\x3e (t/time "12:00")\n (t/on "2024-08-20") ;; August - a summer month\n (t/in "Europe/London")\n (t/offset-date-time))\n;\x3d\x3e #time/offset-date-time "2024-08-20T12:00+01:00"\n```\n\nThis UTC offset difference is due to the Daylight Saving Time (**DST**).\n\nDaylight Saving Time (DST) is a system of adjusting the clock in order to make better use of daylight during the summer months by setting the clock forward by one hour in the spring and setting it back by one hour in the fall. This way, Alice can enjoy more of the sunlight in summer since the days are "longer" (more sunlight duration) while keeping her same working hours!\n\nIt is important to note that not all countries implement DSL. Some countries do not use DSL because they don\'t need. That is the case of Singapore. In Singapore, the sunset/sunrise is almost happening at the same time everyday so technically, there is no Winter/Summer. Some country chose not to use it. That\'s the case of Japan for instance. Japan could benefit from the DSL but chose not to implement it for diverse reasons.\n\nSo we can conclude that a UTC offset is not representative of a Zone because some country might implement DST and other not. Also, for the country implementing DST, their UTC is therefore not fix throughout the year. Thus, we need another parameter to fully define a time: the **Zone**:\n\n```clojure\n(-\x3e (t/time "12:00")\n (t/on "2024-01-20") ;; January - a winter month\n (t/in "Europe/London"))\n;\x3d\x3e #time/zoned-date-time "2024-01-20T12:00Z[Europe/London]"\n```\n\nYou can notice that it is the same code as before but I remove the conversion to an `offset-date-time`. Indeed, Adding the zone like in `(t/in "Europe/London")` is already considering the **Zone** obviously (and therefore the **UTC**) thus creating a `zoned-date-time`.\n\nA `#time/zoned-date-time` in Java is called a `java.time.ZonedDateTime`.\n\nSo we now have a complete way to describe the time:\n- a date\n- a time\n- a zone (that includes the location and the UTC encapsulating the DST)\n\nSo the time for Bob is:\n```clojure\n(-\x3e (t/time "12:00")\n (t/on "2024-01-20")\n (t/in "Asia/Singapore"))\n;\x3d\x3e #time/zoned-date-time "2024-01-20T12:00+08:00[Asia/Singapore]"\n```\n\nSo to recap:\n- the **Zone** `Asia/Singapore` always has the same **UTC** all year long because no **DST**\n- the **Zone** `Europe/London` has a different **UTC** in summer and winter\n- thus Bob is ahead of Alice by 8 hours during winter and Bob is ahead of Alice by 7 hours during summer.\n- This is due by the fact that the UK implements **DST** which makes its own **UTC** throughout the year.\n\nSo a **Zone** encapsulates the notion of **UTC** and **DST**.\n\n## instant\n\nYou might thought we were done here but actually the recommended time representation would be an `instant`. In java, it is called `java.time.Instant`. Why do we want to use instant is actually to avoid confusion. When you store a time in your DB, or when you want to add 10 days to this time, you actually don\'t want to deal with time zone. In programming, we always want to have a solution as simple as possible. Remember the very first time representation I mentioned? The **time since epoch**. The `epoch` in the prime meridian (`UTC+0`) is the same for everybody. So the time since epoch (to current UTC+0 time) in ms is a universal way of representing the time.\n\n```clojure\n;; instant time for Alice\n(-\x3e (t/time "12:00")\n (t/on "2024-01-20")\n (t/in "Europe/London")\n (t/instant))\n;\x3d\x3e #time/instant "2024-01-20T12:00:00Z"\n\n;; instant time for Bob\n(-\x3e (t/time "20:00")\n (t/on "2024-01-20")\n (t/in "Asia/Singapore")\n (t/instant))\n;\x3d\x3e #time/instant "2024-01-20T12:00:00Z"\n```\n\nWe can see in the example above, that since Singapore is 8 hours ahead of London, 12pm in London and 8pm in Singapore are indeed the same `instant`.\n\nThe `instant` is the human-friendly time representation of the timestamp (time since epoch). You can then store that format in your DB or do operation on it such as adding/substituting duration or period to it (more on this later).\n\nThe `epoch` in time-since-epoch is equivalent to #time/instant "1970-01-01T00:00:00Z":\n```clojure\n(t/epoch)\n;\x3d\x3e #time/instant "1970-01-01T00:00:00Z"\n```\n\n## Alice and Bob don\'t care about instants\n\nThat is correct, if we have a web page, we want Alice to see the time in London time and Bob the time in Singapore time. This is easy to do. we can derive the `zoned-date-time` from an `instant` since we know the zone of Bob and Alice:\n\n```clojure\n;; in Alice\'s browser\n(t/format (t/formatter "yyyy-MM-dd HH:mm:ss")\n (t/in #time/instant "2024-01-20T12:00:00Z" "Europe/London"))\n"2024-01-20 12:00:00"\n\n;; in Bob\'s browser\n(t/format (t/formatter "yyyy-MM-dd HH:mm:ss")\n (t/in #time/instant "2024-01-20T12:00:00Z" "Asia/Singapore"))\n"2024-01-20 20:00:00"\n```\n\n## inst\n\nLast time format I promise. As a clojure developer, you might often see `inst`. It is **different** from `instant`. In java `inst` is called `java.util.Date`. The `java.util.Date` class is an old and flawed class that was replaced by the Java 8 time API, and it should be avoided when possible.\n\nHowever, some libraries might require you to pass `inst` instead of `instant` still, and it is easy to convert between the two using the Tick library:\n\n```clojure\n(t/inst #time/instant "2024-01-20T04:00:00Z")\n;\x3d\x3e #inst "2024-01-20T04:00:00.000-00:00"\n```\n\nWhat about the other way around?\n\n```clojure\n(t/instant #inst "2024-01-20T04:00:00.000-00:00")\n;\x3d\x3e #time/instant "2024-01-20T04:00:00Z"\n```\n\n## All theses time formats are confusing\n\nJust remember these key points:\n- to store or do operations on time, use `instant` (java.time.Instant)\n- to represent time locally for users, convert your instant to `zoned-date-time` (java.time.ZonedDateTime)\n- to have a human readable format aka browser, parse your `zoned-date-time` using string formatter\n- if a third party lib needs other format, use tick intuitive conversion functions (t/inst, t/instant etc)\n\n## Duration vs Period\n\nWe now know that we need to use `instant` to perform operations on time. However, sometimes we use `duration` and sometimes we use `period`:\n\n```clojure\n(t/new-duration 10 :seconds)\n;\x3d\x3e #time/duration "PT10S"\n\n(t/new-period 10 :weeks)\n;\x3d\x3e #time/period "P70D"\n```\n\nThey are not interchangeable:\n```clojure\n(t/new-period 10 :seconds)\n; Execution error (IllegalArgumentException) at tick.core/new-period (core.cljc:649).\n; No matching clause: :seconds\n```\n\nSo what is the difference? I will give you a clue:\n- all units from `nanosecond` to `day` (included) are `durations`\n- all units from `day` such as a `week` for instance are a `period`.\n\nThere is one unit that can be both a `duration` and a `period`: a `day`:\n\n```clojure\n;; day as duration\n(t/new-duration 10 :days)\n#time/duration "PT240H"\n\n;; day as period\n(t/new-period 10 :days)\n#time/period "P10D"\n```\n\nTherefore, a simple definition could be:\n- a `duration` measures an amount of time using time-based values (seconds, nanoseconds). \n- a `period` uses date-based (we can also calendar-based) values (years, months, days)\n- a `day` can be both `duration` and `period`: a duration of one day is exactly 24 hours long but a period of one day, when considering the calendar, may vary.\n\nFirst, here is how you would add a day as duration or as a period to the proper format:\n\n```clojure\n;; time-based so use duration\n(-\x3e (t/time "10:00")\n (t/\x3e\x3e (t/new-duration 4 :hours)))\n;\x3d\x3e #time/time "14:00"\n\n;; date-based so use period\n(-\x3e (t/date "2024-04-01")\n (t/\x3e\x3e (t/new-period 1 :days)))\n;\x3d\x3e #time/date "2024-04-02"\n```\n\nNow, let me prove to you that we need to be careful to chose the right format for a day. In London, at 1am on the last Sunday of March, the clocks go forward 1 hour (DST increase by one because we enter summer months). So in 2024, at 1am, on March 31st, clocks go forward 1 hour.\n\n```clojure\n;; we add a period of 1 day\n(-\x3e (t/time "08:00")\n (t/on "2024-03-30")\n (t/in "Europe/London")\n (t/\x3e\x3e (t/new-period 1 :days)))\n#time/zoned-date-time "2024-03-31T08:00+01:00[Europe/London]"\n\n;; we add a duration of 1 day\n(-\x3e (t/time "08:00")\n (t/on "2024-03-30")\n (t/in "Europe/London")\n (t/\x3e\x3e (t/new-duration 1 :days)))\n#time/zoned-date-time "2024-03-31T09:00+01:00[Europe/London]"\n```\n\nWe can see that since in this specific DST update to summer month, the day 03/31 "gained" an hour so it has a `duration` of 25 hours, therefore our new time is `09:00`. However, the `period` taking into consideration the date in a calendar system, does not see a day as 24 hours (time-base) but as calendar unit (date-based) and therefore the new time is still `08:00`.\n\n## Conclusion\n\nA **Zone** encapsulates the notion of **UTC** and **DST**.\n\nThe **time since epoch** is the universal *computer-friendly* of representing time whereas the **Instant** is the universal *human-friendly* of representing time.\n\nA `duration` measures an amount of time using time-based values whereas a `period` uses date-based (calendar) values.\n\nFinally, for Clojure developers, I highly recommend using `juxt/tick` as it allows us to handle time efficiently (conversion, operations) and elegantly (readable, as values) and I use it in several of my projects. It is also of course possible to do interop with the `java.time.Instant` class directly if you prefer.\n', +"\n",iu,"Time as a value with Tick","tick",new n(null,3,[Zo,"/assets/loic-blog-logo.png",Wo,"/assets/loic-blog-logo.png",Tl,"Logo referencing Aperture Science"],null),"juxt-tick"]),ij([Sl,xm,Vn,Bp,Lp,Mp,oq,Cr,Ws,st],[new S(null,2,5,T,["Clojure","Pull Pattern"],null),new S(null,1,5,T,["2023-04-08"],null),'\n## Prerequisites\n\nIf you are not familiar with [lasagna-pull](https://github.com/flybot-sg/lasagna-pull), please refer to the doc [Lasagna Pull: Precisely select from deep nested data](https://www.loicblanchard.me/blog/lasagna-pull)\n\n## Goal\n\nIn this document, I will show you how we leverage `lasagna-pull` in the [flybot app](https://github.com/skydread1/flybot.sg) to define a pure data API.\n\n## Defines API as pure data\n\nA good use case of the pattern is as parameter in a post request.\n\nIn our backend, we have a structure representing all our endpoints:\n\n```clojure\n;; BACKEND data structure\n(defn pullable-data\n "Path to be pulled with the pull-pattern.\n The pull-pattern `:with` option will provide the params to execute the function\n before pulling it."\n [db session]\n {:posts {:all (fn [] (get-all-posts db))\n :post (fn [post-id] (get-post db post-id))\n :new-post (with-role session :editor\n (fn [post] (add-post db post)))\n :removed-post (with-role session :editor\n (fn [post-id user-id] (delete-post db post-id user-id)))}\n :users {:all (with-role session :owner\n (fn [] (get-all-users db)))\n :user (fn [id] (get-user db id))\n :removed-user (with-role session :owner\n (fn [id] (delete-user db id)))\n :auth {:registered (fn [id email name picture] (register-user db id email name picture))\n :logged (fn [] (login-user db (:user-id session)))}\n :new-role {:admin (with-role session :owner\n (fn [email] (grant-admin-role db email)))\n :owner (with-role session :owner\n (fn [email] (grant-owner-role db email)))}\n :revoked-role {:admin (with-role session :owner\n (fn [email] (revoke-admin-role db email)))}}})\n```\n\nThis resembles a REST API structure.\n\nSince the API “route” information is contained within the pattern keys themselves, all the http requests with a pattern as params can hit the same backend URI.\n\nSo we have a single route for all pattern http request:\n\n```clojure\n(into (auth/auth-routes oauth2-config)\n [["/pattern" {:post ring-handler}] ;; all requests with pull pattern go here\n ["/users/logout" {:get (auth/logout-handler client-root-path)}]\n ["/oauth/google/success" {:get ring-handler :middleware [[auth/authentification-middleware client-root-path]]}]\n ["/*" {:get {:handler index-handler}}]])\n```\n\nTherefore the pull pattern:\n\n- Describes the API routes\n- Provides the data expected by the server in its `:with` option for the concerned endpoints\n- Describes what is asked by the client to only return relevant data\n- Can easily perform authorization\n\n## Example: pull a post\n\nFor instance, getting a specific post, meaning with the “route”: `:posts :post`, can be done this way:\n\n```clojure\n((pull/qfn\n {:posts\n {(list :post :with [s/post-1-id]) ;; provide required params to pullable-data :post function\n {:post/id \'?\n :post/page \'?\n :post/css-class \'?\n :post/creation-date \'?\n :post/last-edit-date \'?\n :post/author {:user/id \'?\n :user/email \'?\n :user/name \'?\n :user/picture \'?\n :user/roles [{:role/name \'?\n :role/date-granted \'?}]}\n :post/last-editor {:user/id \'?\n :user/email \'?\n :user/name \'?\n :user/picture \'?\n :user/roles [{:role/name \'?\n :role/date-granted \'?}]}\n :post/md-content \'?\n :post/image-beside {:image/src \'?\n :image/src-dark \'?\n :image/alt \'?}\n :post/default-order \'?}}}\n \'\x26? ;; bind the whole data\n ))\n; \x3d\x3e \n{:posts\n {:post\n #:post{:id #uuid "64cda032-b4e4-431e-bd85-0dbe34a8feeb" ;; s/post-1-id\n :page :home\n :css-class "post-1"\n :creation-date #inst "2023-01-04T00:00:00.000-00:00"\n :last-edit-date #inst "2023-01-05T00:00:00.000-00:00"\n :author #:user{:id "alice-id"\n :email "alice@basecity.com"\n :name "Alice"\n :picture "alice-pic"\n :roles [#:role{:name :editor\n :date-granted\n #inst "2023-01-02T00:00:00.000-00:00"}]}\n :last-editor #:user{:id "bob-id"\n :email "bob@basecity.com"\n :name "Bob"\n :picture "bob-pic"\n :roles [#:role{:name :editor\n :date-granted\n #inst "2023-01-01T00:00:00.000-00:00"}\n #:role{:name :admin\n :date-granted\n #inst "2023-01-01T00:00:00.000-00:00"}]}\n :md-content "#Some content 1"\n :image-beside #:image{:src "https://some-image.svg"\n :src-dark "https://some-image-dark-mode.svg"\n :alt "something"}\n :default-order 0}}}\n```\n\nIt is important to understand that the param `s/post-1-id` in `(list :post :with [#uuid s/post-1-id])` was passed to `(fn [post-id] (get-post db post-id))` in `pullable-data`. \n\nThe function returned the post fetched from the db.\n\nWe decided to fetch all the information of the post in our pattern but we could have just fetch some of the keys only:\n\n```clojure\n((pull/qfn\n {:posts\n {(list :post :with [s/post-1-id]) ;; only fetch id and page even though all the other keys have been returned here\n {:post/id \'?\n :post/page \'?}}}\n \'\x26?))\n\x3d\x3e {:posts\n {:post\n {:post/id #uuid "64cda032-b4e4-431e-bd85-0dbe34a8feeb"\n :post/page :home}}}\n```\n\nThe function `(fn [post-id] (get-post db post-id))` returned **all** the post keys but we only select the `post/id` and `post/page`.\n\nSo we provided the required param `s/post-1-id` to the endpoint `:post` and we also specified what information we want to pull: `:post/id` and `:post/page`.\n\nYou can start to see how convenient that is as a frontend request to the backend. our post request body can just be a `pull-pattern`! (more on this further down in the doc).\n\n## Post data validation\n\nIt is common to use [malli](https://github.com/metosin/malli) schema to validate data.\n\nHere is the malli schema for the post data structure we used above:\n\n```clojure\n(def post-schema\n [:map {:closed true}\n [:post/id :uuid]\n [:post/page :keyword]\n [:post/css-class {:optional true} [:string {:min 3}]]\n [:post/creation-date inst?]\n [:post/last-edit-date {:optional true} inst?]\n [:post/author user-schema]\n [:post/last-editor {:optional true} user-schema]\n [:post/md-content [:and\n [:string {:min 10}]\n [:fn\n {:error/message "Level 1 Heading `#` missing in markdown."}\n md/has-valid-h1-title?]]]\n [:post/image-beside\n {:optional true}\n [:map\n [:image/src [:string {:min 10}]]\n [:image/src-dark [:string {:min 10}]]\n [:image/alt [:string {:min 5}]]]]\n [:post/default-order {:optional true} nat-int?]])\n```\n\n## Pattern data validation\n\n`lasagna-pull` also allows us to provide schema alongside the pattern to validate 2 things:\n\n- the pattern format is correct\n- the pattern content respects a malli schema\n\nThis is very good because we can have a malli schema for the entire `pullable-data` structure like so:\n\n```clojure\n(def api-schema\n "All keys are optional because it is just a data query schema.\n maps with a property :preserve-required set to true have their keys remaining unchanged."\n (all-keys-optional\n [:map\n {:closed true}\n [:posts\n [:map\n [:post [:\x3d\x3e [:cat :uuid] post-schema]] ;; route from our get post example \n [:all [:\x3d\x3e [:cat] [:vector post-schema]]]\n [:new-post [:\x3d\x3e [:cat post-schema-create] post-schema]]\n [:removed-post [:\x3d\x3e [:cat :uuid :string] post-schema]]]]\n [:users\n [:map\n [:user [:\x3d\x3e [:cat :string] user-schema]]\n [:all [:\x3d\x3e [:cat] [:vector user-schema]]]\n [:removed-user [:\x3d\x3e [:cat :string] user-schema]]\n [:auth [:map\n [:registered [:\x3d\x3e [:cat :string user-email-schema :string :string] user-schema]]\n [:logged [:\x3d\x3e [:cat] user-schema]]]]\n [:new-role [:map\n [:admin [:\x3d\x3e [:cat user-email-schema] user-schema]]\n [:owner [:\x3d\x3e [:cat user-email-schema] user-schema]]]]\n [:revoked-role [:map\n [:admin [:\x3d\x3e [:cat user-email-schema] user-schema]]]]]]]))\n```\n\nIf we go back to the scenario where we want to fetch a specific post from the DB, we can see that we are indeed having a function as params of the key `:post` that expects one param: a uuid:\n\n```clojure\n[:post [:\x3d\x3e [:cat :uuid] post-schema]] \n```\n\nIt corresponds to the pattern part:\n\n```clojure\n(list :post :with [s/post-1-id])\n```\n\nAnd `lasagna-pull` provides validation of the function’s params which is very good to be sure the proper data is sent to the server!\n\nPlus, in case the params given to one of the routes are not valid, the function won’t even be executed.\n\nSo now we have a way to do post request to our backend providing a pull-pattern as the request body and our server can validate this pattern format and content as the data is being pulled.\n\n## Pattern query context\n\n### How it works\n\nEarlier, I asked you to assume that the function from `pullable-data` was returning a post data structure.\n\nIn reality, it is a bit more complex than this because what is returned by the different functions (endpoints) in `pullable-data` is a map. For instance:\n\n```clojure\n;; returned by get-post\n{:response (db/get-post db post-id)} ;; note the response key here\n\n;; returned by register-user\n{:response user\n :effects {:db {:payload [user]}} ;; the db transaction description to be made\n :session {:user-id user-id} ;; the user info to be added to the session\n}\n```\n\nThis is actually a problem because our pattern for a post is:\n\n```clojure\n{:posts\n {(list :post :with [s/post-1-id])\n {:post/id \'?}}}\n```\n\nand with what is returned by `(fn [post-id] (get-post db post-id))`, we should have:\n\n```clojure\n{:posts\n {(list :post :with [s/post-1-id])\n {:response ;; note the response here\n \t {:post/id \'?}}}}\n```\n\nAlso, in case of a user registration for instance, you saw that we have other useful information such as\n- effects: the db transaction to add the user to the db\n- session: some user info to add to the session. \n\nHowever we do not want to pull the `effects` and `session`. We just want a way to accumulate them somewhere.\n\nWe could perform the transaction directly and return the post, but we don\'t want that.\n\nWe prefer to accumulate side effects descriptions and execute them all at once in a dedicated `executor`.\n\nThe `response` needs to be added to the pulled data, but the `effects` and `session` need to be stored elsewhere and executed later on.\n\nThis is possible via a `modifier` and a `finalizer` context in the `pull/query` API.\n\nIn our case, we have a `mk-query` function that uses a `modifier` and `finalizer` to achieve what I described above:\n\n```clojure\n(defn mk-query\n "Given the pattern, make an advance query using a context:\n modifier: gather all the effects description in a coll\n finalizer: assoc all effects descriptions in the second value of pattern."\n [pattern]\n (let [effects-acc (transient [])\n session-map (transient {})]\n (pull/query\n pattern\n (pull/context-of\n (fn [_ [k {:keys [response effects session error] :as v}]]\n (when error\n (throw (ex-info "executor-error" error)))\n (when session ;; assoc session to the map session\n (reduce\n (fn [res [k v]] (assoc! res k v))\n session-map\n session))\n (when effects ;; conj the db transaction description to effects vector\n (conj! effects-acc effects))\n (if response\n [k response]\n [k v]))\n #(assoc % ;; returned the whole pulled data and assoc the effects and session to it\n :context/effects (persistent! effects-acc)\n :context/sessions (persistent! session-map))))))\n```\n\n### Example of post creation\n\nLet’s have a look at an example:\n\nWe want to add a new post. When we make a request for a new post, if everything works fine, the pullable-data function at the route `:new-post` returns a map such as:\n\n```clojure\n{:response full-post ;; the pullable data to return to the client\n :effects {:db {:payload posts}} ;; the new posts to be added to the db\n}\n```\n\nThe pull pattern for such request can be like this:\n\n```clojure\n{:posts\n {(list :new-post :with [post-in]) ;; post-in is a full post to be added with all required keys\n {:post/id \'?\n :post/page \'?\n :post/default-order \'?}}}\n```\n\nThe `post-in` is provided to the pullable-data function of the key `:new-post`.\n\nThe function of `add-post` actually determine all the new `:post/default-order` of the posts given the new post. That is why we see in the side effects that several `posts` are returned because we need to have their order updated in db.\n\nRunning this pattern with the pattern **context** above returns:\n\n```clojure\n{\x26? {:posts {:new-post {:post/id #uuid "64cda032-3dae-4845-b7b2-e4a6f9009cbd"\n :post/page :home\n :post/creation-date #inst "2023-01-07T00:00:00.000-00:00"\n :post/default-order 2}}}\n :context/effects [{:db {:payload [{:post/id #uuid "64cda032-3dae-4845-b7b2-e4a6f9009cbd"\n :post/page :home\n :post/md-content "#Some content 3"\n :post/creation-date #inst "2023-01-07T00:00:00.000-00:00"\n :post/author {:user/id "bob-id"}\n :post/default-order 2}]}}]\n :context/sessions {}}\n```\n\n- the response has been returned from the :with function to the pattern in the ‘\x26? key\n- the effects have been accumulated and assoc in `:context/effects`\n- there was no data to be added to the session\n\nThen, in the ring response, we can just return the value of `\x26?`\n\nAlso, the effects can be executed in a dedicated executor functions all at once.\n\nThis allows us to deal with pure data until the very last moment when we run all the side effects (db transaction and session) in one place only we call `executor`.\n\n## Saturn handler\n\nIn our system, we have a component called the `saturn-handler`. The component `ring-handler` depends on it.\n\nIn order to isolate the side effects as much as we can, our endpoints from our `pullable-data`, highlighted previously, do not perform side effects but return **descriptions** in pure data of the side effects to be done. These side effects are the ones we gather in `:context/effects` and `:context/sessions` using the pull-pattern\'s query context.\n\nThe saturn-handler returns a map with the `response` (data pulled and requested in the client pattern) to be sent to the client, the `effect-desc` to be perform (in our case, just db transactions) and the `session` update to be done:\n\n```clojure\n(defn saturn-handler\n "A saturn handler takes a ring request enhanced with additional keys form the injectors.\n The saturn handler is purely functional.\n The description of the side effects to be performed are returned and they will be executed later on in the executors."\n [{:keys [params body-params session db]}]\n (let [pattern (if (seq params) params body-params)\n data (op/pullable-data db session)\n {:context/keys [effects sessions] :as resp}\n (pull/with-data-schema v/api-schema ((mk-query pattern) data))]\n {:response (\'\x26? resp)\n :effects-desc effects\n :session (merge session sessions)}))\n```\n\nYou can also notice that the data is being validated via `pull/with-data-schema`. In case of validation error, since we do not have any side effects done during the pulling, an error will be thrown and no mutations will be done.\n\nHaving no side-effects at all makes it way easier to tests and debug and it is more predictable.\n\nFinally, the `ring-handler` will be the component responsible to **execute** all the side effects at once. \n\nSo the `saturn-handler` purpose was to be sure the data is being pulled properly, validated using malli, and that the side effects descriptions are gathered in one place to be executed later on.\n', +new S(null,1,5,T,[new S(null,2,5,T,["Flybot","https://github.com/skydread1/flybot.sg"],null)],null),"\n",iu,"Lasagna-pull applied to flybot.sg","lasagna-pull-applied-to-flybot",new n(null,3,[Zo,"/assets/loic-blog-logo.png",Wo,"/assets/loic-blog-logo.png",Tl,"Logo referencing Aperture Science"],null),"blog-lasagna-pull-flybot"]),ij([Sl,xm,Vn,Lp,Mp,oq,Cr,Ws,st],[new S(null,4,5,T,["Clojure","Reagent","Figwheel","Mono Repo"],null),new S(null,1,5,T,["2023-02-16"],null),'\n## Context\n\nOur app [skydread1/flybot.sg](https://github.com/skydread1/flybot.sg) is a full-stack Clojure **web** and **mobile** app.\n\nWe opted for a mono-repo to host:\n- the `server`: Clojure app\n- the `web` client: Reagent (React) app using Re-Frame\n- the `mobile` client: Reagent Native (React Native) app using Re-Frame\n\nNote that the web app does not use NPM at all. However, the React Native mobile app does use NPM and the `node_modules` need to be generated.\n\nBy using only one `deps.edn`, we can easily starts the different parts of the app.\n\n## Goal\n\nThe goal of this document is to highlight the mono-repo structure and how to run the different parts (dev, test, build etc).\n\n## Repo structure\n\n```\n├── client\n│ ├── common\n│ │ ├── src\n│ │ │ └── flybot.client.common\n│ │ └── test\n│ │ └── flybot.client.common\n│ ├── mobile\n│ │ ├── src\n│ │ │ └── flybot.client.mobile\n│ │ └── test\n│ │ └── flybot.client.mobile\n│ └── web\n│ ├── src\n│ │ └── flybot.client.web\n│ └── test\n│ └── flybot.client.web\n├── common\n│ ├── src\n│ │ └── flybot.common\n│ └── test\n│ └── flybot.common\n├── server\n│ ├── src\n│ │ └── flybot.server\n│ └── test\n│ └── flybot.server\n```\n\n- `server` dir contains then `.clj` files\n- `common` dir the `.cljc` files\n- `clients` dir the `.cljs` files.\n\n## Deps Management\n\nYou can have a look at the [deps.edn](https://github.com/skydread1/flybot.sg/blob/master/deps.edn).\n\nWe can use namespaced aliases in `deps.edn` to make the process clearer.\n\nI will go through the different aliases and explain their purposes and how to I used them to develop the app.\n\n## Common libraries\n\n### clj and cljc deps\n\nFirst, the root deps of the deps.edn, inherited by all aliases:\n\n#### Both frontend and backend\n- org.clojure/clojure\n- metosin/malli\n- metosin/reitit\n- metosin/muuntaja\n- sg.flybot/lasagna-pull\n\n#### Backend\n- ring/ring-defaults \n- aleph/aleph\n- robertluo/fun-map\n- datalevin/datalevin\n- skydread1/reitit-oauth2\n \nThe deps above are used in both `server/src` and `common/src` (clj and cljc files).\n\nSo every time you start a `deps` REPL or a `deps+figwheel` REPL, these deps will be loaded.\n\n### Sample data\n\nIn the [common/test/flybot/common/test_sample_data.cljc](https://github.com/skydread1/flybot.sg/blob/master/common/test/flybot/common/test_sample_data.cljc) namespace, we have sample data that can be loaded in both backend dev system of frontend dev systems.\n\nThis is made possible by reader conditionals clj/cljs.\n\n### IDE integration\n\nI use the `calva` extension in VSCode to jack-in deps and figwheel REPLs but you can use Emacs if you prefer for instance.\n\nWhat is important to remember is that, when you work on the backend only, you just need a `deps` REPL. There is no need for figwheel since we do not modify the cljs content.\nSo in this scenario, the frontend is fixed (the main.js is generated and not being reloaded) but the backend changes (the `clj` files and `cljc` files).\n\nHowever, when you work on the frontend, you need to load the backend deps to have your server running but you also need to recompile the js when a cljs file is saved. Therefore your need both `deps+figwheel` REPL. So in this scenario, the backend is fixed and running but the frontend changes (the `cljs` files and `cljc` files)\n\nYou can see that the **common** `cljc` files are being watched in both scenarios which makes sense since they "become" clj or cljs code depending on what REPL type you are currently working in.\n\n## Server aliases\n\nFollowing are the aliases used for the server:\n\n- `:jvm-base`: JVM options to make datalevin work with java version \x3e java8\n- `:server/dev`: clj paths for the backend systems and tests\n- `:server/test`: Run clj tests\n\n## Client common aliases\n\nFollowing is the alias used for both web and mobile clients:\n\n- `:client`: deps for frontend libraries common to web and react native.\n\nThe extra-paths contains the `cljs` files.\n\nWe can note the `client/common/src` path that contains most of the `re-frame` logic because most subscriptions and events work on both web and react native right away!\n\nThe main differences between the re-frame logic for Reagent and Reagent Native have to do with how to deal with Navigation and oauth2 redirection. That is the reason we have most of the logic in a **common** dir in `client`.\n\n## Mobile Client\n\nFollowing are the aliases used for the **mobile** client:\n\n- `:mobile/rn`: contains the cljs deps only used for react native. They are added on top of the client deps.\n- `:mobile/ios`: starts the figwheel REPL to work on iOS.\n\n## Web Client\n\nFollowing are the aliases used for the **web** client:\n\n- `:web/dev`: starts the dev REPL\n- `:web/prod`: generates the optimized js bundle main.js\n- `:web/test`: runs the cljs tests\n- `:web/test-headless`: runs the headless cljs tests (fot GitHub CI)\n\n## CI/CD aliases\n\n### build.clj\n\nFollowing is the alias used to build the js bundle or a uberjar:\n\n- `:build`: [clojure/tools.build](https://github.com/clojure/tools.build) is used to build the main.js and also an uber jar for local testing, we use .\n\nThe build.clj contains the different build functions:\n\n- Build frontend js bundle: `clj -T:build js-bundle`\n- Build backend uberjar: `clj -T:build uber`\n- Build both js and jar: `clj -T:build uber+js`\n\n### Jibbit\n\nFollowing is the alias used to build an image and push it to local docker or AWS ECR:\n\n- `:jib`: build image and push to image repo\n\n## Antq\n\nFollowing is the alias used to points out outdated dependencies\n\n- `:outdated`: prints the outdated deps and their last available version\n\n\n## Notes on Mobile CD\n\nWe have not released the mobile app yet, that is why there is no aliases related to CD for react native yet.\n\n## Conclusion\n\nThis is one solution to handle server and clients in the same repo.\n\nFeel free to consult the complete [deps.edn](https://github.com/skydread1/flybot.sg/blob/master/deps.edn) content.\n\nIt is important to have a clear directory structure to only load required namespaces and avoid errors.\n\nUsing `:extra-paths` and `:extra-deps` in deps.edn is important because it prevent deploying unnecessary namespaces and libraries on the server and client.\n\nAdding namespace to the aliases make the distinction between backend, common and client (web and mobile) clearer.\n\nUsing `deps` jack-in for server only work and `deps+figwheel` for frontend work is made easy using `calva` in VSCode (work in other editors as well).\n', +"\n",iu,"Clojure Mono Repo example : server + 2 clients","clojure-mono-repo",new n(null,3,[Zo,"/assets/loic-blog-logo.png",Wo,"/assets/loic-blog-logo.png",Tl,"Logo referencing Aperture Science"],null),"blog-mono-repo"]),ij([Sl,xm,Vn,Lp,Mp,oq,Cr,Ws,st],[new S(null,3,5,T,["Clojure","Pull Pattern","Malli"],null),new S(null,1,5,T,["2022-04-12"],null),"\n## Context\n\n[flybot-sg/lasagna-pull](https://github.com/flybot-sg/lasagna-pull) by [@robertluo](https://github.com/robertluo) aims at precisely select from deep data structure in Clojure.\n\n## Goal\n\nIn this document, I will show you the benefit of `pull-pattern` in pulling nested data.\n\n## Rational\n\nIn Clojure, it is very common to have to precisely select data in nested maps. the Clojure core `select-keys` and `get-in` functions do not allow to easily select in deeper levels of the maps with custom filters or parameters.\n\nOne of the libraries of the `lasagna-stack` is [flybot-sg/lasagna-pull](https://github.com/flybot-sg/lasagna-pull). It takes inspiration from the [datomic pull API](https://docs.datomic.com/on-prem/query/pull.html) and the library [redplanetlabs/specter](https://github.com/redplanetlabs/specter).\n\n`lasagna-pull` aims at providing a clearer pattern than the datomic pull API.\n\nIt also allows the user to add options on the selected keys (filtering, providing params to values which are functions etc). It supports less features than the `specter` library but the syntax is more intuitive and covers all major use cases you might need to select the data you want.\n\nFinally, a [metosin/malli](https://github.com/metosin/malli) schema can be provided to perform data validation directly using the provided pattern. This allows the client to prevent unnecessary pulling if the pattern does not match the expected shape (such as not providing the right params to a function, querying the wrong type etc).\n\n## A query language to select deep nested structure\n\nSelecting data in nested structure is made intuitive via a pattern that describes the data to be pulled following the shape of the data.\n\n### Simple query cases\n\nHere are some simple cases to showcase the syntax:\n\n- query a map\n\n```clojure\n(require '[sg.flybot.pullable :as pull])\n\n((pull/query '{:a ? :b {:b1 ?}})\n {:a 1 :b {:b1 2 :b2 3}})\n;\x3d\x3e {\x26? {:a 1, :b {:b1 2}}}\n```\n\n- query a sequence of maps\n\n```clojure\n((pull/query '[{:a ? :b {:b1 ?}}])\n [{:a 1 :b {:b1 2 :b2 3}}\n {:a 2 :b {:b1 2 :b2 4}}])\n;\x3d\x3e {\x26? [{:a 1, :b {:b1 2}} {:a 2, :b {:b1 2}}]}\n```\n\n- query nested sequences and maps\n\n```clojure\n((pull/query '[{:a ?\n :b [{:c ?}]}])\n [{:a 1 :b [{:c 2}]}\n {:a 11 :b [{:c 22}]}])\n;\x3d\x3e {\x26? [{:a 1, :b [{:c 2}]} {:a 11, :b [{:c 22}]}]}\n```\n\nLet’s compare datomic pull and lasagna pull query with a simple example:\n\n- datomic pull\n\n```clojure\n(def sample-data\n [{:a 1 :b {:b1 2 :b2 3}}\n {:a 2 :b {:b1 2 :b2 4}}])\n\n(pull ?db\n [:a {:b [:b1]}]\n sample-data)\n```\n\n- Lasagna pull\n```clojure\n((pull/query '[{:a ? :b {:b1 ?}}])\n sample-data)\n;\x3d\x3e {\x26? [{:a 1, :b {:b1 2}} {:a 2, :b {:b1 2}}]}\n```\n\nA few things to note\n\n- lasagna-pull uses a map to query a map and surround it with a vector to query a sequence which is very intuitive to use.\n- `?` is just a placeholder on where the value will be after the pull.\n- lasagna-pull returns a map with your pulled data in a key `\x26?`.\n\n### Query specific keys\n\nYou might not want to fetch the whole path down to a leaf key, you might want to query that key and store it in a dedicated var. It is possible to do this by providing a var name after the placeholder `?` such as `?a` for instance. The key `?a` will then be added to the result map along side the `\x26?` that contains the whole data structure.\n\nLet’s have a look at an example.\n\nLet’s say we want to fetch specific keys in addition to the whole data structure:\n\n```clojure\n((pull/query '{:a ?a\n :b {:b1 ?b1 :b2 ?}})\n {:a 1 :b {:b1 2 :b2 3}})\n; \x3d\x3e {?\x26 {:a 1 :b {:b1 2 :b2 3}} ;; all nested data structure\n; ?a 1 ;; var a\n; ?b1 2 ;; var b1\n }\n```\n\nThe results now contain the logical variable we selected via `?a` and `?b1`. Note that the `:b2` key has just a `?` placeholder so it does not appear in the results map keys.\n\nIt works also for sequences:\n\n```clojure\n;; logical variable for a sequence\n((pull/query '{:a [{:b1 ?} ?b1]})\n {:a [{:b1 1 :b2 2} {:b1 2} {}]})\n;\x3d\x3e {?b1 [{:b1 1} {:b1 2} {}]\n; \x26? {:a [{:b1 1} {:b1 2} {}]}}\n```\n\nNote that `'{:a [{:b1 ?b1}]}` does not work because the logical value cannot be the same for all the `b1` keys:\n\n```clojure\n((pull/query '{:a [{:b1 ?b1}]})\n {:a [{:b1 1 :b2 2} {:b1 2} {}]})\n;\x3d\x3e {\x26? {:a [{:b1 1} nil nil]}} ;; not your expected result\n```\n\n## A query language to select structure with params and filters\n\nMost of the time, just selecting nested keys is not enough. We might want to select the key if certain conditions are met, or even pass a parameter if the value of the key is a function so we can run the function and get the value.\n\nWith library like [redplanetlabs/specter](https://github.com/redplanetlabs/specter), you have different possible transformations using diverse [macros](https://github.com/redplanetlabs/specter/wiki/List-of-Macros) which is an efficient way to select/transform data. The downside is that it introduces yet another syntax to get familiar with.\n\n`lasagna-pull` supports most of the features at a key level.\n\nInstead of just providing the key you want to pull in the pattern, you can provide a list with the key as first argument and the options as the rest of the list.\n\nThe transformation is done at the same time as the selection, the pattern can be enhanced with options:\n\n- not found\n\n```clojure\n((pull/query '{(:a :not-found ::not-found) ?}) {:b 5})\n;\x3d\x3e {\x26? {:a :user/not-found}}\n```\n\n- when\n\n```clojure\n((pull/query {(:a :when even?) '?}) {:a 5})\n;\x3d\x3e {\x26? {}} ;; empty because the value of :a is not even\n```\n\n- with\n\nIf the value of a query is a function, using `:with` option can invoke it and returns the result instead:\n\n```clojure\n((pull/query '{(:a :with [5]) ?}) {:a #(* % 2)})\n;\x3d\x3e {\x26? {:a 10}} ;; the arg 5 was given to #(* % 2) and the result returned\n```\n\n- batch\n\nBatched version of :with option:\n\n```clojure\n((pull/query '{(:a :batch [[5] [7]]) ?}) {:a #(* % 2)})\n;\x3d\x3e {\x26? {:a (10 14)}}\n```\n\n- seq\n\nApply to sequence value of a query, useful for pagination:\n\n```clojure\n((pull/query '[{:a ? :b ?} ? :seq [2 3]]) [{:a 0} {:a 1} {:a 2} {:a 3} {:a 4}])\n;\x3d\x3e {\x26? ({:a 2} {:a 3} {:a 4})}\n```\n\nAs you can see with the different options above, the transformations are specified within the selected keys. Unlike specter however, we do not have a way to apply transformation to all the keys for instance.\n\n## Pattern validation with Malli schema\n\nWe can optionally provide a [metosin/malli](https://github.com/metosin/malli) schema to specify the shape of the data to be pulled.\n\nThe client malli schema provided is actually internally \"merged\" to a internal schema that checks the pattern shape so both the pattern syntax and the pattern shape are validated.\n\n## Context\n\nYou can provide a context to the query. You can provide a `modifier` and a `finalizer`.\n\nThis context can help you gathering information from the query and apply a function on the results.\n\n## Lasagna Pull applied to flybot.sg\n\nTo see Lasagna Pull in action, refer to the doc [Lasagna Pull applied to flybot.sg](https://www.loicblanchard.me/blog/lasagna-pull-applied-to-flybot).\n", +"\n",iu,"Lasagna Pull: Precisely select from deep nested data","lasagna-pull",new n(null,3,[Zo,"/assets/loic-blog-logo.png",Wo,"/assets/loic-blog-logo.png",Tl,"Logo referencing Aperture Science"],null),"blog-lasagna-pull"]),ij([Sl,xm,Vn,Bp,Lp,Mp,oq,Cr,Ws,st],[new S(null,4,5,T,["Clojure","React Native","Reagent","Figwheel"],null),new S(null,1,5,T,["2023-02-03"],null),'\n## Prerequisites\n\nThis project is stored alongside the backend and the web frontend in the mono-repo: [skydread1/flybot.sg](https://github.com/skydread1/flybot.sg)\n\nThe codebase is a full-stack **Clojure(Script)** app.\nThe backend is written in **Clojure** and the web and mobile clients are written in **ClojureScript**.\n\nFor the web app, we use [reagent](https://github.com/reagent-project/reagent), a ClojureScript interface for `React`.\n\nFor the mobile app, we use [reagent-react-native](https://github.com/vouch-opensource/reagent-react-native), a ClojureScript interface for `React Native`.\n\nThe mono-repo structure is as followed:\n\n```\n├── client\n│   ├── common\n│   │   ├── src\n│   │   │   └── flybot.client.common\n│   │   └── test\n│   │   └── flybot.client.common\n│   ├── mobile\n│   │   ├── src\n│   │   │   └── flybot.client.mobile\n│   │   └── test\n│   │   └── flybot.client.mobile\n│   └── web\n│   ├── src\n│   │   └── flybot.client.web\n│   └── test\n│   └── flybot.client.web\n├── common\n│   ├── src\n│   │   └── flybot.common\n│   └── test\n│   └── flybot.common\n├── server\n│   ├── src\n│   │   └── flybot.server\n│   └── test\n│   └── flybot.server\n```\n\nSo far, the RN app has only been tested on iOS locally.\n\n## Rational\n\nThe goal was to have a mobile app targeting both iOS and Android, written in `ClojureScript`, which can reuse most of our web frontend logic.\n\nTo do so, I used `React Native` for the following reasons:\n\n- Integrate very well with [figwheel-main](https://github.com/bhauman/figwheel-main) and [re-frame](https://github.com/day8/re-frame)\n- Target both iOS and Android\n- Does not necessitate too much configuration to get it running\n- React Native has an overall good documentation\n\n## Setup\n\nTo get React Native working, you need to follow a few steps.\n\nThe setup steps are well described in the [Figwheel doc](https://figwheel.org/docs/react-native.html).\n\n### npm\n\nThe Figwheel doc has a [dedicated section](https://figwheel.org/docs/npm.html) to install and setup NPM in a project. The best way to install npm is to use [nvm](https://github.com/nvm-sh/nvm).\n\n### React Native\n\nTo do mobile dev, some tools need to be installed and the react native [doc](https://reactnative.dev/docs/next/environment-setup) has the instructions on how to prepare the environment.\n\n### Ruby\n\nThe default Ruby version installed on MacOS is not enough to work with React Native. Actually, React Native needs a specific version of Ruby hence the use of a ruby version manager. I used [rbenv](https://github.com/rbenv/rbenv).\n\n```bash\n~:brew install rbenv ruby-build\n\n~:rbenv -v\nrbenv 1.2.0\n```\n\nReact Native uses [this version](https://github.com/facebook/react-native/blob/main/template/_ruby-version) of ruby so we need to download it.\n\n```bash\n# install proper ruby version\n~:rbenv install 2.7.6\n\n# set ruby version as default\n~:rbenv global 2.7.6\n```\n\nWe also need to add these 2 lines to the .zshrc\n\n```bash\nexport PATH\x3d"$HOME/.rbenv/bin:$PATH"\neval "$(rbenv init -)"\n```\n\nFinally we make sure we have the correct version:\n\n```bash\n~:ruby -v\nruby 2.7.6p219 (2022-04-12 revision c9c2245c0a) [arm64-darwin22]\n```\n\n### Ruby\'s Bundler\n\nFrom the doc:\n\nRuby\'s [Bundler](https://bundler.io/) is a Ruby gem that helps managing the Ruby dependencies of your project. We need Ruby to install Cocoapods and using Bundler will make sure that all the dependencies are aligned and that the project works properly.\n\n```bash\n# install the bundler\n~:gem install bundler\nFetching bundler-2.4.5.gem\nSuccessfully installed bundler-2.4.5\n...\n\n# Check the location where gems are being installed\n~:gem env home\n/Users/loicblanchard/.rbenv/versions/2.7.6/lib/ruby/gems/2.7.0\n```\n\n### Xcode\n\nFrom the doc:\n\n\x3e The easiest way to install `Xcode` is via the [Mac App Store](https://itunes.apple.com/us/app/xcode/id497799835?mt\x3d12)\n. Installing Xcode will also install the iOS Simulator and all the necessary tools to build your iOS app.\n\nI downloaded it from the apple store.\n\n`Xcode command line` tools also needs to be installed. It can be chosen in `Xcode→Settings→Locations`\n\n```bash\n~:xcode-select -p\n/Library/Developer/CommandLineTools\n```\n\n### Installing an iOS Simulator in Xcode\n\nIt should be already installed.\n\n### React Native Command Line Interface\n\nWe can use `npx` directly because it was shipped with `npm`.\n\n### CocoaPods\n\n[CocoaPods](https://github.com/CocoaPods/CocoaPods) is required to use the Ruby’s Bundler and we can install it using [rubygems](https://github.com/rubygems/rubygems):\n\n```bash\nsudo gem install cocoapods\n\n# check version\n~:gem which cocoapods\n/Users/loicblanchard/.rbenv/versions/2.7.6/lib/ruby/gems/2.7.0/gems/cocoapods-1.11.3/lib/cocoapods.rb\n```\n\n### Troubleshooting\n\nIn case of the error [Multiple Profiles](https://github.com/CocoaPods/CocoaPods/issues/11641), we need to switch to the Xcode cli manually like so:\n\n```bash\nsudo xcode-select --switch /Applications/Xcode.app\n```\n\n## Create Project\n\nWe now should have all the tools installed to start a React Native project on Mac targeting iOS.\n\n```bash\n# setup project\nnpx react-native init MyAwesomeProject\n```\n\n### Running the project\n\n```bash\nnpx react-native run-ios\n```\n\nThis should open a simulator with the welcome React Native display.\n\n## Integrate RN with Clojure and Figwheel\n\nAdd an alias to the deps.edn:\n\n```clojure\n:cljs/ios {:main-opts ["--main" "figwheel.main"\n "--build" "ios"\n "--repl"]}\n```\n\nNote: We need to use cljs version `1.10.773` because the latest version causes this [error](https://github.com/log4js-node/log4js-node/issues/1171) which is hard to debug.\n\nAlso, we need to add the figwheel config for `ios` in `ios.cljs.edn` :\n\n```clojure\n^{:react-native :cli\n :watch-dirs ["client/mobile/src" "client/common/src"]}\n{:main flybot.client.mobile.core\n :closure-defines {flybot.client.common.db.event/BASE-URI "http://localhost:9500"}}\n```\n\nAnd then we add the source files in the src folder like explained in the [figwheel doc](https://figwheel.org/docs/react-native.html).\n\nTo run the project, we start a REPLs (clj and cljs) with the proper aliases and in another terminal, we can run `run npm ios` to start the Xcode simulator.\n\nFor more details regarding the aliases: have a look at the [README](https://github.com/skydread1/flybot.sg)\n\n## Deps management\n\nIf we want to add a npm package, we need 2 steps:\n\n```bash\nnpm i my-npm-package\ncd ios\npod install\ncd ..\n```\n\n## Troubleshooting\n\nIn case of the error [RNSScreenStackHeaderConfig](https://stackoverflow.com/questions/73268848/i-am-trying-to-work-with-react-navigation-library-but-this-issue-keeps-coming), we need to:\n\n```bash\nnpm i react-native-gesture-handler\ncd ios\npod install\ncd ..\n\n# We restart the similutor and the error should be gone\n```\n\n## APP architecture and features\n\n### HTTP\n\nRegarding the http request made by the re-frame fx `http-xhrio`, it should work right away, same as for the web, but we just need to manually pass the cookie to the header as RN do not manage cookie for us like the web does.\n\nPassing the cookie in the request was quite straight forward, I just added `:headers {:cookie my-cookie}` to the `:http-xhrio` fx for all the requests that require a session for the mobile app.\n\n### Markdown to Native components\n\nI use [react-native-markdown-package](https://github.com/andangrd/react-native-markdown-package)\n\n```bash\nnpm i react-native-markdown-package --save\n```\n\n### Font\n\nOn iOS, I had to add the fonts in the `info.plist` like so:\n\n```xml\n\x3ckey\x3eUIAppFonts\x3c/key\x3e\n\t\x3carray\x3e\n\t \x3cstring\x3eAntDesign.ttf\x3c/string\x3e\n\t \x3cstring\x3eEntypo.ttf\x3c/string\x3e\n\t \x3cstring\x3eEvilIcons.ttf\x3c/string\x3e\n\t \x3cstring\x3eFeather.ttf\x3c/string\x3e\n\t \x3cstring\x3eFontAwesome.ttf\x3c/string\x3e\n\t \x3cstring\x3eFontAwesome5_Brands.ttf\x3c/string\x3e\n\t \x3cstring\x3eFontAwesome5_Regular.ttf\x3c/string\x3e\n\t \x3cstring\x3eFontAwesome5_Solid.ttf\x3c/string\x3e\n\t \x3cstring\x3eFoundation.ttf\x3c/string\x3e\n\t \x3cstring\x3eIonicons.ttf\x3c/string\x3e\n\t \x3cstring\x3eMaterialIcons.ttf\x3c/string\x3e\n\t \x3cstring\x3eMaterialCommunityIcons.ttf\x3c/string\x3e\n\t \x3cstring\x3eSimpleLineIcons.ttf\x3c/string\x3e\n\t \x3cstring\x3eOcticons.ttf\x3c/string\x3e\n\t \x3cstring\x3eZocial.ttf\x3c/string\x3e\n\t\x3c/array\x3e\n```\n\n## Navigation\n\n### Navigators\n\nAs for now we have 2 Navigators:\n\n[Tab Navigator](https://reactnavigation.org/docs/tab-based-navigation/)\n\n- `login` screen\n- `blog` screen: [Stack Navigator](https://reactnavigation.org/docs/stack-navigator/)\n\n[Stack Navigator](https://reactnavigation.org/docs/stack-navigator/)\n\n- `post-lists` screen\n- `post-read` screen\n- `post-edit` screen\n- `preview` screen\n\nSo the Stack Navigator is inside the Tab Navigator blog screen.\n\n#### How to navigate\n\nFor the navigation, we can use `re-frame` dispatch to change the navigation object ref to the new route.\n\nSince we are using re-frame, we might not be able to access `props.navigation.navigate`.\n\nHowever, we could store a reference to the navigation object in our re-frame DB so we can [Navigate without the navigation prop](https://reactnavigation.org/docs/navigating-without-navigation-prop/).\n\nTherefore, just using `re-frame/dispatch` to store the navigation ref to the `re-frame/db` and use `re-frame/subscribe` to get the ref (and so the nav params) is enough to handle navigation in our case. Thus, we do not use the props at all.\n\nRegarding the hot reloading, the only way I found is to store the js state and navigation objects in atoms via `defonce` so we can remain on the same screen with same params as before the reload.\n\nNote: Maybe I could use the AsyncStorage instead of the atoms even though it is only for dev purposes.\n\n## Env variables\n\nOne of the env variables we need to define is for the `uri`. For the web app, we can use relative path such as `/posts/all` but on mobile, there is no such thing as path and we would need to pass an absolute path such as `http://localhost:9500/posts/all` for instance in our case.\n\nTherefore, we need to have some config to pass to the cljs build. It is possible to do so via the compiler option [:closure-defines](https://clojurescript.org/reference/compiler-options#closure-defines).\n\n`:closure-defines` is a ClojureScript compiler option that allows you to specify a list of key-value pairs to be passed as JavaScript defines to the Google Closure Compiler. These defines can be used to conditionally compile code based on the value of the defined key. For example, you can define `:foo true` as a closure define and then use `#?(:foo some-code)` in your ClojureScript code to include `some-code` only when `:foo` is true.\n\nLuckily, figwheel allows us to [setup the closures-define in the config files](https://figwheel.org/docs/compile_config.html).\n\n## OAuth2.0\n\nI redirect the request back to an intermediate end point that will directly fetch the user info and create a ring-session that contains the google tokens, the user-name and user-permissions. Then ring encrypts that for us and put that `ring-session` in a cookie that is sent to the client.\n\nThus, my clients only receive this ring-session id that will be passed to every request made (automatic for browser, manually added to request for mobile).\n\nWhen the user logout, ring still passes a `ring-session` but it will be nil once decrypted by the server.\n\n### How to redirect back to the mobile app\n\nTo go back to the app after OAuth2.0 success, I had to add the scheme following to the `info.plist` for iOS:\n\n```xml\n\x3ckey\x3eCFBundleURLTypes\x3c/key\x3e\n\t\x3carray\x3e\n\t\x3cdict\x3e\n\t\t\x3ckey\x3eCFBundleURLSchemes\x3c/key\x3e\n\t\t\x3carray\x3e\n\t\t\x3cstring\x3eflybot-app\x3c/string\x3e\n\t\t\x3c/array\x3e\n\t\x3c/dict\x3e\n```\n\nAlso, in `ios/AppDelegate.mm`, I added:\n\n```jsx\n#import \x3cReact/RCTLinkingManager.h\x3e\n\n/// listen to incoming app links during your app\'s execution\n- (BOOL)application:(UIApplication *)application\n openURL:(NSURL *)url\n options:(NSDictionary\x3cUIApplicationOpenURLOptionsKey,id\x3e *)options\n{\n return [RCTLinkingManager application:application openURL:url options:options];\n}\n```\n\n## Cookie management\n\nI store the cookie in async-storage for this because it is enough for our simple use case.\n\n```jsx\nnpm install @react-native-async-storage/async-storage\n```\n\n### AsyncStorage with re-frame\n\nOnce the `ring-session` cookie is received from the server, a re-frame dispatch is triggered to set a cookie name `ring-session` in the device AsyncStorage. This event also updates the re-frame db value of `:user/cookie`.\n\nOne of the issues with AsyncStorage is that it returns a `Promise`. Therefore, we cannot access the value directly but only do something in the `.then` method. So, once the Promise is resolved, in the .then, we `re-frame/dispatch` an event that will update the re-frame/db.\n\nThe Promises to get or set a cookie from storage, being side effects, are done in a re-frame `reg-fx`. These `reg-fx` will be done inside `reg-event-fx` event. We want to respect the principle: `reg-fx` for pulling with side effect and `reg-event-fx` for pushing pure event.\n\n### Ensure order of events\n\nWe want to be sure the cookie is pulled from AsyncStorage before the db is initialised and all the posts and the user pulled. However, we cannot just dispatch the event to pull the cookie from AsyncStorage (returns a Promise that will then dispatch another event to update re-frame/db), and then dispatch the event to get all the posts from the server because there is no guarantee the cookie will be set before the request is made.\n\nThe solution is to dispatch the initialisation event inside the event from the Promise like so:\n\n```clojure\n;; setup all db param and do get request to get posts, pages and user using cookie\n(rf/reg-event-fx\n :evt.app/initialize\n (fn [{:keys [db]} _] \n {:db (assoc db ...)\n :http-xhrio {:method :post\n :uri (base-uri "/pages/all")\n :headers {:cookie (:user/cookie db)}\n :params ...\n :format (edn-request-format {:keywords? true})\n :response-format (edn-response-format {:keywords? true})\n :on-success [:fx.http/all-success]\n :on-failure [:fx.http/failure]}}))\n\n;; Impure fx to fet cookie from storage and dispatch new event to update db\n(rf/reg-fx ;; 2)\n :fx.app/get-cookie-async-store\n (fn [k]\n (-\x3e (async-storage/get-item k) ;; Promise\n (.then #(rf/dispatch [:evt.cookie/get %])))))\n\n;; Pure event triggered at the start of the app\n(rf/reg-event-fx ;; 1)\n :evt.app/initialize-with-cookie\n (fn [_ [_ cookie-name]]\n {:fx [[:fx.app/get-cookie-async-store cookie-name]]}))\n\n;; Pure event triggered by :fx.app/get-cookie-async-store\n(rf/reg-event-fx ;; 3)\n :evt.cookie/get\n (fn [{:keys [db]} [_ cookie-value]]\n {:db (assoc db :user/cookie cookie-value)\n :fx [[:dispatch [:evt.app/initialize]]]}))\n```\n\n## Styling\n\nAs for now, the styling is directly done in the `:style` keys of the RN component’s hiccups. Some more complex components have some styling that takes functions and or not in the `:style` keyword.\n\n## Conclusion\n\nI hope that this unusual mobile app stack made you want to consider `ClojureScript` as a good alternative to build mobile apps.\n\nIt is important to note that the state management logic (re-frame) is the same at 90% for both the web app and the mobile app which is very convenient.\n\nFinally, the web app is deployed but not the mobile app. All the codebase is open-source so feel free to take inspiration.\n', +new S(null,1,5,T,[new S(null,2,5,T,["Flybot","https://github.com/skydread1/flybot.sg"],null)],null),"\n",iu,"Reagent React Native Mobile App","reagent-native-app",new n(null,3,[Zo,"/assets/loic-blog-logo.png",Wo,"/assets/loic-blog-logo.png",Tl,"Logo referencing Aperture Science"],null),"blog-reagent-native"]),ij([Sl,xm,Vn,Bp,Lp,Mp,oq,Cr,Ws,st],[new S(null,4,5,T,["Clojure","Compiler","CLR","Unity"],null),new S(null,1,5,T,["2022-04-08"],null),'\nIn this article, I will show you:\n\n1. how to handle CLR interop to prepare your Clojure code for the CLR\n2. how to use type hints to have your code more performant on the CLR\n3. how to manage dependencies\n4. how to compile to the CLR using Nostrand\n5. how to test in the CLR using Nostrand\n\nNote: the steps for packing the code into nugget package, pushing it to remote github and fetching it in Unity are highlighted in another article.\n\n## Rational\n\n### What is the Magic Compiler\n\nMagic is a bootsrapped compiler writhen in Clojure that take Clojure code as input and produces dotnet assemblies (.dll) as output.\n\nCompiler Bootstrapping is the technique for producing a self-compiling compiler that is written in the same language it intends to compile. In our case, MAGIC is a **Clojure** compiler that compiles **Clojure** code to .**NET** assemblies (.dll and .exe files).\n\nIt means we need the old dlls of MAGIC to generate the new dlls of the MAGIC compiler. We repeat this process until the compiler is good enough. \n\nThe very first magic dlls were generated with the [clojure/clojure-clr](https://github.com/clojure/clojure-clr) project which is also a Clojure compiler to CLR but written in **C#** with limitations over the dlls generated (the problem MAGIC is intended to solve).\n\n### Why the Magic Compiler\n\nThe already existing clojure-\x3eclr compiler [clojure/clojure-clr](https://github.com/clojure/clojure-clr). However, clojure-clr uses a technology called the DLR (dynamic language runtime) to optimize dynamic call sites but it emits self modifying code which make the assemblies not usable on mobile devices (IL2CPP in Unity). So we needed a way to have a compiler that emit assemblies that can target both Desktop and mobile (IL2CPP), hence the Magic compiler.\n\n## Step 1: Interop\n\n### Reader conditionals\n\nWe don’t want separate branches for JVM and CLR so we use reader conditionals.\n\nYou can find how to use the reader conditionals in this [guide](https://clojure.org/guides/reader_conditionals).\n\nYou will mainly need them for the `require` and `import` as well as the function parameters.\n\nDon’t forget to change the extension of your file from `.clj` to `.cljc`.\n\n### Clj-kondo Linter supporting reader conditionals\n\nIn `Emacs` (with `spacemacs` distribution), you might encounter some lint issues if you are using reader conditionals and some configuration might be needed.\n\nThe Clojure linter library [clj-kondo/clj-kondo](https://github.com/clj-kondo/clj-kondo) supports the reader conditionals.\n\nAll the instruction on how to integrate it to the editor you prefer [here](https://github.com/clj-kondo/clj-kondo/blob/master/doc/editor-integration.md).\n\nTo use [clj-kondo](https://github.com/clj-kondo/clj-kondo) with [syl20bnr/spacemacs](https://github.com/syl20bnr/spacemacs), you need the layer [borkdude/flycheck-clj-kondo](https://github.com/borkdude/flycheck-clj-kondo).\n\nHowever, there is no way to add configuration in the `.spacemacs` config file.\n\nThe problem is that we need to set `:clj` as the default language to be checked.\n\nIn `VScode` I did not need any config to make it work.\n\n### Setting up the default reader conditionals of the Clj-kondo linter\n\nIt has nothing to do with the `:default` reader conditional key such as:\n\n```clojure\n#?(:clj (Clojure expression)\n :cljs (ClojureScript expression)\n :cljr (Clojure CLR expression)\n :default (fallthrough expression))\n```\n\nIn the code above, the `:default` reader is used if none of the other reader matches the platform the code is run on. There is no need to add the `:default` tag everywhere as the code will be ran only on 2 potential environment: `:clj` and `:cljr`.\n\nFor our linter, on your Clojure environment (in case of Emacs with [syl20bnr/spacemacs](https://github.com/syl20bnr/spacemacs) distribution), you can highlight the codes for the `:clj` reader only.\n\nThe `:cljr` code will be displayed as comments. \n\nTo add the default `:clj` reader, we need to add it in the config file : `~/.config/clj-kondo/config.edn` (to affect all our repos). It is possible to add config at project level as well as stated [here](https://cljdoc.org/d/clj-kondo/clj-kondo/2020.09.09/doc/configuration).\n\nHere is the config to setup `:clj` as default reader:\n\n```clojure\n{:cljc {:features #{:clj}}}\n```\n\nIf you don’t specify a default reader, `clj-kondo` will trigger lots of error if you don’t provide the `:default` reader because it assumes that you might run the code on a platform that doesn’t match any of the provided reader.\n\n## Step 2 (optional): Add type hints\n\nMagic supports the same shorthands as in Clojure: [Magic types shorthands](https://github.com/nasser/magic/blob/master/src/magic/analyzer/types.clj#L37).\n\n### Value Type hints\n\nWe want to add Magic type hints in our Clojure code to prevent slow argument boxing at run time.\n\nThe main place we want to add the type hints are the function arguments such as in:\n\n```clojure\n(defn straights-n\n "Returns all possible straights with given length of cards."\n [n cards wheel?]\n #?(:clj [n cards wheel?]\n :cljr [^int n cards ^Boolean wheel?])\n (...))\n```\n\nNote the user conditionals here to not affect our Clojure codes and tests to be run on the JVM. \n\nI did not remove the reader conditionals here (the shorthands being the same in both Clojure and Magic It would run), because we don’t want our Clojure tests to be affected and we want to keep the dynamic idiom of Clojure. Also `wheel?` could very likely have the value `nil`, passed by one of the tests, which is in fact not a boolean.\n\nSo we want to keep our type hints in the `:cljr` reader to prevent Magic from doing slow reflection but we don’t want to affect our `:clj` reader that must remain dynamic and so type free to not alter our tests.\n\n### Ref Type hints\n\nOne of the best benefit of type hinting for Magic is to type hint records and their fields.\n\nHere is an example of a record fields type hinting:\n\n```clojure\n(defrecord GameState #?(:clj [players next-pos game-over?]\n :cljr [players ^long next-pos ^boolean game-over?])\n(...))\n```\n\nAs you can see, not all fields are type hinted because for some, we don’t have a way to do so.\n\nThere is no way to type hints a collection parameter in Magic.\n\n`players` is a vector of `Players` records. We don’t have a way to type hints such type. Actually we don’t have a way to type hints a collection in Magic. In Clojure (Java), we can type hint a collection of a known types such as:\n\n```clojure\n;; Clojure file\nuser\x3e (defn f\n "`poker-cards` is a vector of `PokerCard`."\n [^"[Lmyproj.PokerCard;" poker-cards]\n (map :num poker-cards))\n;\x3d\x3e #\'myproj.combination/f\n\n;; Clojure REPL\nuser\x3e (f [(-\x3ePokerCard :d :3) (-\x3ePokerCard :c :4)])\n;\x3d\x3e (:3 :4)\n```\n\nHowever, in Magic, such thing is not possible.\n\nparameters which are `maps` do not benefit much from type hinting because a map could be a `PersistentArrayMap`, a `PersistentHashMap` or even a `PersistentTreeMap` so we would need to just `^clojure.lang.APersistentMap` just to be generic which is not really relevant.\n\nTo type hint a record as parameter, it is advices to `import` it first to avoid having to write the fully qualified namespace:\n\n```clojure\n;; Import the Combination class so we can use type hint format ^Combination\n#?(:cljr (:import [myproj.combination Combination]))\n```\n\nThen we can type hint a parameter which is a record conveniently such as:\n\n```clojure\n(defn pass?\n "Returns true it the combi is a pass."\n #?(:clj [combi]\n :cljr [^Combination combi])\n (combi/empty-combi? combi))\n```\n\nA record field can also a be a known record types such as:\n\n```clojure\n(defrecord Player #?(:clj [combi penalty?]\n :cljr [^Combination combi\n ^boolean penalty?]))\n```\n\n### Type hints and testing\n\nSince in Clojure, we tend to use simplified parameters to our function to isolate the logic being tested (a map instead of a record, nil instead of false, a namespaced keyword instead of a map etc.), naturally lots of tests will fail in the CLR because of the type hints.\n\nWe don’t want to change our test suite with domain types so you can just add a reader conditionals to the tests affected by the type hints in the CLR.\n\n### Interop common cases\n\n#### Normal case\n\nFor interop, you can use the reader conditionals such as in:\n\n```clojure\n(defn round-perc\n "Rounds the given `number`."\n [number]\n #?(:clj (-\x3e number double Math/round)\n :cljr (-\x3e number double Math/Round long)))\n```\n\n#### Deftype equals methods override\n\nFor the `deftype` to work in the CLR, we need to override different equals methods than the Java ones. In Java we use `hashCode` and `equal` but in .net we use `hasheq` and `equiv`.\n\nHere is an example on how to override such methods:\n\n```clojure\n(deftype MyRecord [f-conj m rm]\n ;; Override equals method to compare two MyRecord.\n #?@(:clj\n [Object\n (hashCode [_] (.hashCode m))\n (equals [_ other]\n (and (instance? MyRecord other) (\x3d m (.m other))))]\n :cljr\n [clojure.lang.IHashEq\n (hasheq [_] (hash m))\n clojure.lang.IPersistentCollection\n (equiv [_ other]\n (and (instance? MyRecord other) (\x3d m (.m other))))]))\n```\n\n#### Defecord empty method override for IL2CCP\n\nFor the `defrecord` to work in case we target **IL2CPP** (all our apps), you need to override the default implementation of the `empty` method such as:\n\n```clojure\n(defrecord PokerCard [^clojure.lang.Keyword suit ^clojure.lang.Keyword num]\n #?@(:cljr\n [clojure.lang.IPersistentCollection\n (empty [_] nil)]))\n```\n\nNote the vector required with the **splicing** reader conditional `#?@`.\n\n## Step 3: Manage dependencies\n\nSince magic was created before `tools.deps` or `leiningen`, it has its own deps management system and the dedicated file for it is `project.edn`.\n\nHere is an example of a project.edn:\n```clojure\n{:name "My project"\n :source-paths ["src" "test"]\n :dependencies [[:github skydread1/clr.test.check "magic"\n :sha "a23fe55e8b51f574a63d6b904e1f1299700153ed"\n :paths ["src"]]\n [:gitlab my-private-lib1 "master"\n :paths ["src"]\n :sha "791ef67978796aadb9f7aa62fe24180a23480625"\n :token "r7TM52xnByEbL6mfXx2x"\n :domain "my.domain.sg"\n :project-id "777"]]}\n```\n\nRefer to the Nostrand [README](https://github.com/nasser/nostrand/blob/master/README.md) for more details.\n\nSo you need to add a `project.edn`at the root of your directory with other libraries.\n\n## Step 4: Compile to the CLR\n\n### Nostrand\n\n[nasser/nostrand](https://github.com/nasser/nostrand) is for magic what [tools.deps](https://github.com/clojure/tools.deps.alpha) or [leiningen](https://github.com/technomancy/leiningen) are for a regular Clojure project. Magic has its own dependency manager and does not use tools.deps or len because it was implemented before these deps manager came out!\n\nYou can find all the information you need to build and test your libraries in dotnet in the [README](https://github.com/nasser/nostrand/blob/master/README.md).\n\nIn short, you need to clone nostrand and create a dedicated Clojure namespace at the root of your project to run function with Nostrand.\n\n### Build your Clojure project to .net\n\nIn my case I named my nostrand namespace `dotnet.clj`.\n\nYou cna have a look at the [clr.test.check/dotnet.clj](https://github.com/skydread1/clr.test.check/blob/magic/dotnet.clj), it is a port of clojure/test.check that compiles in both JVM and CLR.\n\nWe have the following require:\n```clojure\n(:require [clojure.test :refer [run-all-tests]]\n [magic.flags :as mflags])\n```\n\nDon’t forget to set the 2 magic flags to true:\n\n```clojure\n(defn build\n "Compiles the project to dlls.\n This function is used by `nostrand` and is called from the terminal in the root folder as:\n nos dotnet/build"\n []\n (binding [*compile-path* "build"\n *unchecked-math* *warn-on-reflection*\n mflags/*strongly-typed-invokes* true\n mflags/*direct-linking* true\n mflags/*elide-meta* false]\n (println "Compile into DLL To : " *compile-path*)\n (doseq [ns prod-namespaces]\n (println (str "Compiling " ns))\n (compile ns))))\n```\n\nTo build to the `*compile-path*` folder, just run the `nos` command at the root of your project:\n\n```clojure\nnos dotnet/build\n```\n\n## Step 5: Test your Clojure project to .net\n\nSame remark as for the build section:\n\n```clojure\n(defn run-tests\n "Run all the tests on the CLR.\n This function is used by `nostrand` and is called from the terminal in the root folder as:\n nos dotnet/run-tests"\n []\n (binding [*unchecked-math* *warn-on-reflection*\n mflags/*strongly-typed-invokes* true\n mflags/*direct-linking* true\n mflags/*elide-meta* false]\n (doseq [ns (concat prod-namespaces test-namespaces)]\n (require ns))\n (run-all-tests)))\n```\n\nTo run the tests, just run the `nos` command at the root of your project:\n\n```clojure\nnos dotnet/run-tests\n```\n\n## Example of a Clojure library ported to Magic\n\nAn example of a Clojure library that has been ported to Magic is [skydread1/clr.test.check](https://github.com/skydread1/clr.test.check/tree/magic), a fork of clojure/clr.test.check.\nMy fork uses reader conditionals so it can be run and tested in both JVM and CLR.\n\n## Learn more\n\nNow that your library is compiled to dotnet, you can learn how to package it to nuget, push it in to your host repo and import in Unity in this article:\n- [Pack, Push and Import Clojure to Unity](https://www.loicblanchard.me/blog/clojure-in-unity).\n', +new S(null,2,5,T,[new S(null,2,5,T,["Magic","https://github.com/nasser/magic"],null),new S(null,2,5,T,["Nostrand","https://github.com/nasser/nostrand"],null)],null),"\n",iu,"Port your Clojure lib to the CLR with MAGIC","port-clj-lib-to-clr",new n(null,3,[Zo,"/assets/loic-blog-logo.png",Wo,"/assets/loic-blog-logo.png",Tl,"Logo referencing Aperture Science"],null),"blog-port-clj-lib"]),ij([Sl,xm,Vn,Lp,Mp,oq,Cr,Ws,st],[new S(null,6,5,T,"Clojure;Kaocha;Malli;Rich Comment Tests;Instrumentation;Data validation/generation".split(";"), null),new S(null,1,5,T,["2024-08-10"],null),'\n## Introduction\n\nThis article introduces effective testing libraries and methods for those new to Clojure.\n\nWe\'ll explore using the [kaocha](https://github.com/lambdaisland/kaocha) test runner in both REPL and terminal, along with configurations to enhance feedback. Then we will explain how tests as documentation can be done using [rich-comment-tests](https://github.com/matthewdowney/rich-comment-tests).\n\nWe will touch on how to do data validation, generation and instrumentation using [malli](https://github.com/metosin/malli).\n\nFinally, I will talk about how I manage integrations tests with eventual external services involved.\n\n## Test good code\n\n### Pure functions\n\nFirst of all, always remember that it is important to have as many pure functions as possible. It means, the same input passed to a function always returns the same output. This will simplify the testing and make your code more robust.\n\nHere is an example of unpredictable **impure** logic:\n\n```clojure\n(defn fib\n "Read the Fibonacci list length to be returned from a file,\n Return the Fibonacci sequence."\n [variable]\n (when-let [n (-\x3e (slurp "config/env.edn") edn/read-string (get variable) :length)]\n (-\x3e\x3e (iterate (fn [[a b]] [b (+\' a b)])\n [0 1])\n (map first)\n (take n))))\n\n(comment\n ;; env.edn has the content {:FIB 10}\n (fib :FIB) ;\x3d\x3e 10\n ;; env.edn is empty\n (fib :FIB) ;\x3d\x3e nil\n )\n```\n\nFor instance, reading the `length` value from a file before computing the Fibonacci sequence is **unpredictable** for several reasons:\n\n- the file could not have the expected value\n- the file could be missing\n- in prod, the env variable would be read from the system not a file so the function would always return `nil`\n- what if the FIB value from the file has the wrong format.\n\nWe would need to test too many cases unrelated to the Fibonacci logic itself, which is bad practice.\n\nThe solution is to **isolate** the impure code:\n\n```clojure\n(defn fib\n "Return the Fibonacci sequence with a lenght of `n`."\n [n]\n (-\x3e\x3e (iterate (fn [[a b]] [b (+\' a b)])\n [0 1])\n (map first)\n (take n)))\n\n^:rct/test\n(comment\n (fib 10) ;\x3d\x3e [0 1 1 2 3 5 8 13 21 34]\n (fib 0) ;\x3d\x3e []\n )\n\n(defn config\x3c-file\n "Reads the `config/env.edn` file, gets the value of the given key `variable`\n and returns it as clojure data."\n [variable]\n (-\x3e (slurp "config/env.edn") edn/read-string (get variable)))\n\n(comment\n ;; env.edn contains :FIB key with value {:length 10}\n (config\x3c-file :FIB) ;\x3d\x3e {:length 10}\n ;; env.edn is empty\n (config\x3c-file :FIB) ;\x3d\x3e {:length nil}\n )\n```\n\nThe `fib` function is now **pure** and the same input will always yield the same output. I can therefore write my unit tests and be confident of the result. You might have noticed I added `^:rct/test` above the comment block which is actually a unit test that can be run with RCT (more on this later).\n\nThe **impure** code is isolated in the `config\x3c-file` function, which handles reading the environment variable from a file.\n\nThis may seem basic, but it\'s the essential first step in testing: ensuring the code is as pure as possible for easier testing is one of the strengths of **data-oriented** programming!\n\n## Test runner: Kaocha\n\nFor all my personal and professional projects, I have used [kaocha](https://github.com/lambdaisland/kaocha) as a test-runner. \n\nThere are 2 main ways to run the tests that developers commonly use:\n\n- Within the **REPL** as we implement our features or fix bugs\n- In the **terminal**: to verify that all tests pass or to target a specific group of tests\n\nHere is the `deps.edn` I will use in this example:\n\n```clojure\n{:deps {org.clojure/clojure {:mvn/version "1.11.3"}\n org.slf4j/slf4j-nop {:mvn/version "2.0.15"}\n metosin/malli {:mvn/version "0.16.1"}}\n :paths ["src"]\n :aliases\n {:dev {:extra-paths ["config" "test" "dev"]\n :extra-deps {io.github.robertluo/rich-comment-tests {:git/tag "v1.1.1", :git/sha "3f65ecb"}}}\n :test {:extra-paths ["test"]\n :extra-deps {lambdaisland/kaocha {:mvn/version "1.91.1392"}\n lambdaisland/kaocha-cloverage {:mvn/version "1.1.89"}}\n :main-opts ["-m" "kaocha.runner"]}\n :jib {:paths ["jibbit" "src"]\n :deps {io.github.atomisthq/jibbit {:git/url "https://github.com/skydread1/jibbit.git"\n :git/sha "bd873e028c031dbbcb95fe3f64ff51a305f75b54"}}\n :ns-default jibbit.core\n :ns-aliases {jib jibbit.core}}\n :outdated {:deps {com.github.liquidz/antq {:mvn/version "RELEASE"}}\n :main-opts ["-m" "antq.core"]}\n :cljfmt {:deps {io.github.weavejester/cljfmt {:git/tag "0.12.0", :git/sha "434408f"}}\n :ns-default cljfmt.tool}}}\n```\n\n### Kaocha in REPL\n\nRegarding the bindings to run the tests From the REPL, refer to your IDE documentation. I have experience using both Emacs (spacemacs distribution) and VSCode and running my tests was always straight forward. If you are starting to learn Clojure, I recommend using VSCode, as the Clojure extension [calva](https://github.com/BetterThanTomorrow/calva) is of very good quality and well documented. I’ll use VSCode in the following example.\n\nLet’s say we have the following test namespace:\n\n```clojure\n(ns my-app.core.fib-test\n (:require [clojure.test :refer [deftest is testing]]\n [my-app.core :as sut]))\n\n(deftest fib-test\n (testing "The Fib sequence is returned."\n (is (\x3d [0 1 1 2 3 5 8 13 21 34]\n (sut/fib 10)))))\n```\n\nAfter I `jack-in` using my *dev* alias form the `deps.edn` file, I can load the `my-app.core-test` namespace and run the tests. Using Calva, the flow will be like this:\n\n1. *ctrl+alt+c* *ctrl+alt+j*: jack-in (select the `dev` alias in my case)\n2. *ctrl+alt+c* *enter* (in the `fib-test` namespace): load the ns in the REPL\n3. *ctrl+alt+c* *t* (in the `fib-test` namespace): run the tests\n\nIn the REPL, we see:\n```clojure\nclj꞉user꞉\x3e\n; Evaluating file: fib_test.clj\n#\'my-app.core.fib-test/system-test\nclj꞉my-app.core.fib-test꞉\x3e \n; Running tests for the following namespaces:\n; my-app.core.fib-test\n; my-app.core.fib\n\n; 1 tests finished, all passing \ud83d\udc4d, ns: 1, vars: 1\n```\n\n### Kaocha in terminal\n\nBefore committing code, it\'s crucial to run all project tests to ensure new changes haven\'t broken existing functionalities.\n\nI added a few other namespaces and some tests.\n\nLet’s run all the tests in the terminal:\n\n```clojure\nclj -M:dev:test\nLoading namespaces: (my-app.core.cfg my-app.core.env my-app.core.fib my-app.core)\nTest namespaces: (:system :unit)\nInstrumented my-app.core.cfg\nInstrumented my-app.core.env\nInstrumented my-app.core.fib\nInstrumented my-app.core\nInstrumented 4 namespaces in 0.4 seconds.\nmalli: instrumented 1 function vars\nmalli: dev-mode started\n[(.)][(()(..)(..)(..))(.)(.)]\n4 tests, 9 assertions, 0 failures.\n```\n\nNote the `Test namespaces: (:system :unit)`. By default, Kaocha runs all tests. When no metadata is specified on the `deftest`, it is considered in the Kaocha `:unit` group. However, as the project grows, we might have slower tests that are system tests, load tests, stress tests etc. We can add metadata to their `deftest` in order to group them together. For instance:\n\n```clojure\n(ns my-app.core-test\n (:require [clojure.test :refer [deftest is testing]]\n [malli.dev :as dev]\n [malli.dev.pretty :as pretty]\n [my-app.core :as sut]))\n\n(dev/start! {:report (pretty/reporter)})\n\n(deftest ^:system system-test ;; metadata to add this test in the `system` kaocha test group \n (testing "The Fib sequence is returned."\n (is (\x3d [0 1 1 2 3 5 8 13 21 34]\n (sut/system #:cfg{:app #:app{:name "app" :version "1.0.0"}\n :fib #:fib{:length 10}})))))\n```\n\nWe need to tell Kaocha when and how to run the system test. Kaocha configurations are provided in a `tests.edn` file:\n\n```clojure\n#kaocha/v1\n {:tests [{:id :system :focus-meta [:system]} ;; only system tests\n {:id :unit}]} ;; all tests\n```\n\nThen in the terminal:\n\n```bash\nclj -M:dev:test --focus :system\nmalli: instrumented 1 function vars\nmalli: dev-mode started\n[(.)]\n1 tests, 1 assertions, 0 failures.\n```\n\nWe can add a bunch of metrics on top of the tests results. These metrics can be added via the `:plugins` keys:\n\n```clojure\n#kaocha/v1\n {:tests [{:id :system :focus-meta [:system]}\n {:id :unit}]\n :plugins [:kaocha.plugin/profiling\n :kaocha.plugin/cloverage]}\n```\n\nIf I run the tests again:\n\n```clojure\nclj -M:dev:test --focus :system\nLoading namespaces: (my-app.core.cfg my-app.core.env my-app.core.fib my-app.core)\nTest namespaces: (:system :unit)\nInstrumented my-app.core.cfg\nInstrumented my-app.core.env\nInstrumented my-app.core.fib\nInstrumented my-app.core\nInstrumented 4 namespaces in 0.4 seconds.\nmalli: instrumented 1 function vars\nmalli: dev-mode started\n[(.)]\n1 tests, 1 assertions, 0 failures.\n\nTop 1 slowest kaocha.type/clojure.test (0.02208 seconds, 97.0% of total time)\n system\n 0.02208 seconds average (0.02208 seconds / 1 tests)\n\nTop 1 slowest kaocha.type/ns (0.01914 seconds, 84.1% of total time)\n my-app.core-test\n 0.01914 seconds average (0.01914 seconds / 1 tests)\n\nTop 1 slowest kaocha.type/var (0.01619 seconds, 71.1% of total time)\n my-app.core-test/system-test\n 0.01619 seconds my_app/core_test.clj:9\nRan tests.\nWriting HTML report to: /Users/loicblanchard/workspaces/clojure-proj-template/target/coverage/index.html\n\n|-----------------+---------+---------|\n| Namespace | % Forms | % Lines |\n|-----------------+---------+---------|\n| my-app.core | 44.44 | 62.50 |\n| my-app.core.cfg | 69.57 | 74.07 |\n| my-app.core.env | 11.11 | 44.44 |\n| my-app.core.fib | 100.00 | 100.00 |\n|-----------------+---------+---------|\n| ALL FILES | 55.26 | 70.59 |\n|-----------------+---------+---------|\n```\n\n### Kaocha in terminal with options\n\nThere are a bunch of options to enhance the development experience such as:\n\n```bash\nclj -M:dev:test --watch --fail-fast\n```\n\n- `watch` mode makes Kaocha rerun the tests on file save.\n- `fail-fast` option makes Kaocha stop running the tests when it encounters a failing test\n\nThese 2 options are very convenient for unit testing.\n\nHowever, when a code base contains slower tests, if the slower tests are run first, the watch mode is not so convenient because it won’t provide instant feedback.\n\nWe saw that we can `focus` on tests with a specific metadata tag, we can also `skip` tests. Let’s pretend our `system` test is slow and we want to skip it to only run unit tests:\n\n```bash\n clj -M:dev:test --watch --fail-fast --skip-meta :system\n```\n\nFinally, I don’t want to use the `plugins` (profiling and code coverage) on watch mode as it clutter the space in the terminal, so I want to exclude them from the report.\n\nWe can actually create another kaocha config file for our watch mode.\n\n`tests-watch.edn`:\n\n```clojure\n#kaocha/v1\n {:tests [{:id :unit-watch :skip-meta [:system]}] ;; ignore system tests\n :watch? true ;; watch mode on\n :fail-fast? true} ;; stop running on first failure\n```\n\nNotice that there is no plugins anymore, and watch mode and fail fast options are enabled. Also, the `system` tests are skipped.\n\n```clojure\nclj -M:dev:test --config-file tests_watch.edn\nSLF4J(I): Connected with provider of type [org.slf4j.nop.NOPServiceProvider]\nmalli: instrumented 1 function vars\nmalli: dev-mode started\n[(.)(()(..)(..)(..))]\n2 tests, 7 assertions, 0 failures.\n```\n\nWe can now leave the terminal always on, change a file and save it and the tests will be rerun using all the options mentioned above.\n\n## Documentation as unit tests: Rich Comment Tests\n\nAnother approach to unit testing is to enhance the `comment` blocks to contain tests. This means that we don’t need a test file, we can just write our tests right below our functions and it serves as both documentation and unit tests.\n\nGoing back to our first example:\n\n```clojure\n(ns my-app.core.fib)\n\n(defn fib\n "Return the Fibonacci sequence with a lenght of `n`."\n [n]\n (-\x3e\x3e (iterate (fn [[a b]] [b (+\' a b)])\n [0 1])\n (map first)\n (take n)))\n\n^:rct/test\n(comment\n (fib 10) ;\x3d\x3e [0 1 1 2 3 5 8 13 21 34]\n (fib 0) ;\x3d\x3e []\n )\n```\n\nThe `comment` block showcases example of what the `fib` could return given some inputs and the values after `;\x3d\x3e` are actually verified when the tests are run.\n\n### RC Tests in the REPL\n\nWe just need to evaluate `(com.mjdowney.rich-comment-tests/run-ns-tests! *ns*)` in the namespace we want to test:\n\n```clojure\nclj꞉my-app.core-test꞉\x3e \n; Evaluating file: fib.clj\nnil\nclj꞉my-app.core.fib꞉\x3e \n(com.mjdowney.rich-comment-tests/run-ns-tests! *ns*)\n; \n; Testing my-app.core.fib\n; \n; Ran 1 tests containing 2 assertions.\n; 0 failures, 0 errors.\n{:test 1, :pass 2, :fail 0, :error 0}\n```\n\n### RC Tests in the terminal\n\nYou might wonder how to run all the RC Tests of the project. Actually, we already did that, when we ran Kaocha unit tests in the terminal.\n\nThis is possible by wrapping the RC Tests in a deftest like so:\n\n```clojure\n(ns my-app.rc-test\n "Rich Comment tests"\n (:require [clojure.test :refer [deftest testing]]\n [com.mjdowney.rich-comment-tests.test-runner :as rctr]))\n\n(deftest ^rct rich-comment-tests\n (testing "all white box small tests"\n (rctr/run-tests-in-file-tree! :dirs #{"src"})))\n```\n\nAnd if we want to run just the `rct` tests, we can focus on the metadata (see the metadata in the deftest above).\n\n```clojure\nclj -M:dev:test --focus-meta :rct\n```\n\nIt is possible to run the RC Tests without using Kaocha of course, refer to their doc for that.\n\n## clojure.test vs RCT?\n\nI personally use a mix of both. When the function is not too complex and internal (not supposed to be called by the client), I would use RCT.\n\nFor system tests, which inevitably often involve side-effects, I have a dedicated test namespace. Using `fixture` is often handy and also the tests are way more verbose which would have polluted the src namespaces with a `comment` block.\n\nIn the short example I used in this article, the project tree is as follow:\n\n```bash\n├── README.md\n├── config\n│ └── env.edn\n├── deps.edn\n├── dev\n│ └── user.clj\n├── jib.edn\n├── project.edn\n├── src\n│ └── my_app\n│ ├── core\n│ │ ├── cfg.clj\n│ │ ├── env.clj\n│ │ └── fib.clj\n│ └── core.clj\n├── test\n│ └── my_app\n│ ├── core_test.clj\n│ └── rc_test.clj\n├── tests.edn\n└── tests_watch.edn\n```\n\n`cfg.clj`, `env.clj` and `fib.clj` have RCT and `core_test.clj` has regular deftest.\n\nA rule of thumb could be: use regular deftest if the tests require at least one of the following:\n\n- fixtures: start and tear down resources (db, kafka, entire system etc)\n- verbose setup (configs, logging etc)\n- side-effects (testing the entire system, load tests, stress tests etc)\n\nWhen the implementation is easy to test, using RCT is good for a combo doc+test.\n\n## Data Validation and Generative testing\n\nThere are 2 main libraries I personally used for data validation an generative testing: [clojure/spec.alpha](https://github.com/clojure/spec.alpha) and [malli](https://github.com/metosin/malli). I will not explain in details how both work because that could be a whole article on its own. However, you can guess which one I used in my example project as you might have noticed the `instrumentation` logs when I ran the Kaocha tests: Malli.\n\n### Malli: Data validation\n\nHere is the config namespace that is responsible to validate the env variables passed to our hypothetical app:\n\n```clojure\n(ns my-app.core.cfg\n (:require [malli.core :as m]\n [malli.registry :as mr]\n [malli.util :as mu]))\n\n;; ---------- Schema Registry ----------\n\n(def domain-registry\n "Registry for malli schemas."\n {::app\n [:map {:closed true}\n [:app/name :string]\n [:app/version :string]]\n ::fib\n [:map {:closed true}\n [:fib/length :int]]})\n\n;; ---------- Validation ----------\n\n(mr/set-default-registry!\n (mr/composite-registry\n (m/default-schemas)\n (mu/schemas)\n domain-registry))\n\n(def cfg-sch\n [:map {:closed true}\n [:cfg/app ::app]\n [:cfg/fib ::fib]])\n\n(defn validate\n "Validates the given `data` against the given `schema`.\n If the validation passes, returns the data.\n Else, returns the error data."\n [data schema]\n (let [validator (m/validator schema)]\n (if (validator data)\n data\n (throw\n (ex-info "Invalid Configs Provided"\n (m/explain schema data))))))\n\n(defn validate-cfg\n [cfg]\n (validate cfg cfg-sch))\n\n^:rct/test\n(comment\n (def cfg #:cfg{:app #:app{:name "my-app"\n :version "1.0.0-RC1"}\n :fib #:fib{:length 10}})\n\n (validate-cfg cfg) ;\x3d\x3e\x3e cfg\n (validate-cfg (assoc cfg :cfg/wrong 2)) ;throws\x3d\x3e\x3e some?\n )\n```\n\nNot going into too much details here but you can see that we define a `schema` that follows our data structure. In this case, my data structure I want to spec is my config map.\n\n### Malli: Data Generation\n\nLet’s have a look at a simple example of a test of our system which randomly generates a length and verifies that the result is indeed a sequence of numbers with `length` element:\n\n```clojure\n(ns my-app.core-test\n (:require [clojure.test :refer [deftest is testing]]\n [malli.dev :as dev]\n [malli.dev.pretty :as pretty]\n [malli.generator :as mg]\n [my-app.core :as sut]\n [my-app.core.cfg :as cfg]))\n\n(dev/start! {:report (pretty/reporter)})\n\n(deftest ^:system system-test\n (testing "The Fib sequence is returned."\n (is (\x3d [0 1 1 2 3 5 8 13 21 34]\n (sut/system #:cfg{:app #:app{:name "app" :version "1.0.0"}\n :fib #:fib{:length 10}}))))\n (testing "No matter the length of the sequence provided, the system returns the Fib sequence."\n (let [length (mg/generate pos-int? {:size 10})\n cfg #:cfg{:app #:app{:name "app" :version "1.0.0"}\n :fib #:fib{:length length}}\n rslt (sut/system cfg)]\n (is (cfg/validate\n rslt\n [:sequential {:min length :max length} :int])))))\n```\n\nThe second `testing` highlights both data generation (the `length`) and data validation (result must be a sequence of `int` with `length` elements).\n\nThe `dev/start!` starts malli instrumentation. It automatically detects functions which have malli specs and validate it. Let’s see what it does exactly in the next section.\n\n### Malli: Instrumentation\n\nEarlier, we saw tests for the `core/system` functions. Here is the core namespace:\n\n```clojure\n(ns my-app.core\n (:require [my-app.core.cfg :as cfg]\n [my-app.core.env :as env]\n [my-app.core.fib :as fib]))\n\n(defn system\n {:malli/schema\n [:\x3d\x3e [:cat cfg/cfg-sch] [:sequential :int]]}\n [cfg]\n (let [length (-\x3e cfg :cfg/fib :fib/length)]\n (fib/fib length)))\n\n(defn -main [\x26 _]\n (let [cfg (cfg/validate-cfg #:cfg{:app (env/config\x3c-env :APP)\n :fib (env/config\x3c-env :FIB)})]\n (system cfg)))\n```\n\nThe `system` function is straight forward. It takes a config map and returns the fib sequence.\n\nNote the metadata of that function:\n\n```clojure\n{:malli/schema\n [:\x3d\x3e [:cat cfg/cfg-sch] [:sequential :int]]}\n```\n\nThe arrow `:\x3d\x3e` means it is a function schema. So in this case, we expect a config as unique argument and we expect a sequence of int as returned value.\n\nWhen we `instrument` our namespace, we tell malli to check the given argument and returned value and to throw an error if they do not respect the schema in the metadata. It is very convenient.\n\nTo enable the instrumentation, we call `malli.dev/start!` as you can see in the `core-test` namespace code snippet.\n\n### When to use data validation/generation/instrumentation\n\nClojure is a dynamically typed language, allowing us to write functions without being constrained by rigid type definitions. This flexibility encourages rapid development, experimentation, and iteration. Thus, it makes testing a bliss because we can easily mock function inputs or provide partial inputs.\n\nHowever, if we start adding type check to all functions in all namespaces (in our case with malli metadata for instance), we introduce strict typing to our entire code base and therefore all the constraints that come with it.\n\nPersonally, I recommend adding validation for the entry point of the app only. For instance, if we develop a library, we will most likely have a top level namespace called `my-app.core` or `my-app.main` with the different functions our client can call. These functions are the ones we want to validate. All the internal logic, not supposed to be called by the clients, even though they can, do not need to be spec’ed as we want to maintain the flexibility I mentioned earlier.\n\nA second example could be that we develop an app that has a `-main` function that will be called to start our system. A system can be whatever our app needs to perform. It can start servers, connect to databases, perform batch jobs etc. Note that in that case the entry point of our program is the `-main` function. What we want to validate is that the proper params are passed to the system that our `-main` function will start. Going back to our Fib app example, our system is very simple, it just returns the Fib sequence given the length. The length is what need to be validated in our case as it is provided externally via env variable. That is why we saw that the system function had malli metadata. However, our internal function have tests but no spec to keep that dynamic language flexibility that Clojure offers.\n\nFinally, note the distinction between `instrumentation`, that is used for development (the metadata with the function schemas) and data validation for production (call to `cfg/validate-cfg`). For overhead reasons, we don\'t want to instrument our functions in production, it is a development tool. However, we do want to have our system throws an error when wrong params are provided to our system, hence the call to `cfg/validate-cfg`.\n\n## Load/stress/integration tests\n\nIn functional programming, and especially in Clojure, it is important to avoid side effects (mutations, external factors, etc) as much as we can. Of course, we cannot avoid mutations as they are inevitable: start a server, connect to a database, IOs, update frontend web state and much more. What we can do is isolate these side effects so the rest of the code base remains pure and can enjoy the flexibility and thus predictable behavior.\n\n### Mocking data\n\nSome might argue that we should never mock data. From my humble personal experience, this is impossible for complex apps. An app I worked on consumes messages from different kafka topics, does write/read from a datomic database, makes http calls to multiple remote servers and produces messages to several kafka topics. So if I don’t mock anything, I need to have several remote http servers in a test cluster just for testing. I need to have a real datomic database with production-like data. I need all the other apps that will produce kafka messages that my consumers will process. In other words, it is not possible.\n\nWe can mock functions using [with-redefs](https://clojuredocs.org/clojure.core/with-redefs) which is very convenient for testing. Using the clojure.test [use-fixtures](https://clojuredocs.org/clojure.test/use-fixtures) is also great to start and tear down services after the tests are done.\n\n### Integration tests\n\nI mentioned above, an app using datomic and kafka for instance. In my integration tests, I want to be able to produce kafka messages and I want to interact with an actual datomic db to ensure proper behavior of my app. The common approach for this is to use `embedded` versions of these services. Our test fixtures can start/delete an embedded datomic database and start/stop kafka consumers/producers as well.\n\nWhat about the http calls? We can `with-redefs` those to return some valid but randomly generated values. Integration tests aim at ensuring that all components of our app work together as expected and embedded versions of external services and redefinitions of vars can make the tests predictable and suitable for CI.\n\nI have not touch on running tests in the CI, but integration tests should be run in the CI and if all services are embedded, there should be no difficulty in setting up a pipeline.\n\n### Load/stress tests\n\nTo be sure an app performs well under heavy load, embedded services won’t work as they are limited in terms of performance, parallel processing etc. In our example above, If I want to start lots of kafka consumers and to use a big datomic transactor to cater lots of transactions, embedded datomic and embedded kafka won’t suffice. So I have to run a datomic transactor on my machine (maybe I want the DB to be pre-populated with millions or entities as well) and I will need to run kafka on my machine as well (maybe using confluent [cp-all-in-one](https://github.com/confluentinc/cp-all-in-one) container setup). Let’s get fancy, and also run prometheus/grafana to monitor the performance of the stress tests.\n\nYour intuition is correct, it would be a nightmare for each developer of the project to setup all services. One solution is to containerized all these services. a datomic transactor can be run in docker, confluent provides a docker-compose to run kafka zookeeper, broker, control center etc, prometheus scrapper can be run in a container as well as grafana. So providing docker-compose files in our repo so each developer can just run `docker-compose up -d` to start all necessary services is the solution I recommend.\n\nNote that I do not containerized my clojure app so I do not have to change anything in my workflow. I deal with load/stress tests the same way I deal with my unit tests. I just start the services in the containers and my Clojure REPL as per usual.\n\nThis setup is not the only solution to load/stress tests but it is the one I successfully implemented in my project and it really helps us being efficient.\n\n## Conclusion\n\nI highlighted some common testing tools and methods that the Clojure community use and I explained how I personally incorporated these tools and methods to my projects. Tools are common to everybody, but how we use them is considered opinionated and will differ depending on the projects and team decision.\n\nIf you are starting your journey as a Clojure developer, I hope you can appreciate the quality of open-source testing libraries we have access to. Also, please remember that keeping things pure is the key to easy testing and debugging; a luxury not so common in the programming world. Inevitably, you will need to deal with side effects but isolate them as much as you can to make your code robust and your tests straight forward.\n\nFinally, there are some tools I didn’t mention to keep things short so feel free to explore what the Clojure community has to offer. The last advice I would give is to not try to use too many tools or only the shiny new ones you might find. Keep things simple and evaluate if a library is worth being added to your deps.\n\n', -"\n",ju,"Testing in Clojure","testing-in-clojure",new n(null,3,[$o,"/assets/loic-blog-logo.png",Xo,"/assets/loic-blog-logo.png",Tl,"Logo referencing Aperture Science"],null),"testing-in-clojure"]),ij([Sl,ym,Wn,Mp,Np,pq,Dr,Zs,tt],[new S(null,4,5,T,["Git","Workflows","Branching","CI/CD"],null),new S(null,1,5,T,["2024-05-12"],null),"\n## Introduction\n\nDepending on the size of the projects and its CI/CD requirements, one might choose one of the popular [Git Workflows](https://www.atlassian.com/git/tutorials/comparing-workflows). Some are good for some scenarios, some are never good and some are questionable to say the least.\n\nIn this article, I will explain how the main workflows work and which one to use and when in my opinion.\n\n## Trunk-Based Development\n\n### Timeline Example\n\n![Trunked Based Dev](/assets/git-workflows/trunk-based-dev.png)\n\n### No Branching\n\nThat’s it. You have your `main` branch and everybody pushes to it. Some might call it madness others would say that excellent CI/CD setup does not require branching.\n\nIf you are the only one working on your project, you *could* push to `main` directly. If you are an excellent developer and have the confidence to push to main and have very good CI/CD in place or none (so merging to `main` is not critical), you could use that strategy. I see this strategy quite often in small open-source projects maintained by a single developer with manual release (so no CD, just CI for testing).\n\n### Should you use it?\n\nYou might have realized already that this strategy applies to very few teams and I don’t think you will encounter that one-branch strategy a lot at your daily jobs. I don’t recommend that strategy as in my humble opinion, PRs are essentials in a good development process. Some people tend to view PR as someone having authority on their code but that’s the wrong way of seeing it. PR offers a second opinion on the code and **everybody** can suggest good changes. I make Junior Developers review my code from the moment they join the company and they have good suggestions in the comments of the PRs regularly.\n\nBack to `TBD`, you need good trust in your colleagues as there is no code review. That is the reason I mentioned that it might be suitable for experience developers.\n\nAnyway, don’t use trunk-based dev unless you know exactly what you are doing and have lots of experience already or a pretty non-critical project and you want very **fast** code base updates.\n\n## Feature Branches\n\n### Timeline Example\n\n![Feature Branching](/assets/git-workflows/feature-branching.png)\n\n### Pull Requests\n\nEverybody should be familiar with that one. Bob pulls a branch from main, implements the feature and pushes that feature branch to remote. Bob then opens a PR/MR (Github call it Pull Request, Gitlab call it Merge Request) and Alice reviews Bob's code before merging to `main`.\n\nIf Alice suggests some changes, Bob pushes new commits to his `feature` branch. Once Alice approves the changes, Bob can merge to `main`.\n\n### Solo Dev\n\nI think that even for personal projects, you should create PR to merge into `main`. This allows you to define properly what is the `scope` of the changes you are working on. Furthermore, you might have CI that checks format, run tests etc that would be different depending on pushing to a `feature` branch and merging to `main`.\n\nFor example, I have a portfolio website (Single Page Application) that is hosted on Netlify. When I open a PR, Netlify builds a js bundle and shows me a preview of what the new version of the website will look like on Web and Mobile. This is very convenient. Once I merge to `main`, Netlify deploys the new js bundle to my domain. So my PR triggers test check and UI preview (CI) and merging to `main` deployed the js bundle to my domain (CD).\n\n### Working with others\n\nHaving `features` branches that are merged to `main` is the bare minimum to have when working with other developers in my opinion. \n\nTherefore, I suggest for the feature you want to implement, create a branch from `main`, solve the issue and raise a PR to get your colleague’s feedback. In your CI, describes the jobs you want to run on commit to a feature branch and the jobs you want to run when the code is merged to `main`.\n\nYour `main` branch should be protected, meaning, only reviewed code can be merged to it and nobody can push directly to it (thus the CI jobs cannot be bypassed).\n\nThis workflow is suitable for simple project with one or a few contributors and with simple CI/CD.\n\nFinally, the feature branches should be **short lived.** Some people refer to CI (Continuous Integration) strictly as a way of saying we merge quickly to main even if the feature is partially implemented as long as it works in production (or hidden behind a flag for instance).\n\n### GitHub Flow\n\nThe feature branching is what they use at GitHub, they call it `GitHub Flow` but it is the same as `feature branching`. See by yourself form their doc:\n\n\x3e So, what is GitHub Flow?\n\x3e \n\x3e - Anything in the `main` branch is deployable\n\x3e - To work on something new, create a descriptively named branch off of `main` (ie: `new-oauth2-scopes`)\n\x3e - Commit to that branch locally and regularly push your work to the same named branch on the server\n\x3e - When you need feedback or help, or you think the branch is ready for merging, open a [pull request](http://help.github.com/send-pull-requests/)\n\x3e - After someone else has reviewed and signed off on the feature, you can merge it into main\n\x3e - Once it is merged and pushed to 'main', you can and *should* deploy immediately\n\n### Should you use it?\n\nYes. Actually, pretty much everybody uses feature branches.\n\n## Forking\n\n### Timeline Example\n\n![Forking](/assets/git-workflows/forking.png)\n\n### Open Source Contributions\n\nForking is the method used for open-source project contributions. In short, you could **clone** the repo locally but you won’t be able to push any branches because the author won't allow you. Just imagine if anybody could freely push branches to your repo! So the trick is to **fork** (personal copy on a version control platform) to your own GitHub account. Then you clone that repository instead and from there. The original Github repo is called the `upstream` and your own copy of the Github repo is called the `origin`.\n\nThen, once your feature implemented, you can push the code to `origin` (your fork) and then raise a PR to merge the feature `origin/my-feature` to the `upstream/main` branch. When the authors/maintainers of the upstream repo approve your PR and merge it to `upstream/main` , you can then “sync” (merge `upstream/main` to `origin/main`) and start working on another feature.\n\nTo link the forking to our previous strategies, you can see that you are basically doing **feature branching** again. \n\nSome open-source authors might push directly to their `main` branch while accepting PR from forks. In that specific scenario, we can see that authors are doing **Trunk-Based Development** while requiring external contributors to follow **feature branching**. Interesting, isn’t it?\n\n## Release Branches\n\n### Timeline Example\n\n![Release Branching](/assets/git-workflows/release-branching.png)\n\n### It’s getting ugly\n\nIndeed, some projects might have multiple versions deployed and accessible by clients at the same time. The common example would be the need to still support old products or old API versions.\n\nIn the timeline chart above, you can see that it is getting a bit more verbose but not so difficult to grasp. We branch `release-1.0` from `main`. Bob starts working on features and merges them to `release-1.0`. At some point, the code is deemed ready to be deployed and therefore merged to `main`. Bob quickly move on to build features for the next release `release1.1`.\n\nUnfortunately, a bug is discovered in production and needs urgent fixing. Alice merges some hotfix into `main` to patch the issue. The production is now stable and a new version arises from the patch: `v1.0.1`. We then sync `release-1.0` with `main` so our version on `release-1.0` is also `v1.0.1`\n\nWhile Alice was patching `production`, Bob already pushed some features to the new release branch. So, we need to merge the patches made by Alice to Bob’s new code and that is why we also need to sync `release-1.1` with `main`. After syncing, Bob can merge is new release as `1.1.1` to `main`.\n\nIf you got confused with the version numbers, I redirect you to [SemVer](https://semver.org/) but in short, a version is of format *Major.Minor.Patch*. **Major** used for incompatible codes (like 2 independent API versions). **Minor** is in our example the `release` and **Patch** is the `hotfix` from Alice. This way when Bob merged his branch `release-1.1`, he did include the hotfix of Alice making the new version in `main` not `1.1.0` but indeed `1.1.1`.\n\n### Should you use it?\n\nIf you don’t need to support multiple releases at once, no, don’t use it. Ideally, you merge your features quite frequently and one release does not break the other ones. It is actually very often the case that we do not need to support old versions. So if you can, don’t use it.\n\n## GitFlow\n\n### Timeline Example\n\n![GitFlow](/assets/git-workflows/gitflow.png)\n\n### Fatality\n\nTo quote [Atlassian](https://www.atlassian.com/git/tutorials/comparing-workflows/gitflow-workflow):\n\n\x3e Gitflow is a legacy Git workflow that was originally a disruptive and novel strategy for managing Git branches. Gitflow has fallen in popularity in favor of [trunk-based workflows](https://www.atlassian.com/continuous-delivery/continuous-integration/trunk-based-development), which are now considered best practices for modern continuous software development and [DevOps](https://www.atlassian.com/devops/what-is-devops) practices. Gitflow also can be challenging to use with [CI/CD](https://www.atlassian.com/continuous-delivery).\n\x3e \n\nSo GitFlow is obsolete and you will soon understand why.\n\nIt is similar to what we just saw with the **release branching** but now we have another branch called `develop`. So every feature is merged to `develop`. Once a version is ready we merge it to the corresponding `release` branch. On that release branch, some additional commits might be pushed before merging to `main`. On new version merged to `main`, we need to sync A LOT. You can see on the chart above all the potential merge conflicts represented by a sword. I hope this visual representation highlights the problem: too many potential merge conflicts.\n\n### But why?\n\nIt is a good question, I am not sure. The idea of having a `develop` branch is very common in a lot of projects, but why combine it with `release` branches like that I am not sure to be frank. I don’t recommend to use GitFlow and it seems obsolete for a reason. In general we want the following:\n\n- as few branches as possible\n- short lived branches with small or partial but workable features to be deployed\n\nI see `GitFlow` as the opposite of `Continuous Integration` (in the sense of merging frequently new features and having new deployable codes ready regularly). For fun, let’s have a look at what happens after a hotfix in prod:\n\n- hotfix-1.0.1 ⚔️ main\n- main ⚔️ release-1.0\n- main ⚔️ release-1.1\n- main ⚔️ develop\n- develop ⚔️ feature\n\nI am grateful that I never had to work with `GitFlow` and I have the feeling that implementing it would mean having a dedicated engineer to take care of the branching, a sort of *Git gardener*.\n\n## Feature branching on develop\n\n### Timeline Example\n\n![Feature Branching on Develop](/assets/git-workflows/feature-branching-on-develop.png)\n\n### Develop branch\n\nThe GitFlow aspect that most people still use is the `develop` branch. All the feature branches are merged to `develop` instead of `main`. Once `develop` is deemed ready for release, it is merged to `main`.\n\nThis is useful for a few reasons:\n\n- at any time, we know the commit of the stable release (code in prod) via the `main` branch\n- at any time, we know what is the latest commit of the ongoing new version via the `develop` branch\n\nThis seems like the sweet spot for most cases and that is why it is popular.\n\nMerging a `feature` to `develop` triggers a bunch of CI jobs (the usual, format check, test checks etc)\n\nMerging `develop` to `main` triggers a bunch of CI jobs (build a docker image, push it to a container registry for instance)\n\n### Should you use it?\n\nYes. It is simple yet efficient.\n\n## Release Candidate workflow\n\n### Timeline Example\n\n![Release Candidate Workflow](/assets/git-workflows/RC-workflow.png)\n\nIt is very similar to **Feature Branching to Develop**. The only difference is that when `develop` is merged to `main` it creates a **Release Candidate** (RC) to be tested in a test/staging environment. If an issue in the test environment arises, a hotfix is done and we have a new RC (RC2 in this case). Once everything is ok in the test env, we have a stable release (we just tag a branch basically).\n\nThe advantage of this strategy is that `main` is the line of truth for both test and prod env. `main` contains the RC and stable versions which is great for reporting what went wrong in the test cluster and what is stable in prod.\n\nThis strategy works if `main` does not automatically deploy to production. It could deploy something non-critical, such as a docker image of the app to a container registry for instance.\n\n### Tagging Example\n\n- Bob has merged a few features to `develop` and deemed `develop` ready to be merged to `main`. It is a release candidate with version `v1.0.0-RC1`\n- Alice approves Bob's changes and merges `develop` to `main`\n- Alice deploys the app to **staging** and realizes one feature needs correction.\n- Alice branches out of `main` and implement the RC fix and the code is merged to `main`. The new version is `v1.0.0-RC2`.\n- Alice redeploys to **staging** and everything works as expected. Thus Alice bumps the version to stable: `v1.0.0`. She then deploys to **prod**.\n- Unfortunately, in a very edge case, a feature fails in production and needs urgent fixing.\n- Alice branches out of `main` and implements the *hotfix* and merges back to `main`. The version is now `v1.0.1`.\n- All is well now and it's time to *sync* `develop` with `main`.\n\n### Recap\n\n- `feature` branches are merged to `develop`\n- `develop` branch is merged to `main` as version *x.y.z-RCp*\n- `RC-fixes` branches are merged to `main` as new RCs until test passes in test env. Version is *x.y.z-RC(p+1)*\n- `hotfix` branches are merged to `main` if urgent bug in prod env and version is incremented like so: *x.y.z+1*\n- `main` branch is merged to `develop` (Sync) and eventual conflicts with new features are resolved\n- new `features` are implemented for the version *x.(y+1)+z*\n\n### Should you use it?\n\nIf you need a test/staging environment to test changes, RC strategy is good for you. However, if you have only one env and your CD is not critical, prefer the **Feature branching to develop**\n\n## Conclusion\n\nUse **trunk-based** development if you are working alone on a project or with experienced developers you can trust.\n\nPrefer **feature branching** for the PR dedicated CI/feedback from colleagues or yourself.\n\nHaving a **develop** branch between the `features` and `main` branches helps you follow the “Continuous Integration” philosophy in the sense of frequently merging short-lived feature branches to a development line of truth (even if a bit ahead/diverging from main, production line of truth).\n\nOnly use **release branching** if it is absolutely required because of older release maintenance constraints.\n\nIf you have a test/staging env that needs to go through integration testing before going to prod, the **Release Candidate workflow** is advisable.\n\nI think people tend to refer to CI as the test jobs done on PRs from `feature` to `develop` and CD to refer to the build jobs happening on merge to `main`. Others refer to CI as the philosophy of merging short/partial (but working) features as quickly as possible. This can be applied in **Feature branching to develop** in my opinion.\n\nTaking the time to have the simplest branching strategy possible for your project can really make the development experience a bliss for all developers of your team. People should focus on implementing quality features and not doing some botanic (lots of branches… anybody?).\n", -"\n",ju,"What Git workflow is suitable for your project","git-workflows",new n(null,3,[$o,"/assets/loic-blog-logo.png",Xo,"/assets/loic-blog-logo.png",Tl,"Logo referencing Aperture Science"],null),"git-workflows"]),ij([Sl,ym,Wn,Mp,Np,pq,Dr,Zs,tt],[new S(null,2,5,T,["Clojure","MCTS"],null),new S(null,1,5,T,["2021-08-13"],null),"\n## Objective\n\nAt [Flybot Pte Ltd](https://www.flybot.sg/), we wanted to have a robot-player that can play several rounds of some of our card games (such as `big-two`) at a decent level.\n\nThe main goal of this robot-player was to take over an AFK player for instance.\n\nWe are considering using it for an offline mode with different level of difficulty.\n\nVocabulary:\n\n- `big-two`: popular Chinese Card game (锄大地)\n- `AI` or `robot`: refer to a robot-player in the card game.\n\n2 approaches were used:\n\n- **MCTS**\n- **Domain knowledge**\n\nThe repositories are closed-source because private to Flybot Pte. Ltd.\nThe approaches used are generic enough so they can be applied to any kind of games.\n\nIn this article, I will explain the general principle of MCTS applied to our specific case of `big-two`.\n\n## MCTS theory\n\n### What is MCTS\n\n**Monte Carlo Tree Search** (MCTS) is an important algorithm behind many major successes of recent AI applications such as **AlphaGo’s** striking showdown in 2016.\n\nEssentially, MCTS uses Monte Carlo simulation to accumulate value estimates to guide towards highly rewarding trajectories in the search tree. In other words, MCTS pays more attention to nodes that are more promising, so it avoids having to brute force all possibilities which is impractical to do.\n\nAt its core, MCTS consists of repeated iterations (ideally infinite, in practice constrained by computing time and resources) of 4 steps: `selection`, `expansion`, `simulation` and `update`.\n\nFor more information, this [MCTS article](https://towardsdatascience.com/monte-carlo-tree-search-an-introduction-503d8c04e168) explains the concept very well.\n\n### MCTS applied to big-two\n\nMCTS algorithm works very well on deterministic games with perfect information. In other words, games in which each player perfectly knows the current state of the game and there are no chance events (e.g. draw a card from a deck, dice rolling) during the game.\n\nHowever, there are a lot of games in which there is not one or both of the two components: these types of games are called stochastic (chance events) and games with imperfect information (partial observability of states).\n\nThus, in **big-two**, we don’t know the cards of the other players, so it is a game with imperfect information (more info in this [paper](https://teaching.csse.uwa.edu.au/units/CITS3001/project/2017/paper1.pdf)).\n\nSo we can apply the MCTS to **big-two** but we will need to do 1 of the 2 at least:\n\n- Pre-select moves by filtering the dumb moves and establish a game-plan\n- access to hidden information (the other player’s hand). This method is called **Determinization** or also **Perfect** **Information Monte Carlo Sampling**.\n\n## MCTS implementation\n\n### Tree representation\n\nOur tree representation looks like this:\n\n```clojure\n{:S0 {::sut/visits 11 ::sut/score [7 3] ::sut/chldn [:S1 :S2]}\n :S1 {::sut/visits 5 ::sut/score [7 3] ::sut/chldn [:S3 :S4]}\n :S3 {::sut/visits 1 ::sut/score [7 3]}}\n```\n\nIn the big-two case, `S0` is the init-state, `S1` and `S2` are the children states of `S0`.\n\n`S1` is the new state after a possible play is played\n\n`S2` is the new state if another possible play is played etc.\n\n`S1` is a key of the tree map so it means it has been explored before to run simulations.\n\n`S1` has been selected 5 times.\n\n`S2` has never been explored before so it does not appear as a key.\n\nIn games when only the win matters (not the score), you could just use something like `::sut/wins`.\n\n### Selection\n\nTo select the child we want to run simulation from, we proceed like this:\n\n- If some children have not been explored yet, we select randomly one of them\n- If all children have been explored already, we use the UCT to determine the child we select.\n\n`UCT` is the `UCB` (Upper Confidence Bound 1) applied to trees. It provides a way to balance exploration/exploitation. You can read more about it in this [article](https://towardsdatascience.com/the-upper-confidence-bound-ucb-bandit-algorithm-c05c2bf4c13f).\n\nIn the algorithm behind **AlphaGo**, a **UCB** based policy is used. More specifically, each node has an associated UCB value and during selection we always chose the child node with the highest UCB value.\n\nThe **UCB1** formula is the following:\n\n![UCB1 formula](/assets/mcts/ucb1.png)\n\n\x3e With `xi` the mean node value, `ni` the number of visits of node `i`, `N` the number of visits of the parent node.\n\x3e \n\nThe equation includes the 2 following components:\n\n![UCB1 formula parts](/assets/mcts/ucb1_2.png)\n\nThe first part of the equation is the `exploitation` based on the *optimism in the fact of uncertainty*.\n\nThe second part of the equation is the `exploration` that allows the search to go through a very rarely visited branch from time to time to see if some good plays might be hidden there.\n\nIn the **big-two** case, the `exploitation` is the total number of points divided by the number of visits of the node. For every simulation of the games, we add up the number of points the AI has made. We want the average points per game simulation so we divide by the number of times we have visited the node.\n\nIn the **big-two** case, the `exploration` considers the number of visits of the parent node (previous state of the game) and the number of visits of the current node (current state of the game). The more we visit the parent without visiting the specific child the bigger the exploration term becomes. Thus, if we have not visited a child for a long time, since we take the `log10` of `N`, this term becomes dominant and the child will be visited once more.\n\nThe coefficient `c`, called confidence value, allows us to change the proportion of exploration we want.\n\nTo recap, The `UCB` will often return the state that led to the most points in the past simulation. However, from time to time, it will explore and return a child that did not lead to good reward in the past but that might lead to a stronger play.\n\nThe formula applied to **big-two** is the following:\n\n![UCB1 applied to BT](/assets/mcts/ucb_bt.png)\n\n### Expansion\n\nThis step just consists in adding the new selected child to the tree.\n\nIn the **big-two** case, the newly selected state is added to the tree.\n\n### Simulation\n\nFor a given node (state), we run several games with everybody playing random moves and we evaluate the total score of the AI. The total amount of points taken from all the simulations is taken into account in the **UCT** formula explained above.\n\nWe do not consider the win because what matters in **big-two**, more than winning the game, is to score a lot of points (cards remaining in opponents hands) to make more money. Sometimes, it is even better to lose the game as long as the other losers have a lot of cards left in their hands. The win matters for your position in the next round however.\n\n### Update\n\nAfter all the simulations are done, we **back-propagate** all the rewards (sum up the scores of each simulation) to the branch nodes.\n\n### MCTS Iteration\n\nWe call `MCTS iteration` the 4 steps described above: `expand-\x3eselect-\x3esimulate-\x3eupdate`\n\nWe run those 4 steps several times to have a tree that shows the path that has the most chance to lead to the best reward (highest score).\n\nSo, for each AI move, we run several MCTS iterations to build a good tree.\n\nThe more iterations we run, the more accurate the tree is but also the bigger the computing time.\n\n### MCTS properties\n\nWe have 2 properties that can be changed:\n\n- `nb-rollouts`: number of simulations per mcts iteration.\n- `budget`: number of mcts iterations (tree growth)\n\n### MCTS applied to a game with more than 2 players\n\nHaving more than 2 players (4 in **big-two** for instance) makes the process more complex as we need to consider the score of all the players. The default way of handling this case, is to back-propagate all the players scores after different simulations. Then, each robot (position) plays to maximize their score. The UCB value will be computed for the score of the concerned robot.\n\n### Caching\n\nBy caching the function that returns the possible children states, we don’t have to rerun that logic when we are visiting a similar node. The node could have been visited during the simulation of another player before so it saves time.\n\nBy caching the sample function, we do not simulate the same state again. Some states might have been simulated by players before during their mcts iterations. This allows us to go directly a level down the tree without simulating the state again and reusing the rewards back-propagated by a previous move.\n\n### Performance issue\n\nIn Clojure, even with caching, I was not able to run a full game because it was too slow, especially at the beginning of the game which can contain hundreds of different possible moves.\n\nFor `{:nb-rollouts 10 :budget 30}` (10 simulations per state and 30 iterations of mcts), the first move can take more than a minute to compute.\n\nAs a workaround, I had the idea of using MCTS only if a few cards are remaining in the player's hands so at least the branches are not that big in the tree. I had decent results in Clojure for big-two.\n\nFor `{:nb-rollouts 10 :budget 30 :max-cards 16}` (16 total cards remaining), in Clojure, it takes less than 3 seconds.\n\nBecause of this problem, I worked on a big-two AI that only uses the **domain knowledge** to play.\n\n## Domain Knowledge\n\nThe problem with MCTS is that even if we don’t brute force all the possibilities, the computing time is still too big if we want to build the tree using random moves.\n\nMost of the possible plays are dumb. Most of the time, we won’t break a fiver just to cover a single card for instance. In case there are no cards on table, we won’t care about having a branch for all the singles if we can play fivers. There are many situations like this. There are lots of branches we don’t need to explore at all.\n\nAs a human player, we always have a `game-plan`, meaning we arrange our cards in our hands with some combinations we want to play if possible and the combination we don’t want to “break\".\n\nWe can use this `game-plan` as an alternative to MCTS, at least for the first moves of the games.\n\nThe details of this `game-plan` are confidential for obvious reasons.\n\n## Conclusion\n\nHaving an hybrid approach, meaning using a `game-plan` for the first moves of the game when the possible plays are too numerous, and then use MCTS at the end of the game allowed us to have a decent AI we can use.\n\nAs of the time I write this article, the implementation is being tested (as part of a bigger system) and not yet in production.\n", -"\n",ju,"MCTS applied to card games","article-mcts",new n(null,3,[$o,"/assets/loic-blog-logo.png",Xo,"/assets/loic-blog-logo.png",Tl,"Logo referencing Aperture Science"],null),"blog-post-mcts"]),ij([Sl,ym,Wn,Cp,Mp,Np,pq,Dr,Zs,tt],[new S(null,3,5,T,["Clojure","AWS","Full-Stack"],null),new S(null,1,5,T,["2023-01-20"],null),'\nThis is an example of how to deploy a containerized full-stack Clojure app in AWS EC2.\n\nI will use the [flybot.sg website](https://github.com/skydread1/flybot.sg) as example of app to deploy.\n\n## Prerequisites\n\n- Use an external DNS manager such as goDaddy for instance\n- The app does not handle SSL and domain/protocols redirect\n- The app used `datalevin` as embedded database which resides alongside the Clojure code inside a container\n- The app is an open-source mono-repo and hosted on my GitHub\n- We use ALB for redirects and certificates validations and ELB for static IP entry point.\n\n## Use Jibbit to push to ECR\n\nInstead of using datomic pro and having the burden to have a separate containers for the app and transactor, we decided to use [juji-io/datalevin](https://github.com/juji-io/datalevin) and its embedded storage on disk. Thus, we only need to deploy one container with the app.\n\nTo do so, we can use the library [atomisthq/jibbit](https://github.com/atomisthq/jibbit) baed on [GoogleContainerTools/jib](https://github.com/GoogleContainerTools/jib) (Build container images for Java applications).\n\nIt does not use docker to generate the image, so there is no need to have docker installed to generate images.\n\n[jibbit](https://github.com/atomisthq/jibbit) can be added as `alias` in deps.edn:\n\n```clojure\n:jib\n {:deps {io.github.atomisthq/jibbit {:git/tag "v0.1.14" :git/sha "ca4f7d3"}}\n :ns-default jibbit.core\n :ns-aliases {jib jibbit.core}}\n```\n\nThe `jib.edn` can be added in the project root with the configs to generate and push the image.\n\n### Testing the app image locally\n\nExample of jibbit config to just create a local docker image:\n\n```clojure\n;; example to create an docker image to be run with docker locally\n{:main clj.flybot.core\n :aliases [:jvm-base]\n :user "root"\n :group "root"\n :base-image {:image-name "openjdk:11-slim-buster"\n :type :registry}\n :target-image {:image-name "flybot/image:test"\n :type :docker}}\n```\n\nThen we can run the container:\n```\ndocker run \\\n--rm \\\n-it \\\n-p 8123:8123 \\\n-v db-v2:/datalevin/dev/flybotdb \\\n-e OAUTH2\x3d"secret" \\\n-e ADMIN_USER\x3d"secret" \\\n-e SYSTEM\x3d"{:http-port 8123, :db-uri \\"datalevin/dev/flybotdb\\", :oauth2-callback \\"http://localhost:8123/oauth/google/callback\\"}" \\\nflybot/image:test\n```\n\n### AWS profile for CI\n\n[jibbit](https://github.com/atomisthq/jibbit) can also read your local AWS credentials to directly push the generated image to your ECR (Elastic Container Registry).\n\nYou need to have aws cli installed (v2 or v1) and you need an env variable `$ECR_REPO` setup with the ECR repo string.\n\nYou have several [possibilities](https://github.com/atomisthq/jibbit/blob/main/src/jibbit/aws_ecr.clj) to provide credentials to login to your AWS ECR.\n\nHere is the `jib.edn` for the CI:\n\n```clojure\n{:main clj.flybot.core\n :target-image {:image-name "$ECR_REPO"\n :type :registry\n :authorizer {:fn jibbit.aws-ecr/ecr-auth\n :args {:type :profile\n :profile-name "flybot"\n :region "region"}}}}\n```\n\n### ENV variables\n\nI used [repository secrets](https://docs.github.com/en/actions/security-guides/encrypted-secrets) to handle AWS credentials on the GitHub repo:\n\n- `AWS_ACCESS_KEY_ID` (must be named like that)\n- `AWS_SECRET_ACCESS_KEY` (must be named like that)\n- `ECR_REPO`\n\n## AWS EC2\n\nThis [article](https://medium.com/appgambit/part-1-running-docker-on-aws-ec2-cbcf0ec7c3f8) explained quite well how to setup docker in EC2 and pull image from ECR.\n\n### IAM policy and role, Security group\n\nThe UserData to install docker at first launch of the EC2 instance is the following:\n\n```bash\n#! /bin/sh\n# For Amazon linux 2022 (might differ in 2023 but the principle remains)\nyum update -y\namazon-linux-extras install docker\nservice docker start\nusermod -a -G docker ec2-user\nchkconfig docker on\n```\n\nTo allow the EC2 to pull from ECR we need to add an `IAM policy` and `IAM role`.\n\nLet’s first create the policy `flybot-ECR-repo-access` :\n\n```bash\n{\n "Version": "2012-10-17",\n "Statement": [\n {\n "Sid": "ListImagesInRepository",\n "Effect": "Allow",\n "Action": [\n "ecr:ListImages"\n ],\n "Resource": "arn:aws:ecr:region:acc:repository/flybot-website"\n },\n {\n "Sid": "GetAuthorizationToken",\n "Effect": "Allow",\n "Action": [\n "ecr:GetAuthorizationToken"\n ],\n "Resource": "*"\n },\n {\n "Sid": "ManageRepositoryContents",\n "Effect": "Allow",\n "Action": [\n "ecr:BatchCheckLayerAvailability",\n "ecr:GetDownloadUrlForLayer",\n "ecr:GetRepositoryPolicy",\n "ecr:DescribeRepositories",\n "ecr:ListImages",\n "ecr:DescribeImages",\n "ecr:BatchGetImage",\n "ecr:InitiateLayerUpload",\n "ecr:UploadLayerPart",\n "ecr:CompleteLayerUpload",\n "ecr:PutImage"\n ],\n "Resource": "arn:aws:ecr:region:acc:repository/flybot-website"\n }\n ]\n}\n```\n\nWe then attached the policy `flybot-ECR-repo-access` to a role `flybot-ECR-repo-access-role`\n\nFinally, we attach the role `flybot-ECR-repo-access-role` to our EC2 instance.\n\nWe also need a `security group` to allow http(s) request and open our port 8123 for our [aleph](https://github.com/clj-commons/aleph) server.\n\nWe attached this SG to the EC2 instance as well.\n\n### Run docker on EC2 instance and pull image from ECR\n\nThen inside the EC2 instance, we can pull the image from ECR and run it:\n\n```bash\n# Login to ECR, this command will return a token\naws ecr get-login-password \\\n--region region \\\n| docker login \\\n--username AWS \\\n--password-stdin acc.dkr.ecr.region.amazonaws.com\n\n# Pull image\ndocker pull acc.dkr.ecr.region.amazonaws.com/flybot-website:test\n\n# Run image\ndocker run \\\n--rm \\\n-d \\\n-p 8123:8123 \\\n-v db-volume:/datalevin/prod/flybotdb \\\n-e OAUTH2\x3d"secret" \\\n-e ADMIN_USER\x3d"secret" \\\n-e SYSTEM\x3d"{:http-port 8123, :db-uri \\"/datalevin/prod/flybotdb\\", :oauth2-callback \\"https://www.flybot.sg/oauth/google/callback\\"}" \\\nacc.dkr.ecr.region.amazonaws.com/flybot-website:test\n```\n\n## Load Balancers\n\nEven if we have one single EC2 instance running, there are several benefits we can get from AWS load balancers.\n\nIn our case, we have an Application Load Balancer (ALB) as target of a Network Load Balancer (NLB). Easily adding an ALB as target of NLB is a recent [feature](https://aws.amazon.com/blogs/networking-and-content-delivery/using-aws-lambda-to-enable-static-ip-addresses-for-application-load-balancers/) in AWS that allows us to combine the strength of both LBs.\n\n### ALB\n\nThe internal ALB purposes:\n\n- redirect naked domain (flybot.sg) to sub domain (www.flybot.sg)\n- redirect http to https using the SSL certificates from AWS Certificate Manager (`ACM`)\n\nACM allows us to requests certificates for `www.flybot.sg` and `flybot.sg` and attach them to the ALB rules to perform path redirection in our case. This is convenient as we do not need to install any ssl certificates or handle any redirects in the instance directly or change the code base.\n\n### NLB\n\nSince the ALB has dynamic IPs, we cannot use it in our goDaddy `A` record for `flybot.sg`. One solution is to use AWS route53 because AWS added the possibility to register the ALB DNS name in a A record (which is not possible with external DNS managers). However, we already use goDaddy as DNS host and we don’t want to depend on route53 for that.\n\nAnother solution is to place an internet-facing NLB behind the ALB because NLB provides static IP.\n\nALB works at level 7 but NLB works at level 4.\n\nThus, we have for the NLB:\n\n- TCP rule that forwards request to ALB on port 80 (for http)\n- TCP rules that forwards request on port 443 (for https)\n\n### Target group\n\nThe target group is where the traffic from the load balancers is sent. We have 3 target groups.\n\n- The first target group contains the EC2 instance in which the ALB forward request.\n- The second target group contains the ALB with the protocol TCP 80 in which the NLB forward http requests.\n- The third target group contains the ALB with the protocol TCP 443 in which the NLB forward https request.\n\n### DNS records\n\nSince the ELB is the internet-facing entry points, we use a `CNAME` record for `www` resolving to the ELB DNS name.\n\nFor the root domain `flybot.sg`, we use a `A` record for `@` resolving to the static IP of the ELB (for the AZ where the EC2 resides).\n\n## Learn More\n\nYou can have a look at the open-source repo: [skydread1/flybot.sg](https://github.com/skydread1/flybot.sg)\n', -new S(null,1,5,T,[new S(null,2,5,T,["Flybot","https://github.com/skydread1/flybot.sg"],null)],null),"\n",ju,"Deploy full stack Clojure website to AWS","deploy-clj-app-to-aws",new n(null,3,[$o,"/assets/loic-blog-logo.png",Xo,"/assets/loic-blog-logo.png",Tl,"Logo referencing Aperture Science"],null),"blog-deploy-clj-aws"]),ij([Sl,ym,Wn,Mp,Np,pq,Dr,Zs,tt],[new S(null,4,5,T,["AWS","Load Balancers","DNS","GoDaddy"],null),new S(null,1,5,T,["2023-11-09"],null),"\n## Context\n\nMy goal was to redirect the domain `flybot.sg` to the subdomain `www.flybot.sg`.\n\nThe domain and subdomain are resolved with GoDaddy.\n\nThe app is deployed in an AWS EC2 which resides behind an ALB (Application Load Balancer).\n\nWe can map www.flybot.sg to the ALB DNS name with a `CNAME` record.\n\n**However, we cannot map flybot.sg (the naked domain) to the ALB because we cannot use CNAME for the domain, only `A` records are valid.**\n\n## GoDaddy forwarding\n\nGoDaddy provides a way to redirect from domain to subdomain which is great news.\n\nHowever, as of 2018, it cannot redirect paths:\n\n- [flybot.sg](http://flybot.sg/) -\x3e [www.flybot.sg](http://www.flybot.sg/) (OK)\n- [flybot.sg/blog](http://flybot.sg/blog) -\x3e [www.flybot.sg/blog](http://www.flybot.sg/blog) (error 404)\n\nTherefore, this simple solution is not viable.\n\n## Using AWS route53 as Name Server\n\n**Since the ALB has `dynamic` IPs, we cannot use it in our goDaddy `A` record for `flybot.sg`.**\n\nOne solution is to use AWS route53 because AWS added the possibility to **register the ALB DNS name in a special ALIAS record**.\n\nSo we could add NS records in GoDaddy to specify that for the domain `flybot.sg`, we let AWS handle it. However, we cannot add NS records for the `domain`, only for `subdomain`. The only way to make sure the domain is handled by AWS is to change the `default Name Servers` in our GoDaddy DNS.\n\nThis would work, however, since we change the **default** Name Servers, all the subdomains will also be handled by AWS, so we are basically letting AWS handle all our subdomains which is not what we wanted.\n\n### Note\n\nIf we wanted to let AWS handle a subdomain such as `test.flybot.sg` for instance, that would be totally possible without affecting the other subdomains (and the domain), because we can add NS records for subdomain to specify what Name Servers to use. The problem arises when we deal with the naked domain.\n\n## ALB+NLB\n\nThe solution I chose was to add a Network Load Balancer (NLB) in front of the ALB. The NLB can provide a **static** IP so we can resolve our @ `A` record to the NLB subnet static IP.\n\nAdding an ALB as target of NLB is a recent [feature](https://aws.amazon.com/blogs/networking-and-content-delivery/using-aws-lambda-to-enable-static-ip-addresses-for-application-load-balancers/) in AWS that allows us to combine the strength of both LBs.\n\n### ALB\n\nThe internal ALB purposes:\n\n- redirect naked domain (flybot.sg) to sub domain (www.flybot.sg)\n- redirect http to https using the SSL certificates from ACM\n\nAmazon Certificate Manager allows us to requests certificates for `www.flybot.sg` and `flybot.sg` and attach them to the ALB rules to perform path redirection in our case. This is convenient as we do not need to install any ssl certificates or handle any redirects in the instance directly or change the code base.\n\n### NLB\n\nALB works at level 7 but NLB works at level 4.\n\nThus, we have for the NLB:\n\n- TCP rule that forwards request to ALB on port 80 (for http)\n- TCP rules that forwards request on port 443 (for https)\n\n### Target group\n\nThe target group is where the traffic from the load balancers is sent. We have 3 target groups.\n\nThe first target group contains the **EC2** instance in which the ALB forward request.\n\nThe second target group contains the **ALB** with the protocol TCP 80 in which the NLB forward **http** requests.\n\nThe third target group contains the **ALB** with the protocol TCP 443 in which the NLB forward **https** request.\n\n### DNS records\n\nSince the ELB is the internet-facing entry points, we use a `CNAME` record for `www` resolving to the ELB DNS name\n\nFor the root domain `@`, we use an `A` record resolving to the static IP of the NLB (for the AZ where the EC2 resides).\n\n### Trade-off\n\nThe trade-off on adding an extra load balancer is the cost. Once the free-tier period is over, the minimum cost for a load balancer is $18 per month.\n\n## Conclusion\n\nGoDaddy domain to subdomain forwarding does not support path so it is not viable at all.\n\nUsing AWS route53 to enjoy the feature of mapping a domain to the ALB DNS name via the special record ALIAS comes at a cost: all subdomains would need to be resolved using AWS Name Servers.\n\nALB+NLB is the setup that worked well for me. Having the internal ALB handling redirect to https and to the subdomain is very convenient. Using an internet-facing NLB solves the static IP problem to resolve the domain record at an extra cost of minimum $18 per month.\n", -"\n",ju,"Redirecting Domain to Subdomain using AWS ALB+NLB","redirect-domain-to-subdomain",new n(null,3,[$o,"/assets/loic-blog-logo.png",Xo,"/assets/loic-blog-logo.png",Tl,"Logo referencing Aperture Science"],null),"blog-redirect-domain"]),ij([Sl,ym,Wn,Cp,Mp,Np,pq,Dr,Zs,tt],[new S(null,4,5,T,["Python","Django","AWS","Elastic Beanstalk"],null),new S(null,1,5,T,["2023-08-08"],null),"\n## Context\n\nMy open-source tech blog used to be deployed on AWS (during my AWS free tier period).\n\nIn this article, I am going to highlight the different libraries/settings I used to develop and deploy my Django app.\n\n## Project Setup\n\nYou might be familiar with Django app setup but I will just recap the different common commands I ran in my case.\n\n### Start python env\n\n```bash\n# create env\npython -m venv blog_venv\n\n# activate env (mac)\nsource blog_venv/bin/activate\n\n```\n\n### Install Django\n\n```bash\npip install django\n```\n\n### Start Django project\n\n```bash\ndjango-admin startproject loicblog\n```\n\n### Migrations\n\nMigrations are Django’s way of propagating changes you make to your models (adding a field, deleting a model, etc.) into your database schema. They’re designed to be mostly automatic, but you’ll need to know when to make migrations, when to run them, and the common problems you might run into.\n\n```bash\n# create new migrations based on the changes made to the models\npython manage.py makemigrations\n\n# apply and unapply migrations.\npython manage.py migrate\n```\n\n### Run server\n\n```bash\n# be sure to migrate first\npython manage.py runserver\n```\n\n### Admin\n\nIt is very common to have a superuser to handle admin tasks:\n\n```bash\npython manage.py createsuperuser --username\x3dmyname --email\x3dme@mymail.com\n```\n\n## Start app\n\nOne project can have multiple apps such as a blog, an authentication system etc.\nSo `loicblog` is the project and `blog` is one app inside the project.\n\n```bash\npython manage.py startapp blog\n```\n\n- Add the `blog` app to the INSTALLED_APPS array in the project `loicblog/common.py`.\n- Add `path('', include('blog.urls'))` to the `urlpatterns` in `loic/blog/urls.py`.\n\n## Blog Post Content in Markdown\n\nI like writing my articles in Markdown with a preview button (like on GitHub for instance). This is how I write the articles in this blog. To do so, I used [django-markdownx](https://neutronx.github.io/django-markdownx/).\n\n### django-markdownx\n\n```bash\npip install markdown django-markdownx\n```\n\n- Then we need to add the `markdownx` app to the INSTALLED_APPS array in the project `loicblog/common.py`.\n- Add the path to [urls.py](http://urls.py/): `path('markdownx/', include('markdownx.urls'))`.\n- Collect MarkdownX assets to your STATIC_ROOT:\n\n```bash\npython manage.py collectstatic\n```\n\n### Code block syntax highlighting\n\nBy default the html code blocks `pre` do not have syntax highlighting. Since, this blog gives code examples in different programming languages, it is important to support syntax highlighting of the code blocks. I used [codehilite](https://python-markdown.github.io/extensions/code_hilite/) for that.\n\nAdding `MARKDOWNX_MARKDOWN_EXTENSIONS \x3d ['fenced_code', 'codehilite']` to the settings enable syntax highlighting.\n\n`codehilite` required the [pygments](https://pygments.org/) package:\n\n```bash\npip install pygments\n```\n\n## Env variables\n\n### django-environ\n\nA common python library to deal with ENV variable in django is [django-environ](https://django-environ.readthedocs.io/en/latest/)\n\n```bash\npip install django-environ\n```\n\n### Django project settings\n\nI advise to separate your settings in multiple files instead of keeping one default `settings.py`.\n\nI use 3 settings files: `common.py` , `dev.py` and `prod.py`. \n\nHere is the `common.py` (partial content):\n\n```python\nfrom pathlib import Path\nimport os\n\n# Build paths inside the project like this: BASE_DIR / 'subdir'.\nBASE_DIR \x3d Path(__file__).resolve().parent.parent.parent\n\n# Env variables\n\nimport environ\n# Initialise environment variables\nenv \x3d environ.Env()\nbase \x3d environ.Path(__file__) - 3 # 3 folders back\nenviron.Env.read_env(env_file\x3dbase('.env'), overwrite\x3dTrue) # reading .env file\n\n# SECURITY WARNING: keep the secret key used in production secret!\nSECRET_KEY \x3d env('SECRET_KEY')\n\nALLOWED_HOSTS \x3d []\n\n# Application definition\n\n...\n\n# Static files (CSS, JavaScript, Images)\n# https://docs.djangoproject.com/en/4.2/howto/static-files/\n\nSTATIC_URL \x3d \"static/\"\n\n# Default primary key field type\n# https://docs.djangoproject.com/en/4.2/ref/settings/#default-auto-field\n\nDEFAULT_AUTO_FIELD \x3d \"django.db.models.BigAutoField\"\n\n# Login/Logout redirects\nLOGIN_REDIRECT_URL \x3d 'home'\nLOGOUT_REDIRECT_URL \x3d 'home'\n\n# Markdown extensions to handle code blocks and code block highlighting\nMARKDOWNX_MARKDOWN_EXTENSIONS \x3d ['fenced_code', 'codehilite']\n```\n\nThen, the main differences between `dev` and `prod` are the DB settings and where to store the static files.\n\nHere is the `dev.py`:\n\n```python\nfrom loicblog.settings.common import *\n\nDEBUG \x3d True\n\n# SQLite Database\n\nDATABASES \x3d {\n \"default\": {\n \"ENGINE\": \"django.db.backends.sqlite3\",\n \"NAME\": BASE_DIR / \"db.sqlite3\",\n }\n}\n\nSTATIC_ROOT \x3d \"/Users/loicblanchard/workspaces/blog-statics\"\n```\n\nAnd here is the `prod.py`:\n\n```python\nfrom loicblog.settings.common import *\n\nDEBUG \x3d False\n\nALLOWED_HOSTS \x3d [\"blog.loicblanchard.me\", \"*\"] # add localhost for local testing\n\nCSRF_TRUSTED_ORIGINS \x3d ['https://blog.loicblanchard.me']\n\n# Amazon S3 configuration\nAWS_ACCESS_KEY_ID \x3d env.str('AWS_ACCESS_KEY_ID')\nAWS_SECRET_ACCESS_KEY \x3d env.str('AWS_SECRET_ACCESS_KEY')\nAWS_STORAGE_BUCKET_NAME \x3d env.str('AWS_STORAGE_BUCKET_NAME')\n\nINSTALLED_APPS +\x3d [\n 'storages',\n]\n\nSTORAGES \x3d {\n \"staticfiles\": {\n \"BACKEND\": \"storages.backends.s3boto3.S3StaticStorage\"\n }\n}\n\nAWS_S3_CUSTOM_DOMAIN \x3d '%s.s3.amazonaws.com' % AWS_STORAGE_BUCKET_NAME\n\nAWS_S3_FILE_OVERWRITE \x3d True\n\n# Set the static root to the S3 bucket path\nSTATIC_ROOT \x3d 's3://%s/static' % AWS_STORAGE_BUCKET_NAME\n\n## Admin styling adjustment\n\nADMIN_MEDIA_PREFIX \x3d '/static/admin/'\n\n# PostgreSQL Database\n\nDATABASES \x3d {\n 'default': {\n 'ENGINE': 'django.db.backends.postgresql',\n 'NAME': env.str('DB_NAME'),\n 'USER': env.str('DB_USER'),\n 'PASSWORD': env.str('DB_PASSWORD'),\n 'HOST' : env.str('DB_HOST'),\n 'PORT': env.str('DB_PORT', default\x3d'5432'),\n }\n}\n```\n\n### .env and .env.dist\n\nYou can see that all the sensitive data is stored in env variables and in my case in a `.env` file at the root of the project. Of course, do not push this file to any repo and keep it in a safe place (I use GitHub private Gist or sometimes directly Bitwarden password manager for some credentials).\n\nAnother good practice is to have an `env.dist` file that describes the env variables expected to be provided without the actual values.\n\nHere is mine:\n\n```bash\nDJANGO_SETTINGS_MODULE\x3d\nSECRET_KEY\x3d\n\nAWS_ACCESS_KEY_ID\x3d\nAWS_SECRET_ACCESS_KEY\x3d\nAWS_STORAGE_BUCKET_NAME\x3d\nAWS_S3_REGION_NAME\x3d\n\nDB_NAME\x3d\nDB_USER\x3d\nDB_PASSWORD\x3d\nDB_HOST\x3d\n```\n\nIn case other developers or your future self want to know what are the env variables required for the project to work (especially in prod), having a look at the `.env.dist` show me what I need to know right away.\n\nNote the `DJANGO_SETTINGS_MODULE` I use for switching from `dev` to `prod` env and therefore load the proper setting file.\n\nIf you look at the `prod.py` , you can see that I use AWS S3 to store the static files and AWS RDS Postgres to store the users/posts data.\n\n## AWS S3\n\nThe static files are stored in a public AWS S3 bucket.\n\n### django-storages\n\nDjango-storages is a Python library that provides a storage backend system for Django web applications.\n\n```bash\npip install -U django-storages\n```\n\n### boto3\n\nOfficial AWS SDK (Software Development Kit) for Python. Boto3 allows Python developers to interact with various Amazon Web Services (AWS) resources and services programmatically.\n\n```bash\npip install -U boto3\n```\n\n### Push static files to S3\n\nFirst, I make sure to use the prod env variable in `.env`:\n\n```bash\nDJANGO_SETTINGS_MODULE\x3dloicblog.settings.prod\n```\n\nThen in `loicblog`:\n\n```bash\npython manage.py collectstatic\n```\n\nThis command collects all the static files and store them in the location specified via `STATIC_ROOT` (a local dir for `dev` and the S3 bucket for `prod`).\n\nBe sure to have the AWS env variables setup before running the command.\n\nIn prod, this command will gather your static files and push it to AWS S3 bucket (you need to have the bucket public so the files can be served everywhere).\n\n## AWS RDS Postgres\n\nFirst, create AWS RDS Postgres db and Security Group.\n\nFor the SG, it needs to have inbound rules for Postgres.\n\n### psycopg2-binary\n\nThe package `psycopg2-binary` is a PostgreSQL adapter for Python. It allows Python programs to communicate with a PostgreSQL database.\n\n```bash\npip install psycopg2-binary\n```\n\nAll the DB configs can be added in `.env` as well.\n\nIn order to migrate and setup the superuser properly, we need to run a few commands:\n\n```bash\n# apply migrations.\npython manage.py migrate\n\n# Create superuser\npython manage.py createsuperuser\n```\n\n## AWS Elastic Beanstalk\n\nOne straight forward way to run the Django app is in an AWS Elastic Beanstalk.\n\nYou can read more about it in this guide: [EB guide](https://docs.aws.amazon.com/elasticbeanstalk/latest/dg/create-deploy-python-django.html).\n\nFirst, we need to be sure all the packages are present in the `requirements.txt` as the EB needs it to setup the app:\n\n```bash\npip freeze \x3e requirements.txt\n```\n\n### gunicorn\n\n[Gunicorn](https://gunicorn.org/) (Green Unicorn) is a commonly used HTTP server for deploying Python web applications, including Django apps. When deploying a Django app on AWS Elastic Beanstalk, Gunicorn is often used as the application server to handle incoming HTTP requests and serve the Django application.\n\n```bash\npip install gunicorn\n```\n\n### EB config\n\nThe EB configurations can be found in `.ebextensions/django.config`\n\n```\noption_settings:\n aws:elasticbeanstalk:container:python:\n WSGIPath: loicblog.wsgi:application\n```\n\n**WSGI Application**: The Web Server Gateway Interface application is responsible for handling the communication between the web server (like Apache or nginx) and the Django application. It translates incoming HTTP requests into a format that Django can process and then sends the responses back to the web server.\n\nIn my example, **`loicblog.wsgi`** is the module path, and **`application`** is the variable within that module that represents my WSGI application.\n\n### AWS CLI EB\n\nWe can use the AWS CLI to manage the Elastic Beanstalk creation and deployment of new environment and app.\n\n```bash\n## first leave python virtual env\ndeactivate\n\n## then proceed with eb cli\nbrew install awsebcli\n\n## init eb\neb init\n\n## BE SURE TO HAVE DJANGO_SETTINGS_MODULE\x3dloicblog.settings.prod\n\n## Create all resources\neb create\n\n## (re)deploy\neb deploy\n```\n\n## Domain name\n\n### ALB DNS\n\nBy default, creating an EB also setup an Application Load Balancer (ALB). The ALB has its own DNS and we want to map our own DNS name to it. The type of record to achieve this is called `CNAME`.\n\n### SSL Certificate\n\nIn my case, I own the domain `loicblanchard.me`. I want to have my blog on the subdomain `blog.loicblanchard.me`. I use GoDaddy for DNS provider but the process is quite similar for most providers.\n\nFor HTTPS, we can create a SSL certificate using AWS Certificate Manager for the subdomain `blog.loicblanchard.me`.\n\n*Note: ACM provides the CNAME record name and value. For the name, it will provide something like this `_SOME-NUMBERS-HERE.blog.loicblanchard.me.`*\n\n*However, we need to only enter `_SOME-NUMBERS-HERE.blog` for it to work in GoDaddy.*\n\n### Mapping Subdomain to ALB\n\nThen in GoDaddy, to resolve `blog.loicblanchard.me` to the ALB name, we need to add another CNAME record for the `blog` subdomain.\n\nAfter that, we need to add rules to the ALB to redirect http to https using the ACM certificate.\n\nFinally, we need to be sure the Security Group of the ALB allows inbound HTTPS.\n\nUpdate the `ALLOWED_HOSTS` and `CSRF_TRUSTED_ORIGINS` with the subdomain and redeploy the EB.\n\n## Conclusion\n\nI provided some general guidelines on how you could develop and deploy a Django app using the example of my own project.\n\nUsing AWS EB is very straight forward to setup and cheap solution for low traffic website such as my blog.\n\nUsing AWS S3 to serve the static files and AWS RDS to store your production data are common ways to handle your production data.\n\nBe sure to keep your env variables safe and split dev and prod settings to avoid confusion and accidental sensitive data leak.\n\nSince EB comes with ALB, you can easily use a CNAME record to map your own personal subdomain to the ALB.\n\nBe aware of the hosting costs. I moved the blog content to my clojure SPA instead because after my AWS free tier expired, the monthly cost for hosting the blog was around $50 which was too much for a simple blog like that.\n", -new S(null,1,5,T,[new S(null,2,5,T,["Blog","https://github.com/skydread1/blog"],null)],null),"\n",ju,"Deploy Django Blog in AWS Beanstalk","deploy-django-aws-beanstalk",new n(null,3,[$o,"/assets/loic-blog-logo.png",Xo,"/assets/loic-blog-logo.png",Tl,"Logo referencing Aperture Science"],null),"blog-django-aws"]),ij([Sl,ym,Wn,Cp,Mp,Np,pq,Dr,Zs,tt],[new S(null,4,5,T,["Clojure","Compiler","CLR","Unity"],null),new S(null,1,5,T,["2022-04-22"],null),'\n## Prerequisites\n\nYour Clojure library is assumed to be already compiled to dotnet.\n\nTo know how to do this, refer to the article: [Port your Clojure lib to the CLR with MAGIC](https://www.loicblanchard.me/blog/port-clj-lib-to-clr)\n\n## Goal\n\nIn this article, I will show you:\n- how to package your lib to nuget\n- push it in to your host repo\n- import in Unity in this article\n\n## Build the dlls with Nostrand\n\nJust use the command `nos dotnet/build` at the root of the Clojure project.\n\nThe dlls are by default generated in a `/build` folder.\n\n## Dependency management\n\nA `.csproj` file (XML) must be added at the root of the Clojure project.\n\nYou can find an example here: [clr.test.check.csproj](https://github.com/skydread1/clr.test.check/blob/magic/clr.test.check.csproj)\n\n```xml\n\x3cProject Sdk\x3d"Microsoft.NET.Sdk"\x3e\n \x3cPropertyGroup\x3e\n \x3cTargetFrameworks\x3enetstandard2.0\x3c/TargetFrameworks\x3e\n \x3c/PropertyGroup\x3e\n \x3cPropertyGroup\x3e\n \x3cNuspecFile\x3eclr.test.check.nuspec\x3c/NuspecFile\x3e\n \x3cRestoreAdditionalProjectSources\x3e\n https://api.nuget.org/v3/index.json\n \x3c/RestoreAdditionalProjectSources\x3e\n \x3c/PropertyGroup\x3e\n\x3c/Project\x3e\n```\n\nThere is no need to add References as they were already built by Nostrand in the `/build` folder.\n\nNote the `NuspecFile` that is required to use the nuspec.\n\n## Package Manager\n\nA `.nuspec` file (XML) must be added at the root of the Clojure project.\n\nThe `references` are the references to the dlls in `/build`.\n\nYou can find an example here: [clr.test.check.nuspec](https://github.com/skydread1/clr.test.check/blob/magic/clr.test.check.nuspec)\n\n```xml\n\x3c?xml version\x3d"1.0" encoding\x3d"utf-8"?\x3e\n\x3cpackage\x3e\n \x3cmetadata\x3e\n \x3cid\x3eclr.test.check\x3c/id\x3e\n \x3cversion\x3e1.1.1\x3c/version\x3e\n \x3ctitle\x3eclr.test.check\x3c/title\x3e\n \x3cauthors\x3eskydread1\x3c/authors\x3e\n \x3cdescription\x3eContains the core references for the Clojure lib test.check.\x3c/description\x3e\n \x3crepository type\x3d"git" url\x3d"https://github.com/skydread1/clr.test.check" /\x3e\n \x3cdependencies\x3e\n \x3cgroup targetFramework\x3d"netstandard2.0"\x3e\x3c/group\x3e\n \x3c/dependencies\x3e\n \x3c/metadata\x3e\n \x3cfiles\x3e\n \x3cfile src\x3d"build\\*.clj.dll" target\x3d"lib\\netstandard2.0" /\x3e\n \x3c/files\x3e\n\x3c/package\x3e\n```\n\nThe `dependency` tag is required to indicate the targeted framework.\n\nThe `file` (using a wild card to avoid adding the files one by one) is required to add the dlls files that will be available for the consumer. So the target must be `lib\\TFM`.\n\nIn our case, Unity recommends to use `netstandard2.0` so our target is `lib\\netstandard2.0`.\n\n## GitHub/GitLab local config\n\nTo push the package to a git host, one of the most convenient way is to have a `nuget.config` (XML) locally at the root of the project.\n\n### The nuget.config for GitHub\n\n```xml\n\x3c?xml version\x3d"1.0" encoding\x3d"utf-8"?\x3e\n\x3cconfiguration\x3e\n \x3cpackageSources\x3e\n \x3cclear /\x3e\n \x3cadd key\x3d"github" value\x3d"https://nuget.pkg.github.com/skydread1/index.json" /\x3e\n \x3c/packageSources\x3e\n \x3cpackageSourceCredentials\x3e\n \x3cgithub\x3e\n \x3cadd key\x3d"Username" value\x3d"skydread1" /\x3e\n \x3cadd key\x3d"ClearTextPassword" value\x3d"PAT" /\x3e\n \x3c/github\x3e\n \x3c/packageSourceCredentials\x3e\n\x3c/configuration\x3e\n```\n\nIn order to push a Package to a `Package Registry` to your GitHub project repo, you will need to create a **PAT** (Personal Access Token) with the `write:packages` ,`:read:packages` and `delete:packages` permissions.\n\nReplace Username value by your Github username\n\nReplace Token value by your newly created access token\n\nReplace the repo URL by the path to your GitHub **account page** (not the repo).\n\n*Note: Do not push your config in GitHub as it contains sensitive info (your PAT), it is just for local use.*\n\n### The nuget.config for GitLab\n\n```xml\n\x3c?xml version\x3d"1.0" encoding\x3d"utf-8"?\x3e\n\x3cconfiguration\x3e\n \x3cpackageSources\x3e\n \x3cclear /\x3e\n \x3cadd key\x3d"gitlab" value\x3d"https://sub.domain.sg/api/v4/projects/777/packages/nuget/index.json" /\x3e\n \x3c/packageSources\x3e\n \x3cpackageSourceCredentials\x3e\n \x3cgitlab\x3e\n \x3cadd key\x3d"Username" value\x3d"deploy-token-name" /\x3e\n \x3cadd key\x3d"ClearTextPassword" value\x3d"deploy-token-value" /\x3e\n \x3c/gitlab\x3e\n \x3c/packageSourceCredentials\x3e\n\x3c/configuration\x3e\n```\n\nIn order to push a Package to a `Package Registry` to your GitLab project repo, you will need to create a **deploy token** (not access token) with the `read_package_registry` and `write_package_registry` permissions.\n\nReplace Username value by your token username\n\nReplace Token value by your newly created deploy token\n\nReplace the domain (for private server) and project number in the GitLab URL. (don’t forget the index.json at the end)\n\n*Note: Do not push your config in GitLab as it contains sensitive info (your deploy token), it is just for local use.*\n\n## Pack and Push nuget packages with Nostrand\n\nAt the root of the project, the `dotnet.clj` contains the convenient function to be used with [nasser/nostrand](https://github.com/nasser/nostrand).\n\nYou can find an example here: [dotnet.clj](https://github.com/skydread1/clr.test.check/blob/magic/dotnet.clj)\n\nWe added to our Clojure library a convenient function to avoid having to manually use the dotnet commands, you can just run at the root at the Clojure directory:\n\n```bash\nnos dotnet/nuget-push\n```\n\nThis will create the nuget code package `.nupkg` file in the folder `bin/Release`. the name is the package name and the version such as `clr.test.check.1.1.1.nupkg`.\n\nIt will then push it to either Gitlab or Github depending on the host using the credentials in `nuget.config`.\n\nIt is equivalent to the 2 dotnet commands:\n\n```bash\ndotnet pack --configuration Release\ndotnet nuget push "bin/Release/clr.test.check.1.1.1.nupkg" --source "github"\n```\n\n**Note**: for a Clojure project, you can let the default option for the packing. There is no need to build in theory as we already have our dlls ready in our `/build` folder. The `dotnet build` will just create a unique dll with the name of your library that you can just ignore.\n\n## Download nuget Packages\n\nUsing package references is the new way of doing this but it does not work with Unity.\n\n### Import nuget packages to a regular C# project\n\nThe new way of importing the nuget packages is to use the `PackageReference` tag directly in the `.csproj` file such as:\n\n```bash\n\x3cPackageReference Include\x3d"Sitecore.Kernel" Version\x3d"12.0.*" /\x3e\n```\n\nBut this method only works if you are using the `.csproj` file which we don’t use in Unity as we use the `manifest.json`.\n\n## Import nuget packages to a Unity project\n\nUnity uses a json file in `Packages/manifest.json` to download deps. However it does not work for nuget packages.\n\nThere is no `.csproj` at the root so we cannot use the method above, and all the other underlying `csproj` are generated by Unity so we cannot change them.\n\nThe only choice we have is to use the old way of importing the nuget packages which is to use a `packages.config` and then use the command `nuget restore` to fetch the packages last versions.\n\nSo we need to add 2 config files in our root of our Unity project:\n\n- `nuget.config` : github/gitlab credentials\n- `packages.config` : packages name and their version/target\n\n### nuget.config\n\nIn order to fetch all the packages at once using `nuget restore`, we need to add locally the `nuget.config` with the different sources and credentials.\n\nSo to restore our GitHub and GitLab packages from our example, we use the following `nuget.restore`:\n\n```xml\n\x3c?xml version\x3d"1.0" encoding\x3d"utf-8"?\x3e\n\x3cconfiguration\x3e\n \x3cconfig\x3e\n\t \x3cadd key\x3d"repositoryPath" value\x3d"Assets/ClojureLibs" /\x3e\n\t \x3c/config\x3e\n \x3cpackageSources\x3e\n \x3cclear /\x3e\n \x3cadd key\x3d"gitlab" value\x3d"https://sub.domain.sg/api/v4/projects/777/packages/nuget/index.json" /\x3e\n \x3cadd key\x3d"github" value\x3d"https://nuget.pkg.github.com/skydread1/index.json" /\x3e\n \x3c/packageSources\x3e\n \x3cpackageSourceCredentials\x3e\n \x3cgitlab\x3e\n \x3cadd key\x3d"Username" value\x3d"deploy-token-name" /\x3e\n \x3cadd key\x3d"ClearTextPassword" value\x3d"deploy-token-value" /\x3e\n \x3c/gitlab\x3e\n \x3cgithub\x3e\n \x3cadd key\x3d"Username" value\x3d"skydread1" /\x3e\n \x3cadd key\x3d"ClearTextPassword" value\x3d"PAT" /\x3e\n \x3c/github\x3e\n \x3c/packageSourceCredentials\x3e\n\x3c/configuration\x3e\n```\n\nThe `repositoryPath` allows us to get our packages in a specific directory.\nIn our case, we put it in `Assets/ClojureLibs` (it needs to be in the `Asset` dir anywhere)\n\n### packages.config\n\nTo tell Unity which packages to import while running `nuget restore`, we need to provide the `packages.config`. Here is the config in our example:\n\n```bash\n\x3c?xml version\x3d"1.0" encoding\x3d"utf-8"?\x3e\n\x3cpackages\x3e\n \x3cpackage id\x3d"Magic.Unity" version\x3d"1.0.0" targetFramework\x3d"netstandard2.0" /\x3e\n \x3cpackage id\x3d"my-private-proj" version\x3d"1.0.0" targetFramework\x3d"netstandard2.0" /\x3e\n \x3cpackage id\x3d"clr.test.check" version\x3d"1.1.1" targetFramework\x3d"netstandard2.0" /\x3e\n\x3c/packages\x3e\n```\n\n### Magic.Unity\n\nTo run clojure in Unity, you need [Magic.Unity](https://github.com/nasser/Magic.Unity). It is a the runtime for Clojure compiles with Magic in Unity.\n\nNote the `Magic.Unity` in the `packages.config` above. Magic.Unity has its own nuget package deployed the same way as you would deploy a Clojure library, so you import it along side your nuget packages with your compiles clojure libs.\n\n### nuget restore\n\nOnce you have the github/gitlab credentials ready in `nuget.config` and the packages and their version/target listed in `packages.config`, you can run the command `nuget restore` at the root of the unity project.\n\nIf running `nuget restore` do not fetch the last version, it is because it is using the local cache.\nIn this case you need to force restore using those [commands](https://docs.microsoft.com/en-us/nuget/consume-packages/package-restore#force-restore-from-package-sources).\n\nMost of the time, ignoring the cache is fixing this issue:\n\n```bash\nnuget restore -NoCache\n```\n\nHere is the packages tree of our project for instance:\n\n```bash\n~/workspaces/unity-projects/my-proj:\n.\n├── clr.test.check-legacy.1.1.1\n│   ├── clr.test.check-legacy.1.1.1.nupkg\n│   └── lib\n│   └── netstandard2.0\n│   ├── clojure.test.check.clj.dll\n│   ├── clojure.test.check.clojure_test.assertions.clj.dll\n│   ├── clojure.test.check.clojure_test.clj.dll\n│   ├── clojure.test.check.generators.clj.dll\n│   ├── clojure.test.check.impl.clj.dll\n│   ├── clojure.test.check.random.clj.dll\n│   ├── clojure.test.check.results.clj.dll\n│   └── clojure.test.check.rose_tree.clj.dll\n├── my-private-lib.1.0.0\n│   ├── my-private-lib.1.0.0.nupkg\n│   └── lib\n│   └── netstandard2.0\n│   ├── domain.my_prate_lib.core.clj.dll\n│      └── domain.my_prate_lib.core.utils.clj.dll\n```\n\nFinally, You can add Magic.Unity (runtime for magic inside Unity) in the manifest.json like so:\n\n```json\n{\n "dependencies": {\n\t ...,\n "sr.nas.magic.unity": "https://github.com/nasser/Magic.Unity.git"\n\t}\n}\n```\n\n## Conclusion\n\nOnce you have the proper required config files ready, you can use `Nostrand` to\nBuild your dlls:\n```\nnos dotnet/build\n```\nPack your dlls in a nuget package and push to a remote host:\n```\nnos dotnet/nuget-push\n```\nImport your packages in Unity:\n```\nnuget restore\n```\n\n`Magic.Unity` is the Magic runtime for Unity and is already nuget packaged on its public repo\n', -new S(null,3,5,T,[new S(null,2,5,T,["Magic","https://github.com/nasser/magic"],null),new S(null,2,5,T,["Nostrand","https://github.com/nasser/nostrand"],null),new S(null,2,5,T,["Magic.Unity","https://github.com/nasser/Magic.Unity"],null)],null),"\n",ju,"Pack, Push and Import Clojure to Unity","clojure-in-unity",new n(null,3,[$o,"/assets/loic-blog-logo.png",Xo,"/assets/loic-blog-logo.png",Tl,"Logo referencing Aperture Science"],null),"blog-clj-in-unity"]),ij([Sl,ym,Wn,Mp,Np,pq,Dr,Zs,tt],[new S(null, +"\n",iu,"Testing in Clojure","testing-in-clojure",new n(null,3,[Zo,"/assets/loic-blog-logo.png",Wo,"/assets/loic-blog-logo.png",Tl,"Logo referencing Aperture Science"],null),"testing-in-clojure"]),ij([Sl,xm,Vn,Lp,Mp,oq,Cr,Ws,st],[new S(null,4,5,T,["Git","Workflows","Branching","CI/CD"],null),new S(null,1,5,T,["2024-05-12"],null),"\n## Introduction\n\nDepending on the size of the projects and its CI/CD requirements, one might choose one of the popular [Git Workflows](https://www.atlassian.com/git/tutorials/comparing-workflows). Some are good for some scenarios, some are never good and some are questionable to say the least.\n\nIn this article, I will explain how the main workflows work and which one to use and when in my opinion.\n\n## Trunk-Based Development\n\n### Timeline Example\n\n![Trunked Based Dev](/assets/git-workflows/trunk-based-dev.png)\n\n### No Branching\n\nThat’s it. You have your `main` branch and everybody pushes to it. Some might call it madness others would say that excellent CI/CD setup does not require branching.\n\nIf you are the only one working on your project, you *could* push to `main` directly. If you are an excellent developer and have the confidence to push to main and have very good CI/CD in place or none (so merging to `main` is not critical), you could use that strategy. I see this strategy quite often in small open-source projects maintained by a single developer with manual release (so no CD, just CI for testing).\n\n### Should you use it?\n\nYou might have realized already that this strategy applies to very few teams and I don’t think you will encounter that one-branch strategy a lot at your daily jobs. I don’t recommend that strategy as in my humble opinion, PRs are essentials in a good development process. Some people tend to view PR as someone having authority on their code but that’s the wrong way of seeing it. PR offers a second opinion on the code and **everybody** can suggest good changes. I make Junior Developers review my code from the moment they join the company and they have good suggestions in the comments of the PRs regularly.\n\nBack to `TBD`, you need good trust in your colleagues as there is no code review. That is the reason I mentioned that it might be suitable for experience developers.\n\nAnyway, don’t use trunk-based dev unless you know exactly what you are doing and have lots of experience already or a pretty non-critical project and you want very **fast** code base updates.\n\n## Feature Branches\n\n### Timeline Example\n\n![Feature Branching](/assets/git-workflows/feature-branching.png)\n\n### Pull Requests\n\nEverybody should be familiar with that one. Bob pulls a branch from main, implements the feature and pushes that feature branch to remote. Bob then opens a PR/MR (Github call it Pull Request, Gitlab call it Merge Request) and Alice reviews Bob's code before merging to `main`.\n\nIf Alice suggests some changes, Bob pushes new commits to his `feature` branch. Once Alice approves the changes, Bob can merge to `main`.\n\n### Solo Dev\n\nI think that even for personal projects, you should create PR to merge into `main`. This allows you to define properly what is the `scope` of the changes you are working on. Furthermore, you might have CI that checks format, run tests etc that would be different depending on pushing to a `feature` branch and merging to `main`.\n\nFor example, I have a portfolio website (Single Page Application) that is hosted on Netlify. When I open a PR, Netlify builds a js bundle and shows me a preview of what the new version of the website will look like on Web and Mobile. This is very convenient. Once I merge to `main`, Netlify deploys the new js bundle to my domain. So my PR triggers test check and UI preview (CI) and merging to `main` deployed the js bundle to my domain (CD).\n\n### Working with others\n\nHaving `features` branches that are merged to `main` is the bare minimum to have when working with other developers in my opinion. \n\nTherefore, I suggest for the feature you want to implement, create a branch from `main`, solve the issue and raise a PR to get your colleague’s feedback. In your CI, describes the jobs you want to run on commit to a feature branch and the jobs you want to run when the code is merged to `main`.\n\nYour `main` branch should be protected, meaning, only reviewed code can be merged to it and nobody can push directly to it (thus the CI jobs cannot be bypassed).\n\nThis workflow is suitable for simple project with one or a few contributors and with simple CI/CD.\n\nFinally, the feature branches should be **short lived.** Some people refer to CI (Continuous Integration) strictly as a way of saying we merge quickly to main even if the feature is partially implemented as long as it works in production (or hidden behind a flag for instance).\n\n### GitHub Flow\n\nThe feature branching is what they use at GitHub, they call it `GitHub Flow` but it is the same as `feature branching`. See by yourself form their doc:\n\n\x3e So, what is GitHub Flow?\n\x3e \n\x3e - Anything in the `main` branch is deployable\n\x3e - To work on something new, create a descriptively named branch off of `main` (ie: `new-oauth2-scopes`)\n\x3e - Commit to that branch locally and regularly push your work to the same named branch on the server\n\x3e - When you need feedback or help, or you think the branch is ready for merging, open a [pull request](http://help.github.com/send-pull-requests/)\n\x3e - After someone else has reviewed and signed off on the feature, you can merge it into main\n\x3e - Once it is merged and pushed to 'main', you can and *should* deploy immediately\n\n### Should you use it?\n\nYes. Actually, pretty much everybody uses feature branches.\n\n## Forking\n\n### Timeline Example\n\n![Forking](/assets/git-workflows/forking.png)\n\n### Open Source Contributions\n\nForking is the method used for open-source project contributions. In short, you could **clone** the repo locally but you won’t be able to push any branches because the author won't allow you. Just imagine if anybody could freely push branches to your repo! So the trick is to **fork** (personal copy on a version control platform) to your own GitHub account. Then you clone that repository instead and from there. The original Github repo is called the `upstream` and your own copy of the Github repo is called the `origin`.\n\nThen, once your feature implemented, you can push the code to `origin` (your fork) and then raise a PR to merge the feature `origin/my-feature` to the `upstream/main` branch. When the authors/maintainers of the upstream repo approve your PR and merge it to `upstream/main` , you can then “sync” (merge `upstream/main` to `origin/main`) and start working on another feature.\n\nTo link the forking to our previous strategies, you can see that you are basically doing **feature branching** again. \n\nSome open-source authors might push directly to their `main` branch while accepting PR from forks. In that specific scenario, we can see that authors are doing **Trunk-Based Development** while requiring external contributors to follow **feature branching**. Interesting, isn’t it?\n\n## Release Branches\n\n### Timeline Example\n\n![Release Branching](/assets/git-workflows/release-branching.png)\n\n### It’s getting ugly\n\nIndeed, some projects might have multiple versions deployed and accessible by clients at the same time. The common example would be the need to still support old products or old API versions.\n\nIn the timeline chart above, you can see that it is getting a bit more verbose but not so difficult to grasp. We branch `release-1.0` from `main`. Bob starts working on features and merges them to `release-1.0`. At some point, the code is deemed ready to be deployed and therefore merged to `main`. Bob quickly move on to build features for the next release `release1.1`.\n\nUnfortunately, a bug is discovered in production and needs urgent fixing. Alice merges some hotfix into `main` to patch the issue. The production is now stable and a new version arises from the patch: `v1.0.1`. We then sync `release-1.0` with `main` so our version on `release-1.0` is also `v1.0.1`\n\nWhile Alice was patching `production`, Bob already pushed some features to the new release branch. So, we need to merge the patches made by Alice to Bob’s new code and that is why we also need to sync `release-1.1` with `main`. After syncing, Bob can merge is new release as `1.1.1` to `main`.\n\nIf you got confused with the version numbers, I redirect you to [SemVer](https://semver.org/) but in short, a version is of format *Major.Minor.Patch*. **Major** used for incompatible codes (like 2 independent API versions). **Minor** is in our example the `release` and **Patch** is the `hotfix` from Alice. This way when Bob merged his branch `release-1.1`, he did include the hotfix of Alice making the new version in `main` not `1.1.0` but indeed `1.1.1`.\n\n### Should you use it?\n\nIf you don’t need to support multiple releases at once, no, don’t use it. Ideally, you merge your features quite frequently and one release does not break the other ones. It is actually very often the case that we do not need to support old versions. So if you can, don’t use it.\n\n## GitFlow\n\n### Timeline Example\n\n![GitFlow](/assets/git-workflows/gitflow.png)\n\n### Fatality\n\nTo quote [Atlassian](https://www.atlassian.com/git/tutorials/comparing-workflows/gitflow-workflow):\n\n\x3e Gitflow is a legacy Git workflow that was originally a disruptive and novel strategy for managing Git branches. Gitflow has fallen in popularity in favor of [trunk-based workflows](https://www.atlassian.com/continuous-delivery/continuous-integration/trunk-based-development), which are now considered best practices for modern continuous software development and [DevOps](https://www.atlassian.com/devops/what-is-devops) practices. Gitflow also can be challenging to use with [CI/CD](https://www.atlassian.com/continuous-delivery).\n\x3e \n\nSo GitFlow is obsolete and you will soon understand why.\n\nIt is similar to what we just saw with the **release branching** but now we have another branch called `develop`. So every feature is merged to `develop`. Once a version is ready we merge it to the corresponding `release` branch. On that release branch, some additional commits might be pushed before merging to `main`. On new version merged to `main`, we need to sync A LOT. You can see on the chart above all the potential merge conflicts represented by a sword. I hope this visual representation highlights the problem: too many potential merge conflicts.\n\n### But why?\n\nIt is a good question, I am not sure. The idea of having a `develop` branch is very common in a lot of projects, but why combine it with `release` branches like that I am not sure to be frank. I don’t recommend to use GitFlow and it seems obsolete for a reason. In general we want the following:\n\n- as few branches as possible\n- short lived branches with small or partial but workable features to be deployed\n\nI see `GitFlow` as the opposite of `Continuous Integration` (in the sense of merging frequently new features and having new deployable codes ready regularly). For fun, let’s have a look at what happens after a hotfix in prod:\n\n- hotfix-1.0.1 ⚔️ main\n- main ⚔️ release-1.0\n- main ⚔️ release-1.1\n- main ⚔️ develop\n- develop ⚔️ feature\n\nI am grateful that I never had to work with `GitFlow` and I have the feeling that implementing it would mean having a dedicated engineer to take care of the branching, a sort of *Git gardener*.\n\n## Feature branching on develop\n\n### Timeline Example\n\n![Feature Branching on Develop](/assets/git-workflows/feature-branching-on-develop.png)\n\n### Develop branch\n\nThe GitFlow aspect that most people still use is the `develop` branch. All the feature branches are merged to `develop` instead of `main`. Once `develop` is deemed ready for release, it is merged to `main`.\n\nThis is useful for a few reasons:\n\n- at any time, we know the commit of the stable release (code in prod) via the `main` branch\n- at any time, we know what is the latest commit of the ongoing new version via the `develop` branch\n\nThis seems like the sweet spot for most cases and that is why it is popular.\n\nMerging a `feature` to `develop` triggers a bunch of CI jobs (the usual, format check, test checks etc)\n\nMerging `develop` to `main` triggers a bunch of CI jobs (build a docker image, push it to a container registry for instance)\n\n### Should you use it?\n\nYes. It is simple yet efficient.\n\n## Release Candidate workflow\n\n### Timeline Example\n\n![Release Candidate Workflow](/assets/git-workflows/RC-workflow.png)\n\nIt is very similar to **Feature Branching to Develop**. The only difference is that when `develop` is merged to `main` it creates a **Release Candidate** (RC) to be tested in a test/staging environment. If an issue in the test environment arises, a hotfix is done and we have a new RC (RC2 in this case). Once everything is ok in the test env, we have a stable release (we just tag a branch basically).\n\nThe advantage of this strategy is that `main` is the line of truth for both test and prod env. `main` contains the RC and stable versions which is great for reporting what went wrong in the test cluster and what is stable in prod.\n\nThis strategy works if `main` does not automatically deploy to production. It could deploy something non-critical, such as a docker image of the app to a container registry for instance.\n\n### Tagging Example\n\n- Bob has merged a few features to `develop` and deemed `develop` ready to be merged to `main`. It is a release candidate with version `v1.0.0-RC1`\n- Alice approves Bob's changes and merges `develop` to `main`\n- Alice deploys the app to **staging** and realizes one feature needs correction.\n- Alice branches out of `main` and implement the RC fix and the code is merged to `main`. The new version is `v1.0.0-RC2`.\n- Alice redeploys to **staging** and everything works as expected. Thus Alice bumps the version to stable: `v1.0.0`. She then deploys to **prod**.\n- Unfortunately, in a very edge case, a feature fails in production and needs urgent fixing.\n- Alice branches out of `main` and implements the *hotfix* and merges back to `main`. The version is now `v1.0.1`.\n- All is well now and it's time to *sync* `develop` with `main`.\n\n### Recap\n\n- `feature` branches are merged to `develop`\n- `develop` branch is merged to `main` as version *x.y.z-RCp*\n- `RC-fixes` branches are merged to `main` as new RCs until test passes in test env. Version is *x.y.z-RC(p+1)*\n- `hotfix` branches are merged to `main` if urgent bug in prod env and version is incremented like so: *x.y.z+1*\n- `main` branch is merged to `develop` (Sync) and eventual conflicts with new features are resolved\n- new `features` are implemented for the version *x.(y+1)+z*\n\n### Should you use it?\n\nIf you need a test/staging environment to test changes, RC strategy is good for you. However, if you have only one env and your CD is not critical, prefer the **Feature branching to develop**\n\n## Conclusion\n\nUse **trunk-based** development if you are working alone on a project or with experienced developers you can trust.\n\nPrefer **feature branching** for the PR dedicated CI/feedback from colleagues or yourself.\n\nHaving a **develop** branch between the `features` and `main` branches helps you follow the “Continuous Integration” philosophy in the sense of frequently merging short-lived feature branches to a development line of truth (even if a bit ahead/diverging from main, production line of truth).\n\nOnly use **release branching** if it is absolutely required because of older release maintenance constraints.\n\nIf you have a test/staging env that needs to go through integration testing before going to prod, the **Release Candidate workflow** is advisable.\n\nI think people tend to refer to CI as the test jobs done on PRs from `feature` to `develop` and CD to refer to the build jobs happening on merge to `main`. Others refer to CI as the philosophy of merging short/partial (but working) features as quickly as possible. This can be applied in **Feature branching to develop** in my opinion.\n\nTaking the time to have the simplest branching strategy possible for your project can really make the development experience a bliss for all developers of your team. People should focus on implementing quality features and not doing some botanic (lots of branches… anybody?).\n", +"\n",iu,"What Git workflow is suitable for your project","git-workflows",new n(null,3,[Zo,"/assets/loic-blog-logo.png",Wo,"/assets/loic-blog-logo.png",Tl,"Logo referencing Aperture Science"],null),"git-workflows"]),ij([Sl,xm,Vn,Lp,Mp,oq,Cr,Ws,st],[new S(null,2,5,T,["Clojure","MCTS"],null),new S(null,1,5,T,["2021-08-13"],null),"\n## Objective\n\nAt [Flybot Pte Ltd](https://www.flybot.sg/), we wanted to have a robot-player that can play several rounds of some of our card games (such as `big-two`) at a decent level.\n\nThe main goal of this robot-player was to take over an AFK player for instance.\n\nWe are considering using it for an offline mode with different level of difficulty.\n\nVocabulary:\n\n- `big-two`: popular Chinese Card game (锄大地)\n- `AI` or `robot`: refer to a robot-player in the card game.\n\n2 approaches were used:\n\n- **MCTS**\n- **Domain knowledge**\n\nThe repositories are closed-source because private to Flybot Pte. Ltd.\nThe approaches used are generic enough so they can be applied to any kind of games.\n\nIn this article, I will explain the general principle of MCTS applied to our specific case of `big-two`.\n\n## MCTS theory\n\n### What is MCTS\n\n**Monte Carlo Tree Search** (MCTS) is an important algorithm behind many major successes of recent AI applications such as **AlphaGo’s** striking showdown in 2016.\n\nEssentially, MCTS uses Monte Carlo simulation to accumulate value estimates to guide towards highly rewarding trajectories in the search tree. In other words, MCTS pays more attention to nodes that are more promising, so it avoids having to brute force all possibilities which is impractical to do.\n\nAt its core, MCTS consists of repeated iterations (ideally infinite, in practice constrained by computing time and resources) of 4 steps: `selection`, `expansion`, `simulation` and `update`.\n\nFor more information, this [MCTS article](https://towardsdatascience.com/monte-carlo-tree-search-an-introduction-503d8c04e168) explains the concept very well.\n\n### MCTS applied to big-two\n\nMCTS algorithm works very well on deterministic games with perfect information. In other words, games in which each player perfectly knows the current state of the game and there are no chance events (e.g. draw a card from a deck, dice rolling) during the game.\n\nHowever, there are a lot of games in which there is not one or both of the two components: these types of games are called stochastic (chance events) and games with imperfect information (partial observability of states).\n\nThus, in **big-two**, we don’t know the cards of the other players, so it is a game with imperfect information (more info in this [paper](https://teaching.csse.uwa.edu.au/units/CITS3001/project/2017/paper1.pdf)).\n\nSo we can apply the MCTS to **big-two** but we will need to do 1 of the 2 at least:\n\n- Pre-select moves by filtering the dumb moves and establish a game-plan\n- access to hidden information (the other player’s hand). This method is called **Determinization** or also **Perfect** **Information Monte Carlo Sampling**.\n\n## MCTS implementation\n\n### Tree representation\n\nOur tree representation looks like this:\n\n```clojure\n{:S0 {::sut/visits 11 ::sut/score [7 3] ::sut/chldn [:S1 :S2]}\n :S1 {::sut/visits 5 ::sut/score [7 3] ::sut/chldn [:S3 :S4]}\n :S3 {::sut/visits 1 ::sut/score [7 3]}}\n```\n\nIn the big-two case, `S0` is the init-state, `S1` and `S2` are the children states of `S0`.\n\n`S1` is the new state after a possible play is played\n\n`S2` is the new state if another possible play is played etc.\n\n`S1` is a key of the tree map so it means it has been explored before to run simulations.\n\n`S1` has been selected 5 times.\n\n`S2` has never been explored before so it does not appear as a key.\n\nIn games when only the win matters (not the score), you could just use something like `::sut/wins`.\n\n### Selection\n\nTo select the child we want to run simulation from, we proceed like this:\n\n- If some children have not been explored yet, we select randomly one of them\n- If all children have been explored already, we use the UCT to determine the child we select.\n\n`UCT` is the `UCB` (Upper Confidence Bound 1) applied to trees. It provides a way to balance exploration/exploitation. You can read more about it in this [article](https://towardsdatascience.com/the-upper-confidence-bound-ucb-bandit-algorithm-c05c2bf4c13f).\n\nIn the algorithm behind **AlphaGo**, a **UCB** based policy is used. More specifically, each node has an associated UCB value and during selection we always chose the child node with the highest UCB value.\n\nThe **UCB1** formula is the following:\n\n![UCB1 formula](/assets/mcts/ucb1.png)\n\n\x3e With `xi` the mean node value, `ni` the number of visits of node `i`, `N` the number of visits of the parent node.\n\x3e \n\nThe equation includes the 2 following components:\n\n![UCB1 formula parts](/assets/mcts/ucb1_2.png)\n\nThe first part of the equation is the `exploitation` based on the *optimism in the fact of uncertainty*.\n\nThe second part of the equation is the `exploration` that allows the search to go through a very rarely visited branch from time to time to see if some good plays might be hidden there.\n\nIn the **big-two** case, the `exploitation` is the total number of points divided by the number of visits of the node. For every simulation of the games, we add up the number of points the AI has made. We want the average points per game simulation so we divide by the number of times we have visited the node.\n\nIn the **big-two** case, the `exploration` considers the number of visits of the parent node (previous state of the game) and the number of visits of the current node (current state of the game). The more we visit the parent without visiting the specific child the bigger the exploration term becomes. Thus, if we have not visited a child for a long time, since we take the `log10` of `N`, this term becomes dominant and the child will be visited once more.\n\nThe coefficient `c`, called confidence value, allows us to change the proportion of exploration we want.\n\nTo recap, The `UCB` will often return the state that led to the most points in the past simulation. However, from time to time, it will explore and return a child that did not lead to good reward in the past but that might lead to a stronger play.\n\nThe formula applied to **big-two** is the following:\n\n![UCB1 applied to BT](/assets/mcts/ucb_bt.png)\n\n### Expansion\n\nThis step just consists in adding the new selected child to the tree.\n\nIn the **big-two** case, the newly selected state is added to the tree.\n\n### Simulation\n\nFor a given node (state), we run several games with everybody playing random moves and we evaluate the total score of the AI. The total amount of points taken from all the simulations is taken into account in the **UCT** formula explained above.\n\nWe do not consider the win because what matters in **big-two**, more than winning the game, is to score a lot of points (cards remaining in opponents hands) to make more money. Sometimes, it is even better to lose the game as long as the other losers have a lot of cards left in their hands. The win matters for your position in the next round however.\n\n### Update\n\nAfter all the simulations are done, we **back-propagate** all the rewards (sum up the scores of each simulation) to the branch nodes.\n\n### MCTS Iteration\n\nWe call `MCTS iteration` the 4 steps described above: `expand-\x3eselect-\x3esimulate-\x3eupdate`\n\nWe run those 4 steps several times to have a tree that shows the path that has the most chance to lead to the best reward (highest score).\n\nSo, for each AI move, we run several MCTS iterations to build a good tree.\n\nThe more iterations we run, the more accurate the tree is but also the bigger the computing time.\n\n### MCTS properties\n\nWe have 2 properties that can be changed:\n\n- `nb-rollouts`: number of simulations per mcts iteration.\n- `budget`: number of mcts iterations (tree growth)\n\n### MCTS applied to a game with more than 2 players\n\nHaving more than 2 players (4 in **big-two** for instance) makes the process more complex as we need to consider the score of all the players. The default way of handling this case, is to back-propagate all the players scores after different simulations. Then, each robot (position) plays to maximize their score. The UCB value will be computed for the score of the concerned robot.\n\n### Caching\n\nBy caching the function that returns the possible children states, we don’t have to rerun that logic when we are visiting a similar node. The node could have been visited during the simulation of another player before so it saves time.\n\nBy caching the sample function, we do not simulate the same state again. Some states might have been simulated by players before during their mcts iterations. This allows us to go directly a level down the tree without simulating the state again and reusing the rewards back-propagated by a previous move.\n\n### Performance issue\n\nIn Clojure, even with caching, I was not able to run a full game because it was too slow, especially at the beginning of the game which can contain hundreds of different possible moves.\n\nFor `{:nb-rollouts 10 :budget 30}` (10 simulations per state and 30 iterations of mcts), the first move can take more than a minute to compute.\n\nAs a workaround, I had the idea of using MCTS only if a few cards are remaining in the player's hands so at least the branches are not that big in the tree. I had decent results in Clojure for big-two.\n\nFor `{:nb-rollouts 10 :budget 30 :max-cards 16}` (16 total cards remaining), in Clojure, it takes less than 3 seconds.\n\nBecause of this problem, I worked on a big-two AI that only uses the **domain knowledge** to play.\n\n## Domain Knowledge\n\nThe problem with MCTS is that even if we don’t brute force all the possibilities, the computing time is still too big if we want to build the tree using random moves.\n\nMost of the possible plays are dumb. Most of the time, we won’t break a fiver just to cover a single card for instance. In case there are no cards on table, we won’t care about having a branch for all the singles if we can play fivers. There are many situations like this. There are lots of branches we don’t need to explore at all.\n\nAs a human player, we always have a `game-plan`, meaning we arrange our cards in our hands with some combinations we want to play if possible and the combination we don’t want to “break\".\n\nWe can use this `game-plan` as an alternative to MCTS, at least for the first moves of the games.\n\nThe details of this `game-plan` are confidential for obvious reasons.\n\n## Conclusion\n\nHaving an hybrid approach, meaning using a `game-plan` for the first moves of the game when the possible plays are too numerous, and then use MCTS at the end of the game allowed us to have a decent AI we can use.\n\nAs of the time I write this article, the implementation is being tested (as part of a bigger system) and not yet in production.\n", +"\n",iu,"MCTS applied to card games","article-mcts",new n(null,3,[Zo,"/assets/loic-blog-logo.png",Wo,"/assets/loic-blog-logo.png",Tl,"Logo referencing Aperture Science"],null),"blog-post-mcts"]),ij([Sl,xm,Vn,Bp,Lp,Mp,oq,Cr,Ws,st],[new S(null,3,5,T,["Clojure","AWS","Full-Stack"],null),new S(null,1,5,T,["2023-01-20"],null),'\nThis is an example of how to deploy a containerized full-stack Clojure app in AWS EC2.\n\nI will use the [flybot.sg website](https://github.com/skydread1/flybot.sg) as example of app to deploy.\n\n## Prerequisites\n\n- Use an external DNS manager such as goDaddy for instance\n- The app does not handle SSL and domain/protocols redirect\n- The app used `datalevin` as embedded database which resides alongside the Clojure code inside a container\n- The app is an open-source mono-repo and hosted on my GitHub\n- We use ALB for redirects and certificates validations and ELB for static IP entry point.\n\n## Use Jibbit to push to ECR\n\nInstead of using datomic pro and having the burden to have a separate containers for the app and transactor, we decided to use [juji-io/datalevin](https://github.com/juji-io/datalevin) and its embedded storage on disk. Thus, we only need to deploy one container with the app.\n\nTo do so, we can use the library [atomisthq/jibbit](https://github.com/atomisthq/jibbit) baed on [GoogleContainerTools/jib](https://github.com/GoogleContainerTools/jib) (Build container images for Java applications).\n\nIt does not use docker to generate the image, so there is no need to have docker installed to generate images.\n\n[jibbit](https://github.com/atomisthq/jibbit) can be added as `alias` in deps.edn:\n\n```clojure\n:jib\n {:deps {io.github.atomisthq/jibbit {:git/tag "v0.1.14" :git/sha "ca4f7d3"}}\n :ns-default jibbit.core\n :ns-aliases {jib jibbit.core}}\n```\n\nThe `jib.edn` can be added in the project root with the configs to generate and push the image.\n\n### Testing the app image locally\n\nExample of jibbit config to just create a local docker image:\n\n```clojure\n;; example to create an docker image to be run with docker locally\n{:main clj.flybot.core\n :aliases [:jvm-base]\n :user "root"\n :group "root"\n :base-image {:image-name "openjdk:11-slim-buster"\n :type :registry}\n :target-image {:image-name "flybot/image:test"\n :type :docker}}\n```\n\nThen we can run the container:\n```\ndocker run \\\n--rm \\\n-it \\\n-p 8123:8123 \\\n-v db-v2:/datalevin/dev/flybotdb \\\n-e OAUTH2\x3d"secret" \\\n-e ADMIN_USER\x3d"secret" \\\n-e SYSTEM\x3d"{:http-port 8123, :db-uri \\"datalevin/dev/flybotdb\\", :oauth2-callback \\"http://localhost:8123/oauth/google/callback\\"}" \\\nflybot/image:test\n```\n\n### AWS profile for CI\n\n[jibbit](https://github.com/atomisthq/jibbit) can also read your local AWS credentials to directly push the generated image to your ECR (Elastic Container Registry).\n\nYou need to have aws cli installed (v2 or v1) and you need an env variable `$ECR_REPO` setup with the ECR repo string.\n\nYou have several [possibilities](https://github.com/atomisthq/jibbit/blob/main/src/jibbit/aws_ecr.clj) to provide credentials to login to your AWS ECR.\n\nHere is the `jib.edn` for the CI:\n\n```clojure\n{:main clj.flybot.core\n :target-image {:image-name "$ECR_REPO"\n :type :registry\n :authorizer {:fn jibbit.aws-ecr/ecr-auth\n :args {:type :profile\n :profile-name "flybot"\n :region "region"}}}}\n```\n\n### ENV variables\n\nI used [repository secrets](https://docs.github.com/en/actions/security-guides/encrypted-secrets) to handle AWS credentials on the GitHub repo:\n\n- `AWS_ACCESS_KEY_ID` (must be named like that)\n- `AWS_SECRET_ACCESS_KEY` (must be named like that)\n- `ECR_REPO`\n\n## AWS EC2\n\nThis [article](https://medium.com/appgambit/part-1-running-docker-on-aws-ec2-cbcf0ec7c3f8) explained quite well how to setup docker in EC2 and pull image from ECR.\n\n### IAM policy and role, Security group\n\nThe UserData to install docker at first launch of the EC2 instance is the following:\n\n```bash\n#! /bin/sh\n# For Amazon linux 2022 (might differ in 2023 but the principle remains)\nyum update -y\namazon-linux-extras install docker\nservice docker start\nusermod -a -G docker ec2-user\nchkconfig docker on\n```\n\nTo allow the EC2 to pull from ECR we need to add an `IAM policy` and `IAM role`.\n\nLet’s first create the policy `flybot-ECR-repo-access` :\n\n```bash\n{\n "Version": "2012-10-17",\n "Statement": [\n {\n "Sid": "ListImagesInRepository",\n "Effect": "Allow",\n "Action": [\n "ecr:ListImages"\n ],\n "Resource": "arn:aws:ecr:region:acc:repository/flybot-website"\n },\n {\n "Sid": "GetAuthorizationToken",\n "Effect": "Allow",\n "Action": [\n "ecr:GetAuthorizationToken"\n ],\n "Resource": "*"\n },\n {\n "Sid": "ManageRepositoryContents",\n "Effect": "Allow",\n "Action": [\n "ecr:BatchCheckLayerAvailability",\n "ecr:GetDownloadUrlForLayer",\n "ecr:GetRepositoryPolicy",\n "ecr:DescribeRepositories",\n "ecr:ListImages",\n "ecr:DescribeImages",\n "ecr:BatchGetImage",\n "ecr:InitiateLayerUpload",\n "ecr:UploadLayerPart",\n "ecr:CompleteLayerUpload",\n "ecr:PutImage"\n ],\n "Resource": "arn:aws:ecr:region:acc:repository/flybot-website"\n }\n ]\n}\n```\n\nWe then attached the policy `flybot-ECR-repo-access` to a role `flybot-ECR-repo-access-role`\n\nFinally, we attach the role `flybot-ECR-repo-access-role` to our EC2 instance.\n\nWe also need a `security group` to allow http(s) request and open our port 8123 for our [aleph](https://github.com/clj-commons/aleph) server.\n\nWe attached this SG to the EC2 instance as well.\n\n### Run docker on EC2 instance and pull image from ECR\n\nThen inside the EC2 instance, we can pull the image from ECR and run it:\n\n```bash\n# Login to ECR, this command will return a token\naws ecr get-login-password \\\n--region region \\\n| docker login \\\n--username AWS \\\n--password-stdin acc.dkr.ecr.region.amazonaws.com\n\n# Pull image\ndocker pull acc.dkr.ecr.region.amazonaws.com/flybot-website:test\n\n# Run image\ndocker run \\\n--rm \\\n-d \\\n-p 8123:8123 \\\n-v db-volume:/datalevin/prod/flybotdb \\\n-e OAUTH2\x3d"secret" \\\n-e ADMIN_USER\x3d"secret" \\\n-e SYSTEM\x3d"{:http-port 8123, :db-uri \\"/datalevin/prod/flybotdb\\", :oauth2-callback \\"https://www.flybot.sg/oauth/google/callback\\"}" \\\nacc.dkr.ecr.region.amazonaws.com/flybot-website:test\n```\n\n## Load Balancers\n\nEven if we have one single EC2 instance running, there are several benefits we can get from AWS load balancers.\n\nIn our case, we have an Application Load Balancer (ALB) as target of a Network Load Balancer (NLB). Easily adding an ALB as target of NLB is a recent [feature](https://aws.amazon.com/blogs/networking-and-content-delivery/using-aws-lambda-to-enable-static-ip-addresses-for-application-load-balancers/) in AWS that allows us to combine the strength of both LBs.\n\n### ALB\n\nThe internal ALB purposes:\n\n- redirect naked domain (flybot.sg) to sub domain (www.flybot.sg)\n- redirect http to https using the SSL certificates from AWS Certificate Manager (`ACM`)\n\nACM allows us to requests certificates for `www.flybot.sg` and `flybot.sg` and attach them to the ALB rules to perform path redirection in our case. This is convenient as we do not need to install any ssl certificates or handle any redirects in the instance directly or change the code base.\n\n### NLB\n\nSince the ALB has dynamic IPs, we cannot use it in our goDaddy `A` record for `flybot.sg`. One solution is to use AWS route53 because AWS added the possibility to register the ALB DNS name in a A record (which is not possible with external DNS managers). However, we already use goDaddy as DNS host and we don’t want to depend on route53 for that.\n\nAnother solution is to place an internet-facing NLB behind the ALB because NLB provides static IP.\n\nALB works at level 7 but NLB works at level 4.\n\nThus, we have for the NLB:\n\n- TCP rule that forwards request to ALB on port 80 (for http)\n- TCP rules that forwards request on port 443 (for https)\n\n### Target group\n\nThe target group is where the traffic from the load balancers is sent. We have 3 target groups.\n\n- The first target group contains the EC2 instance in which the ALB forward request.\n- The second target group contains the ALB with the protocol TCP 80 in which the NLB forward http requests.\n- The third target group contains the ALB with the protocol TCP 443 in which the NLB forward https request.\n\n### DNS records\n\nSince the ELB is the internet-facing entry points, we use a `CNAME` record for `www` resolving to the ELB DNS name.\n\nFor the root domain `flybot.sg`, we use a `A` record for `@` resolving to the static IP of the ELB (for the AZ where the EC2 resides).\n\n## Learn More\n\nYou can have a look at the open-source repo: [skydread1/flybot.sg](https://github.com/skydread1/flybot.sg)\n', +new S(null,1,5,T,[new S(null,2,5,T,["Flybot","https://github.com/skydread1/flybot.sg"],null)],null),"\n",iu,"Deploy full stack Clojure website to AWS","deploy-clj-app-to-aws",new n(null,3,[Zo,"/assets/loic-blog-logo.png",Wo,"/assets/loic-blog-logo.png",Tl,"Logo referencing Aperture Science"],null),"blog-deploy-clj-aws"]),ij([Sl,xm,Vn,Lp,Mp,oq,Cr,Ws,st],[new S(null,4,5,T,["AWS","Load Balancers","DNS","GoDaddy"],null),new S(null,1,5,T,["2023-11-09"],null),"\n## Context\n\nMy goal was to redirect the domain `flybot.sg` to the subdomain `www.flybot.sg`.\n\nThe domain and subdomain are resolved with GoDaddy.\n\nThe app is deployed in an AWS EC2 which resides behind an ALB (Application Load Balancer).\n\nWe can map www.flybot.sg to the ALB DNS name with a `CNAME` record.\n\n**However, we cannot map flybot.sg (the naked domain) to the ALB because we cannot use CNAME for the domain, only `A` records are valid.**\n\n## GoDaddy forwarding\n\nGoDaddy provides a way to redirect from domain to subdomain which is great news.\n\nHowever, as of 2018, it cannot redirect paths:\n\n- [flybot.sg](http://flybot.sg/) -\x3e [www.flybot.sg](http://www.flybot.sg/) (OK)\n- [flybot.sg/blog](http://flybot.sg/blog) -\x3e [www.flybot.sg/blog](http://www.flybot.sg/blog) (error 404)\n\nTherefore, this simple solution is not viable.\n\n## Using AWS route53 as Name Server\n\n**Since the ALB has `dynamic` IPs, we cannot use it in our goDaddy `A` record for `flybot.sg`.**\n\nOne solution is to use AWS route53 because AWS added the possibility to **register the ALB DNS name in a special ALIAS record**.\n\nSo we could add NS records in GoDaddy to specify that for the domain `flybot.sg`, we let AWS handle it. However, we cannot add NS records for the `domain`, only for `subdomain`. The only way to make sure the domain is handled by AWS is to change the `default Name Servers` in our GoDaddy DNS.\n\nThis would work, however, since we change the **default** Name Servers, all the subdomains will also be handled by AWS, so we are basically letting AWS handle all our subdomains which is not what we wanted.\n\n### Note\n\nIf we wanted to let AWS handle a subdomain such as `test.flybot.sg` for instance, that would be totally possible without affecting the other subdomains (and the domain), because we can add NS records for subdomain to specify what Name Servers to use. The problem arises when we deal with the naked domain.\n\n## ALB+NLB\n\nThe solution I chose was to add a Network Load Balancer (NLB) in front of the ALB. The NLB can provide a **static** IP so we can resolve our @ `A` record to the NLB subnet static IP.\n\nAdding an ALB as target of NLB is a recent [feature](https://aws.amazon.com/blogs/networking-and-content-delivery/using-aws-lambda-to-enable-static-ip-addresses-for-application-load-balancers/) in AWS that allows us to combine the strength of both LBs.\n\n### ALB\n\nThe internal ALB purposes:\n\n- redirect naked domain (flybot.sg) to sub domain (www.flybot.sg)\n- redirect http to https using the SSL certificates from ACM\n\nAmazon Certificate Manager allows us to requests certificates for `www.flybot.sg` and `flybot.sg` and attach them to the ALB rules to perform path redirection in our case. This is convenient as we do not need to install any ssl certificates or handle any redirects in the instance directly or change the code base.\n\n### NLB\n\nALB works at level 7 but NLB works at level 4.\n\nThus, we have for the NLB:\n\n- TCP rule that forwards request to ALB on port 80 (for http)\n- TCP rules that forwards request on port 443 (for https)\n\n### Target group\n\nThe target group is where the traffic from the load balancers is sent. We have 3 target groups.\n\nThe first target group contains the **EC2** instance in which the ALB forward request.\n\nThe second target group contains the **ALB** with the protocol TCP 80 in which the NLB forward **http** requests.\n\nThe third target group contains the **ALB** with the protocol TCP 443 in which the NLB forward **https** request.\n\n### DNS records\n\nSince the ELB is the internet-facing entry points, we use a `CNAME` record for `www` resolving to the ELB DNS name\n\nFor the root domain `@`, we use an `A` record resolving to the static IP of the NLB (for the AZ where the EC2 resides).\n\n### Trade-off\n\nThe trade-off on adding an extra load balancer is the cost. Once the free-tier period is over, the minimum cost for a load balancer is $18 per month.\n\n## Conclusion\n\nGoDaddy domain to subdomain forwarding does not support path so it is not viable at all.\n\nUsing AWS route53 to enjoy the feature of mapping a domain to the ALB DNS name via the special record ALIAS comes at a cost: all subdomains would need to be resolved using AWS Name Servers.\n\nALB+NLB is the setup that worked well for me. Having the internal ALB handling redirect to https and to the subdomain is very convenient. Using an internet-facing NLB solves the static IP problem to resolve the domain record at an extra cost of minimum $18 per month.\n", +"\n",iu,"Redirecting Domain to Subdomain using AWS ALB+NLB","redirect-domain-to-subdomain",new n(null,3,[Zo,"/assets/loic-blog-logo.png",Wo,"/assets/loic-blog-logo.png",Tl,"Logo referencing Aperture Science"],null),"blog-redirect-domain"]),ij([Sl,xm,Vn,Bp,Lp,Mp,oq,Cr,Ws,st],[new S(null,4,5,T,["Python","Django","AWS","Elastic Beanstalk"],null),new S(null,1,5,T,["2023-08-08"],null),"\n## Context\n\nMy open-source tech blog used to be deployed on AWS (during my AWS free tier period).\n\nIn this article, I am going to highlight the different libraries/settings I used to develop and deploy my Django app.\n\n## Project Setup\n\nYou might be familiar with Django app setup but I will just recap the different common commands I ran in my case.\n\n### Start python env\n\n```bash\n# create env\npython -m venv blog_venv\n\n# activate env (mac)\nsource blog_venv/bin/activate\n\n```\n\n### Install Django\n\n```bash\npip install django\n```\n\n### Start Django project\n\n```bash\ndjango-admin startproject loicblog\n```\n\n### Migrations\n\nMigrations are Django’s way of propagating changes you make to your models (adding a field, deleting a model, etc.) into your database schema. They’re designed to be mostly automatic, but you’ll need to know when to make migrations, when to run them, and the common problems you might run into.\n\n```bash\n# create new migrations based on the changes made to the models\npython manage.py makemigrations\n\n# apply and unapply migrations.\npython manage.py migrate\n```\n\n### Run server\n\n```bash\n# be sure to migrate first\npython manage.py runserver\n```\n\n### Admin\n\nIt is very common to have a superuser to handle admin tasks:\n\n```bash\npython manage.py createsuperuser --username\x3dmyname --email\x3dme@mymail.com\n```\n\n## Start app\n\nOne project can have multiple apps such as a blog, an authentication system etc.\nSo `loicblog` is the project and `blog` is one app inside the project.\n\n```bash\npython manage.py startapp blog\n```\n\n- Add the `blog` app to the INSTALLED_APPS array in the project `loicblog/common.py`.\n- Add `path('', include('blog.urls'))` to the `urlpatterns` in `loic/blog/urls.py`.\n\n## Blog Post Content in Markdown\n\nI like writing my articles in Markdown with a preview button (like on GitHub for instance). This is how I write the articles in this blog. To do so, I used [django-markdownx](https://neutronx.github.io/django-markdownx/).\n\n### django-markdownx\n\n```bash\npip install markdown django-markdownx\n```\n\n- Then we need to add the `markdownx` app to the INSTALLED_APPS array in the project `loicblog/common.py`.\n- Add the path to [urls.py](http://urls.py/): `path('markdownx/', include('markdownx.urls'))`.\n- Collect MarkdownX assets to your STATIC_ROOT:\n\n```bash\npython manage.py collectstatic\n```\n\n### Code block syntax highlighting\n\nBy default the html code blocks `pre` do not have syntax highlighting. Since, this blog gives code examples in different programming languages, it is important to support syntax highlighting of the code blocks. I used [codehilite](https://python-markdown.github.io/extensions/code_hilite/) for that.\n\nAdding `MARKDOWNX_MARKDOWN_EXTENSIONS \x3d ['fenced_code', 'codehilite']` to the settings enable syntax highlighting.\n\n`codehilite` required the [pygments](https://pygments.org/) package:\n\n```bash\npip install pygments\n```\n\n## Env variables\n\n### django-environ\n\nA common python library to deal with ENV variable in django is [django-environ](https://django-environ.readthedocs.io/en/latest/)\n\n```bash\npip install django-environ\n```\n\n### Django project settings\n\nI advise to separate your settings in multiple files instead of keeping one default `settings.py`.\n\nI use 3 settings files: `common.py` , `dev.py` and `prod.py`. \n\nHere is the `common.py` (partial content):\n\n```python\nfrom pathlib import Path\nimport os\n\n# Build paths inside the project like this: BASE_DIR / 'subdir'.\nBASE_DIR \x3d Path(__file__).resolve().parent.parent.parent\n\n# Env variables\n\nimport environ\n# Initialise environment variables\nenv \x3d environ.Env()\nbase \x3d environ.Path(__file__) - 3 # 3 folders back\nenviron.Env.read_env(env_file\x3dbase('.env'), overwrite\x3dTrue) # reading .env file\n\n# SECURITY WARNING: keep the secret key used in production secret!\nSECRET_KEY \x3d env('SECRET_KEY')\n\nALLOWED_HOSTS \x3d []\n\n# Application definition\n\n...\n\n# Static files (CSS, JavaScript, Images)\n# https://docs.djangoproject.com/en/4.2/howto/static-files/\n\nSTATIC_URL \x3d \"static/\"\n\n# Default primary key field type\n# https://docs.djangoproject.com/en/4.2/ref/settings/#default-auto-field\n\nDEFAULT_AUTO_FIELD \x3d \"django.db.models.BigAutoField\"\n\n# Login/Logout redirects\nLOGIN_REDIRECT_URL \x3d 'home'\nLOGOUT_REDIRECT_URL \x3d 'home'\n\n# Markdown extensions to handle code blocks and code block highlighting\nMARKDOWNX_MARKDOWN_EXTENSIONS \x3d ['fenced_code', 'codehilite']\n```\n\nThen, the main differences between `dev` and `prod` are the DB settings and where to store the static files.\n\nHere is the `dev.py`:\n\n```python\nfrom loicblog.settings.common import *\n\nDEBUG \x3d True\n\n# SQLite Database\n\nDATABASES \x3d {\n \"default\": {\n \"ENGINE\": \"django.db.backends.sqlite3\",\n \"NAME\": BASE_DIR / \"db.sqlite3\",\n }\n}\n\nSTATIC_ROOT \x3d \"/Users/loicblanchard/workspaces/blog-statics\"\n```\n\nAnd here is the `prod.py`:\n\n```python\nfrom loicblog.settings.common import *\n\nDEBUG \x3d False\n\nALLOWED_HOSTS \x3d [\"blog.loicblanchard.me\", \"*\"] # add localhost for local testing\n\nCSRF_TRUSTED_ORIGINS \x3d ['https://blog.loicblanchard.me']\n\n# Amazon S3 configuration\nAWS_ACCESS_KEY_ID \x3d env.str('AWS_ACCESS_KEY_ID')\nAWS_SECRET_ACCESS_KEY \x3d env.str('AWS_SECRET_ACCESS_KEY')\nAWS_STORAGE_BUCKET_NAME \x3d env.str('AWS_STORAGE_BUCKET_NAME')\n\nINSTALLED_APPS +\x3d [\n 'storages',\n]\n\nSTORAGES \x3d {\n \"staticfiles\": {\n \"BACKEND\": \"storages.backends.s3boto3.S3StaticStorage\"\n }\n}\n\nAWS_S3_CUSTOM_DOMAIN \x3d '%s.s3.amazonaws.com' % AWS_STORAGE_BUCKET_NAME\n\nAWS_S3_FILE_OVERWRITE \x3d True\n\n# Set the static root to the S3 bucket path\nSTATIC_ROOT \x3d 's3://%s/static' % AWS_STORAGE_BUCKET_NAME\n\n## Admin styling adjustment\n\nADMIN_MEDIA_PREFIX \x3d '/static/admin/'\n\n# PostgreSQL Database\n\nDATABASES \x3d {\n 'default': {\n 'ENGINE': 'django.db.backends.postgresql',\n 'NAME': env.str('DB_NAME'),\n 'USER': env.str('DB_USER'),\n 'PASSWORD': env.str('DB_PASSWORD'),\n 'HOST' : env.str('DB_HOST'),\n 'PORT': env.str('DB_PORT', default\x3d'5432'),\n }\n}\n```\n\n### .env and .env.dist\n\nYou can see that all the sensitive data is stored in env variables and in my case in a `.env` file at the root of the project. Of course, do not push this file to any repo and keep it in a safe place (I use GitHub private Gist or sometimes directly Bitwarden password manager for some credentials).\n\nAnother good practice is to have an `env.dist` file that describes the env variables expected to be provided without the actual values.\n\nHere is mine:\n\n```bash\nDJANGO_SETTINGS_MODULE\x3d\nSECRET_KEY\x3d\n\nAWS_ACCESS_KEY_ID\x3d\nAWS_SECRET_ACCESS_KEY\x3d\nAWS_STORAGE_BUCKET_NAME\x3d\nAWS_S3_REGION_NAME\x3d\n\nDB_NAME\x3d\nDB_USER\x3d\nDB_PASSWORD\x3d\nDB_HOST\x3d\n```\n\nIn case other developers or your future self want to know what are the env variables required for the project to work (especially in prod), having a look at the `.env.dist` show me what I need to know right away.\n\nNote the `DJANGO_SETTINGS_MODULE` I use for switching from `dev` to `prod` env and therefore load the proper setting file.\n\nIf you look at the `prod.py` , you can see that I use AWS S3 to store the static files and AWS RDS Postgres to store the users/posts data.\n\n## AWS S3\n\nThe static files are stored in a public AWS S3 bucket.\n\n### django-storages\n\nDjango-storages is a Python library that provides a storage backend system for Django web applications.\n\n```bash\npip install -U django-storages\n```\n\n### boto3\n\nOfficial AWS SDK (Software Development Kit) for Python. Boto3 allows Python developers to interact with various Amazon Web Services (AWS) resources and services programmatically.\n\n```bash\npip install -U boto3\n```\n\n### Push static files to S3\n\nFirst, I make sure to use the prod env variable in `.env`:\n\n```bash\nDJANGO_SETTINGS_MODULE\x3dloicblog.settings.prod\n```\n\nThen in `loicblog`:\n\n```bash\npython manage.py collectstatic\n```\n\nThis command collects all the static files and store them in the location specified via `STATIC_ROOT` (a local dir for `dev` and the S3 bucket for `prod`).\n\nBe sure to have the AWS env variables setup before running the command.\n\nIn prod, this command will gather your static files and push it to AWS S3 bucket (you need to have the bucket public so the files can be served everywhere).\n\n## AWS RDS Postgres\n\nFirst, create AWS RDS Postgres db and Security Group.\n\nFor the SG, it needs to have inbound rules for Postgres.\n\n### psycopg2-binary\n\nThe package `psycopg2-binary` is a PostgreSQL adapter for Python. It allows Python programs to communicate with a PostgreSQL database.\n\n```bash\npip install psycopg2-binary\n```\n\nAll the DB configs can be added in `.env` as well.\n\nIn order to migrate and setup the superuser properly, we need to run a few commands:\n\n```bash\n# apply migrations.\npython manage.py migrate\n\n# Create superuser\npython manage.py createsuperuser\n```\n\n## AWS Elastic Beanstalk\n\nOne straight forward way to run the Django app is in an AWS Elastic Beanstalk.\n\nYou can read more about it in this guide: [EB guide](https://docs.aws.amazon.com/elasticbeanstalk/latest/dg/create-deploy-python-django.html).\n\nFirst, we need to be sure all the packages are present in the `requirements.txt` as the EB needs it to setup the app:\n\n```bash\npip freeze \x3e requirements.txt\n```\n\n### gunicorn\n\n[Gunicorn](https://gunicorn.org/) (Green Unicorn) is a commonly used HTTP server for deploying Python web applications, including Django apps. When deploying a Django app on AWS Elastic Beanstalk, Gunicorn is often used as the application server to handle incoming HTTP requests and serve the Django application.\n\n```bash\npip install gunicorn\n```\n\n### EB config\n\nThe EB configurations can be found in `.ebextensions/django.config`\n\n```\noption_settings:\n aws:elasticbeanstalk:container:python:\n WSGIPath: loicblog.wsgi:application\n```\n\n**WSGI Application**: The Web Server Gateway Interface application is responsible for handling the communication between the web server (like Apache or nginx) and the Django application. It translates incoming HTTP requests into a format that Django can process and then sends the responses back to the web server.\n\nIn my example, **`loicblog.wsgi`** is the module path, and **`application`** is the variable within that module that represents my WSGI application.\n\n### AWS CLI EB\n\nWe can use the AWS CLI to manage the Elastic Beanstalk creation and deployment of new environment and app.\n\n```bash\n## first leave python virtual env\ndeactivate\n\n## then proceed with eb cli\nbrew install awsebcli\n\n## init eb\neb init\n\n## BE SURE TO HAVE DJANGO_SETTINGS_MODULE\x3dloicblog.settings.prod\n\n## Create all resources\neb create\n\n## (re)deploy\neb deploy\n```\n\n## Domain name\n\n### ALB DNS\n\nBy default, creating an EB also setup an Application Load Balancer (ALB). The ALB has its own DNS and we want to map our own DNS name to it. The type of record to achieve this is called `CNAME`.\n\n### SSL Certificate\n\nIn my case, I own the domain `loicblanchard.me`. I want to have my blog on the subdomain `blog.loicblanchard.me`. I use GoDaddy for DNS provider but the process is quite similar for most providers.\n\nFor HTTPS, we can create a SSL certificate using AWS Certificate Manager for the subdomain `blog.loicblanchard.me`.\n\n*Note: ACM provides the CNAME record name and value. For the name, it will provide something like this `_SOME-NUMBERS-HERE.blog.loicblanchard.me.`*\n\n*However, we need to only enter `_SOME-NUMBERS-HERE.blog` for it to work in GoDaddy.*\n\n### Mapping Subdomain to ALB\n\nThen in GoDaddy, to resolve `blog.loicblanchard.me` to the ALB name, we need to add another CNAME record for the `blog` subdomain.\n\nAfter that, we need to add rules to the ALB to redirect http to https using the ACM certificate.\n\nFinally, we need to be sure the Security Group of the ALB allows inbound HTTPS.\n\nUpdate the `ALLOWED_HOSTS` and `CSRF_TRUSTED_ORIGINS` with the subdomain and redeploy the EB.\n\n## Conclusion\n\nI provided some general guidelines on how you could develop and deploy a Django app using the example of my own project.\n\nUsing AWS EB is very straight forward to setup and cheap solution for low traffic website such as my blog.\n\nUsing AWS S3 to serve the static files and AWS RDS to store your production data are common ways to handle your production data.\n\nBe sure to keep your env variables safe and split dev and prod settings to avoid confusion and accidental sensitive data leak.\n\nSince EB comes with ALB, you can easily use a CNAME record to map your own personal subdomain to the ALB.\n\nBe aware of the hosting costs. I moved the blog content to my clojure SPA instead because after my AWS free tier expired, the monthly cost for hosting the blog was around $50 which was too much for a simple blog like that.\n", +new S(null,1,5,T,[new S(null,2,5,T,["Blog","https://github.com/skydread1/blog"],null)],null),"\n",iu,"Deploy Django Blog in AWS Beanstalk","deploy-django-aws-beanstalk",new n(null,3,[Zo,"/assets/loic-blog-logo.png",Wo,"/assets/loic-blog-logo.png",Tl,"Logo referencing Aperture Science"],null),"blog-django-aws"]),ij([Sl,xm,Vn,Bp,Lp,Mp,oq,Cr,Ws,st],[new S(null,4,5,T,["Clojure","Compiler","CLR","Unity"],null),new S(null,1,5,T,["2022-04-22"],null),'\n## Prerequisites\n\nYour Clojure library is assumed to be already compiled to dotnet.\n\nTo know how to do this, refer to the article: [Port your Clojure lib to the CLR with MAGIC](https://www.loicblanchard.me/blog/port-clj-lib-to-clr)\n\n## Goal\n\nIn this article, I will show you:\n- how to package your lib to nuget\n- push it in to your host repo\n- import in Unity in this article\n\n## Build the dlls with Nostrand\n\nJust use the command `nos dotnet/build` at the root of the Clojure project.\n\nThe dlls are by default generated in a `/build` folder.\n\n## Dependency management\n\nA `.csproj` file (XML) must be added at the root of the Clojure project.\n\nYou can find an example here: [clr.test.check.csproj](https://github.com/skydread1/clr.test.check/blob/magic/clr.test.check.csproj)\n\n```xml\n\x3cProject Sdk\x3d"Microsoft.NET.Sdk"\x3e\n \x3cPropertyGroup\x3e\n \x3cTargetFrameworks\x3enetstandard2.0\x3c/TargetFrameworks\x3e\n \x3c/PropertyGroup\x3e\n \x3cPropertyGroup\x3e\n \x3cNuspecFile\x3eclr.test.check.nuspec\x3c/NuspecFile\x3e\n \x3cRestoreAdditionalProjectSources\x3e\n https://api.nuget.org/v3/index.json\n \x3c/RestoreAdditionalProjectSources\x3e\n \x3c/PropertyGroup\x3e\n\x3c/Project\x3e\n```\n\nThere is no need to add References as they were already built by Nostrand in the `/build` folder.\n\nNote the `NuspecFile` that is required to use the nuspec.\n\n## Package Manager\n\nA `.nuspec` file (XML) must be added at the root of the Clojure project.\n\nThe `references` are the references to the dlls in `/build`.\n\nYou can find an example here: [clr.test.check.nuspec](https://github.com/skydread1/clr.test.check/blob/magic/clr.test.check.nuspec)\n\n```xml\n\x3c?xml version\x3d"1.0" encoding\x3d"utf-8"?\x3e\n\x3cpackage\x3e\n \x3cmetadata\x3e\n \x3cid\x3eclr.test.check\x3c/id\x3e\n \x3cversion\x3e1.1.1\x3c/version\x3e\n \x3ctitle\x3eclr.test.check\x3c/title\x3e\n \x3cauthors\x3eskydread1\x3c/authors\x3e\n \x3cdescription\x3eContains the core references for the Clojure lib test.check.\x3c/description\x3e\n \x3crepository type\x3d"git" url\x3d"https://github.com/skydread1/clr.test.check" /\x3e\n \x3cdependencies\x3e\n \x3cgroup targetFramework\x3d"netstandard2.0"\x3e\x3c/group\x3e\n \x3c/dependencies\x3e\n \x3c/metadata\x3e\n \x3cfiles\x3e\n \x3cfile src\x3d"build\\*.clj.dll" target\x3d"lib\\netstandard2.0" /\x3e\n \x3c/files\x3e\n\x3c/package\x3e\n```\n\nThe `dependency` tag is required to indicate the targeted framework.\n\nThe `file` (using a wild card to avoid adding the files one by one) is required to add the dlls files that will be available for the consumer. So the target must be `lib\\TFM`.\n\nIn our case, Unity recommends to use `netstandard2.0` so our target is `lib\\netstandard2.0`.\n\n## GitHub/GitLab local config\n\nTo push the package to a git host, one of the most convenient way is to have a `nuget.config` (XML) locally at the root of the project.\n\n### The nuget.config for GitHub\n\n```xml\n\x3c?xml version\x3d"1.0" encoding\x3d"utf-8"?\x3e\n\x3cconfiguration\x3e\n \x3cpackageSources\x3e\n \x3cclear /\x3e\n \x3cadd key\x3d"github" value\x3d"https://nuget.pkg.github.com/skydread1/index.json" /\x3e\n \x3c/packageSources\x3e\n \x3cpackageSourceCredentials\x3e\n \x3cgithub\x3e\n \x3cadd key\x3d"Username" value\x3d"skydread1" /\x3e\n \x3cadd key\x3d"ClearTextPassword" value\x3d"PAT" /\x3e\n \x3c/github\x3e\n \x3c/packageSourceCredentials\x3e\n\x3c/configuration\x3e\n```\n\nIn order to push a Package to a `Package Registry` to your GitHub project repo, you will need to create a **PAT** (Personal Access Token) with the `write:packages` ,`:read:packages` and `delete:packages` permissions.\n\nReplace Username value by your Github username\n\nReplace Token value by your newly created access token\n\nReplace the repo URL by the path to your GitHub **account page** (not the repo).\n\n*Note: Do not push your config in GitHub as it contains sensitive info (your PAT), it is just for local use.*\n\n### The nuget.config for GitLab\n\n```xml\n\x3c?xml version\x3d"1.0" encoding\x3d"utf-8"?\x3e\n\x3cconfiguration\x3e\n \x3cpackageSources\x3e\n \x3cclear /\x3e\n \x3cadd key\x3d"gitlab" value\x3d"https://sub.domain.sg/api/v4/projects/777/packages/nuget/index.json" /\x3e\n \x3c/packageSources\x3e\n \x3cpackageSourceCredentials\x3e\n \x3cgitlab\x3e\n \x3cadd key\x3d"Username" value\x3d"deploy-token-name" /\x3e\n \x3cadd key\x3d"ClearTextPassword" value\x3d"deploy-token-value" /\x3e\n \x3c/gitlab\x3e\n \x3c/packageSourceCredentials\x3e\n\x3c/configuration\x3e\n```\n\nIn order to push a Package to a `Package Registry` to your GitLab project repo, you will need to create a **deploy token** (not access token) with the `read_package_registry` and `write_package_registry` permissions.\n\nReplace Username value by your token username\n\nReplace Token value by your newly created deploy token\n\nReplace the domain (for private server) and project number in the GitLab URL. (don’t forget the index.json at the end)\n\n*Note: Do not push your config in GitLab as it contains sensitive info (your deploy token), it is just for local use.*\n\n## Pack and Push nuget packages with Nostrand\n\nAt the root of the project, the `dotnet.clj` contains the convenient function to be used with [nasser/nostrand](https://github.com/nasser/nostrand).\n\nYou can find an example here: [dotnet.clj](https://github.com/skydread1/clr.test.check/blob/magic/dotnet.clj)\n\nWe added to our Clojure library a convenient function to avoid having to manually use the dotnet commands, you can just run at the root at the Clojure directory:\n\n```bash\nnos dotnet/nuget-push\n```\n\nThis will create the nuget code package `.nupkg` file in the folder `bin/Release`. the name is the package name and the version such as `clr.test.check.1.1.1.nupkg`.\n\nIt will then push it to either Gitlab or Github depending on the host using the credentials in `nuget.config`.\n\nIt is equivalent to the 2 dotnet commands:\n\n```bash\ndotnet pack --configuration Release\ndotnet nuget push "bin/Release/clr.test.check.1.1.1.nupkg" --source "github"\n```\n\n**Note**: for a Clojure project, you can let the default option for the packing. There is no need to build in theory as we already have our dlls ready in our `/build` folder. The `dotnet build` will just create a unique dll with the name of your library that you can just ignore.\n\n## Download nuget Packages\n\nUsing package references is the new way of doing this but it does not work with Unity.\n\n### Import nuget packages to a regular C# project\n\nThe new way of importing the nuget packages is to use the `PackageReference` tag directly in the `.csproj` file such as:\n\n```bash\n\x3cPackageReference Include\x3d"Sitecore.Kernel" Version\x3d"12.0.*" /\x3e\n```\n\nBut this method only works if you are using the `.csproj` file which we don’t use in Unity as we use the `manifest.json`.\n\n## Import nuget packages to a Unity project\n\nUnity uses a json file in `Packages/manifest.json` to download deps. However it does not work for nuget packages.\n\nThere is no `.csproj` at the root so we cannot use the method above, and all the other underlying `csproj` are generated by Unity so we cannot change them.\n\nThe only choice we have is to use the old way of importing the nuget packages which is to use a `packages.config` and then use the command `nuget restore` to fetch the packages last versions.\n\nSo we need to add 2 config files in our root of our Unity project:\n\n- `nuget.config` : github/gitlab credentials\n- `packages.config` : packages name and their version/target\n\n### nuget.config\n\nIn order to fetch all the packages at once using `nuget restore`, we need to add locally the `nuget.config` with the different sources and credentials.\n\nSo to restore our GitHub and GitLab packages from our example, we use the following `nuget.restore`:\n\n```xml\n\x3c?xml version\x3d"1.0" encoding\x3d"utf-8"?\x3e\n\x3cconfiguration\x3e\n \x3cconfig\x3e\n\t \x3cadd key\x3d"repositoryPath" value\x3d"Assets/ClojureLibs" /\x3e\n\t \x3c/config\x3e\n \x3cpackageSources\x3e\n \x3cclear /\x3e\n \x3cadd key\x3d"gitlab" value\x3d"https://sub.domain.sg/api/v4/projects/777/packages/nuget/index.json" /\x3e\n \x3cadd key\x3d"github" value\x3d"https://nuget.pkg.github.com/skydread1/index.json" /\x3e\n \x3c/packageSources\x3e\n \x3cpackageSourceCredentials\x3e\n \x3cgitlab\x3e\n \x3cadd key\x3d"Username" value\x3d"deploy-token-name" /\x3e\n \x3cadd key\x3d"ClearTextPassword" value\x3d"deploy-token-value" /\x3e\n \x3c/gitlab\x3e\n \x3cgithub\x3e\n \x3cadd key\x3d"Username" value\x3d"skydread1" /\x3e\n \x3cadd key\x3d"ClearTextPassword" value\x3d"PAT" /\x3e\n \x3c/github\x3e\n \x3c/packageSourceCredentials\x3e\n\x3c/configuration\x3e\n```\n\nThe `repositoryPath` allows us to get our packages in a specific directory.\nIn our case, we put it in `Assets/ClojureLibs` (it needs to be in the `Asset` dir anywhere)\n\n### packages.config\n\nTo tell Unity which packages to import while running `nuget restore`, we need to provide the `packages.config`. Here is the config in our example:\n\n```bash\n\x3c?xml version\x3d"1.0" encoding\x3d"utf-8"?\x3e\n\x3cpackages\x3e\n \x3cpackage id\x3d"Magic.Unity" version\x3d"1.0.0" targetFramework\x3d"netstandard2.0" /\x3e\n \x3cpackage id\x3d"my-private-proj" version\x3d"1.0.0" targetFramework\x3d"netstandard2.0" /\x3e\n \x3cpackage id\x3d"clr.test.check" version\x3d"1.1.1" targetFramework\x3d"netstandard2.0" /\x3e\n\x3c/packages\x3e\n```\n\n### Magic.Unity\n\nTo run clojure in Unity, you need [Magic.Unity](https://github.com/nasser/Magic.Unity). It is a the runtime for Clojure compiles with Magic in Unity.\n\nNote the `Magic.Unity` in the `packages.config` above. Magic.Unity has its own nuget package deployed the same way as you would deploy a Clojure library, so you import it along side your nuget packages with your compiles clojure libs.\n\n### nuget restore\n\nOnce you have the github/gitlab credentials ready in `nuget.config` and the packages and their version/target listed in `packages.config`, you can run the command `nuget restore` at the root of the unity project.\n\nIf running `nuget restore` do not fetch the last version, it is because it is using the local cache.\nIn this case you need to force restore using those [commands](https://docs.microsoft.com/en-us/nuget/consume-packages/package-restore#force-restore-from-package-sources).\n\nMost of the time, ignoring the cache is fixing this issue:\n\n```bash\nnuget restore -NoCache\n```\n\nHere is the packages tree of our project for instance:\n\n```bash\n~/workspaces/unity-projects/my-proj:\n.\n├── clr.test.check-legacy.1.1.1\n│   ├── clr.test.check-legacy.1.1.1.nupkg\n│   └── lib\n│   └── netstandard2.0\n│   ├── clojure.test.check.clj.dll\n│   ├── clojure.test.check.clojure_test.assertions.clj.dll\n│   ├── clojure.test.check.clojure_test.clj.dll\n│   ├── clojure.test.check.generators.clj.dll\n│   ├── clojure.test.check.impl.clj.dll\n│   ├── clojure.test.check.random.clj.dll\n│   ├── clojure.test.check.results.clj.dll\n│   └── clojure.test.check.rose_tree.clj.dll\n├── my-private-lib.1.0.0\n│   ├── my-private-lib.1.0.0.nupkg\n│   └── lib\n│   └── netstandard2.0\n│   ├── domain.my_prate_lib.core.clj.dll\n│      └── domain.my_prate_lib.core.utils.clj.dll\n```\n\nFinally, You can add Magic.Unity (runtime for magic inside Unity) in the manifest.json like so:\n\n```json\n{\n "dependencies": {\n\t ...,\n "sr.nas.magic.unity": "https://github.com/nasser/Magic.Unity.git"\n\t}\n}\n```\n\n## Conclusion\n\nOnce you have the proper required config files ready, you can use `Nostrand` to\nBuild your dlls:\n```\nnos dotnet/build\n```\nPack your dlls in a nuget package and push to a remote host:\n```\nnos dotnet/nuget-push\n```\nImport your packages in Unity:\n```\nnuget restore\n```\n\n`Magic.Unity` is the Magic runtime for Unity and is already nuget packaged on its public repo\n', +new S(null,3,5,T,[new S(null,2,5,T,["Magic","https://github.com/nasser/magic"],null),new S(null,2,5,T,["Nostrand","https://github.com/nasser/nostrand"],null),new S(null,2,5,T,["Magic.Unity","https://github.com/nasser/Magic.Unity"],null)],null),"\n",iu,"Pack, Push and Import Clojure to Unity","clojure-in-unity",new n(null,3,[Zo,"/assets/loic-blog-logo.png",Wo,"/assets/loic-blog-logo.png",Tl,"Logo referencing Aperture Science"],null),"blog-clj-in-unity"]),ij([Sl,xm,Vn,Lp,Mp,oq,Cr,Ws,st],[new S(null, 4,5,T,["Clojure","System","Component","Deps Injection"],null),new S(null,1,5,T,["2022-04-12"],null),'\n## Context\n\nThe Lasagna stack library [fun-map](https://github.com/robertluo/fun-map) by [@robertluo](https://github.com/robertluo) blurs the line between identity, state and function. As a results, it is a very convenient tool to define `system` in your applications by providing an elegant way to perform associative dependency injections.\n\n## Goal\n\nIn this document, I will show you the benefit of `fun-map`, and especially the `life-cycle-map` as dependency injection system.\n\n## Rational\n\n### Managing state\n\nIn any kind of programs, we need to manage the state. In Clojure, we want to keep the mutation parts of our code as isolated and minimum as possible. The different components of our application such as the db connections, queues or servers for instance are mutating the world and sometimes need each other to do so. The talk [Components Just Enough Structure](https://www.youtube.com/watch?v\x3d13cmHf_kt-Q) by Stuart Sierra explains this dependency injection problem very well and provides a Clojure solution to this problem with the library [component](https://github.com/stuartsierra/component).\n\n[fun-map](https://github.com/robertluo/fun-map) is another way of dealing with inter-dependent components. In order to understand why `fun-map` is very convenient, it is interesting to look at other existing solutions first.\n\n### Component\n\nLet’s first have a look at existing solution to deal with life cycle management of components in Clojure, especially the Component library which is a very good library to provide a way to define systems.\n\nIn the Clojure word, we have stateful components (atom, channel etc) and we don’t want it to be scattered in our code without any clear way to link them and also know the order of which to start these external resources. \n\nThe `component` of the library [component](https://github.com/stuartsierra/component) is just a record that implements a `Lifecycle` protocol to properly start and stop the component. As a developer, you just implement the `start` and `stop` methods of the protocol for each of your components (DB, server or even domain model).\n\nA DB component could look like this for instance\n\n```clojure\n(defrecord Database [host port connection]\n component/Lifecycle\n (start [component]\n (let [conn (connect-to-database host port)]\n (assoc component :connection conn)))\n (stop [component]\n (.close connection)\n (assoc component :connection nil)))\n```\n\nAll these components are then combined together in a `system` map that just bounds a keyword to each component. A system is a component that has its own start/stop implementation that is responsible to start all components in dependency order and shut them down in reverse order.\n\nIf a component has dependencies on other components, they are then associated to the system and started first. Since each component returns another state of the system; after all components are started, their return values are assoc back to the system.\n\nHere is an example of a system with 3 components. The `app` components depends on the `db` and `scheduler` components so they will be started first:\n\n```clojure\n(defn system [config-options]\n (let [{:keys [host port]} config-options]\n (component/system-map\n :db (new-database host port)\n :scheduler (new-scheduler)\n :app (component/using\n (example-component config-options)\n {:database :db\n :scheduler :scheduler}))))\n```\n\nSo, in the above example, `db` and `scheduler` have been injected to `app`. Stuart Sierra mentioned that contrary to `constructor` injections and `setter` injections OOP often use, we could refer this component injections (immutable map) as `associative` injections.\n\nThis is very convenient way to adapt a system to other different situations such as testing for instance. You could just assoc to an in-memory DB and a simplistic schedular in a test-system to run some tests:\n\n```clojure\n(defn test-system\n\t[...]\n\t(assoc (system some-config)\n\t\t:db (test-db)\n\t\t:scheduler (test-scheduler)))\n\n;; then we can call (start test-system) to start all components in deps order.\n```\n\nThus, you can isolate what you want to test and even run tests in parallel. So, it is more powerful than `with-redefs` and `binding` because it is not limited by time. Your tests could replace a big portion of your logic quite easily instead of individual vars allowing us to decouple the tests from the rest of the code.\n\nFinally, we do not want to pass the whole system to every function in all namespaces. Instead, the components library allows you to specify just the component.\n\n#### Limitations\n\nHowever, there are some limitations to this design, the main ones being:\n\n- `stuartsierra/component` is a whole app buy-in. Your entire app needs to follow this design to get all the benefits from it.\n- It is not easy to visually inspect the whole system in the REPL\n- cannot start just a part of the system\n\n#### Other approaches\n\nOther libraries were created as replacement of component such as [mount](https://github.com/tolitius/mount) and [integrant](https://github.com/weavejester/integrant).\n\n- Mount highlights their differences with Component in [here](https://github.com/tolitius/mount/blob/master/doc/differences-from-component.md#differences-from-component).\n- Integrant highlights their differences with Component in [here](https://github.com/weavejester/integrant/blob/master/README.md#rationale).\n\n## Fun-map\n\n[fun-map](https://github.com/robertluo/fun-map) is yet another replacement of [component](https://github.com/stuartsierra/component), but it does more than just providing state management.\n\nThe very first goal of `fun-map` is to blur the line between identity, state and function, but in a good way. `fun-map` combines the idea of [lazy-map](https://github.com/originrose/lazy-map) and [plumbing](https://github.com/plumatic/plumbing) to allow lazy access to map values regardless of the types or when these values are accessed. \n\n### Wrappers\n\nIn order to make the map’s values accessible on demand regardless of the type (delay, future, atom etc), map’s value arguments are wrapped to encapsulate the way the underlying values are accessed and return the values as if they were just data in the first place.\n\nFor instance:\n\n```clojure\n(def m (fun-map {:numbers (delay [3 4])}))\n\nm\n;\x3d\x3e {:numbers [3 4]}\n\n(apply * (:numbers m))\n;\x3d\x3e 12\n\n;; the delay will be evaluated just once\n```\n\nYou can see that the user of the map is not impacted by the `delay` and only see the deref value as if it were just a vector in the first place.\n\n#### Associative dependency injections\n\nSimilar to what we discussed regarding how the [component](https://github.com/stuartsierra/component) library assoc dependencies in order, fun-map as a wrapper macro `fk` to use other `:keys` as arguments of their function.\n\nLet’s have a look at an example of `fun-map`:\n\n```clojure\n(def m (fun-map {:numbers [3 4]\n :cnt (fw {:keys [numbers]}\n (count numbers))\n :average (fw {:keys [numbers cnt]}\n (/ (reduce + 0 numbers) cnt))}))\n```\n\nIn the fun-map above, you can see that the key `:cnt` takes for argument the value of the key `:numbers`. The key `:average` takes for arguments the values of the key `:numbers` and `:cnt`.\n\nCalling the `:average` key will first call the keys it depends on, meaning `:cnt` and `:number` then call the `:average` and returns the results:\n\n```clojure\n(:average m)\n;\x3d\x3e 7/2\n```\n\nWe recognize the same dependency injections process highlighted in the Component section.\n\nFurthermore, fun-map provides a convenient wrapper `fnk` macro to destructure directly the keys we want to focus on:\n\n```clojure\n(def m (fun-map {:numbers [3 4]\n :cnt (fnk [numbers]\n (count numbers))\n :average (fnk [numbers cnt]\n (/ (reduce + 0 numbers) cnt))}))\n```\n\nAs explained above, we could add some more diverse values, it wouldn’t be perceived by the user of the map:\n\n```clojure\n (def m (fun-map {:numbers (delay [3 4])\n :cnt (fnk [numbers]\n (count numbers))\n :multiply (fnk [numbers]\n (atom (apply * numbers)))\n :average (fnk [numbers cnt]\n (/ (reduce + 0 numbers) cnt))}))\n\n(:multiply m)\n;\x3d\x3e 12\n\nm\n;\x3d\x3e {:numbers [3 4] :cnt 2 :multiply 12 :average 7/2}\n\n```\n\n### System\n\n#### Life Cycle Map\n\nWrappers take care of getting other keys’s values (with eventual options we did not talk about so far). However, to get the life cycle we describe in the Component library section, we still need a way to\n\n- start each underlying values (components) in dependency order (other keys)\n- close each underlying values in reverse order of their dependencies\n\nfun-map provides a `life-cycle-map` that allows us to specify the action to perform when the component is getting started/closed via the `closeable`.\n\n- `touch` start the system, meaning it injects all the dependencies in order. the first argument of `closeable` (eventually deref in case it is a delay or atom etc) is returned as value of the key.\n- `halt!` close the system, meaning it executes the second argument of `closeable` which is a function taking no param. It does so in reverse order of the dependencies\n\nHere is an example:\n\n```clojure\n(def system\n (life-cycle-map ;; to support the closeable feature\n {:a (fnk []\n (closeable\n 100 ;; 1) returned at touch\n #(println "a closed") ;; 4) evaluated at halt!\n ))\n :b (fnk [a]\n (closeable\n (inc a) ;; 2) returned at touch\n #(println "b closed") ;; 3) evaluated at halt!\n ))}))\n\n(touch system1)\n;\x3d\x3e {:a 100, :b 101}\n\n(halt! system1)\n;\x3d\x3e b closed\n; a closed\n; nil\n```\n\n`closeable` takes 2 params:\n- value returned when we call the key of the fun-map.\n- a no-arg function evaluated in reverse order of dependencies.\n\n#### Testing\n\nSame as for Component, you can easily dissoc/assoc/merge keys in your system for testing purposes. You need to be sure to build your system before `touch`.\n\n```clojure\n(def test-system\n (assoc system :a (fnk []\n (closeable\n 200\n #(println "a closed v2")))))\n\n(touch test-system)\n;\x3d\x3e {:a 200, :b 201}\n\n(halt! test-system)\n;\x3d\x3e b closed\n; a closed v2\n; nil\n```\n\nfun-map also support other features such as function call tracing, value caching or lookup for instance. More info in the readme.\n\n## Fun-Map applied to flybot.sg\n\nTo see Fun Map in action, refer to the doc [Fun-Map applied to flybot.sg](https://www.loicblanchard.me/blog/fun-map-applied-to-flybot).\n', -"\n",ju,"Fun-Map: a solution to deps injection in Clojure","fun-map",new n(null,3,[$o,"/assets/loic-blog-logo.png",Xo,"/assets/loic-blog-logo.png",Tl,"Logo referencing Aperture Science"],null),"blog-fun-map"]),ij([Sl,xm,ym,Wn,Ro,Cp,Mp,Np,pq,Dr,Zs,tt],[new S(null,4,5,T,["ClojureScript","Figwheel","Re-Frame","Reagent React Native"],null),"Flybot Pte Ltd",new S(null,2,5,T,["2023-02-13","2023-08-08"],null),"\n## Rational\n\nThe goal is to have a mobile app targeting both iOS and Android, written in ClojureScript, which can reuse most of our web frontend logic.\n\nTo do so, I used React Native for the following reasons:\n\n- Integrate very well with [bhauman/figwheel-main](https://github.com/bhauman/figwheel-main) and [day8/re-frame](https://github.com/day8/re-frame)\n- Target both iOS and Android\n- Does not necessitate too much configuration to get it running\n- React Native has an overall good documentation\n\n## Repo\n\nYou can have a look at the code on my [GitHub repo](https://github.com/skydread1/flybot.sg)\n\nWe use a mono repo structure where the `server` (clj files), and `client` (cljs files) reside alongside each others.\nA `common` (cljc files) top folder is also used for data validation that applies for both server and client.\n\nWe actually have 2 clients: web and mobile.\nSo the mobile app frontend resides in the same repo as the web frontend and the 2 share most of the re-frame events.\n\nThe mono-repo structure is as followed:\n\n```clojure\n├── client\n│   ├── common\n│   │   ├── src\n│   │   │   └── flybot.client.common\n│   │   └── test\n│   │   └── flybot.client.common\n│   ├── mobile\n│   │   ├── src\n│   │   │   └── flybot.client.mobile\n│   │   └── test\n│   │   └── flybot.client.mobile\n│   └── web\n│   ├── src\n│   │   └── flybot.client.web\n│   └── test\n│   └── flybot.client.web\n├── common\n│   ├── src\n│   │   └── flybot.common\n│   └── test\n│   └── flybot.common\n├── server\n│   ├── src\n│   │   └── flybot.server\n│   └── test\n│   └── flybot.server\n```\n\nYou can read more about it in my article: [Clojure Mono Repo example : server + 2 clients](../blog/clojure-mono-repo).\n\n## Stack\n\nThe backend is the same as for the web app.\n\nThe `mobile` frontend is very similar to the `web` frontend.\n\nThe main differences with the web frontend are the following:\n- markup: RN using native components, we cannot reuse the hiccup markup used in the web frontend (hence the use of `reagent-react-native`)\n- navigation: Tab and Stack Navigators for mobile instead of reitit.frontend.easy for web\n- markdown support: convert markdown to native components instead of react components for the web\n- cookie management: I manually store the cookie in AsyncStorage and manually pass it to the request\n\nFor the rest, most re-frame events remain the same between the 2 UIs, hence most of the re-frame logic is done in the `client.common` namespace.\n\n## Hot reloading\n\n[figwheel-main](https://github.com/bhauman/figwheel-main) also works on mobile after a few setup steps required to get a react native app ready. To install the different native libraries, I just use npm. Once again, Figwheel is really convenient to use and provide clear configurations to get the hot reloading working.\n\n## CI/CD\n\nI tested the app on iOS only via the iOS Simulator in Xcode locally.\nThe app has not been deployed on any Store yet.\n\n## Learn more\n\nFeel free to visit [flybot.sg](https://www.flybot.sg/) and especially the [blog](https://www.flybot.sg/blog).\n", -new S(null,2,5,T,[new S(null,2,5,T,["Reagent React Native Mobile App","../blog/reagent-native-app"],null),new S(null,2,5,T,["Clojure Mono Repo example : server + 2 clients","../blog/clojure-mono-repo"],null)],null),new S(null,1,5,T,[new S(null,2,5,T,["Flybot","https://github.com/skydread1/flybot.sg"],null)],null),"\nAt Flybot, I developed a mobile app using `ClojureScript` with `React Native` using `Figwheel` and `Reagent React Native` (to interact with reagent in ClojureScript).\n\nThe code for the mobile app resides in the same repo as the server and the web client of the [flybot.sg](https://www.flybot.sg/) website.\n\nThe goal of the mobile app was to allow employees to write blog posts using an app instead of the web UI and also to evaluate if our ClojureScript frontend stack could reuse most of the `re-frame` logic in both the mobile and web UIs (which it does).\n", -ws,"Flybot Mobile App","clojure-mobile-app",new n(null,3,[$o,"https://www.flybot.sg/assets/flybot-logo.png",Xo,"https://www.flybot.sg/assets/flybot-logo.png",Tl,"Flybot Logo"],null),"flybot-mobile-app"]),ij([Sl,xm,ym,Wn,Ro,Cp,Mp,Np,pq,Dr,Zs,tt],[new S(null,8,5,T,"Clojure C# JVM CLR Compiler Interop NuGet Unity".split(" "),null),"Flybot Pte Ltd",new S(null,2,5,T,["2021-02-01","2022-12-09"],null),"\n## Rational\n\nMore acting as a devops this time, I worked on integrating the Magic Compiler and its tooling to Flybot's development workflow.\n\n`Magic` is a bootstrapped compiler written in Clojure that takes Clojure code as input and produces dotnet assemblies (.dll) as output. The dlls produced by Magic can be run in the Game engine `Unity` which Flybot uses for their card game UIs. The goal was to be able to compile our backend Clojure APIs to dlls so we can used that logic in Unity directly.\n\nThere are 4 main open-source libraries involved:\n- [nasser/magic](https://github.com/nasser/magic): clojure-\x3edotnet compiler written in Clojure\n- [nasser/nostrand](https://github.com/nasser/nostrand): dependencies manager for the magic compiler\n- [nasser/Magic.Unity](https://github.com/nasser/Magic.Unity): runtime for Unity\n- [magic-clojure/magic](https://github.com/magic-clojure/magic): pipeline to build magic and update tools\n\nWorking closely with the author of the Magic compiler [Ramsey Nasser](https://github.com/nasser), I helped improving the tooling around the Magic compiler so it integrates well with our workflow Clojure Backend/Unity Frontend.\n\nMy contributions were to:\n- Fix some high level issues on the compiler that were preventing us from compiling our Clojure projects\n- Report compiling issues and performance issues to Ramsey Nasser so he can improve the compiler.\n- Improve the tooling around the Magic compiler to make it easier for our developers to compile/test/package Clojure libraries in Dotnet\n- Successfully port our Clojure projects to Unity\n- Improve the way a project and its dependencies are compiled the dlls\n- Make it easy to package the newly compiled dlls in NuGet packages\n- Allow developers to deploy these packages in online GitHub repos (public or private)\n- Package the dlls in a way it is easy to import them into Unity projects\n\n## Nostrand\n\n### Why Nostrand\n\n`nostrand` is for magic what [tools.deps](https://github.com/clojure/tools.deps.alpha) or [leiningen](https://github.com/technomancy/leiningen) are for a regular Clojure project. Magic has its own dependency manager and does not use `tools.deps` or `len` because it was implemented before these deps manager came out!\n\n### Private Gitlab support\n\nSince we wanted to compile private gitlab projects with deps on both private gitlab repos and public repos, I added the Gitlab support and private repo supports using the Github/GitLab tokens.\n\n### Nuget pack and push\n\nAdding a `.csproj` that refers to a `.nuspec` to the Clojure repo at the root allows me to pack and deploy the generated dlls to a nuget package that will be store on the Remote git repo. For private repositories, a `nuget.config` can be added to specify the `PAT` token for GitHub or `Deploy token` for Gitlab. The package is then added to GitHub/GitLab Package Registry.\n\n### Example of a Clojure library ported to Magic\n\nAn example of a Clojure library that has been ported to Magic is [skydread1/clr.test.check](https://github.com/skydread1/clr.test.check/tree/magic), a fork of clojure/clr.test.check.\n\nIf you have a look at the [dotnet.clj](https://github.com/skydread1/clr.test.check/blob/magic/dotnet.clj) namespace, you can see the different convenient function that can be called by `nostrand` with the command `nos`:\n\n- compile the clojure codebase to dotnet assemblies:\n```\nnos dotnet/build\n```\n- run all the clojure tests using the CLR:\n```\nnos dotnet/run-tests\n```\n- pack and push NuGet packages to the GitHub/GitLab Package Registries:\n```\nnos dotnet/nuget-push\n```\n\nSo it only 3 commands, a developer can compile a Clojure project and its deps to dotnet assemblies that can be run in Unity, test that all the tests are passing in the CLR and push a NuGet Package in a remote public or private repository.\n\n## Magic Unity\n\nThe goal of Magic.Unity is to provide a Clojure runtime in Unity.\n\n[Magic.Unity](https://github.com/nasser/Magic.Unity) used to have a compilation UI and a runtime. However, there was a mismatch between the Magic dlls of Nostrand and Magic.Unity. Also the compilation UI was not easy to use and we wanted to use Nostrand directly. The compilation has since been removed and Magic.Unity is now only a runtime that can use the last magic dlls.\n\nAlso, I added the `.nuspec` and `dotnet.clj` to the repo so we can easily package it with NuGet and push it to the repo. Therefore, it can be imported in Unity the same way we import our Clojure libraries.\n\n## Magic compiler\n\n### What is the Magic Compiler\n\nMagic is a bootstrapped compiler written in Clojure that takes Clojure code as input and produces dotnet assemblies (.dll) as output.\n\nCompiler Bootstrapping is the technique for producing a self-compiling compiler that is written in the same language it intends to compile. In our case, MAGIC is a **Clojure** compiler that compiles **Clojure** code to .**NET** assemblies (.dll and .exe files).\n\nIt means we need the old dlls of MAGIC to generate the new dlls of the MAGIC compiler. We repeat this process until the compiler is good enough. \n\nThe very first magic dlls were generated with the [clojure/clojure-clr](https://github.com/clojure/clojure-clr) project which is also a Clojure compiler to CLR but written in **C#** with limitations over the dlls generated (the problem MAGIC intends to solve).\n\n### Why the Magic Compiler\n\nThere is already a clojure-\x3eclr compiler [clojure/clojure-clr](https://github.com/clojure/clojure-clr). However, clojure-clr uses a technology called the DLR (dynamic language runtime) to optimize dynamic call sites but it emits self modifying code which make the assemblies not usable on mobile devices (IL2CPP in Unity). So we needed a way to have a compiler that emit assemblies that can target both Desktop and mobile (IL2CPP), hence the Magic compiler.\n\n### Documentations and Bug reports\n\nI do not have the knowledge for such low level compiler implementation, so I did not fix any issues on the compiler myself. However, I could help Ramsey Nasser on improving the documentation for both user and potential contributors and fix some high level small issues. I was also reporting all the bugs and creating the issues on the different related repos.\n\n### GitHub Action\n\nI added a GitHub action to perform the bootstrapping at every push to automate the process and make the latest dlls available in the GitHub action artifact to anybody.\n\n## Importing the nuget packages to our frontend Unity projects\n\nSince all the Clojure libraries and the Magic.Unity were packaged via nugget and pushed to the GitHub/GitLab repo, we can use a `packages.config` to list our packages and use the command `nuget restore` to import them. Same as to push packages, a `nuget.config` can be added with the credentials.\n\n## Learn more\n\nYou can learn more about `Magic` in my blog articles below.\n", +"\n",iu,"Fun-Map: a solution to deps injection in Clojure","fun-map",new n(null,3,[Zo,"/assets/loic-blog-logo.png",Wo,"/assets/loic-blog-logo.png",Tl,"Logo referencing Aperture Science"],null),"blog-fun-map"]),ij([Sl,wm,xm,Vn,Qo,Bp,Lp,Mp,oq,Cr,Ws,st],[new S(null,4,5,T,["ClojureScript","Figwheel","Re-Frame","Reagent React Native"],null),"Flybot Pte Ltd",new S(null,2,5,T,["2023-02-13","2023-08-08"],null),"\n## Rational\n\nThe goal is to have a mobile app targeting both iOS and Android, written in **ClojureScript**, which can reuse most of our web frontend logic.\n\nTo do so, I used React Native for the following reasons:\n\n- Integrate very well with [bhauman/figwheel-main](https://github.com/bhauman/figwheel-main) and [day8/re-frame](https://github.com/day8/re-frame)\n- Target both iOS and Android\n- Does not necessitate too much configuration to get it running\n- React Native has an overall good documentation\n\n## Repo\n\nYou can have a look at the code on my [GitHub repo](https://github.com/skydread1/flybot.sg)\n\nWe use a mono repo structure where the `server` (clj files), and `client` (cljs files) reside alongside each others.\nA `common` (cljc files) top folder is also used for data validation that applies for both server and client.\n\nWe actually have 2 clients: web and mobile.\nSo the mobile app frontend resides in the same repo as the web frontend and the 2 share most of the re-frame events.\n\nThe mono-repo structure is as followed:\n\n```\n├── client\n│   ├── common\n│   │   ├── src\n│   │   │   └── flybot.client.common\n│   │   └── test\n│   │   └── flybot.client.common\n│   ├── mobile\n│   │   ├── src\n│   │   │   └── flybot.client.mobile\n│   │   └── test\n│   │   └── flybot.client.mobile\n│   └── web\n│   ├── src\n│   │   └── flybot.client.web\n│   └── test\n│   └── flybot.client.web\n├── common\n│   ├── src\n│   │   └── flybot.common\n│   └── test\n│   └── flybot.common\n├── server\n│   ├── src\n│   │   └── flybot.server\n│   └── test\n│   └── flybot.server\n```\n\nYou can read more about it in my article: [Clojure Mono Repo example : server + 2 clients](../blog/clojure-mono-repo).\n\n## Stack\n\nThe backend is the same as for the web app.\n\nThe `mobile` frontend is very similar to the `web` frontend.\n\nThe main differences with the web frontend are the following:\n- markup: RN using native components, we cannot reuse the hiccup markup used in the web frontend (hence the use of `reagent-react-native`)\n- navigation: Tab and Stack Navigators for mobile instead of reitit.frontend.easy for web\n- markdown support: convert markdown to native components instead of react components for the web\n- cookie management: I manually store the cookie in AsyncStorage and manually pass it to the request\n\nFor the rest, most re-frame events remain the same between the 2 UIs, hence most of the re-frame logic is done in the `client.common` namespace.\n\n## Hot reloading\n\n[figwheel-main](https://github.com/bhauman/figwheel-main) also works on mobile after a few setup steps required to get a react native app ready. To install the different native libraries, I just use npm. Once again, Figwheel is really convenient to use and provide clear configurations to get the hot reloading working.\n\n## CI/CD\n\nI tested the app on iOS only via the iOS Simulator in Xcode locally.\nThe app has not been deployed on any Store yet.\n\n## Learn more\n\nYou can learn more about how the mobile app was setup in the article linked at the top of the page.\n\nAlso, feel free to visit [flybot.sg](https://www.flybot.sg/) and especially the [blog](https://www.flybot.sg/blog).\n", +new S(null,2,5,T,[new S(null,2,5,T,["Reagent React Native Mobile App","../blog/reagent-native-app"],null),new S(null,2,5,T,["Clojure Mono Repo example : server + 2 clients","../blog/clojure-mono-repo"],null)],null),new S(null,1,5,T,[new S(null,2,5,T,["Flybot","https://github.com/skydread1/flybot.sg"],null)],null),"\nAt Flybot, I developed a mobile app using **ClojureScript** with `React Native` using `Figwheel` and `Reagent React Native` (to interact with reagent in ClojureScript).\n\nThe code for the mobile app resides in the same repo as the server and the web client of the [flybot.sg](https://www.flybot.sg/) website.\n\nThe goal of the mobile app was to allow employees to write blog posts using an app instead of the web UI and also to evaluate if our ClojureScript frontend stack could reuse most of the `re-frame` logic in both the mobile and web UIs.\n", +vs,"Flybot Mobile App","clojure-mobile-app",new n(null,3,[Zo,"https://www.flybot.sg/assets/flybot-logo.png",Wo,"https://www.flybot.sg/assets/flybot-logo.png",Tl,"Flybot Logo"],null),"flybot-mobile-app"]),ij([Sl,wm,xm,Vn,Qo,Bp,Lp,Mp,oq,Cr,Ws,st],[new S(null,8,5,T,"Clojure C# JVM CLR Compiler Interop NuGet Unity".split(" "),null),"Flybot Pte Ltd",new S(null,2,5,T,["2021-02-01","2022-12-09"],null),'\n## Rational\n\nI worked on integrating the Magic Compiler and its tooling to Flybot\'s development workflow.\n\n[Magic](https://github.com/nasser/magic) is an open-source bootstrapped compiler written in Clojure that takes Clojure code as input and produces dotnet assemblies (.dll) as output. The dlls produced by Magic can be run in the Game engine `Unity` which Flybot uses for their card game UIs. The goal was to be able to compile our backend Clojure APIs to dlls so we can used that logic in Unity directly.\n\nThere are 4 main open-source libraries involved:\n- [nasser/magic](https://github.com/nasser/magic): clojure-\x3edotnet compiler written in Clojure\n- [nasser/nostrand](https://github.com/nasser/nostrand): dependencies manager for the magic compiler\n- [nasser/Magic.Unity](https://github.com/nasser/Magic.Unity): runtime for Unity\n- [magic-clojure/magic](https://github.com/magic-clojure/magic): pipeline to build magic and update tools\n\nWorking closely with the author of the Magic compiler [Ramsey Nasser](https://github.com/nasser), I helped improving the tooling around the Magic compiler so it integrates well with our workflow Clojure Backend/Unity Frontend.\n\nMy contributions were to:\n- Fix some high level issues on the compiler that were preventing us from compiling our Clojure projects\n- Report compiling issues and performance issues to Ramsey Nasser so he can improve the compiler.\n- Improve the tooling around the Magic compiler to make it easier for our developers to compile/test/package Clojure libraries in Dotnet\n- Successfully port our Clojure projects to Unity\n- Improve the way a project and its dependencies are compiled to dlls\n- Make it easy to package the newly compiled dlls in NuGet packages\n- Allow developers to deploy these packages in online GitHub repos (public or private)\n- Package the dlls in a way that makes it easy to import them into Unity projects\n\n## Nostrand\n\n### Why Nostrand\n\n[Nostrand](https://github.com/nasser/nostrand) is for magic what [tools.deps](https://github.com/clojure/tools.deps.alpha) or [leiningen](https://github.com/technomancy/leiningen) are for a regular Clojure project. Magic has its own dependency manager and does not use `tools.deps` or `len` because it was implemented before these deps manager came out!\n\n### Private Gitlab support\n\nSince we wanted to compile private gitlab projects with deps on both private gitlab repos and public repos, I added the Gitlab support and private repo supports using the Github/GitLab tokens.\n\n### NuGet pack and push\n\nAdding a `.csproj` that refers to a `.nuspec` at the root of a Clojure repo allows me to pack and deploy the generated dlls to a **NuGet package** that will be store on the Remote git repo. For private repositories, a `nuget.config` can be added to specify the `PAT` token for GitHub or `Deploy token` for Gitlab. The package is then added to GitHub/GitLab Package Registry.\n\n### Example of a Clojure library ported to Magic\n\nAn example of a Clojure library that has been ported to Magic is [skydread1/clr.test.check](https://github.com/skydread1/clr.test.check/tree/magic), a fork of clojure/clr.test.check.\n\nIf you have a look at the [dotnet.clj](https://github.com/skydread1/clr.test.check/blob/magic/dotnet.clj) namespace, you can see the different convenient function that can be called by `nostrand` with the command `nos`:\n\n- compile the clojure codebase to dotnet assemblies:\n```\nnos dotnet/build\n```\n- run all the clojure tests using the CLR:\n```\nnos dotnet/run-tests\n```\n- pack and push NuGet packages to the GitHub/GitLab Package Registries:\n```\nnos dotnet/nuget-push\n```\n\nSo it only 3 commands, a developer can compile a Clojure project and its deps to dotnet assemblies that can be run in Unity, test that all the tests are passing in the CLR and push a NuGet Package in a remote public or private repository.\n\n## Magic Unity\n\nThe goal of Magic.Unity is to provide a Clojure runtime in Unity.\n\n[Magic.Unity](https://github.com/nasser/Magic.Unity) used to have a compilation UI and a runtime. However, there was a mismatch between the Magic dlls of Nostrand and Magic.Unity. Also the compilation UI was not easy to use and we wanted to use Nostrand directly. The compilation has since been removed and Magic.Unity is now only a runtime that can use the last magic dlls.\n\nFinally, You can add Magic.Unity (runtime for magic inside Unity) in the manifest.json like so:\n\n```json\n{\n "dependencies": {\n\t ...,\n "sr.nas.magic.unity": "https://github.com/nasser/Magic.Unity.git"\n\t}\n}\n\n## Magic compiler\n\n### What is the Magic Compiler\n\nMagic is a bootstrapped compiler written in Clojure that takes Clojure code as input and produces dotnet assemblies (.dll) as output.\n\nCompiler Bootstrapping is the technique for producing a self-compiling compiler that is written in the same language it intends to compile. In our case, MAGIC is a **Clojure** compiler that compiles **Clojure** code to .**NET** assemblies (.dll and .exe files).\n\nIt means we need the old dlls of MAGIC to generate the new dlls of the MAGIC compiler. We repeat this process until the compiler is good enough. \n\nThe very first magic dlls were generated with the [clojure/clojure-clr](https://github.com/clojure/clojure-clr) project which is also a Clojure compiler to CLR but written in **C#** with limitations over the dlls generated (the problem MAGIC intends to solve).\n\n### Why the Magic Compiler\n\nThere is already a clojure-\x3eclr compiler [clojure/clojure-clr](https://github.com/clojure/clojure-clr). However, clojure-clr uses a technology called the DLR (dynamic language runtime) to optimize dynamic call sites but it emits self modifying code which make the assemblies not usable on mobile devices (IL2CPP in Unity). So we needed a way to have a compiler that emit assemblies that can target both Desktop and mobile (IL2CPP), hence the Magic compiler.\n\n### Documentations and Bug reports\n\nI do not have the knowledge for such low level compiler implementation, so I did not fix any issues on the compiler myself. However, I could help Ramsey Nasser improving the documentation for both user and potential contributors and fix some high level small issues. I was also reporting all the bugs and creating the issues on the different related repos.\n\n### GitHub Action\n\nI added a GitHub action to perform the bootstrapping at every push to automate the process and make the latest dlls available in the GitHub action artifact to anybody.\n\n## Importing the nuget packages to our frontend Unity projects\n\nSince all the Clojure libraries and the Magic.Unity were packaged via nugget and pushed to the GitHub/GitLab repo, we can use a `packages.config` to list our packages and use the command `nuget restore` to import them. Same as to push packages, a `nuget.config` can be added with the credentials.\n\n## Learn more\n\nYou can learn more about `Magic` in my blog articles listed at the top of the page.\n', new S(null,2,5,T,[new S(null,2,5,T,["Port your Clojure lib to the CLR with MAGIC","../blog/port-clj-lib-to-clr"],null),new S(null,2,5,T,["Pack, Push and Import Clojure to Unity","../blog/clojure-in-unity"],null)],null),new S(null,4,5,T,[new S(null,2,5,T,["Magic","https://github.com/nasser/magic"],null),new S(null,2,5,T,["Nostrand","https://github.com/nasser/nostrand"],null),new S(null,2,5,T,["Magic.Unity","https://github.com/nasser/Magic.Unity"],null),new S(null,2,5,T,["magic-pipeline","https://github.com/magic-clojure/magic"], -null)],null),"\nMore acting as a devops this time, I worked on integrating the Magic Compiler and its tooling to Flybot's development workflow.\n\n`Magic` is a bootstrapped compiler written in Clojure that takes Clojure code as input and produces dotnet assemblies (.dll) as output. The dlls produced by Magic can be run in the Game engine `Unity` which Flybot uses for their card game UIs. The goal was to be able to compile our backend Clojure APIs to dlls so we can used that logic in Unity directly.\n\nWorking closely with the author of the Magic compiler [Ramsey Nasser](https://github.com/nasser), I helped improving the tooling around the Magic compiler so it integrates well with our workflow Clojure Backend/Unity Frontend. I notably simplified the way the a clojure project and its dependencies are compiled to dlls, packed to NuGet, deployed to online repos and finally imported to Unity.\n", -ws,"Magic Compiler and Nostrand","magic-clojure-compiler-to-clr",new n(null,3,[$o,"/assets/magic-book.jpg",Xo,"/assets/magic-book.jpg",Tl,"Data Light Wallpaper"],null),"magic-nostrand"]),ij([Sl,xm,ym,Wn,Mp,Np,pq,Dr,Zs,tt],[new S(null,5,5,T,["Clojure","Kafka","Datomic","Kubernetes","AWS EKS"],null),"Flybot Pte Ltd",new S(null,1,5,T,["2023-08-08"],null),"\n## Rational\nI am currently working on a challenge recommender that will suggest personal challenges and their potential rewards to [Golden Island](https://www.80166.com/)'s players. `Golden Island` is a card/board game platforms that offers a dozen of games including Pǎo Dé Kuài (跑得快), Dou dizhu (鬥地主), Mahjong etc.\n\nSome game operators are already creating props (challenges) that are personalized in the sense that they apply only to a subset of users (game, levels, balance etc) in response to a subset of events (game, bet size etc).\n\nThe recommender is a Clojure application deployed in a POD in `AWS EKS` that consume events from `kafka` topics and produces personalized challenge recommendations to a dedicated kafka topic. It uses `Datomic` as storage solution within the EKS cluster.\n\nThe end goal is to ease the job of game operators by taking care of `when` to recommend challenges and to `whom`, so the game operators can focus on the `what` the challenges could be and have constant performance feedback.\n\nThe repositories are closed-source because private to Flybot Pte. Ltd.\n\n## More\n\nMore details will be coming soon...\n", -"\nI am currently working on a challenge recommender that will suggest personal challenges and their potential rewards to [Golden Island](https://www.80166.com/)'s players. `Golden Island` is a card/board game platforms that offers a dozen of games including Pǎo Dé Kuài (跑得快), Dou dizhu (鬥地主), Mahjong etc.\n\nSome game operators are already creating props (challenges) that are personalized in the sense that they apply only to a subset of users (game, levels, balance etc) in response to a subset of events (game, bet size etc).\n\nThe recommender is a Clojure application deployed in a POD in `AWS EKS` that consume events from `kafka` topics and produces personalized challenge recommendations to a dedicated kafka topic. It uses `Datomic` as storage solution within the EKS cluster.\n\nThe end goal is to ease the job of game operators by taking care of `when` to recommend challenges and to `whom`, so the game operators can focus on the `what` the challenges could be and have constant performance feedback.\n", -ws,"Challenge Recommender","game-challenge-recommender",new n(null,3,[$o,"https://www.flybot.sg/assets/flybot-logo.png",Xo,"https://www.flybot.sg/assets/flybot-logo.png",Tl,"Flybot Logo"],null),"props-recommender"]),ij([Sl,ym,Wn,Cp,Mp,Np,pq,Dr,Zs,tt],[new S(null,6,5,T,"ClojureScript Figwheel Re-Frame Reagent Lasagna-pull Reitit".split(" "),null),new S(null,2,5,T,["2023-04-07","2024-04-21"],null),'\n## Stack\n\nThis website is a Single Page Application written in ClojureScript.\n\nI have the following stack:\n- [figwheel-main](https://figwheel.org/) for live code reloading\n- [reagent](https://github.com/reagent-project/reagent) for react components\n- [hiccup](https://github.com/weavejester/hiccup) for DOM representation\n- [reitit](https://github.com/metosin/reitit) for routing\n- [malli](https://github.com/metosin/malli) to validate some configs at the top of markdown files\n- [markdown-to-hiccup](https://github.com/mpcarolin/markdown-to-hiccup) to allow me to write the page content in markdown.\n- [re-frame](https://github.com/day8/re-frame) a framework for building user interfaces, leveraging [reagent](https://github.com/reagent-project/reagent)\n- [lasagna-pull](https://github.com/flybot-sg/lasagna-pull) to precisely select from deep data structure\n\n## Features\n\nThe website contains a list of the projects I worked on as a Software Engineer and the stack I used. It also contains a page with my resume.\n\nThe content is written in markdown and compiled to hiccup.\n\nThe website is fully responsive and support light/dark mode.\n\n## Content\n\n### Organization\n\nEach post has its own markdown files in the folder of the page it belongs to.\n\n```\n.\n├── about\n│   └── aboutme.md\n└── portfolio\n ├── blog_django.md\n ├── flybot_card_games.md\n ├── flybot_mobile_app.md\n ├── flybot_website.md\n ├── magic_nostrand.md\n └── portfolio_website.md\n```\n\n### Vignette and Post\n\nIn the `/portfolio` route, I showcase all the projects via what I called `vignette`. They contain a short description of the post instead of the full content.\n\nWhen the user clicks on a vignette, he goes to a new route with the full post content.\n\n### Config Clojure map\n\nA markdown file of a post is divided into 3 parts:\n- above the demarcation `+ + +` is a clojure map of configs (title, page, order etc.)\n- below the first demarcation `+ + +` is the post short description as markdown.\n- below the second demarcation `+ + +` is the post content as markdown.\n\nHere is an example of clojure config map for a post:\n\n```\n#:post{:id "clojure-full-stack-webapp"\n :page :portfolio\n :employer "Flybot Pte Ltd"\n :date "2022"\n :repos [["Flybot" "https://github.com/skydread1/flybot.sg"]]\n :articles [["How to deploy full stack Clojure website to AWS" "../blog/deploy-clj-app-to-aws"]\n ["Lasagna-pull Pattern applied to flybot.sg backend" "../blog/lasagna-pull-applied-to-flybot"]\n ["Clojure Mono Repo example : server + 2 clients" "../blog/clojure-mono-repo"]]\n :title "Flybot Website"\n :tags ["Clojure" "ClojureScript" "Figwheel" "Re-Frame" "Malli" "Lasagna-pull" "Fun-map" "Datalevin" "Reitit"]\n :css-class "flybot-website"\n :image #:image{:src "https://www.flybot.sg/assets/flybot-logo.png"\n :src-dark "https://www.flybot.sg/assets/flybot-logo.png"\n :alt "Flybot Logo"}}\n```\n\n## Compile\n\nAt CLJ compile time, the following steps happen:\n1. Read all markdown files\n2. Validate the post configs against a `Malli` schema\n3. Assoc the post markdown content to the configs \n4. A macro stores a vector of the posts to be loaded in the re-frame DB\n\nAt CLJS compile time, the following steps happen:\n1. A re-frame event initializes the re-frame DB, loading all the posts from the clojure macro and the theme from local storage.\n2. The `reitit` router is created\n3. The post markdowns are converted to hiccup via `markdown-to-hiccup`. \n\n## Build\n\n### Dev\n\nI use clj/cljs REPL of `figwheel` for hot reloading on file save.\n\n### Prod\n\nThe github action is triggered when the code is pushed.\n\nI use [clojure/tools.build](https://github.com/clojure/tools.build) to create tasks related to the build.\n\nIt runs the build.clj task to generate the main.js bundle:\n\n```\nclojure -T:build js-bundle\n```\n\nThis command compiles the cljs to the optimized js bundle that Netlify will use to generate the preview in the PR.\n\n## Continuous Integration\n\nThe Ci does the following:\n- run the clj tests\n- run the cljs tests in headless mode\n- compile the cljs file into the js bundle `main.js` and commit it to the repo.\n\n## Continuous Deployment\n\n**Opening a pull request (PR)** to merge your changes to master, makes `Netlify` create a preview for you to see how the new version of the website would look like.\n\n**Merging to master** automatically publishes the last version of the website via Netlify.\n\n## Hosted with Netlify\n\nI use **Netlify** for hosting platform because it is easy to setup and the previews of the new website version on GitHub MR is convenient.\n\n## Learn More\n\nHave a look at the repo [README](https://github.com/skydread1/portfolio/blob/master/README.md) for more information.\n', -new S(null,1,5,T,[new S(null,2,5,T,["Portfolio","https://github.com/skydread1/portfolio"],null)],null),"\nThis portfolio website you are currently visiting is a Single Page Application written in `ClojureScript`.\n\nThe website contains a list of the projects I worked on as a Software Engineer and the stacks I used. It also contains a page with my resume.\n\nThe markdown content is converted into hiccup (a clojure-friendly markup) and the post/vignette configurations are made in EDN which is validated at compile time with a malli schema.\n\nThe website is fully responsive and support light/dark mode.\n", -ws,"Portfolio Website","portfolio-clojurescript-spa",new n(null,3,[$o,"/assets/loic-logo.png",Xo,"/assets/loic-logo.png",Tl,"Logo referencing Aperture Science"],null),"portfolio"]),ij([Sl,xm,ym,Wn,Ro,Cp,Mp,Np,pq,Dr,Zs,tt],[new S(null,4,5,T,["Clojure","Clojure Spec","Magic Compiler","Interop CLR"],null),"Flybot Pte Ltd",new S(null,2,5,T,["2020-01-06","2021-10-29"],null),'\n## Rational\n\nAt Flybot, I had the opportunity to create popular Asian Card Games APIs in `Clojure`.\n\nI developed the backend of games such as Pǎo Dé Kuài (跑得快) and Big two (锄大地) which are climbing card games.\n\nI also worked on a Library we called MetaGame that allows us to compose several `Pǎo Dé Kuài` or `Big two` games (or a mix of both) in tournaments for instance.\n\nThe repositories are closed-source because private to Flybot Pte. Ltd.\n\n## Immutable data\n\nSince we use Clojure, the game state can be represented as pure edn data leveraging `records`, `protocols` and `datafy`.\n\nThere is no need for any atoms, agent or vars as the new state is just another Clojure pure data structure.\n\nThis allows us to represent the game setup and rules as clojure pure data as well so the game is easy to customize.\n\nFor non-clojure developers, you can imagine that you could represent your entire game state using a simple JSON file. In clojure, we use the EDN format (which has a similar syntax to JSON).\n\n## Data validation and Generation\n\nThe Clojure libraries I used are:\n- [clojure/spec.alpha](https://github.com/clojure/spec.alpha) for the data registry that is used for data validation and generation.\n- [clojure/test.check](https://github.com/clojure/test.check) to create custom generators to overcome the interdependence between the API functions inputs.\n\nUsing the libraries above, I design an integration test suite that can run hundreds for semi-random generated games (can be run in the CI as well) which ensure proper behavior of the API.\n\n## CLR interoperability\n\nIt is more common to see interop with JavaScript for ClojureScript. However, in our case, we want our Clojure codebase to be run in the game engin Unity, so a dotnet environment.\n\nIs is now possible to compile a Clojure project to dotnet assemblies and make it work in Unity using the [nasser/magic](https://github.com/nasser/magic) compiler.\n([clojure-clr](https://github.com/clojure/clojure-clr), which is the default clojure compiler to dotnet cannot work in Unity because it relies on the DLR.)\n\nWe use the reader conditionals in `.cljc` files to handle JVM/CLR interop in our project so we can run and test our Clojure project in both environments.\n\n## Composing games\n\nThe first objective was to have a way to compose several games (such as `big-two` or `PDK`).\n\nWe wanted to be able to play several games up to a certain `score` target or up to a certain number of `rounds`.\n\nThe second objective was to allow the user to set up and run `tournaments`. A tournament is a sequence of stages in which we play `meta-games`.\n\nOnce again, we could leverage the Clojure data immutability to "describe" our `meta-game` in EDN format.\n\nYou can view a `meta-game` as a wrapper around a given sub-game such as `big-two` for instance.\n\nWe can have a `meta-game` of `meta-game` to create a `tournament`.\n\nThis is made possible via making the different projects implements a specific Game protocol.\n\nTherefore, it is possible to describe the rules of the `meta-game` using pure Clojure data and setup games such as:\n- playing 3 games of `big-two` and add up the score of each players, winner is the one with highest score\n- playing several round of `big-two` until one player wins 2 rounds\n- playing one round of `big-two` then on round of `pdk`.\n- playing a tournament in which the semi-finals are a 3 round `big-two` game and the finals a single `pdk` game\n\nAll our games work with any number of players (relevant to the rules of course), so for the tournaments stages, we can have 3 group of 4 players playing `big-two` in the semi, and the 3 winners playing `pdk` in the finals without any issues.\n\nNote: `meta-game` was also ported to the CLR successfully.\n\nOnce again, for non-clojure developers, you can imagine that your are describing a whole tournament setup using just a JSON file which is very powerful (but instead of JSON, we use EDN, the clojure equivalent).\n\n## Learn more\n\n### Clojure projects\n\nAll the Clojure libraries are private\nFlybot Ptd Ltd All Right Reserved\n\n### Magic compiler\n\nThe magic compiler is open source and you can read more about my contribution in the dedicated section.\n', -new S(null,1,5,T,[new S(null,2,5,T,["Port your Clojure lib to the CLR with MAGIC","../blog/port-clj-lib-to-clr"],null)],null),new S(null,1,5,T,[new S(null,2,5,T,["Magic","https://github.com/nasser/magic"],null)],null),"\nAt Flybot, I had the opportunity to create popular Asian Card Games APIs in `Clojure`.\n\nI developed the backend of games such as Pǎo Dé Kuài (跑得快) and Big two (锄大地) which are climbing card games.\n\nI also worked on a Library we called MetaGame that allows us to compose several `Pǎo Dé Kuài` or `Big two` games (or a mix of both) in tournaments for instance.\n", -ws,"Clojure Card Games Backend APIs","card-games-api",new n(null,3,[$o,"https://www.flybot.sg/assets/flybot-logo.png",Xo,"https://www.flybot.sg/assets/flybot-logo.png",Tl,"Flybot Logo"],null),"card-games"]),ij([Sl,xm,ym,Wn,Ro,Cp,Mp,Np,pq,Dr,Zs,tt],[new S(null,9,5,T,"Clojure ClojureScript Figwheel Re-Frame Malli Lasagna-pull Fun-map Datalevin Reitit".split(" "),null),"Flybot Pte Ltd",new S(null,2,5,T,["2022-05-20","2023-08-04"],null),'\n## Rational\n\n[flybot.sg](https://www.flybot.sg/) is an open-source full-stack Clojure web-app that allows company’s employees to write posts to showcase their open-source libraries, their contributions and all technical knowledge that could interest the functional programming community. HRs can also post job offers. Admins can edit any piece of content in any pages as the whole content can be written in Markdown.\n\nThe purpose of this project was to demonstrate how the "lasagna stack" ([flybot-sg/lasagna-pull](https://github.com/flybot-sg/lasagna-pull) and [robertluo/fun-map](https://github.com/robertluo/fun-map)) could ease the web development experience for any Clojure developers.\n\nThe [skydread1/flybot.sg](https://github.com/skydread1/flybot.sg) repo was then created and is open-source so all Clojure developers can see the benefit of the `pull pattern` and `fun map` as well as a good example of the usage of other very good open-source libraries of the Clojure community.\n\n## Stack\n\n### Backend\n\n- [reitit](https://github.com/metosin/reitit) for backend routing\n- [muuntaja](https://github.com/metosin/muuntaja) for http api format negotiation, encoding and decoding\n- [malli](https://github.com/metosin/malli) for data validation\n- [aleph](https://github.com/clj-commons/aleph) as http server\n- [reitit-oauth2](https://github.com/skydread1/reitit-oauth2) for oauth2\n- [datalevin](https://github.com/juji-io/datalevin) as datalog database\n- **[fun-map](https://github.com/robertluo/fun-map) for systems**\n- **[lasagna-pull](https://github.com/flybot-sg/lasagna-pull) to precisely select from deep data structure**\n\n### Frontend\n\n- [figwheel-main](https://github.com/bhauman/figwheel-main) for live code clj/cljs reloading\n- [hiccup](https://github.com/weavejester/hiccup) for DOM representation\n- [reitit](https://github.com/metosin/reitit) for frontend routing\n- [malli](https://github.com/metosin/malli) for data validation\n- [markdown-to-hiccup](https://github.com/mpcarolin/markdown-to-hiccup) to write the content in markdown.\n- [re-frame](https://github.com/day8/re-frame) a framework for building user interfaces, leveraging [reagent](https://github.com/reagent-project/reagent)\n- [re-frame-http-fx](https://github.com/day8/re-frame-http-fx) a re-frame effects handler wrapping [cljs-ajax](https://github.com/JulianBirch/cljs-ajax)\n\n## Repo\n\nYou can have a look at the code on my [GitHub repo](https://github.com/skydread1/flybot.sg)\n\nWe use a mono repo structure where the `server` (clj files), and `client` (cljs files) reside alongside each others.\nA `common` (cljc files) top folder is also used for data validation that applies for both server and client.\n\nWe actually have 2 clients: web and mobile.\nSo the web app frontend resides in the same repo as the mobile frontend and the 2 share most of the re-frame events.\n\nThe mono-repo structure is as followed:\n\n```clojure\n├── client\n│   ├── common\n│   │   ├── src\n│   │   │   └── flybot.client.common\n│   │   └── test\n│   │   └── flybot.client.common\n│   ├── mobile\n│   │   ├── src\n│   │   │   └── flybot.client.mobile\n│   │   └── test\n│   │   └── flybot.client.mobile\n│   └── web\n│   ├── src\n│   │   └── flybot.client.web\n│   └── test\n│   └── flybot.client.web\n├── common\n│   ├── src\n│   │   └── flybot.common\n│   └── test\n│   └── flybot.common\n├── server\n│   ├── src\n│   │   └── flybot.server\n│   └── test\n│   └── flybot.server\n```\n\nYou can read more about it in my article: [Clojure Mono Repo example : server + 2 clients](../blog/clojure-mono-repo).\n\n## Features\n\n### Markdown to write the content\n\nOnce logged in, a software engineer or HR can create/edit/delete a post using Markdown for the post content. Some configurations such as adding an illustrative image for light mode and dark mode or to display author name and date of the articles are also available.\n\nA preview option is also available to see how the post would look like before being submitted.\n\n### Oauth2\n\nWe use google oauth2 for authentication. Once an employee is logged in via google, the ring session is updated server side and a ring cookie is sent to the client to ensure proper authorization on post submission. There is also an Admin panel to add admin permissions to employees that require specific admin roles. Only admins can edit/delete posts of others.\n\n## Design\n\nThe website is fully responsive and the design is simple but clean. The dark mode is handled using global css variables and by having a theme field in the re-frame DB. The persistence of the dark/light mode is done via local storage.\n\n## Data\n\n### Data Persistence\n\nWe used [datalevin](https://github.com/juji-io/datalevin) as DB, which is an open-source DB that supports datalog storage with a similar syntax to Datomic.\n\n### Data validation\n\nWe use [malli](https://github.com/metosin/malli) for data validation for both backend and frontend.\nFurthermore, [lasagna-pull](https://github.com/flybot-sg/lasagna-pull) can accept a malli schema as optional parameter to be sure the pull pattern provided respects the malli schema for that specific query. It is very convenient as if the query shape does not match the schema provided by the API, a detailed error is thrown and no query is performed.\n\n## Lasagna Stack\n\n- [fun-map](https://github.com/robertluo/fun-map) allows us to define a system and perform associative dependency injections.\n- [lasagna-pull](https://github.com/flybot-sg/lasagna-pull) makes selecting data in nested structure more intuitive via a pattern that describes the data to be pulled following the shape of the data.\n\nI wrote articles about how these 2 libraries benefit web development and design in my tech blog:\n- [Lasagna-pull Pattern applied to flybot.sg backend](../blog/lasagna-pull-applied-to-flybot)\n- [Pull Pattern: Query in deep nested data structure](../blog/lasagna-pull)\n\n## CI/CD\n\n### CI\n\nThe GitHub actions run both backend (clj) and frontend (cljs) tests.\n\nIf all the CI tests pass, the GitHub action proceeds to create the js bundle and finally the app docker image is created and deployed to AWS ECR.\n\n### CD\n\nThe website [flybot.sg](http://flybot.sg) is deployed in an EC2 instance in front of LBs.\n\nThe app image is generated via the `deps.edn` and [atomisthq/jibbit](https://github.com/atomisthq/jibbit) directly and started via docker in the EC2 instance.\n\nWe also have the possibility to create an uberjar using [clojure/tools.build](https://github.com/clojure/tools.build) for local testing.\n\nWhen new GitHub PR is merged, the new container image is automatically generated and sent to AWS ECR via Github Actions.\n\nYou can read more about how I deployed the app to AWS in this article: [How to deploy full stack Clojure website to AWS](../blog/deploy-clj-app-to-aws).\n\n## Hot reloading\n\n[figwheel-main](https://github.com/bhauman/figwheel-main) allows us to do hot reloading when a file is saved and provide clj/cljs REPL to print at anytime the re-frame DB for instant feedback. It also allow us to generate an optimized js bundle from the cljs files. The configuration parameters are very well thought and the library makes the development experience a bliss. Figwheel also allows us to run our own ring server by providing a ring-handler in the config. This feature works very well with our fun-map system.\n\n## Learn more\n\nFeel free to visit [flybot.sg](https://www.flybot.sg/) and especially the [blog](https://www.flybot.sg/blog).\n', -new S(null,4,5,T,[new S(null,2,5,T,["Deploy full stack Clojure website to AWS","../blog/deploy-clj-app-to-aws"],null),new S(null,2,5,T,["Lasagna-pull applied to flybot.sg","../blog/lasagna-pull-applied-to-flybot"],null),new S(null,2,5,T,["Lasagna Pull: Precisely select from deep nested data","../blog/lasagna-pull"],null),new S(null,2,5,T,["Clojure Mono Repo example : server + 2 clients","../blog/clojure-mono-repo"],null)],null),new S(null,1,5,T,[new S(null,2,5,T,["Flybot","https://github.com/skydread1/flybot.sg"], -null)],null),"\n[flybot.sg](https://www.flybot.sg/) is an open-source full-stack Clojure web-app that allows company’s employees to write posts to showcase their open-source libraries, their contributions and all technical knowledge that could interest the functional programming community. HRs can also post job offers. Admins can edit any piece of content in any pages as the whole content can be written in Markdown.\n\nThe purpose of this project was to demonstrate how the `lasagna stack` ([flybot-sg/lasagna-pull](https://github.com/flybot-sg/lasagna-pull) and [robertluo/fun-map](https://github.com/robertluo/fun-map)) could ease the web development experience for any Clojure developers.\n", -ws,"Flybot Website","clojure-full-stack-webapp",new n(null,3,[$o,"https://www.flybot.sg/assets/flybot-logo.png",Xo,"https://www.flybot.sg/assets/flybot-logo.png",Tl,"Flybot Logo"],null),"flybot-website"]),ij([Sl,ym,Wn,Ro,Cp,Mp,Np,pq,Dr,Zs,tt],[new S(null,6,5,T,"Python;Django;Elastic Beanstalk;RDS PostgreSQL;S3;AWS".split(";"),null),new S(null,2,5,T,["2023-05-27","2023-11-12"],null),"\n## Stack\n\nThe blog is written in `python` and uses the web framework `Django`.\n\nIt is deployed in AWS Elastic Beanstalk, the production data is stored in an AWS RDS PostgreSQL database and the static files are served from an AWS S3 bucket.\n\n## Features\n\nThe different features of the blog are the following:\n- Users can create an account and login/logout\n- Logged-in users can create/edit/delete posts\n- The posts are written in markdown with preview of what the post will look like before submission\n- There is syntax highlighting for the code blocks\n- The UI supports light/dark mode toggle\n- Posts can be sorted in different categories (such as `clojure`, `python` for instance)\n- Users can search a post using the search bar.\n\n## CI/CD\n\n### Env variable\n\nI used `django-environ` to handle env variables.\n\nMy settings are divided in 3 files `settings/common.clj`, `settings/dev.clj` and `settings/prod.clj`.\n\n### Static files\n\nThe static files are stored in AWS S3 in production.\n\nI use `Django-storages` as a storage backend system for my Django app. Django-storages provides various storage backends, including the one designed to work with AWS S3. It abstracts the process of interacting with different storage solutions, making it easier to switch between them if needed.\n\nThen I use `boto3` which is the official AWS SDK for Python so I can programmatically interact with AWS S3.\n\nFinally, I can run the `collectstatic` django command to gather and upload the static files to the S3 bucket\n\n### Storage\n\nIn production, the data is stored in an AWS RDS PostgreSQL database.\n\nThe library `psycopg2-binary` allows my app to communicate with a PostgreSQL database.\n\nFor dev, I used the default SQLite configuration provided by Django.\n\n### Run the server\n\nIn production, I use an AWS Elastic Beanstalk.\n\nI use `gunicorn` as HTTP server.\n\nTo deploy new app versions, I rely on the `AWS CLI EB` so I just have to run `eb deploy` to deploy the new app version on the AWS beanstalk.\n\n### Domain\n\nThe blog used to be hosted at `blog.loicblanchard.me`. I used a CNAME record to map my personal subdomain to the Application Load Balancer's DNS.\n\nHowever I moved the blog content to my clojure SPA instead because after my AWS free tier expired, the monthly cost for hosting the blog was around $50 which was too much for a simple blog.\n\n## Learn more\n\nHave a look at the repo [README](https://github.com/skydread1/blog/blob/master/README.md) for more information.\n", -new S(null,1,5,T,[new S(null,2,5,T,["Deploy Django Blog in AWS Beanstalk","../blog/deploy-django-aws-beanstalk"],null)],null),new S(null,1,5,T,[new S(null,2,5,T,["Blog","https://github.com/skydread1/blog"],null)],null),"\nI developed a tech blog in python using the Django framework. Thus, it is Server-Side Rendered.\n\nThe blog is deployed on AWS Beanstalk, the static files are served from an AWS S3 bucket, and the production data is stored in an AWS RDS Postgres database.\n\nIt uses HTMX for the search bar.\n", -ws,"Tech Blog with Django","blog-django",new n(null,3,[$o,"/assets/loic-blog-logo.png",Xo,"/assets/loic-blog-logo.png",Tl,"Logo referencing Aperture Science"],null),"blog-django"]),ij([Sl,xm,ym,Wn,Ro,Mp,Np,pq,Dr,Zs,tt],[new S(null,4,5,T,["MCTS","Monte Carlo Tree Search","Clojure","Card Game"],null),"Flybot Pte Ltd",new S(null,2,5,T,["2021-07-19","2021-08-13"],null),"\n## Rational\n\nAt Flybot Pte Ltd, one of my projects was to develop a robot-player capable of playing multiple rounds of card games, such as the popular Chinese card game `big-two`.\n\nA few of our games implement the same protocol so the implementation can be independent of the underlying game.\n\nThe primary objective was to create an AI that could replace AFK players and provide varying levels of difficulty for offline gameplay.\n\nTo achieve this, I explored two key approaches: \n- **Monte Carlo Tree Search** (MCTS)\n- **Domain knowledge**\n\n## MCTS Theory\n\nMonte Carlo Tree Search (MCTS) is a powerful algorithm known for its role in the success of AI applications like AlphaGo. At its core, MCTS leverages Monte Carlo simulations to guide the search for highly rewarding paths in a game tree. This approach is essential for games with deterministic rules and perfect information, where players have complete knowledge of the game state and no chance events occur.\n\nHowever, card games like `big-two` introduce imperfect information, as players do not have access to their opponents' cards information. To apply MCTS to such games, we need to do one of the following:\n- Pre-select moves by filtering the dumb moves\n- Access hidden information (the other player’s hand). This method is called *Determinization* or also *Perfect Information Monte Carlo Sampling*.\n\n## MCTS Implementation\n\nOur MCTS implementation involves representing the game tree as a collection of nodes, where each node corresponds to a specific game state. These nodes store relevant statistics, including visit counts and scores.\n\nThe MCTS process can be broken down into four key steps:\n\n- `Selection`: Determining which child node to explore next.\n- `Expansion`: Adding newly selected child nodes to the tree.\n- `Simulation`: Running multiple game scenarios with random moves and evaluating the AI's total score.\n- `Update`: Back-propagating rewards from simulations to update branch nodes in the tree.\n\n## MCTS Iteration\n\nA complete MCTS iteration comprises the four steps mentioned above: `expand`, `select`, `simulate`, and `update`.\n\nThe more iterations we run, the more accurate our tree becomes, but this comes at the cost of increased computation time.\n\n## MCTS for Games with More Than 2 Players\n\nWhen dealing with games involving more than two players, such as `big-two` with has four players, we must consider the scores of all participants. Each robot (player) aims at maximizing their score, and `UCT` (Upper Confidence Bound applied to trees) values are computed based on the concerned robot's score.\n\n## Caching\n\nCaching plays a critical role in optimizing performance. By caching possible children states and sampled states, we reduce redundant computations and speed up the AI's decision-making process.\n\n## Performance Issues\n\nPerformance challenges can arise, especially at the beginning of a game with numerous possible moves. To address this, I introduced conditions to trigger MCTS only when the number of remaining cards in players' hands falls below a certain threshold. This significantly improved computational efficiency.\n\n## Domain Knowledge\n\nOne limitation of MCTS is its tendency to explore non-promising branches. To overcome this, I incorporated domain knowledge for the initial game moves. This domain knowledge includes a game plan that guides the AI's decision-making process. However, the specific details of this game plan remain confidential.\n\n## Conclusion\n\nIn conclusion, the fusion of MCTS and domain knowledge has enabled us to create a functional AI for big-two that can replace human players and offer different levels of difficulty. As of the time of writing this article in 2023, the implementation is currently undergoing testing as part of a larger system and is not yet in production. This hybrid approach represents a promising solution for developing robust AI players in complex card games.\n", -new S(null,1,5,T,[new S(null,2,5,T,["MCTS applied to card games","../blog/article-mcts"],null)],null),"\nAt Flybot, I had the opportunity to work on a Monte Carlo Tree Search (MCTS) **Clojure** implementation for our card games.\n\nI combined `MCTS` with `domain knowledge` of our games to balance quality and performance of the plays.\n\nA few of our games implement the same `Game` protocol so the MCTS implementation is independent of the underlying game.\n",ws,"MCTS applied to card games","mcts", -new n(null,3,[$o,"https://www.flybot.sg/assets/flybot-logo.png",Xo,"https://www.flybot.sg/assets/flybot-logo.png",Tl,"Flybot Logo"],null),"mcts"])],null);var Iv={},Jv={},Kv="undefined"!==typeof console;if("undefined"===typeof Iv||"undefined"===typeof Jv||"undefined"===typeof Lv)var Lv=ph(null); -if("undefined"===typeof Iv||"undefined"===typeof Jv||"undefined"===typeof Mv)var Mv=function(){var a={};a.warn=function(){function b(d){var e=null;if(0