~mrlee/www.kamelasa.dev

2beda01aa6df5215cd26a7a90e78c2c4681eaea3 — Lee Meichin a month ago 1227556
Migrate pollen rewrite
48 files changed, 1581 insertions(+), 1572 deletions(-)

D VERSION
M css/main.css
M css/syntax.css
D images/tc.jpg
D index.html
A index.html.pm
A index.ptree
A pollen.rkt
R posts/{a-damn-good-listen.md => a-damn-good-listen.poly.pm}
R posts/{a-decade-of-work.md => a-decade-of-work.poly.pm}
D posts/agile-lipstick.md
A posts/agile-lipstick.poly.pm
D posts/blog-hacking.md
R posts/{blogging-in-haskell.md => blogging-in-haskell.poly.pm}
D posts/can-you-crack-the-code.md
A posts/can-you-crack-the-code.poly.pm
R posts/{celebrate-each-other.md => celebrate-each-other.poly.pm}
R posts/{devops.md => devops.poly.pm}
R posts/{do-you-really-need-those-microservices.md => do-you-really-need-those-microservices.poly.pm}
R posts/{enough.md => enough.poly.pm}
D posts/floc-off.md
A posts/floc-off.poly.pm
D posts/gettin-ziggy-with-it-pi-zero.md
A posts/gettin-ziggy-with-it-pi-zero.poly.pm
R posts/{growing-up.md => growing-up.poly.pm}
R posts/{hakyll-on-devops-pipelines.md => hakyll-on-devops-pipelines.poly.pm}
D posts/hardening-your-server.md
R posts/{human-after-all.md => human-after-all.poly.pm}
R posts/{i-am-here.md => i-am-here.poly.pm}
R posts/{isolation-aloneness-and-loneliness.md => isolation-aloneness-and-loneliness.poly.pm}
D posts/lonesome-lockdown.md
R posts/{my-favourite-thing-about-programming.md => my-favourite-thing-about-programming.poly.pm}
R posts/{on-sharing-vulnerability.md => on-sharing-vulnerability.poly.pm}
R posts/{on-working-remotely.md => on-working-remotely.poly.pm}
R posts/{past-mistakes.md => past-mistakes.poly.pm}
R posts/{permanent-solutions-to-temporary-problems.md => permanent-solutions-to-temporary-problems.poly.pm}
D posts/testing-drafts.md
R posts/{things-ive-changed-my-mind-on.md => things-ive-changed-my-mind-on.poly.pm}
R posts/{time-travel.md => time-travel.poly.pm}
R posts/{to-simpler-times.md => to-simpler-times.poly.pm}
D posts/using-ruby-c-in-ruby.md
A posts/using-ruby-c-in-ruby.poly.pm
A redirs.caddy
R templates/default.html => template.html.p
D templates/archive.html
D templates/git-log.html
D templates/post-list.html
D templates/post.html
D VERSION => VERSION +0 -1
@@ 1,1 0,0 @@
1.0.1
\ No newline at end of file

M css/main.css => css/main.css +15 -4
@@ 82,6 82,12 @@
  padding-left: 1ch;
}

.post-date {
  float: right;
  padding-right: 1ch;
  padding-left: 1ch;
}

[role="doc-endnote"] > p {
  display: inline;
}


@@ 118,10 124,6 @@ aside::before {
  font-style: normal;
}

pre {
  overflow-x: scroll;
}

.git-log {
  display: flex;
}


@@ 133,3 135,12 @@ pre {
.commit-time {
  margin-left: auto;
}

pre {
  display: block;
}

:target {
  font-weight: bold;
  text-decoration: underline;
}
\ No newline at end of file

M css/syntax.css => css/syntax.css +69 -47
@@ 1,47 1,69 @@
pre > code.sourceCode { white-space: pre; position: relative; padding: 1ch; overflow: auto !important; }
pre > code.sourceCode > span { display: inline-block; line-height: 1.25; }
pre > code.sourceCode > span:empty { height: 1.2em; }
code.sourceCode > span { color: inherit; text-decoration: inherit; }
div.sourceCode { margin: 1ch 0; }
pre.sourceCode { margin: 0; padding: 0; border: unset !important; }

@media screen {
div.sourceCode { overflow: auto; }
}

@media print {
pre > code.sourceCode { white-space: pre-wrap; }
pre > code.sourceCode > span { text-indent: -5em; padding-left: 5em; }
}

pre.numberSource code
  { counter-reset: source-line 0; }
pre.numberSource code > span
  { position: relative; left: -4em; counter-increment: source-line; }
pre.numberSource code > span > a:first-child::before
  { content: counter(source-line);
    position: relative; left: -1em; text-align: right; vertical-align: baseline;
    border: none; display: inline-block;
    -webkit-touch-callout: none; -webkit-user-select: none;
    -khtml-user-select: none; -moz-user-select: none;
    -ms-user-select: none; user-select: none;
    padding: 0 4px; width: 4em;
  }
pre.numberSource { margin-left: 3em;  padding-left: 4px; }

@media screen {
pre > code.sourceCode > span > a:first-child::before { text-decoration: underline; }
}

code span.al { font-weight: bold; } /* Alert */
code span.an { font-style: italic; } /* Annotation */
code span.cf { font-weight: bold; } /* ControlFlow */
code span.co { font-style: italic; } /* Comment */
code span.cv { font-style: italic; } /* CommentVar */
code span.do { font-style: italic; } /* Documentation */
code span.dt { text-decoration: underline; } /* DataType */
code span.er { font-weight: bold; } /* Error */
code span.in { font-style: italic; } /* Information */
code span.kw { font-weight: bold; } /* Keyword */
code span.pp { font-weight: bold; } /* Preprocessor */
code span.wa { font-style: italic; } /* Warning */
\ No newline at end of file
.highlight .hll { background-color: #ffffcc }
.highlight  { background: #f0f0f0; }
.highlight .c { color: #60a0b0; font-style: italic } /* Comment */
.highlight .err { border: 1px solid #FF0000 } /* Error */
.highlight .k { color: #007020; font-weight: bold } /* Keyword */
.highlight .o { color: #666666 } /* Operator */
.highlight .ch { color: #60a0b0; font-style: italic } /* Comment.Hashbang */
.highlight .cm { color: #60a0b0; font-style: italic } /* Comment.Multiline */
.highlight .cp { color: #007020 } /* Comment.Preproc */
.highlight .cpf { color: #60a0b0; font-style: italic } /* Comment.PreprocFile */
.highlight .c1 { color: #60a0b0; font-style: italic } /* Comment.Single */
.highlight .cs { color: #60a0b0; background-color: #fff0f0 } /* Comment.Special */
.highlight .gd { color: #A00000 } /* Generic.Deleted */
.highlight .ge { font-style: italic } /* Generic.Emph */
.highlight .gr { color: #FF0000 } /* Generic.Error */
.highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */
.highlight .gi { color: #00A000 } /* Generic.Inserted */
.highlight .go { color: #888888 } /* Generic.Output */
.highlight .gp { color: #c65d09; font-weight: bold } /* Generic.Prompt */
.highlight .gs { font-weight: bold } /* Generic.Strong */
.highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */
.highlight .gt { color: #0044DD } /* Generic.Traceback */
.highlight .kc { color: #007020; font-weight: bold } /* Keyword.Constant */
.highlight .kd { color: #007020; font-weight: bold } /* Keyword.Declaration */
.highlight .kn { color: #007020; font-weight: bold } /* Keyword.Namespace */
.highlight .kp { color: #007020 } /* Keyword.Pseudo */
.highlight .kr { color: #007020; font-weight: bold } /* Keyword.Reserved */
.highlight .kt { color: #902000 } /* Keyword.Type */
.highlight .m { color: #40a070 } /* Literal.Number */
.highlight .s { color: #4070a0 } /* Literal.String */
.highlight .na { color: #4070a0 } /* Name.Attribute */
.highlight .nb { color: #007020 } /* Name.Builtin */
.highlight .nc { color: #0e84b5; font-weight: bold } /* Name.Class */
.highlight .no { color: #60add5 } /* Name.Constant */
.highlight .nd { color: #555555; font-weight: bold } /* Name.Decorator */
.highlight .ni { color: #d55537; font-weight: bold } /* Name.Entity */
.highlight .ne { color: #007020 } /* Name.Exception */
.highlight .nf { color: #06287e } /* Name.Function */
.highlight .nl { color: #002070; font-weight: bold } /* Name.Label */
.highlight .nn { color: #0e84b5; font-weight: bold } /* Name.Namespace */
.highlight .nt { color: #062873; font-weight: bold } /* Name.Tag */
.highlight .nv { color: #bb60d5 } /* Name.Variable */
.highlight .ow { color: #007020; font-weight: bold } /* Operator.Word */
.highlight .w { color: #bbbbbb } /* Text.Whitespace */
.highlight .mb { color: #40a070 } /* Literal.Number.Bin */
.highlight .mf { color: #40a070 } /* Literal.Number.Float */
.highlight .mh { color: #40a070 } /* Literal.Number.Hex */
.highlight .mi { color: #40a070 } /* Literal.Number.Integer */
.highlight .mo { color: #40a070 } /* Literal.Number.Oct */
.highlight .sa { color: #4070a0 } /* Literal.String.Affix */
.highlight .sb { color: #4070a0 } /* Literal.String.Backtick */
.highlight .sc { color: #4070a0 } /* Literal.String.Char */
.highlight .dl { color: #4070a0 } /* Literal.String.Delimiter */
.highlight .sd { color: #4070a0; font-style: italic } /* Literal.String.Doc */
.highlight .s2 { color: #4070a0 } /* Literal.String.Double */
.highlight .se { color: #4070a0; font-weight: bold } /* Literal.String.Escape */
.highlight .sh { color: #4070a0 } /* Literal.String.Heredoc */
.highlight .si { color: #70a0d0; font-style: italic } /* Literal.String.Interpol */
.highlight .sx { color: #c65d09 } /* Literal.String.Other */
.highlight .sr { color: #235388 } /* Literal.String.Regex */
.highlight .s1 { color: #4070a0 } /* Literal.String.Single */
.highlight .ss { color: #517918 } /* Literal.String.Symbol */
.highlight .bp { color: #007020 } /* Name.Builtin.Pseudo */
.highlight .fm { color: #06287e } /* Name.Function.Magic */
.highlight .vc { color: #bb60d5 } /* Name.Variable.Class */
.highlight .vg { color: #bb60d5 } /* Name.Variable.Global */
.highlight .vi { color: #bb60d5 } /* Name.Variable.Instance */
.highlight .vm { color: #bb60d5 } /* Name.Variable.Magic */
.highlight .il { color: #40a070 } /* Literal.Number.Integer.Long */
\ No newline at end of file

D images/tc.jpg => images/tc.jpg +0 -0
D index.html => index.html +0 -16
@@ 1,16 0,0 @@
---
title: Home
---

<h1 class="prompt">
  &gt;
  <span class="type-in"
    >ls -al <a href="/archive">/var/www/kamelasa.dev/archive</a></span
  >
</h1>
$partial("templates/post-list.html")$

<h1 class="prompt">
  &gt; <span class="type-in">git log --format=pretty | head -n 10</span>
</h1>
$partial("templates/git-log.html")$

A index.html.pm => index.html.pm +50 -0
@@ 0,0 1,50 @@
#lang pollen

◊(define-meta title "Home")

◊h1[#:class "prompt"]{
  > ◊span[#:class "type-in"]{
    ls -al /var/www/kamelasa.dev/archive
  }
}

◊table[#:class "borderless"]{
  ◊tbody{
    ◊for/published-posts[#:as p]{
      ◊tr{
        ◊td[#:class "file-permission"]{-rw-r--r--}
        ◊td[#:class "user"]{mrlee}
        ◊td[#:class "group"]{www}
        ◊td[#:class "size"]{◊(post->size p)}
        ◊td[#:class "date"]{◊(post->date p)}
        ◊td{
          ◊a[#:href (page-url p) #:title (post->title p)]{
            ◊post->title[p]
          }
        }
      }
    }
  }
}

◊h1[#:class "prompt"]{
  > ◊span[#:class "type-in"]{
    git log --format=pretty | head -n 10
  }
}

◊ul{
  ◊for/splice[([log (in-list (post->history))])]{
    ◊li[#:class "git-log"]{
      ◊a[#:href (log->giturl log)]{
        ◊log->commit[log]
      }
      ◊span[#:class "commit-msg"]{
        ◊log->message[log]
      }
      ◊tag-time[#:class "commit-time" #:datetime (log->date log)]{
        (◊log->date[log])
      }
    }
  }
}
\ No newline at end of file

A index.ptree => index.ptree +7 -0
@@ 0,0 1,7 @@
#lang pollen

index.html

◊posts{
  ◊this-pagetree["posts"]
}
\ No newline at end of file

A pollen.rkt => pollen.rkt +114 -0
@@ 0,0 1,114 @@
#lang racket

(provide (all-defined-out))

(require txexpr pollen/pagetree pollen/core pollen/setup pollen/decode pollen/cache pollen/file racket/string pollen/tag pollen/unstable/pygments)

(provide highlight)

(module setup racket/base
  (provide (all-defined-out))
  (define poly-targets '(html)))

(define (root . elements)
  (add-footnotes (decode (txexpr 'root empty elements)
    #:txexpr-elements-proc smart-paragraphs
    #:string-proc (compose1 smart-ellipses smart-quotes smart-dashes)
    #:exclude-tags '(pre code))))

(define (smart-paragraphs elements)
  (decode-paragraphs elements
    #:linebreak-proc (λ (elems) (decode-linebreaks elems #f))))

(define (include-files folder extension)
   (map (λ (str) (string->symbol (path->string (simplify-path (format "~a/~a" folder (string-replace str extension "html"))))))
        (filter (λ (str) (string-suffix? str extension))
                (map path->string (directory-list folder)))))

(define (this-pagetree folder) `(@ ,@(include-files folder "poly.pm")))

(define (latest-posts)
  (sort (children 'posts (get-pagetree "index.ptree"))
        #:key post->date
        string>?))

(define (post->path post) (get-source (path->complete-path (symbol->string post) (current-project-root))))
(define (post->title post) (select-from-metas 'title post))
(define (post->date post) (select-from-metas 'date post))
(define (post->published? post) (select-from-metas 'published post))
(define (post->size post) (number->string (file-size (post->path post))))

(define average-word-length 4.7)
(define words-per-minute 250)
(define (post->ert post)  (exact-round (/ (/ (string->number (post->size post)) average-word-length) words-per-minute)))

(define posthistory '())
(define (post->history [post null])
    (when (empty? posthistory)
      (let ([gitlog (string-split (with-output-to-string 
                                  (λ () (system (format "git log --format='~a' --max-count=~a -- ~a"
                                                        "%h;%s;%ai"
                                                        10
                                                        (if (null? post) "." (post->path post))))))
                                "\n")])
      (for/list ([logline (in-list gitlog)])
        (let ([log (string-split logline ";")])
          (set! posthistory 
                (append posthistory 
                        (list `#hash([commit . ,(first log)] 
                                     [message . ,(second log)] 
                                     [date . ,(third log)]))))))))
    posthistory)
(define (log->giturl log) (format "https://git.sr.ht/~~mrlee/www.kamelasa.dev/commit/~a" (hash-ref log 'commit)))
(define (log->commit log) (hash-ref log 'commit))
(define (log->message log) (hash-ref log 'message))
(define (log->date log) (hash-ref log 'date))

(define (q author date . body) `(blockquote ,@body (p ,(format "--~a, ~a" author date))))
(define (<> url) `(a ((href ,url)) ,url))
(define tag-time (default-tag-function 'time))

(define footnotes '())

(define (add-footnotes tx) 
  (txexpr (get-tag tx) (get-attrs tx) 
    `(,@(get-elements tx)
        (hr)
        (section ((class "footnotes"))
          (ol ,(for/splice ([footnote (in-list footnotes)])
                  `(li ((id ,(format "fn~a" (car footnote)))) 
                       ,@(second footnote)
                       (a ((class "footnote-back")
                           (role "doc-backlink")
                           (href ,(format "#fnref~a" (car footnote))))
                            "↩︎"))))))))

(define (^ ref-num . footnote)
  (if (empty? footnote) 
    `(a ((class "footnote-ref") 
        (role "doc-noteref") 
        (id ,(format "fnref~a" ref-num))
        (href ,(format "#fn~a" ref-num)))
        (sup ,(number->string ref-num)))
      (set! footnotes (append footnotes (list (list ref-num footnote))))))

(define-syntax (for/s stx)
  (syntax-case stx ()
    [(_ thing listofthings result-expr ...)
     #'(for/splice ([thing (in-list listofthings)]) result-expr ...)]))

(define (page-url pagenode)
  (string-replace (symbol->string pagenode) "\\" "/"))

(define-syntax (for/published-posts stx)
  (syntax-case stx ()
    [(_ #:as binding result-expr ...)
      #'(for/splice
          ([binding (in-list (latest-posts))] #:when (post->published? binding))
          result-expr ...)]))

(define-syntax (codeblock stx)
  (syntax-case stx ()
    [(_ lang code ...)
      #'(highlight #:python-executable (if (equal? (system-type) 'windows) "python.exe" "python3") 
                   #:line-numbers? #f lang code ...)]))

R posts/a-damn-good-listen.md => posts/a-damn-good-listen.poly.pm +14 -13
@@ 1,18 1,17 @@
---
title: A damn good listen
date: 2020-08-01
status: published
category: personal
---
#lang pollen

A couple of years ago I enrolled in a coaching course with Coaching Development[^1]. It cost a fair whack and, even though I was unable to complete the course due to the severe decline in my mental health at the time, I still maintain that it's the best money I've ever spent, and what I learned has stuck with me and essentially guided me towards a different path through life that I would otherwise never have taken. The lack of credential means I can't practice as a coach, but the skills acquired are a great benefit to any communication driven profession, especially ones where conflict can easily brew up.
◊define-meta[title]{A damn good listen}
◊define-meta[date]{2020-08-01}
◊define-meta[published #t]
◊define-meta[category]{personal}

Out of all of the things I picked up during those five months, one of a few quotes still stands out with me:
A couple of years ago I enrolled in a coaching course with Coaching Development◊^[1]. It cost a fair whack and, even though I was unable to complete the course due to the severe decline in my mental health at the time, I still maintain that it's the best money I've ever spent, and what I learned has stuck with me and essentially guided me towards a different path through life that I would otherwise never have taken. The lack of credential means I can't practice as a coach, but the skills acquired are a great benefit to any communication driven profession, especially ones where conflict can easily brew up.

> Sometimes all somebody needs is a damn good listening to
>
> *Colin Brett, 2018*
Out of all of the things I picked up during those five months, one of a few quotes still stands out with me:

◊q["Colin Brett" 2018]{
  Sometimes all somebody needs is a damn good listening to
}

To be fair, the first thing I think of whenever I hear "damn good" anything is Special Agent Dale Cooper ordering a cherry pie and a coffee. But I hold Twin Peaks close to my heart; you won't find any other television like it.



@@ 20,7 19,7 @@ Seriously though, I'm the sort of person who genuinely enjoys hearing people out

So many problems we have as individual people exist and grow out of proportion because the space or audience required to express those feelings was never up for offer. Raw emotions are dismissed, ignored, and twisted away from their original intent all the time, and all that arises from it is frustration, resentment, and in the worst case, the realisation of emotional abuse.

For example, I was first diagnosed as clinically depressed back in 2010 after breaking up with my ex-girlfriend at the time. I lost her, and the dog who'd been with me for most of my _entire life_, since I was maybe 2 years old, died at the age of 21. That's crazy for a dog, but the loss hurt like nothing else. And my grandad died and I dropped out of uni just before the final exams. But really my dog Suki welcoming the afterlife was the straw that broke the camel's back. My mum understood, and she came to the doctor with me for emotional support. My dad, upon hearing about it, made it all about himself. Told me I'm living a shit life, it's always been shit, and he should have done more to make it not shit.
For example, I was first diagnosed as clinically depressed back in 2010 after breaking up with my ex-girlfriend at the time. I lost her, and the dog who'd been with me for most of my ◊em{entire life}, since I was maybe 2 years old, died at the age of 21. That's crazy for a dog, but the loss hurt like nothing else. And my grandad died and I dropped out of uni just before the final exams. But really my dog Suki welcoming the afterlife was the straw that broke the camel's back. My mum understood, and she came to the doctor with me for emotional support. My dad, upon hearing about it, made it all about himself. Told me I'm living a shit life, it's always been shit, and he should have done more to make it not shit.

He wasn't listening at all, he just took my extreme vulnerability and swapped it in for his own so he could go on about being a bad father. I must have spent more time listening to him and reassuring him, as if I just announced I had late stage cancer and had a few months to live.



@@ 30,4 29,6 @@ To bring it back to now: the beauty of listening, the sheer magnificence of it, 

To wrap this up; sometimes the best, most enlightening conversations are the ones where you don't say anything.

[^1]: <https://coachingdevelopment.com/>
\ No newline at end of file

◊^[1]{◊<>["https://coachingdevelopment.com/"]}


R posts/a-decade-of-work.md => posts/a-decade-of-work.poly.pm +18 -17
@@ 1,17 1,17 @@
---
title: A decade of work
date: 2020-07-20
status: published
category: personal
---
#lang pollen

◊define-meta[title]{A decade of work}
◊define-meta[date]{2020-07-20}
◊define-meta[published #t]
◊define-meta[category]{personal}

I first 'got into' programming back in 2003, or 2004. I can't remember the exact time, but it was at the point where Geocities and AngelFire were still a thing, as was Myspace. The in-thing was to host your own phpBB forum and, much like people will do with forks on github, branch off existing communities into your own ad-riddled alternative. I remember being involved in a few and, actually, a small number of them turned out to be quite successful.

What also existed back then was the 'dotTk' domain, which allowed you to point all kinds of things to it, typically from weird free hosts like 20M. Of course, they would be ad-riddled too. This was before the time that even popup blockers were mainstream, and most of the ads were, in retrospect, quite goofy. You could 'spank the monkey' or fire darts and stuff in weird ad-based minigames. While you could easily get scammed or end up with a virus that dialled into a premium hotline using your landline, they feel a lot more innocent than the kind of misleading crap you get today, which is sometimes difficult to distinguish from genuine content.

But, I digress, I'm not here to talk about advertising, social media, and the slow descent into madness of the modern internet, I'm here to talk about what it's been like as a software developer (or engineer or whatever) over the past ten years of professional work with a good four or five on top as a newbie learning the ropes. This was the time when CSS was barely even standard, and building a website involved pirating a copy of Macromedia Dreamweaver (long since bought out by Adobe) and dragging layers of boxes around a window to get a layout you want. The HTML it would generate was truly awful and it was still common to do all of your layout with tables. It was generally easier to build for IE6 back then since the Mac didn't enjoy the ubiquity it now does, and Firefox was still in its early-ish days. Javascript wasn't really a thing back then, but DHTML[^0] was all the rage and was what allowed you to put flaming cursors or snowflakes all over your page.
But, I digress, I'm not here to talk about advertising, social media, and the slow descent into madness of the modern internet, I'm here to talk about what it's been like as a software developer (or engineer or whatever) over the past ten years of professional work with a good four or five on top as a newbie learning the ropes. This was the time when CSS was barely even standard, and building a website involved pirating a copy of Macromedia Dreamweaver (long since bought out by Adobe) and dragging layers of boxes around a window to get a layout you want. The HTML it would generate was truly awful and it was still common to do all of your layout with tables. It was generally easier to build for IE6 back then since the Mac didn't enjoy the ubiquity it now does, and Firefox was still in its early-ish days. Javascript wasn't really a thing back then, but DHTML◊^[1] was all the rage and was what allowed you to put flaming cursors or snowflakes all over your page.

I fondly remember that day when I dared to look at the generated HTML from my own example sites, and decided to learn how to write things by hand instead. It was all HTML4 back then, with XHTML on the horizon which promised a bit more strictness. Before then, I'd browse through Albino Blacksheep[^1] and it's collection of funny things that we now call 'memes', only back then they were a lot more creative and unique and only occasionally memetic, as in the case of Joel Veitch/rathergood.com and his being commissioned to do a TV ad in his style[^2].
I fondly remember that day when I dared to look at the generated HTML from my own example sites, and decided to learn how to write things by hand instead. It was all HTML4 back then, with XHTML on the horizon which promised a bit more strictness. Before then, I'd browse through Albino Blacksheep◊^[2] and it's collection of funny things that we now call 'memes', only back then they were a lot more creative and unique and only occasionally memetic, as in the case of Joel Veitch/rathergood.com and his being commissioned to do a TV ad in his style◊^[3].

Back on topic, before I drown in nostalgia. The reason I mention Albino Blacksheep is because I *still*, after all this time, remember one post the author made about how he built the site. He said he did it all in Notepad. *Notepad*! If you're not aware, Notepad is a barebones text editor on Windows, not unlike TextEdit on Mac. I was fascinated because, to my 15/16-year old mind, that seemed like an impossible feat.



@@ 19,25 19,26 @@ That was about the age that I went to sixth form, and I was lucky enough to get 

I essentially got into programming as a joke, because a friend in my new social circle wanted a website, or at least hinted at it. I can't fully remember what was on the site, but I used part of my part-time income from Tesco to buy a .co.uk domain in his name and point it to a little HTML thing I made and hosted through the registrar's free web hosting service. All I needed was an FTP client and a bit of dragging and dropping.

Before I even knew it I had 'PHP4 for dummies' and 'MySQL for dummies' on the desk under my weird bunk-bed setup, and I only found out about this stuff through faffing around with those phpBB forums and looking at the configs. I remember _why_ I sought that out though: I had a different website and noticed that it always displayed the current time when you refreshed it. I searched for how to do it and found examples in PHP, mostly from the comments section that each page of PHP docs had. It was literally as simple as changing the file extension from `html` (or `htm`) to `php` and then adding `<?php echo date(); ?>` wherever you wanted it. Deploying it was a case of dragging and dropping through FTP as most of these shared hosts offered PHP by default.
Before I even knew it I had 'PHP4 for dummies' and 'MySQL for dummies' on the desk under my weird bunk-bed setup, and I only found out about this stuff through faffing around with those phpBB forums and looking at the configs. I remember ◊em{why} I sought that out though: I had a different website and noticed that it always displayed the current time when you refreshed it. I searched for how to do it and found examples in PHP, mostly from the comments section that each page of PHP docs had. It was literally as simple as changing the file extension from ◊code{html} (or ◊code{htm}) to ◊code{php} and then adding ◊code{<?php echo date(); ?>} wherever you wanted it. Deploying it was a case of dragging and dropping through FTP as most of these shared hosts offered PHP by default.

That was literally my first line of dynamic code.

Skip ahead a few years, all the way to 2012 when I moved to London. I'd worked a PHP job full time before then but it was only at New Bamboo where I would find my form. They wrote everything in Ruby on Rails, and my experience in that was extremely minimal. Somehow I'd set up a redis server and had ruby communicating with it on my own hardware, but it didn't do much and I couldn't really figure out the code a few months after I wrote it. This required learning a lot of new things in terms of building application servers, and deploying code. Capistrano[^3] was the tool of choice in Ruby-land for deploying to a VPS and it was essentially a DSL over a bunch of shell scripts. In all honesty this DSL was great but were I not made to use it, I would be a lot more intimate with the power of SSH and tools like `scp`, and understanding the issue with things like forwarding your SSH agent because you pull from a private git repo on your server.
Skip ahead a few years, all the way to 2012 when I moved to London. I'd worked a PHP job full time before then but it was only at New Bamboo where I would find my form. They wrote everything in Ruby on Rails, and my experience in that was extremely minimal. Somehow I'd set up a redis server and had ruby communicating with it on my own hardware, but it didn't do much and I couldn't really figure out the code a few months after I wrote it. This required learning a lot of new things in terms of building application servers, and deploying code. Capistrano◊^[4] was the tool of choice in Ruby-land for deploying to a VPS and it was essentially a DSL over a bunch of shell scripts. In all honesty this DSL was great but were I not made to use it, I would be a lot more intimate with the power of SSH and tools like ◊code{scp}, and understanding the issue with things like forwarding your SSH agent because you pull from a private git repo on your server.

I won't talk much about the code, although my years at New Bamboo were truly formative. One thing has stuck with me since then though, over the 8 years since I was told it. My boss at the time saw I was struggling with managing the expectations of the client I was working with, and I was trying too hard to do things alone and hoping for the best instead of reaching out for the help that was readily available. I must have only been about 5 months into the job at that point. My boss took me into our boardroom, the table of which doubled up as a pingpong table, asked if I was alright, and then said something I've never forgotten since:

*"Being a great developer is about a lot more than writing great code."*
◊em{"Being a great developer is about a lot more than writing great code."}

Of course, at the time I was ashamed for mucking up and felt embarrased, but I was being given a piece of advice that would, in hindsight, radically change the direction of my career. I since became fascinated with the role of a scrum master, and took on the position full time myself. And as I moved into different positions at different companies (like Typeform and Friday Pulse), I continually realised that my favourite thing about programming wasn't just the raw challenge of solving a problem in code, but in the _people_ side of it. I'm not talking about management per se, it's more about what else you can do with a good amount of knowledge and a strong desire to mentor and encourage those around you so they too can boost themselves up.
Of course, at the time I was ashamed for mucking up and felt embarrased, but I was being given a piece of advice that would, in hindsight, radically change the direction of my career. I since became fascinated with the role of a scrum master, and took on the position full time myself. And as I moved into different positions at different companies (like Typeform and Friday Pulse), I continually realised that my favourite thing about programming wasn't just the raw challenge of solving a problem in code, but in the ◊em{people} side of it. I'm not talking about management per se, it's more about what else you can do with a good amount of knowledge and a strong desire to mentor and encourage those around you so they too can boost themselves up.

That's been an ongoing theme since mid-2015, even as I moved to Latvia and then eventually returned to London, and if anyone asks me what I consider to be great achievements when I interview, or have a conversation, I will always point to those people who I saw fluorish because they were given the time, space and effort to do so.

Now it's 2020, and not much has changed except that I enjoy the programming side of things a lot more than I used to. You could say that those two sides of the coin have started to merge into one imperfect sphere. Language is all communication and, these days, I enjoy trying to solve problems in different languages just so I can learn how to communicate similar technical things better in terms that I'm more familiar with. The same as I enjoy learning spoken languages to empathise in the same way.

The world has changed _a lot_ in all that time, and I now find myself in my thirties. Not once in my life did I ever think or believe I would be doing this kind of thing as a career, and I've grown to love it. If I were to tell my teenage self anything, knowing all of this... I would keep my mouth shut. He managed to figure it out just fine.
The world has changed ◊em{a lot} in all that time, and I now find myself in my thirties. Not once in my life did I ever think or believe I would be doing this kind of thing as a career, and I've grown to love it. If I were to tell my teenage self anything, knowing all of this... I would keep my mouth shut. He managed to figure it out just fine.


[^0]: <https://en.wikipedia.org/wiki/Dynamic_HTML>
[^1]: <https://www.albinoblacksheep.com/archive/>
[^2]: <https://www.youtube.com/watch?v=3AoNKGwBB74>
[^3]: <https://capistranorb.com/>
\ No newline at end of file
◊^[1]{◊<>["https://en.wikipedia.org/wiki/Dynamic_HTML"]}
◊^[2]{◊<>["https://www.albinoblacksheep.com/archive/"]}
◊^[3]{◊<>["https://www.youtube.com/watch?v=3AoNKGwBB74"]}
◊^[4]{◊<>["https://capistranorb.com/"]}
\ No newline at end of file

D posts/agile-lipstick.md => posts/agile-lipstick.md +0 -52
@@ 1,52 0,0 @@
---
title: Agile lipstick 💄
date: 2021-03-22
status: published
category: programming
---

Over the past decade of my wondrous career I've rather haphazardly stumbled in and out of the realm of agile leadership. I've been a scrum master and an agile coach, even got the PSM1 certification which is nice but not really worth the PDF it's written on these days. My best investment was in a coaching course[^1] where I learned experientially all of the things I didn't learn as part of my job: active listening, rapport, acknowledgment and recognition, transactional anaylisis, I'm Ok/You're Ok, etc.

In the old-days of the internet, and the world wide web, there was a period of time known as Eternal September[^2]. Basically, the shadow-of-its-former-self that is AOL used to be an ISP that gave its browser away on free CDs. You'd find them _everywhere_. So many adverts at the time would give you an 'AOL keyword' to look for to get to their site, and there was an instant messenger and essentially a predecessor to the walled gardens you see so often today with Facebook, Google, Apple, and so on. They made Usenet accessible to the masses and so the existing Usenet users at the time had to basically deal with onboarding a constant influx of new users, many of whom didn't understand the rules or etiquette of the groups they found themselves joining.

I think agile, or perhaps scrum more specifically, has something of an Eternal September itself. The crux of it is that 'agile development' has become increasingly popular, practically a buzzword, and therefore the discourse has never really evolved beyond the experience of that introduction. Either it works for you after an initial period and you stick with it, or you become one of the legion who writes it off. I think this is the same reason why books like Peopleware[^3] and The Five Dysfunctions of a Team[^4] still feel prophetic: forty years and the world has still not caught up with them.

To be agile, in my mind, is to reject mediocrity and foster a mindset that encourages excellency; actually being excellent with each other and not just writing excellent software. This comes in many forms and the exact manifestation will change from one organisation to another, one culture to another, one team to another.

The subject of this topic, the Agile Manifesto[^5], was penned in 2001, twenty years ago, and has remained frozen in time since. Let's take a look in more detail.

The first line of the manifesto is this:

> We are uncovering better ways of developing software by doing it and helping others do it.

The manifesto, at that point in time, was conceived via the process of introspection and adaptation. I actually think this is the most important part of the manifesto because it tells you how they came to the conclusion they did. By extension, do the same and you may yourself find things that you value over others.

> Through this work we have come to value:

_This work_ is their iterative process. As if to reinforce the previous understanding, they discovered a pattern throughout and realised that things they valued _more_ when developing software were:

> Individuals and interactions
>
> Working software
>
> Customer collaboration
>
> Responding to change

It bothers me just how easily such valuable things as change, collaboration and interaction are discarded in favour of rigid process and other varied forms of control-freakery, trying to change the future through a cascading sequence of futile interventions instead of changing the way you do things or the way you are.

I've been doing this dance for a decade now and just like the Eternal September of Usenet, it feels like retracing the first few steps... repeatedly.

The main problem is, though, that it's so fucking difficult to empower a culture where individuals and interactivity take the stage, and you strive to put software you're proud of into the hands of your clients. It takes time and effort and it is a serious transformation that will shake your company to its very core. We're talking about empowerment, sustainability, humility, compassion.

And you can't just let the business fall back into old habits just because something went wrong, blaming it on the new and not what already was. The process is designed to bring problems to the surface so that they can be addressed in the open, through collaboration and interaction.

Fuck that noise, just put the agile lipstick on your pig instead! Throw in a daily standup, run a retrospective, rationalise the legitimate issues away, and use sprints and estimates to track performance. Write a few blogs about it when you're done, and move onto to superficially changing the next unwitting client.

Now _that_ is easy.

[^1]: <https://www.coachingdevelopment.com/> - if you're based in London or Ireland this is _so_ worth it.
[^2]: <https://en.wikipedia.org/wiki/Eternal_September>
[^3]: <https://uk.bookshop.org/books/peopleware-productive-projects-and-teams/9780321934116>
[^4]: <https://uk.bookshop.org/books/the-five-dysfunctions-of-a-team-a-leadership-fable/9780787960759>
[^5]: <https://agilemanifesto.org/>

A posts/agile-lipstick.poly.pm => posts/agile-lipstick.poly.pm +56 -0
@@ 0,0 1,56 @@
#lang pollen

◊define-meta[title]{Agile lipstick 💄}
◊define-meta[date]{2021-03-22}
◊define-meta[published #t]
◊define-meta[category]{programming}

Over the past decade of my wondrous career I've rather haphazardly stumbled in and out of the realm of agile leadership. I've been a scrum master and an agile coach, even got the PSM1 certification which is nice but not really worth the PDF it's written on these days. My best investment was in a coaching course◊^[1] where I learned experientially all of the things I didn't learn as part of my job: active listening, rapport, acknowledgment and recognition, transactional anaylisis, I'm Ok/You're Ok, etc.

In the old-days of the internet, and the world wide web, there was a period of time known as Eternal September◊^[2]. Basically, the shadow-of-its-former-self that is AOL used to be an ISP that gave its browser away on free CDs. You'd find them _everywhere_. So many adverts at the time would give you an 'AOL keyword' to look for to get to their site, and there was an instant messenger and essentially a predecessor to the walled gardens you see so often today with Facebook, Google, Apple, and so on. They made Usenet accessible to the masses and so the existing Usenet users at the time had to basically deal with onboarding a constant influx of new users, many of whom didn't understand the rules or etiquette of the groups they found themselves joining.

I think agile, or perhaps scrum more specifically, has something of an Eternal September itself. The crux of it is that 'agile development' has become increasingly popular, practically a buzzword, and therefore the discourse has never really evolved beyond the experience of that introduction. Either it works for you after an initial period and you stick with it, or you become one of the legion who writes it off. I think this is the same reason why books like Peopleware◊^[3] and The Five Dysfunctions of a Team◊^[4] still feel prophetic: forty years and the world has still not caught up with them.

To be agile, in my mind, is to reject mediocrity and foster a mindset that encourages excellency; actually being excellent with each other and not just writing excellent software. This comes in many forms and the exact manifestation will change from one organisation to another, one culture to another, one team to another.

The subject of this topic, the Agile Manifesto◊^[5], was penned in 2001, twenty years ago, and has remained frozen in time since. Let's take a look in more detail.

The first line of the manifesto is this:

◊blockquote{
  We are uncovering better ways of developing software by doing it and helping others do it.
}

The manifesto, at that point in time, was conceived via the process of introspection and adaptation. I actually think this is the most important part of the manifesto because it tells you how they came to the conclusion they did. By extension, do the same and you may yourself find things that you value over others.

◊blockquote{
  Through this work we have come to value:
}

◊em{This work} is their iterative process. As if to reinforce the previous understanding, they discovered a pattern throughout and realised that things they valued ◊em{more} when developing software were:

◊blockquote{
  ◊p{Individuals and interactions}
  ◊p{Working software}
  ◊p{Customer collaboration}
  ◊p{Responding to change}
}

It bothers me just how easily such valuable things as change, collaboration and interaction are discarded in favour of rigid process and other varied forms of control-freakery, trying to change the future through a cascading sequence of futile interventions instead of changing the way you do things or the way you are.

I've been doing this dance for a decade now and just like the Eternal September of Usenet, it feels like retracing the first few steps... repeatedly.

The main problem is, though, that it's so fucking difficult to empower a culture where individuals and interactivity take the stage, and you strive to put software you're proud of into the hands of your clients. It takes time and effort and it is a serious transformation that will shake your company to its very core. We're talking about empowerment, sustainability, humility, compassion.

And you can't just let the business fall back into old habits just because something went wrong, blaming it on the new and not what already was. The process is designed to bring problems to the surface so that they can be addressed in the open, through collaboration and interaction.

Fuck that noise, just put the agile lipstick on your pig instead! Throw in a daily standup, run a retrospective, rationalise the legitimate issues away, and use sprints and estimates to track performance. Write a few blogs about it when you're done, and move onto to superficially changing the next unwitting client.

Now ◊em{that} is easy.


◊^[1]{◊<>["https://www.coachingdevelopment.com/"] - if you're based in London or Ireland this is ◊em{so} worth it.}
◊^[2]{◊<>["https://en.wikipedia.org/wiki/Eternal_September"]}
◊^[3]{◊<>["https://uk.bookshop.org/a/6865/9780321934116"]}
◊^[4]{◊<>["https://uk.bookshop.org/a/6865/9781118127308"]}
◊^[5]{◊<>["https://agilemanifesto.org/"]}

D posts/blog-hacking.md => posts/blog-hacking.md +0 -115
@@ 1,115 0,0 @@
---
title: Blog hacking
date: 20201-03-18
category: programming
status: draft
---

One of the reasons I chose Hakyll for this blog, aside from messing around with Haskell, was because it was designed as a library. You build your own static site generator from it, adding in whatever bits and pieces you want.

Ever since I first started with it, I liked the idea of integrating with git. The front page of this site has a commit log at the bottom of the page and I only recently updated it to actually link each commit to the source. This week I improved it so each post would show its own commit history too.

This information probably isn't interesting to many people who read whatever I post, but for me it adds a bit more character to the site.

What struck me is just how insanely abstracted Haskell can get, for better or worse. Take a look at the code I ended up with:

```haskell
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}

data GitLog = GitLog { commitHash :: String, commitMsg :: String, commitDate :: String }
  deriving (Show)

getGitLog :: Integer -> String -> IO [GitLog]
getGitLog limit path = do
  (status, stdout, _) <- readProcessWithExitCode
    "git"
    [ "log"
    , "--format=" ++ logfmt
    , "--max-count=" ++ show limit
    , "--"
    , path
    ]
    ""
  return $ case status of
    ExitSuccess -> map parseGitLog $ splitOn "\n" (trim stdout)
    _           -> []
  where trim = dropWhileEnd isSpace
        logfmt = "%h;%s;%ai"

parseGitLog :: String -> GitLog
parseGitLog log = GitLog {..}
  where [commitHash, commitMsg, commitDate] = splitOn ";" log

gitLogCtx :: Context GitLog
gitLogCtx = field "commit" (return . commitHash . itemBody)
    <> field "message" (return . commitMsg . itemBody)
    <> field "date" (return . commitDate . itemBody)

logItem :: GitLog ->  Item GitLog
logItem log = Item (fromString $ "log/" ++ commitHash log) log

logListFieldWith fieldName limit =
  listFieldWith fieldName gitLogCtx $ \item -> unsafeCompiler $ do
    logs <- getGitLog limit $ show (itemIdentifier item)
    return $ map logItem logs

logListField
  :: String -> Integer -> String -> Context String
logListField fieldName limit path =
  listField fieldName gitLogCtx $ unsafeCompiler $ do
    logs <- getGitLog limit path
    return $ map logItem logs
```

## Language pragmas and records

Haskell has language pragmas for practically _everything_. `RecordWildcards`, for example, is how I can make a `GitLog` more easily. These two snippets of code achieve more or less the same thing (the other one being in Ruby).

```haskell
parseGitLog log = GitLog {..}
  where [commitHash, commitMsg, commitDate] = splitOn ";" log
```

```ruby
GitLog = Struct.new(:commit_hash, :commit_msg, :commit_date)

def parse_git_log(log)
  GitLog.new(*log.split(';'))
end
```

Records are a bit odd in Haskell because of how you access a field, which in this case is like this: `commitHash gitLog`. You can't have two records with the same field names as a result, because that would introduce a naming conflict. Newer language extensions resolve that problem.

As for `OverloadedStrings`, string handling is also a bit odd in Haskell-land. A string can be a `[Char]` (as in a list of bytes), or it can be a `Text`, or it can be something similar. I find myself importing `Data.Text` quite often.

## Where what?

I'm quite fond of how you can define a function in Haskell as if it was a mathematical equation. I think that `let` and `where` solve a similar problem that variables do, in a language that does not have variables, and so you can use these to assign more descriptive names to things.

In this example, I think `where trim = dropWhileEnd isSpace` makes my intention clearer than something like `splitOn "\n" (dropWhileEnd isSpace $ stdout)`. The interesting thing is that you're filling in these terms after, not before, like you would with a variable.

```haskell
getGitLog limit path = do
  -- ...
  return $ case status of
    ExitSuccess -> map parseGitLog $ splitOn "\n" (trim stdout)
    _           -> []
  where trim = dropWhileEnd isSpace
```

## (.)

```haskell
field "commit" (return . commitHash . itemBody)
field "message" (return . commitMsg . itemBody)
field "date" (return . commitDate . itemBody)
```

This would be familiar to any JS dev doing React or Redux these days. Haskell does function composition with `.`, and it would be similar to this:

```javascript
field("commit", compose(_return, commitHash, itemBody));
```

`return` in Haskell isn't the same as `return` in most other languages, though.

R posts/blogging-in-haskell.md => posts/blogging-in-haskell.poly.pm +57 -57
@@ 1,68 1,68 @@
---
title: Blogging in Haskell
date: 2020-06-27
status: published
category: programming
---
#lang pollen

It's taken me quite a while to settle on a particular look and feel for this blog. Rather than just having an outlet for writing, I wanted the creation of it to be a learning experience too. Hugo[^1], Gatsby[^2] and Zola[^3], with Netlify CMS[^4] as a fancy interface for writing posts on top of it all. Each attempt left me feeling less inspired than the last.
◊define-meta[title]{Blogging in Haskell}
◊define-meta[date]{2020-06-27}
◊define-meta[published #t]
◊define-meta[category]{programming}

Eventually I stumbled across Hakyll[^5] and, after finding a CSS 'framework' that gave the appearance of a terminal UI[^6], I felt like I had enough to get things off the ground.
It's taken me quite a while to settle on a particular look and feel for this blog. Rather than just having an outlet for writing, I wanted the creation of it to be a learning experience too. Hugo◊^[1], Gatsby◊^[2] and Zola◊^[3], with Netlify CMS◊^[4] as a fancy interface for writing posts on top of it all. Each attempt left me feeling less inspired than the last.

Eventually I stumbled across Hakyll◊^[5] and, after finding a CSS 'framework' that gave the appearance of a terminal UI◊^[6], I felt like I had enough to get things off the ground.

The major appeal so far has been the immense ease of customisation. Hakyll itself isn't a static site generator in the same sense that others are, and as a result it offers a layer of customisation that other generators generally defer to templating languages for.

The main difference is that you don't pull down a `hakyll` binary and then throw a `yaml` file together in order to configure a few pre-defined properties; you're instead given a basic implementation of a generator, using hakyll's own library, and thus have complete control over routing, page generation, templating, and so on. This generally lives in a `site.hs` file and it's not difficult to follow even for relative newbies to Haskell. The structure of everything else is entirely up to you.

Once you compile this file, you end up with a nice binary, e.g. `site`, and _that_ is what you use to generate your site. It is beautiful in its elegance and I'm eager to see what I can add to this site while also learning some more Haskell at the same time.

As an example, on the home page, there is a `git log` output section. It's fairly primitive, although I intend to build out the functionality a bit more. Writing the functionality was fairly effortless, with the help of some other authors on the net:

```haskell
data GitLog = Hash | Commit | Full
  deriving (Eq, Read)

instance Show GitLog where
  show content = case content of
    Hash   -> "%h"
    Commit -> "%h: %s"
    Full   -> "%h: %s (%ai)"

getGitLog :: GitLog -> Integer -> FilePath -> IO [String]
getGitLog content limit path = do
  (status, stdout, _) <- readProcessWithExitCode
    "git"
    [ "log"
    , "--format=" ++ show content
    , "--max-count=" ++ show limit
    , "--"
    , path
    ]
    ""

  return $ case status of
    ExitSuccess -> splitOn "\n" (trim stdout)
    _           -> [""]
  where trim = dropWhileEnd isSpace

logListField
  :: String -> String -> GitLog -> Integer -> String -> Context String
logListField pluralName singularName style limit path =
  listField pluralName ctx $ unsafeCompiler $ do
    logs <- getGitLog style limit path
    return $ map logItem logs
 where
  ctx = field singularName (return . show . itemBody)
  logItem log = Item (fromString $ path ++ "/log/" ++ log) log
```
The main difference is that you don't pull down a ◊code{hakyll} binary and then throw a ◊code{yaml} file together in order to configure a few pre-defined properties; you're instead given a basic implementation of a generator, using hakyll's own library, and thus have complete control over routing, page generation, templating, and so on. This generally lives in a ◊code{site.hs} file and it's not difficult to follow even for relative newbies to Haskell. The structure of everything else is entirely up to you.

Once you compile this file, you end up with a nice binary, e.g. ◊code{site}, and ◊em{that} is what you use to generate your site. It is beautiful in its elegance and I'm eager to see what I can add to this site while also learning some more Haskell at the same time.

As an example, on the home page, there is a ◊code{git log} output section. It's fairly primitive, although I intend to build out the functionality a bit more. Writing the functionality was fairly effortless, with the help of some other authors on the net:

◊codeblock['haskell]{
  data GitLog = Hash | Commit | Full
    deriving (Eq, Read)

  instance Show GitLog where
    show content = case content of
      Hash   -> "%h"
      Commit -> "%h: %s"
      Full   -> "%h: %s (%ai)"

  getGitLog :: GitLog -> Integer -> FilePath -> IO [String]
  getGitLog content limit path = do
    (status, stdout, _) <- readProcessWithExitCode
      "git"
      [ "log"
      , "--format=" ++ show content
      , "--max-count=" ++ show limit
      , "--"
      , path
      ]
      ""

    return $ case status of
      ExitSuccess -> splitOn "\n" (trim stdout)
      _           -> [""]
    where trim = dropWhileEnd isSpace

  logListField
    :: String -> String -> GitLog -> Integer -> String -> Context String
  logListField pluralName singularName style limit path =
    listField pluralName ctx $ unsafeCompiler $ do
      logs <- getGitLog style limit path
      return $ map logItem logs
  where
    ctx = field singularName (return . show . itemBody)
    logItem log = Item (fromString $ path ++ "/log/" ++ log) log
}

The result of adding this code, and then inserting it into the template context, is that I have a new template variable that I can loop over, for each log item. The practical use is fairly limited, but I like it because it adds a certain flavour to the site. Later on I will try to use a parser combinator library to be able to present the different parts of the log with more control.

In any case, I've enjoyed playing around with Haskell in order to deploy this site, and I'm looking forward to seeing what else I can build with the language. It's truly fascinating.


[^1]: <https://gohugo.io/>
[^2]: <https://www.gatsbyjs.org/>
[^3]: <https://www.getzola.org/>
[^4]: <https://www.netlifycms.org/>
[^5]: <https://jaspervdj.be/hakyll/>
[^6]: <https://terminalcss.xyz/>
\ No newline at end of file
◊^[1]{◊<>["https://gohugo.io"]}
◊^[2]{◊<>["https://www.gatsbyjs.org"]}
◊^[3]{◊<>["https://www.netlifycms.org"]}
◊^[4]{◊<>["https://jaspervdj.be/hakyll"]}
◊^[5]{◊<>["https://www.getzola.org"]}
◊^[6]{◊<>["https://terminalcss.xyz"]}
\ No newline at end of file

D posts/can-you-crack-the-code.md => posts/can-you-crack-the-code.md +0 -276
@@ 1,276 0,0 @@
---
title: Can you crack the code?
date: 2020-12-31
category: programming
status: published
---

What better way to spend the final moments of 2020, locked down in London, than with a logic puzzle and a computer that can, well, do logic? Join me for a few minutes on this auspicious eve, and learn how you can spend an order of magnitude more time computing a solution than what it would take if you used your noggin instead.

I presume you've seen this kind of puzzle before: there is a lock that requires a three or four digit code in order for it to open. You don't know what the code is, of course, but you're given a series of clues that will test your powers of deduction and lead you to the right answer. I've actually got such a puzzle here:

---

<div style="text-align: center">
**CAN YOU CRACK THE CODE?**

🔐 \_ \_ \_ \_

**9** **2** **8** **5**  
One number is correct, but in the wrong place.

**1** **9** **3** **7**  
Two numbers are correct, but in the wrong place.

**5** **2** **0** **1**  
One number is correct, and is in the right place.

**6** **5** **0** **7**  
None of the numbers are correct, in any place.

**8** **5** **0** **4**  
Two numbers are correct, but in the wrong place.

</div>

---

## A brief introduction

If you're unaware of Prolog, it's a _logical progamming_ language that, in its most simplest terms, takes a bunch of facts and rules and then gives you the tools to query them to get the outcome you want. In more complicated terms, a cursory search on the intertubes will lead you to a vast collection of academic papers that explain more. This is not the kind of language that is casually blogged about by the masses, as with more mainstream ones like CSS, HTML, or ColdFusion.

> Programming in Prolog is significantly different from conventional procedural programming and requires a readjustment in the way one things about programming. Logical relationships are asserted, and Prolog is used to determine whether or not certain statements are true, and if true, what variable bindings make them true. This leads to a very declarative style of programming.
>
> Dennis Merritt, _Adventure in Prolog_, 2017[^0].

Mr Merritt is, to put it professionally, **god damn right**. Here's a valid Prolog program:

```prolog
% https://swish.swi-prolog.org/p/KfdGtcJr.swinb

president(trump).
president(obama).
president(brie).

cheese(brie).
cheese(wensleydale).
person(trump).
person(obama).
```

What we have here are some facts, both true and technically true. It's a fact that Obama is a president, as is Trump. It's also a fact that there is a brand of cheese in the UK called President. This is quite ambiguous as a result so some extra facts are supplied, namely that brie is a cheese as much as it is a President-brand cheese, and that Wensleydale is also a cheese. It goes without saying that Trump and Obama are people, so with those facts we should be able to do some querying.

If you're doing this on your own machine, you can save those facts into a file (say, `example.pl`) and then importing it inside a console, like so: `[example].`. Otherwise, you can load up the Swish notebook[^1] and follow along using an online console, no installation needed!

Let's do some querying then, which will show you how Prolog might seem a bit back to front compared to what you're used to.

```prolog
president(trump). % true.
```

So far, so boring. We stated `president(trump)` as a fact in our first prolog file, so this is basically confirming that we did so. Let's spice it up a little.

```prolog
president(X). % trump; obama; brie.
```

<aside>With the online editor you can click 'Next' to see all of the results, and in the console you can type `;`. This can be done repeatedly until the input ends with `.`, which says there are no more facts that fit the query)</aside>

The fuck? What is `X`?

`X` is a variable, or a placeholder if you like. Any word starting with a capital letter is a variable, and when you pass one in a query Prolog will supply the results of the query to those variables. In this case, we're essentially saying `who are all the presidents? I don't know their names so put them all in X for me`.

Let's try one more thing, which should explain enough about Prolog to be dangerous.

```prolog
president(X), cheese(X). % brie.
```

_Now we're cookin' wi' gas!_ as we'd say back up north. A lot of what you do in prolog is chain little sentences like this together (using the comma operator `,`, which means `and`), and in this instance we're asking Prolog to get all the presidents, put them in X, and then show me only the presidents that are also a cheese. The `.` finishes the sentence, or the query. Let's do a similar query to wrap this intro up, and you can see if your guess at the answer is the same as what this produces.

```prolog
president(X), person(X). % trump, obama.
```

This is more or less the essence of Prolog, and your program is essentially a database of facts and rules, and then you use the program by querying those facts and rules. You'll make a query by providing what you _do_ know, and then placing a variable (or a placeholder) in the spots where you don't know the answer. You don't tell Prolog how exactly to compute that answer. And with that explained, I think we can try and crack this code.

## Doing some l33t haxx0ring

Here's the puzzle again, for reference:

---

<div style="text-align: center">
**CAN YOU CRACK THE CODE?**

🔐 \_ \_ \_ \_

**9** **2** **8** **5**  
One number is correct, but in the wrong place.

**1** **9** **3** **7**  
Two numbers are correct, but in the wrong place.

**5** **2** **0** **1**  
One number is correct, and is in the right place.

**6** **5** **0** **7**  
None of the numbers are correct, in any place.

**8** **5** **0** **4**  
Two numbers are correct, but in the wrong place.

</div>

---

According to Leon Sterling and Ehud Shapiro in _The Art of Prolog_[^2], this type of problem falls quite neatly under the umbrella of non-deterministic programming. This is because we're essentially going to build an algorithm that will use what they describe as a `generate and test` solution. We're going to write something that will take our clues and run through all the possible answers until it lands on the only one that fits. We're not aiming for beautiful optimisation here so this good enough, although the code we write will be tightly coupled to the exact puzzle provided.

So, let's begin with our set of rules:

```prolog
:- use_module(library(clpfd)). % we're working with numbers, this makes it easier.

clue_1([9, 2, 8, 5]). % one number correct, but in the wrong place
clue_2([1, 9, 3, 7]). % two numbers are correct, but in the wrong place
clue_3([5, 2, 0, 1]). % one number is correct, and is also in the right place
clue_4([6, 5, 0, 7]). % none of the numbers are correct, anywhere
clue_5([8, 5, 2, 4]). % two numbers are correct, but in the wrong place
```

<aside>If you're curious about the first `use_module` statement, beyond knowing that it makes things easier, check out the docs on <em>Constraint Logic Programming over Finite Domains</em>[^3].</aside>

These clues don't really mean anything by themselves, they're simple facts in Prolog terms, so we need to add a bit more to give these some meaning. All of this will go into the same file, as we're not ready to query yet.

```prolog
% rule: a digit is correct but it is in the wrong place
wrong_place(Digit, Index, Digits) :- nth1(Index1, Digits, Digit), Index \== Index1.

% rule: a digit is correct and it is in the right place
right_place(Digit, Index, Digits) :- nth1(Index, Digits, Digit).

% rule: the digit is wrong.
wrong(_, []).
wrong(Digit, [D|Ds]) :- Digit #\= D, wrong(Digit, Ds).
```

I'll leave the in-depth explanation of these rules to another post for the sake of brevity, and also because I'm not that much of a Prolog expert. These are all used to add meaning to the facts, as with these rules we can now define logic such as _one number is correct but in the wrong position_, and _none of the numbers are correct_. We just have to painstakingly mix and match them.

The next bit is quite long, but this query is where we make the sausage. Commentary will be written inline for ease of copy and paste, until I come back and edit this post with a more digestible version.

```prolog
crack_code(Code) :-
    % A, B, C and D represent the four digits of the code, which are all between 0 and 9.
    A in 0..9,
    B in 0..9,
    C in 0..9,
    D in 0..9,

    % ';' means 'or', whereas ',' means 'and'

    % one digit in D1 is correct, but in the wrong place
    % the other three digits must therefore be incorrect
    % query this for each digit.
    clue_1(D1),
    (
        wrong_place(A, 1, D1), wrong(B, D1), wrong(C, D1), wrong(D, D1);
        wrong_place(B, 2, D1), wrong(A, D1), wrong(C, D1), wrong(D, D1);
        wrong_place(C, 3, D1), wrong(A, D1), wrong(B, D1), wrong(D, D1);
        wrong_place(D, 4, D1), wrong(A, D1), wrong(B, D1), wrong(C, D1)
    ),

    % two digits are correct this time, and they are both in the wrong place
    % exhaustively check every combination where two numbers are correct, and the other two are incorrect.
    clue_2(D2),
    (
        wrong_place(A, 1, D2), wrong_place(B, 2, D2), wrong(C, D2), wrong(D, D2);
        wrong_place(A, 1, D2), wrong_place(C, 3, D2), wrong(B, D2), wrong(D, D2);
        wrong_place(A, 1, D2), wrong_place(D, 4, D2), wrong(B, D2), wrong(C, D2);

        wrong_place(B, 2, D2), wrong_place(A, 1, D2), wrong(C, D2), wrong(D, D2);
        wrong_place(B, 2, D2), wrong_place(C, 3, D2), wrong(A, D2), wrong(D, D2);
        wrong_place(B, 2, D2), wrong_place(D, 4, D2), wrong(A, D2), wrong(C, D2);

        wrong_place(C, 3, D2), wrong_place(A, 1, D2), wrong(B, D2), wrong(D, D2);
        wrong_place(C, 3, D2), wrong_place(B, 2, D2), wrong(A, D2), wrong(D, D2);
        wrong_place(C, 3, D2), wrong_place(D, 4, D2), wrong(A, D2), wrong(B, D2);

        wrong_place(D, 4, D2), wrong_place(A, 1, D2), wrong(B, D2), wrong(C, D2);
        wrong_place(D, 4, D2), wrong_place(B, 2, D2), wrong(A, D2), wrong(C, D2);
        wrong_place(D, 4, D2), wrong_place(C, 3, D2), wrong(A, D2), wrong(B, D2)
    ),

    % one digit is correct, and also in the right place
    % as above, we still don't know which digit that is, so we check each one.
    clue_3(D3),
    (
        right_place(A, 1, D3), wrong(B, D3), wrong(C, D3), wrong(D, D3);
        right_place(B, 2, D3), wrong(A, D3), wrong(C, D3), wrong(D, D3);
        right_place(C, 3, D3), wrong(A, D3), wrong(B, D3), wrong(D, D3);
        right_place(D, 4, D3), wrong(A, D3), wrong(B, D3), wrong(C, D3)
    ),

    % none of the digits are correct, so they can be completely excluded
    % we know for a fact the final result will not contain any of these digits.
    clue_4(D4),
    (
        wrong(A, D4), wrong(B, D4), wrong(C, D4), wrong(D, D4)
    ),

    % again, two digits are correct but not in the right order
    % we do a similar check as before but also need to look
    % back into the previous clue to eliminate wrong candidates;
    % this is why we query D2, as well as D5.
    clue_5(D5),
    (
        wrong_place(A, 1, D5), wrong_place(B, 2, D5), wrong(C, D5), wrong(D, D5);
        wrong_place(A, 1, D5), wrong_place(C, 3, D5), wrong(B, D5), wrong(D, D5);
        wrong_place(A, 1, D5), wrong_place(D, 4, D5), wrong(B, D2), wrong(C, D2);

        wrong_place(B, 2, D5), wrong_place(A, 1, D5), wrong(C, D5), wrong(D, D5);
        wrong_place(B, 2, D5), wrong_place(C, 3, D5), wrong(A, D5), wrong(D, D5);
        wrong_place(B, 2, D5), wrong_place(D, 4, D5), wrong(A, D2), wrong(C, D2);

        wrong_place(C, 3, D5), wrong_place(A, 1, D5), wrong(B, D5), wrong(D, D5);
        wrong_place(C, 3, D5), wrong_place(B, 2, D5), wrong(A, D5), wrong(D, D5);
        wrong_place(C, 3, D5), wrong_place(D, 4, D5), wrong(A, D2), wrong(B, D2);

        wrong_place(D, 4, D5), wrong_place(A, 1, D5), wrong(B, D5), wrong(C, D5);
        wrong_place(D, 4, D5), wrong_place(B, 2, D5), wrong(A, D5), wrong(C, D5);
        wrong_place(D, 4, D5), wrong_place(C, 3, D5), wrong(A, D2), wrong(B, D2)
    ),

    % Take (or cut) the first result, no need for continued backtracking
    % this is probably most similar to an early return or short-circuit.
    !,

    % we've cracked the code! A, B, C, and D each refer to
    % the only answer that makes sense given the previous
    % rules.
    Code = [A, B, C, D].
```

Did you solve the puzzle yourself? Do you remember the answer? If you don't care to copy and paste all of that, you can open up this ready made notebook[^4], and then run the following:

```prolog
crack_code([A, B, C, D]),
write('The first number is: '), write(A), write('\n'),
write('The second number is: '), write(B), write('\n'),
write('The third number is: '), write(C), write('\n'),
write('The fourth number is: '), write(D), write('\n').
```

The exercise of writing that in a less brute-force manner is left to you, my beloved reader.

## The grand finale

So ends 2020, so ends this post. Did your brain-grown answer match the one this Prolog program gave you? What do you think about logic programming in general now you've seen some of it? Why not share it with your friends or whoever, if they're interested, and see what they think?

Mad propz to the Prolog community on Reddit also, whose example solutions helped point me in the right direction [^5].

[^0]: <https://amzi.com/AdventureInProlog/a1start.php> (buy the book, srlsy...)
[^1]: <https://swish.swi-prolog.org/p/KfdGtcJr.swinb>
[^2]: <https://mitpress.mit.edu/books/art-prolog-second-edition>
[^3]: <https://www.swi-prolog.org/man/clpfd.html>
[^4]: <https://swish.swi-prolog.org/p/MgtEUnSv.swinb>
[^5]: <https://www.reddit.com/r/prolog/comments/fzww7m/cracking_this_puzzle_with_prolog/>

A posts/can-you-crack-the-code.poly.pm => posts/can-you-crack-the-code.poly.pm +279 -0
@@ 0,0 1,279 @@
#lang pollen

◊define-meta[title]{Can you crack the code?}
◊define-meta[date]{2020-12-31}
◊define-meta[published #t]
◊define-meta[category]{programming}

What better way to spend the final moments of 2020, locked down in London, than with a logic puzzle and a computer that can, well, do logic? Join me for a few minutes on this auspicious eve, and learn how you can spend an order of magnitude more time computing a solution than what it would take if you used your noggin instead.

I presume you've seen this kind of puzzle before: there is a lock that requires a three or four digit code in order for it to open. You don't know what the code is, of course, but you're given a series of clues that will test your powers of deduction and lead you to the right answer. I've actually got such a puzzle here:

◊hr{}

◊div[#:style "text-align: center"]{
  ◊strong{CAN YOU CRACK THE CODE?}

  🔐 _ _ _ _

  ◊b{9} ◊b{2} ◊b{8} ◊b{5}  
  ◊p{One number is correct, but in the wrong place.}

  ◊b{1} ◊b{9} ◊b{3} ◊b{7}  
  ◊p{Two numbers are correct, but in the wrong place.}

  ◊b{5} ◊b{2} ◊b{0} ◊b{1}  
  ◊p{One number is correct, and is in the right place.}

  ◊b{6} ◊b{5} ◊b{0} ◊b{7}
  ◊p{None of the numbers are correct, in any place.}

  ◊b{8} ◊b{5} ◊b{0} ◊b{4}  
  ◊p{Two numbers are correct, but in the wrong place.}
}

◊hr{}

◊h2{A brief introduction}

If you're unaware of Prolog, it's a ◊em{logical progamming} language that, in its most simplest terms, takes a bunch of facts and rules and then gives you the tools to query them to get the outcome you want. In more complicated terms, a cursory search on the intertubes will lead you to a vast collection of academic papers that explain more. This is not the kind of language that is casually blogged about by the masses, as with more mainstream ones like CSS, HTML, or ColdFusion.

◊q["Dennis Merritt" 2017]{
  Programming in Prolog is significantly different from conventional procedural programming and requires a readjustment in the way one things about programming. Logical relationships are asserted, and Prolog is used to determine whether or not certain statements are true, and if true, what variable bindings make them true. This leads to a very declarative style of programming.◊^[1]
}

Mr Merritt is, to put it professionally, ◊strong{god damn right}. Here's a valid Prolog program:

◊codeblock['prolog]{
  % https://swish.swi-prolog.org/p/KfdGtcJr.swinb

  president(trump).
  president(obama).
  president(brie).

  cheese(brie).
  cheese(wensleydale).
  person(trump).
  person(obama).
}

What we have here are some facts, both true and technically true. It's a fact that Obama is a president, as is Trump. It's also a fact that there is a brand of cheese in the UK called President. This is quite ambiguous as a result so some extra facts are supplied, namely that brie is a cheese as much as it is a President-brand cheese, and that Wensleydale is also a cheese. It goes without saying that Trump and Obama are people, so with those facts we should be able to do some querying.

If you're doing this on your own machine, you can save those facts into a file (say, ◊code{example.pl}) and then importing it inside a console, like so: ◊code{[example].}. Otherwise, you can load up the Swish notebook◊^[2] and follow along using an online console, no installation needed!

Let's do some querying then, which will show you how Prolog might seem a bit back to front compared to what you're used to.

◊codeblock['prolog]{
  president(trump). % true.
}

So far, so boring. We stated `president(trump)` as a fact in our first prolog file, so this is basically confirming that we did so. Let's spice it up a little.

◊codeblock['prolog]{
  president(X). % trump; obama; brie.
}

◊aside{
  With the online editor you can click 'Next' to see all of the results, and in the console you can type ◊code{;}. This can be done repeatedly until the input ends with ◊code{.}, which says there are no more facts that fit the query)
}

The fuck? What is ◊code{X}?

◊code{X} is a variable, or a placeholder if you like. Any word starting with a capital letter is a variable, and when you pass one in a query Prolog will supply the results of the query to those variables. In this case, we're essentially saying ◊em{"who are all the presidents? I don't know their names so put them all in ◊code{X} for me"}.

Let's try one more thing, which should explain enough about Prolog to be dangerous.

◊codeblock['prolog]{
  president(X), cheese(X). % brie.
}

◊em{Now we're cookin' wi' gas!} as we'd say back up north. A lot of what you do in prolog is chain little sentences like this together (using the comma operator ◊code{,}, which means ◊code{and}), and in this instance we're asking Prolog to get all the presidents, put them in ◊code{X}, and then show me only the presidents that are also a cheese. The ◊code{.} finishes the sentence, or the query. Let's do a similar query to wrap this intro up, and you can see if your guess at the answer is the same as what this produces.

◊codeblock['prolog]{
  president(X), person(X). % trump, obama.
}

This is more or less the essence of Prolog, and your program is essentially a database of facts and rules, and then you use the program by querying those facts and rules. You'll make a query by providing what you _do_ know, and then placing a variable (or a placeholder) in the spots where you don't know the answer. You don't tell Prolog how exactly to compute that answer. And with that explained, I think we can try and crack this code.

◊h2{Doing some l33t haxx0ring}

Here's the puzzle again, for reference:

◊hr{}

◊div[#:style "text-align: center"]{
  ◊strong{CAN YOU CRACK THE CODE?}

  🔐 _ _ _ _

  ◊b{9} ◊b{2} ◊b{8} ◊b{5}  
  ◊p{One number is correct, but in the wrong place.}

  ◊b{1} ◊b{9} ◊b{3} ◊b{7}  
  ◊p{Two numbers are correct, but in the wrong place.}

  ◊b{5} ◊b{2} ◊b{0} ◊b{1}  
  ◊p{One number is correct, and is in the right place.}

  ◊b{6} ◊b{5} ◊b{0} ◊b{7}
  ◊p{None of the numbers are correct, in any place.}

  ◊b{8} ◊b{5} ◊b{0} ◊b{4}  
  ◊p{Two numbers are correct, but in the wrong place.}
}

◊hr{}

According to Leon Sterling and Ehud Shapiro in ◊em{The Art of Prolog}◊^[3], this type of problem falls quite neatly under the umbrella of non-deterministic programming. This is because we're essentially going to build an algorithm that will use what they describe as a ◊code{generate and test} solution. We're going to write something that will take our clues and run through all the possible answers until it lands on the only one that fits. We're not aiming for beautiful optimisation here so this good enough, although the code we write will be tightly coupled to the exact puzzle provided.

So, let's begin with our set of rules:

◊codeblock['prolog]{
  :- use_module(library(clpfd)). % we're working with numbers, this makes it easier.

  clue_1([9, 2, 8, 5]). % one number correct, but in the wrong place
  clue_2([1, 9, 3, 7]). % two numbers are correct, but in the wrong place
  clue_3([5, 2, 0, 1]). % one number is correct, and is also in the right place
  clue_4([6, 5, 0, 7]). % none of the numbers are correct, anywhere
  clue_5([8, 5, 2, 4]). % two numbers are correct, but in the wrong place
}

◊aside{
  If you're curious about the first `use_module` statement, beyond knowing that it makes things easier, check out the docs on ◊em{Constraint Logic Programming over Finite Domains}.◊^[4]
}

These clues don't really mean anything by themselves, they're simple facts in Prolog terms, so we need to add a bit more to give these some meaning. All of this will go into the same file, as we're not ready to query yet.

◊codeblock['prolog]{
  % rule: a digit is correct but it is in the wrong place
  wrong_place(Digit, Index, Digits) :- nth1(Index1, Digits, Digit), Index \== Index1.

  % rule: a digit is correct and it is in the right place
  right_place(Digit, Index, Digits) :- nth1(Index, Digits, Digit).

  % rule: the digit is wrong.
  wrong(_, []).
  wrong(Digit, [D|Ds]) :- Digit #\= D, wrong(Digit, Ds).
}

I'll leave the in-depth explanation of these rules to another post for the sake of brevity, and also because I'm not that much of a Prolog expert. These are all used to add meaning to the facts, as with these rules we can now define logic such as ◊em{one number is correct but in the wrong position}, and ◊em{none of the numbers are correct}. We just have to painstakingly mix and match them.

The next bit is quite long, but this query is where we make the sausage. Commentary will be written inline for ease of copy and paste, until I come back and edit this post with a more digestible version.

◊codeblock['prolog]{
  crack_code(Code) :-
    % A, B, C and D represent the four digits of the code, which are all between 0 and 9.
    A in 0..9,
    B in 0..9,
    C in 0..9,
    D in 0..9,

    % ';' means 'or', whereas ',' means 'and'

    % one digit in D1 is correct, but in the wrong place
    % the other three digits must therefore be incorrect
    % query this for each digit.
    clue_1(D1),
    (
      wrong_place(A, 1, D1), wrong(B, D1), wrong(C, D1), wrong(D, D1);
      wrong_place(B, 2, D1), wrong(A, D1), wrong(C, D1), wrong(D, D1);
      wrong_place(C, 3, D1), wrong(A, D1), wrong(B, D1), wrong(D, D1);
      wrong_place(D, 4, D1), wrong(A, D1), wrong(B, D1), wrong(C, D1)
    ),

    % two digits are correct this time, and they are both in the wrong place
    % exhaustively check every combination where two numbers are correct, and the other two are incorrect.
    clue_2(D2),
    (
      wrong_place(A, 1, D2), wrong_place(B, 2, D2), wrong(C, D2), wrong(D, D2);
      wrong_place(A, 1, D2), wrong_place(C, 3, D2), wrong(B, D2), wrong(D, D2);
      wrong_place(A, 1, D2), wrong_place(D, 4, D2), wrong(B, D2), wrong(C, D2);

      wrong_place(B, 2, D2), wrong_place(A, 1, D2), wrong(C, D2), wrong(D, D2);
      wrong_place(B, 2, D2), wrong_place(C, 3, D2), wrong(A, D2), wrong(D, D2);
      wrong_place(B, 2, D2), wrong_place(D, 4, D2), wrong(A, D2), wrong(C, D2);

      wrong_place(C, 3, D2), wrong_place(A, 1, D2), wrong(B, D2), wrong(D, D2);
      wrong_place(C, 3, D2), wrong_place(B, 2, D2), wrong(A, D2), wrong(D, D2);
      wrong_place(C, 3, D2), wrong_place(D, 4, D2), wrong(A, D2), wrong(B, D2);

      wrong_place(D, 4, D2), wrong_place(A, 1, D2), wrong(B, D2), wrong(C, D2);
      wrong_place(D, 4, D2), wrong_place(B, 2, D2), wrong(A, D2), wrong(C, D2);
      wrong_place(D, 4, D2), wrong_place(C, 3, D2), wrong(A, D2), wrong(B, D2)
    ),

    % one digit is correct, and also in the right place
    % as above, we still don't know which digit that is, so we check each one.
    clue_3(D3),
    (
      right_place(A, 1, D3), wrong(B, D3), wrong(C, D3), wrong(D, D3);
      right_place(B, 2, D3), wrong(A, D3), wrong(C, D3), wrong(D, D3);
      right_place(C, 3, D3), wrong(A, D3), wrong(B, D3), wrong(D, D3);
      right_place(D, 4, D3), wrong(A, D3), wrong(B, D3), wrong(C, D3)
    ),

    % none of the digits are correct, so they can be completely excluded
    % we know for a fact the final result will not contain any of these digits.
    clue_4(D4),
    (
      wrong(A, D4), wrong(B, D4), wrong(C, D4), wrong(D, D4)
    ),

    % again, two digits are correct but not in the right order
    % we do a similar check as before but also need to look
    % back into the previous clue to eliminate wrong candidates;
    % this is why we query D2, as well as D5.
    clue_5(D5),
    (
      wrong_place(A, 1, D5), wrong_place(B, 2, D5), wrong(C, D5), wrong(D, D5);
      wrong_place(A, 1, D5), wrong_place(C, 3, D5), wrong(B, D5), wrong(D, D5);
      wrong_place(A, 1, D5), wrong_place(D, 4, D5), wrong(B, D2), wrong(C, D2);

      wrong_place(B, 2, D5), wrong_place(A, 1, D5), wrong(C, D5), wrong(D, D5);
      wrong_place(B, 2, D5), wrong_place(C, 3, D5), wrong(A, D5), wrong(D, D5);
      wrong_place(B, 2, D5), wrong_place(D, 4, D5), wrong(A, D2), wrong(C, D2);

      wrong_place(C, 3, D5), wrong_place(A, 1, D5), wrong(B, D5), wrong(D, D5);
      wrong_place(C, 3, D5), wrong_place(B, 2, D5), wrong(A, D5), wrong(D, D5);
      wrong_place(C, 3, D5), wrong_place(D, 4, D5), wrong(A, D2), wrong(B, D2);

      wrong_place(D, 4, D5), wrong_place(A, 1, D5), wrong(B, D5), wrong(C, D5);
      wrong_place(D, 4, D5), wrong_place(B, 2, D5), wrong(A, D5), wrong(C, D5);
      wrong_place(D, 4, D5), wrong_place(C, 3, D5), wrong(A, D2), wrong(B, D2)
    ),

    % Take (or cut) the first result, no need for continued backtracking
    % this is probably most similar to an early return or short-circuit.
    !,

    % we've cracked the code! A, B, C, and D each refer to
    % the only answer that makes sense given the previous
    % rules.
    Code = [A, B, C, D].
}

Did you solve the puzzle yourself? Do you remember the answer? If you don't care to copy and paste all of that, you can open up this ready made notebook◊^[5], and then run the following:

◊codeblock['prolog]{
  crack_code([A, B, C, D]),
  write('The first number is: '), write(A), write('\n'),
  write('The second number is: '), write(B), write('\n'),
  write('The third number is: '), write(C), write('\n'),
  write('The fourth number is: '), write(D), write('\n').
}

The exercise of writing that in a less brute-force manner is left to you, my beloved reader.

◊h2{The grand finale}

So ends 2020, so ends this post. Did your brain-grown answer match the one this Prolog program gave you? What do you think about logic programming in general now you've seen some of it? Why not share it with your friends or whoever, if they're interested, and see what they think?

Mad propz to the Prolog community on Reddit also, whose example solutions helped point me in the right direction◊^[6].


◊^[1]{◊<>["https://amzi.com/AdventureInProlog/a1start.php"] - (buy the book, srlsy...)}
◊^[2]{◊<>["https://swish.swi-prolog.org/p/KfdGtcJr.swinb"]}
◊^[3]{◊<>["https://uk.bookshop.org/books/the-art-of-prolog-advanced-programming-techniques/9780262691635"]}
◊^[4]{◊<>["https://www.swi-prolog.org/man/clpfd.html"]}
◊^[5]{◊<>["https://swish.swi-prolog.org/p/MgtEUnSv.swinb"]}
◊^[6]{◊<>["https://www.reddit.com/r/prolog/comments/fzww7m/cracking_this_puzzle_with_prolog/"]}

R posts/celebrate-each-other.md => posts/celebrate-each-other.poly.pm +15 -15
@@ 1,11 1,11 @@
---
title: Celebrate each other
date: 2020-08-03
category: personal
status: published
---
#lang pollen

Back when I worked at Typeform[^0], it really surprised me that they casually used a system of extrinsic motivation to reward good work, and to appreciate and recognise others. That's a long-handed way of saying that they used a service called Bonusly[^1] and integrated it with the company chat app, which at the time was HipChat[^2] (hands up if you remember *not* using Slack?). We had an internal currency called Typecoin (TC)[^3] and you had a budget of 250 a month to offer to your fellow colleagues as an extra way of saying thanks, or shouting out. Those coins actually converted to cash or Amazon gift vouchers, it was actually pretty cool. Most of the time I'd cash out, go to the Mercadona by the beach at La Vila Olimpica (which was around the corner from our office), and buy around 10€ worth of Haribo sweets for the whole office to enjoy. It was usually either a 'ThursYAY' or 'TuesYAY' depending on what mood I was in.
◊define-meta[title]{Celebrate each other}
◊define-meta[date]{2020-08-03}
◊define-meta[published #t]
◊define-meta[category]{personal}

Back when I worked at Typeform◊^[1], it really surprised me that they casually used a system of extrinsic motivation to reward good work, and to appreciate and recognise others. That's a long-handed way of saying that they used a service called Bonusly◊^[2] and integrated it with the company chat app, which at the time was HipChat◊^[3] (hands up if you remember *not* using Slack?). We had an internal currency called Typecoin (TC)◊^[4] and you had a budget of 250 a month to offer to your fellow colleagues as an extra way of saying thanks, or shouting out. Those coins actually converted to cash or Amazon gift vouchers, it was actually pretty cool. Most of the time I'd cash out, go to the Mercadona by the beach at La Vila Olimpica (which was around the corner from our office), and buy around 10€ worth of Haribo sweets for the whole office to enjoy. It was usually either a 'ThursYAY' or 'TuesYAY' depending on what mood I was in.

Of course, I've just linked recognition to financial reward. That wasn't really the main goal of the system though, although it operated as a nice side-effect. You couldn't gift some of your TC without connecting it to a company value and also explaining what they did to earn it, and all of this would be posted publically to a special channel in HipChat (as well as the application itself) for everyone to see. There was even a leaderboard and it was quite exciting to see who made it to the top each month (I held the record for a few months, it was a nice ego boost).



@@ 15,20 15,20 @@ The reason I bring this up so fondly is because the act of recognition was an ac

The difference is that, for the majority of my career, recognition, acknowledgement and appreciation are not typically given in such an active way. The abundance of what you provide, that they appreciate, leads to it becoming an expectation. Only when it is time for you to leave or move on does the scarcity mindset kick in and you are showered with love and support. Because they won't be able to enjoy your presence or benefit from your skills any more.

Before I continue, I'm sure some people (particularly current colleagues) reading this at the time of writing will think... is Lee talking about Babylon[^4] and why he's leaving? Is he airing his laundry? Emphatically, I am not :) but let's talk about Babylon anyway.
Before I continue, I'm sure some people (particularly current colleagues) reading this at the time of writing will think... is Lee talking about Babylon◊^[5] and why he's leaving? Is he airing his laundry? Emphatically, I am not :) but let's talk about Babylon anyway.

When I joined Babylon in early 2019, one of the first things I did after settling in was, well, 'make a name for myself' on Slack. Because of course, HipChat stopped being a thing in 2019. I wanted to take that positivity I experienced at Typeform, in Barcelona, and everything I'd learned and loved since, and see if something similar could happen in London. I made a gratitude channel and basically name-dropped colleagues, explaining what I appreciated through the onboarding process and the initial few weeks. Little did I know a similar scheme was being built in parallel and not long after, we got our Feel Good Fridays, which accompanied a huge list of collated messages of gratitude, recognition, and acknowledgement from one colleague to another.

Honestly, I live for that shit :D I love public displays of recognition because, so often, this stuff never happens until you're gone and people miss whatever you did that made them so happy or grateful. Just a simple line of text with thank you or an explanation, along with hundreds of other similar lines meant for other people, that everybody could read through and really share in that celebration of each other, not just success.

And that, for me, is my key learning after all these years. 'Celebrating success' is such an overused and misunderstood term that you never really see it happen. Besides which, there is plenty of failure that is also worth celebration. And plenty of stuff that doesn't fit into the bucket of success or failure that deserves celebrating too. And you can be successful _within_ a failure.
And that, for me, is my key learning after all these years. 'Celebrating success' is such an overused and misunderstood term that you never really see it happen. Besides which, there is plenty of failure that is also worth celebration. And plenty of stuff that doesn't fit into the bucket of success or failure that deserves celebrating too. And you can be successful ◊em{within} a failure.

What I'm saying in a pretty long-winded way is that we people make the success what it is, and success comes in so many diverse forms! It's not just a project delivered on time, or a feature boosting MRR, or an uptick in retention against churn or whatever abstract work-related metric you can conjure up. It's not just a business goal, it's a _fucking plethora_ of personal goals, desires, likes, dislikes, passions, and serendipitous interactions, all of which can mingle and mesh until you get that moment of genius, or you go home feeling happy and contented. Or whatever it is you want from life.
What I'm saying in a pretty long-winded way is that we people make the success what it is, and success comes in so many diverse forms! It's not just a project delivered on time, or a feature boosting MRR, or an uptick in retention against churn or whatever abstract work-related metric you can conjure up. It's not just a business goal, it's a ◊em{fucking plethora} of personal goals, desires, likes, dislikes, passions, and serendipitous interactions, all of which can mingle and mesh until you get that moment of genius, or you go home feeling happy and contented. Or whatever it is you want from life.

Celebrate each other, celebrate yourselves. 🥳

[^0]: <https://www.typeform.com/>
[^1]: <https://bonus.ly/>
[^2]: <https://en.wikipedia.org/wiki/HipChat>
[^3]: <https://www.mrlee.dev/images/tc.jpg>
[^4]: <https://www.babylonhealth.com/>
◊^[1]{◊<>["https://www.typeform.com"]}
◊^[2]{◊<>["https://bonus.ly"]}
◊^[3]{◊<>["https://en.wikipedia.org/wiki/HipChat"]}
◊^[4]{◊<>["https://www.kamelasa.dev/images/tc.jpg"]}
◊^[5]{◊<>["https://www.babylonhealth.com"]}
\ No newline at end of file

R posts/devops.md => posts/devops.poly.pm +8 -8
@@ 1,9 1,9 @@
---
title: Devops
date: 2020-09-18
status: published
category: programming
---
#lang pollen

◊define-meta[title]{Devops}
◊define-meta[date]{2020-09-18}
◊define-meta[published #t]
◊define-meta[category]{programming}

I recently left a healthcare company called Babylon a few weeks back. Of all the things I enjoyed there, and the things that made it unique, one has to be how it has managed to dance across the line between startup and enterprise. I don't mean to say 'enterprise' in a perjorative sense; it's more that you can't really avoid that when you're working in a heavily regulated and audited sector. Mistakes can be literally life or death, and the data collected over time includes people's medical information and health records. There really isn't any room for fucking about, but that doesn't mean that other business functions have to be so serious.



@@ 11,7 11,7 @@ Enterprise might not be the best word, but it's the only one I have right now, a

Let's talk about bugs and production servers. Before Bablyon I had not worked in a single place that restricted access to production. As a developer working primarily with Ruby on Rails applications, getting prod access on Heroku or AWS was practically an onboarding step, and that meant I could easily boot up a console and modify the application runtime on the fly. This is an amazingly powerful tool in development and testing environments, it's basically just a boostrapped REPL, but expose that in production and a malicious actor could wreak all sorts of havoc without leaving a trace. This is even worse if your rails app is running under `root` for some reason (e.g. through a poor Docker setup), as you can quite easily jump into a shell from there.

You couldn't do any of this at Bablyon because production and preprod were locked down _tight_, and even seeing production logs required a background check. This didn't really make debugging worse, because instead there was a huge investment in tooling (internal and external) and developer experience to balance it out. One of my favourite outcomes of this is the creation of an open source tool for managing a Kubernetes cluster, called `shipcat`[^1]. You know it's good when it has its own cute logo.
You couldn't do any of this at Bablyon because production and preprod were locked down ◊em{tight}, and even seeing production logs required a background check. This didn't really make debugging worse, because instead there was a huge investment in tooling (internal and external) and developer experience to balance it out. One of my favourite outcomes of this is the creation of an open source tool for managing a Kubernetes cluster, called `shipcat`◊^[1]. You know it's good when it has its own cute logo.

What I've since realised is that this particular crutch (live debugging in production) prevents the business from properly investing in safer and more compliant tools for engineers to investigate issues. You want proper structured logging, good alerting on error conditions, and a whole slew of observability (o11y) tools that can help you diagnose the system from the outside-in without compromising it. You want to have a team of impassioned engineers who enjoy working on internal productivity/efficacy, creating new tools to address pain-points in the organisation's development and support lifecycle.



@@ 29,4 29,4 @@ Before I proselytise too much, I think this is really important because you can 

And if I had one suggestion for any budding project finding itself in the hands of real life users in production, consider what you want your devops culture to be like and, if you can, see how early you can encourage your team without depending on offering wide-scale production access. And maybe even consider what kind of internal tooling you can build to improve the productivity and efficacy of your engineers.

[^1]: <https://github.com/babylonhealth/shipcat>
◊^[1]{◊<>["https://github.com/babylonhealth/shipcat"]}

R posts/do-you-really-need-those-microservices.md => posts/do-you-really-need-those-microservices.poly.pm +12 -12
@@ 1,9 1,9 @@
---
title: Do you really need those microservices?
date: 2020-07-21
category: programming
status: published
---
#lang pollen

◊define-meta[title]{Do you really need those microservices?}
◊define-meta[date]{2020-07-21}
◊define-meta[published #t]
◊define-meta[category]{programming}

I've been through half a dozen rounds of interviewing over the past couple of months, with different companies. Naturally, when you say you've had experience working with microservices, you're practically begging the question. What do you think about them?



@@ 11,15 11,15 @@ I'm not really for-or-against them, and in fact I find it a little strange and f

Anyway, I love being asked this question because after a good five years of working with distributed systems orchestrated by Kubernetes, almost entirely in the form of 'migrating away from the monolith', I've had plenty of time to formulate and adapt my thinking around it.

First and foremost, I believe the main benefit of a business switching to microservices is the manifestation of Conway's Law[^1] in practice. Prior to making the switch in architecture, the business most likely decided on an organisational structure that exchanges large, difficult to manage teams for a distributed collection of smaller, self-sufficient, self-empowered teams. More often than not these follow a squad and chapter model, otherwise reduced to 'the Spotify model', and a hierarchy of cross-functional teams is established. Once that structure is put in place and people are shuffled around a bit, the question of ownership in a mixed-responsibility, mixed-domain codebase becomes apparent. Microservices are thus the solution to a perceived conflict between squads and they shift a lot of that conflict from the teams themselves, to the channels in-between them.
First and foremost, I believe the main benefit of a business switching to microservices is the manifestation of Conway's Law◊^[1] in practice. Prior to making the switch in architecture, the business most likely decided on an organisational structure that exchanges large, difficult to manage teams for a distributed collection of smaller, self-sufficient, self-empowered teams. More often than not these follow a squad and chapter model, otherwise reduced to 'the Spotify model', and a hierarchy of cross-functional teams is established. Once that structure is put in place and people are shuffled around a bit, the question of ownership in a mixed-responsibility, mixed-domain codebase becomes apparent. Microservices are thus the solution to a perceived conflict between squads and they shift a lot of that conflict from the teams themselves, to the channels in-between them.

Under no circumstance is the technical implication of such a change considered, particularly in older codebases for which this change would introduce a significant level of disruption. The organisational benefits of distributing teams and workload are substantial, but the drawbacks of distributing _code_ are similarly worthy of consideration too, and it often becomes a gateway to extra complexity as once-simple tasks find themselves spread apart over various network calls and machines.
Under no circumstance is the technical implication of such a change considered, particularly in older codebases for which this change would introduce a significant level of disruption. The organisational benefits of distributing teams and workload are substantial, but the drawbacks of distributing ◊em{code} are similarly worthy of consideration too, and it often becomes a gateway to extra complexity as once-simple tasks find themselves spread apart over various network calls and machines.

I don't consider this a dealbreaker, but in my experience I've always felt like there's a step missing between the singular, monolithic codebase and the highly distributed microservice architecture. There's a hell of a lot you can do in that singular codebase, in terms of taking smaller steps towards a service-oriented architecture, or a domain-driven one. 

In those situations, you are investing primarily in the work required to understand the different domains in your codebase and how they speak to each other, with much lower risk than fundamentally changing your infrastructure as well as your architecture. In Ruby, you can abstract code into gems and provide solid, public APIs that other parts of the code can use. In Java you have modules and packages. Every language you care to use has the concept of packaging code into bundles or libraries that can be shared as a dependency.

Adopting this workflow introduces much lower risk because, in the event of failure, you can easily adjust your expectations around the domains and how they communicate and fix them in a singular release. It may not be perfect, but if you refactor enough of your code in SOA[^2]/DDD[^3] style then you'll have a much easier time turning those domains into proper microservices further down the line.
Adopting this workflow introduces much lower risk because, in the event of failure, you can easily adjust your expectations around the domains and how they communicate and fix them in a singular release. It may not be perfect, but if you refactor enough of your code in SOA◊^[2]/DDD◊^[3] style then you'll have a much easier time turning those domains into proper microservices further down the line.

This is where a microservice architecture truly shines. If you have clear, well bounded domains, and you've developed solid API contracts as well as standards for versioning, logging, etc to allow for centralised aggregation of useful resources (for debugging or auditing, for example), and if the team in charge of the domain can essentially treat that service as a full-blown product with documentation, support, and its own priorities and backlog, then that is where the power of that infrastructure comes into play.



@@ 29,6 29,6 @@ I believe that's a lot easier to do when you start early, but if you come to it 

If I was to offer anybody advice about how to make all of this happen successfully, I'd say to stop thinking in terms of the existing monolith, and instead look at what individual products you could separate or extract, or even spin-off into their own businesses if the idea was unique enough to sell individually. And don't jump to solutions like Kubernetes until you're dealing with enough of these services that your existing deployment setup is too hard to manage.

[^1]: <https://en.wikipedia.org/wiki/Conway%27s_law>
[^2]: <https://en.wikipedia.org/wiki/Service-oriented_architecture>
[^3]: <https://en.wikipedia.org/wiki/Domain-driven_design>
\ No newline at end of file
◊^[1]{◊<>["https://en.wikipedia.org/wiki/Conway%27s_law"]}
◊^[2]{◊<>["https://en.wikipedia.org/wiki/Service-oriented_architecture"]}
◊^[3]{◊<>["https://en.wikipedia.org/wiki/Domain-driven_design"]}
\ No newline at end of file

R posts/enough.md => posts/enough.poly.pm +15 -15
@@ 1,9 1,9 @@
---
title: Enough
date: 2021-03-06
category: personal
status: published
---
#lang pollen

◊define-meta[title]{Enough}
◊define-meta[date]{2021-03-06}
◊define-meta[published #t]
◊define-meta[category]{personal}

I'm driven by ambition, as are many of us. I don't think that you need to be ambitious to be successful, though, unless you reframe your perception of it. You can be enough; no more, no less.



@@ 15,17 15,17 @@ It's not as healthy as stepping back and getting back into it the next day. The 

And... it's the classic case of doing less with more. The prospect of doing something instead of nothing is a seductive one.

I'm reminded of the Politician's Syllogism[^1], which in this case would sound like this:
I'm reminded of the Politician's Syllogism◊^[1], which in this case would sound like this:

> To be productive, I must do something
>
> I am doing something
>
> Therefore, I am being productive
◊blockquote{
  ◊p{To be productive, I must do something}
  ◊p{I am doing something}
  ◊p{Therefore, I am being productive}
}

It's a circular definition and it also makes me think of the first time an agile team tries to write a user story:

> So that I can log in / As a user / I need a log in system
◊blockquote{So that I can log in / As a user / I need a log in system}

The problem with defining a problem in terms of itself is that it looks like you're doing something for the sake of it. No one really benefits from anything done for the sake of it, but it'll feel good and it'll likely come with a cost.



@@ 33,7 33,7 @@ So it is with ambition for the sake of ambition. Working towards something witho

When is it enough?

At one point in time, I didn't really have a concept of this. There would always be more, always something to do better, always some way to _be_ better, by whatever definition 'better' had at the time.
At one point in time, I didn't really have a concept of this. There would always be more, always something to do better, always some way to ◊em{be} better, by whatever definition 'better' had at the time.

These days that feels more like an endless treadmill, always chasing the carrot dangling an arm's length away and beating yourself with the stick when you fail to reach it.



@@ 55,4 55,4 @@ Am I enough? Certainly!

Do I behave like I am? Not at all.

[^1]: <https://en.wikipedia.org/wiki/Politician%27s_syllogism>
◊^[1]{◊<>["https://en.wikipedia.org/wiki/Politician%27s_syllogism"]}

D posts/floc-off.md => posts/floc-off.md +0 -26
@@ 1,26 0,0 @@
---
title: FLoC Off
date: 2021-04-16
category: web
status: published
---

FLoC (Federated Learning of Cohorts) is Google's answer to the diminishing utility of third party cookies.

Browsing this site will not opt you into this latest experiment in large-scale privacy violation.

You might notice that the site does gather analytics using plausible.io[^1], who themselves go into some more detail about this and how to opt-out[^2].

You can see the analytics for yourself, as I have made them public - you and I see the same thing on that page. It's a glorified hit-counter that lets me see what posts land better than others and it is very easily adblockable. In fact, go ahead and block Javascript on this site - if there's any feature I ever add that depends on it, there will always be an accessible `<noscript>` fallback if it actually matters to me.

I don't have any issue with that kind of technology, for what it's worth. You're only seeing how people use your site so you can figure out how you might tweak things, or understand what you need to do less of if you're scaring people away. It has practically nothing in common with the invasive tracking and advertising that follows you all across the internet, the likes that Google and Facebook involve themselves with at a scale beyond human comprehension.

Anyway, every page here is served with the `Permissions-Policy: interest-cohort=()` header set. There is a valid argument that this still presents a datapoint that can be tracked, but since the change is happening server-side, it is less useful than if you sent the same thing from your browser in every request, adding to your unique fingerprint (as with `Do-Not-Track`, an abject failure of a standard[^4]).

If you're curious, you can also check out the Security Headers report for this site[^5].

[^1]: <https://plausible.io/kamelasa.dev>
[^2]: <https://plausible.io/blog/google-floc>
[^3]: <https://developer.mozilla.org/en-US/docs/Web/HTML/Element/noscript>
[^4]: <https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/DNT>
[^5]: <https://securityheaders.com/?q=www.kamelasa.dev&followRedirects=on>

A posts/floc-off.poly.pm => posts/floc-off.poly.pm +26 -0
@@ 0,0 1,26 @@
#lang pollen

◊define-meta[title]{FLoC Off}
◊define-meta[date]{2021-04-16}
◊define-meta[published #t]
◊define-meta[category]{programming}

FLoC (Federated Learning of Cohorts) is Google's answer to the diminishing utility of third party cookies.

Browsing this site will not opt you into this latest experiment in large-scale privacy violation.

You might notice that the site does gather analytics using plausible.io◊^[1], who themselves go into some more detail about this and how to opt-out◊^[2].

You can see the analytics for yourself, as I have made them public - you and I see the same thing on that page. It's a glorified hit-counter that lets me see what posts land better than others and it is very easily adblockable. In fact, go ahead and block Javascript on this site - if there's any feature I ever add that depends on it, there will always be an accessible ◊code{<noscript>}◊^[3] fallback if it actually matters to me.

I don't have any issue with that kind of technology, for what it's worth. You're only seeing how people use your site so you can figure out how you might tweak things, or understand what you need to do less of if you're scaring people away. It has practically nothing in common with the invasive tracking and advertising that follows you all across the internet, the likes that Google and Facebook involve themselves with at a scale beyond human comprehension.

Anyway, every page here is served with the ◊code{Permissions-Policy: interest-cohort=()} header set. There is a valid argument that this still presents a datapoint that can be tracked, but since the change is happening server-side, it is less useful than if you sent the same thing from your browser in every request, adding to your unique fingerprint (as with ◊code{Do-Not-Track}, an abject failure of a standard◊^[4]).

If you're curious, you can also check out the Security Headers report for this site◊^[5].

◊^[1]{◊<>["https://plausible.io/kamelasa.dev"]}
◊^[2]{◊<>["https://plausible.io/blog/google-floc"]}
◊^[3]{◊<>["https://developer.mozilla.org/en-US/docs/Web/HTML/Element/noscript"]}
◊^[4]{◊<>["https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/DNT"]}
◊^[5]{◊<>["https://securityheaders.com/?q=www.kamelasa.dev&followRedirects=on"]}
\ No newline at end of file

D posts/gettin-ziggy-with-it-pi-zero.md => posts/gettin-ziggy-with-it-pi-zero.md +0 -284
@@ 1,284 0,0 @@
---
title: Gettin' Ziggy With It On The Pi Zero
date: 2021-01-05
status: published
category: programming
---

Alright, you can read the article first and shoot me later for a title like that, and what will inevitably become a series of Zig-based puns.

Zig, for the unaware, is a fancy language that looks to be to C what Rust is to C++. Honestly, I recommend you read the summary on the main page[^1] to find out more yourself, as the best I can do is to just parrot what has already been written. However, you can see it as a valid _alternative_ to C and Zig itself has claimed that it wants to be a better version of C than C itself. An ambitious challenge, for sure. To that end, Zig itself ships its own C compiler.

I've been interested in giving Zig a spin for quite a while, and once my Raspberry Pi Zero W[^2] and OLED display[^3] arrived in the post, I decided that this would be my best opportunity to try it out. I'm not really going to cover the process of wiring up the hardware, suffice to say that once you've got your Pi Zero you'll need to be able to SSH into it, and that you'll need a [solderless] GPIO header[^4] to plug the OLED display into. I recommend the Zero **W** because the W means 'WiFi', which means that if you connect it to your network you can SSH in without faffing around with USB cables and what not. It's not a requirement, though.

With that out of the way, let's see if we can write something in Zig to power this little display. It's going to be a simple program that simply fills the entire screen by turning the pixels from black (off) to white (on). As an extra challenge, we will do this without pulling in dependencies like WiringPi[^5], or relying on existing drivers, as lovely as they are.

Instead, we will be directly using the i<sup>2</sup>c dev interface[^6]. If you're using Debian and/or Ubuntu on your Pi and your own machine, you can grab these libraries with a simple `sudo apt install i2c-dev`. You will need to enable i<sup>2</sup>c on your Pi separately though, through `sudo raspi-config`[^7].

Ready to... get Ziggy with it? Oh, I bet you are. 😋 If you want to skip to the end and just grab the code, though, you can find this all on GitHub[^8]. I called it Stardust, like *Zig*gy Stardust. Get it?

🥁

---

## Hello, Pi.

The first and most complicated part of any low-level project is the bit where you try and establish a build system of some sorts. We're going to forget about that completely for now and apply some elbow-grease to the situation.

The next step is to define a `main` function that grabs a file descriptor (or handle) corresponding to our OLED display. According to the aforementioned dev interface docs, we'll need to open a file and check it with `ioctl`.

```zig
const std = @import("std");

const c = @cImport({
  @cInclude("linux/i2c.h");
  @cInclude("linux/i2c-dev.h");
  @cInclude("sys/ioctl.h");
});

const i2c_device = "/dev/i2c-1"; // this is assumed correct on a Pi Zero, but may be i2c-0 on an older Pi.
const i2c_addr: c_int = 0x3c; // this is typed as a C-style int for ABI compatibility with C

pub fn main() !void {
  const stdout = std.io.getStdOut().outStream();

  const fd = try fs.openFileAbsolute(i2c_device, fs.File.OpenFlags{ .write = true, .read = true });
  defer fd.close();

  if (c.ioctl(fd.handle, c.I2C_SLAVE, i2c_addr) < 0)) {
    try stdout.print("ioctl failed, errno: {}\n", c.errno);
  }

  stdout.print("Init successful.\n", .{});
}
```

You might have noticed something odd: we're not really writing much Zig here, it's practically 95% interop with C. The beauty of Zig is that this interop is so simple and intuitive that it's the _easiest_ way to get started if you're going to be linking against existing C libraries. Get the software working first, abstract it later, as they say, and you might already start to get an idea of what we could convert into idiomatic Zig libraries in future.

The actual Zig code you see though, is quite different to the C stuff. That `defer fd.close()`, for example, _ensures_ that the file descriptor we opened up will be closed when we're done. If we don't do that, then it'll stay open and there'll be a leak.

There's also the `try` macro, used in combination with the `!void` return type, which will be super familiar if you've written some Rust and have dealt with option types. It's short hand for executing the code and catching/dealing with the error, with `!void` being another shorthand for `anyerror!void`, namely: this function returns either nothing, or an error if there is one.

WHat we've actually done, however, is open the device file `/dev/i2c-1`, and then used the `ioctl` library to specify which device in particular we want to talk to. You can find out this value by running `i2cdevice -y 1`, like so:

```
pi@raspberrypi:~ $ i2cdetect -y 1
     0  1  2  3  4  5  6  7  8  9  a  b  c  d  e  f
00:          -- -- -- -- -- -- -- -- -- -- -- -- --
10: -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
20: -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
30: -- -- -- -- -- -- -- -- -- -- -- -- 3c -- -- --
40: -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
50: -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
60: -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
70: -- -- -- -- -- -- -- --
```

<aside>In my case, the device can be accessed at address `0x3C`, which is how I defined `i2c_addr` above.</aside>

We're at a good point now to try and compile this thing and then run it on the Pi. If we get the message 'Init successful.' then we're golden.

---

## Build and Push

Zig comes with a nice little build system out of the box, but we're not going to use it right now because it's a work in progress. I'll leave that as an exercise to you, the reader, and I urge you to contribute any documentation you come up with to Zig. Instead, we'll use the CLI which is just as powerful and, gracefully, a bit more discoverable for our purposes.

Are you writing this code on the Pi itself? Probably not, I imagine, and nor do you need to.

> Cross-compiling is a first-class use case
>
> Andrew Kelley, Creator of Zig

Let's build a binary, then. Save your code into a file, say, `stardust.zig` and then proceed.

```console
zig build-exe stardust.zig  -target arm-linux-musleabihf -mcpu arm1176jzf_s -O ReleaseSafe -lc
```

To unpack that a little, the `target` is a triplet stating that we want to build this using the musl[^9] libc ABI, on a 32bit ARM architecture. `mcpu` goes along with that to make sure the resulting binary will work on our Pi Zero. I grabbed these values from an issue on Zig's github repo[^10], so credit goes to the author of that issue for unintentionally guiding me forward.

Passing the optimiser flag (`-O`) isn't strictly necessary, so you can omit this if you require a debug build and stack traces with errors.

`-lc` basically says that this binary needs to be linked against libc.

Once the build finishes, you should find a shiny new executable called `stardust` in the same directory as your code. You can get it onto your Pi with `scp`, like so:

```console
scp stardust pi@raspberrypi:~/stardust
```

<aside>You will need to change `pi@raspberrypi` to whatever else you've configured if you've changed the defaults.</aside>

SSH into your Pi after that, and try and run it! Does it return successfully? I hope so!

Let's move on and make this kitten purr. Meow 🐈.

---

## Getting this show on the road

In true _draw the rest of the fucking owl_ fashion[^11], what follows is a bit of a code-dump since the primary method of communicating with your OLED display is to, literally, write a few bytes to a file. The registers available and what can be written to them are often described in a meticulously detailed datasheet[^12], but they're not exactly light reading and we can save a bit of time by grabbing the info from elsewhere. A lot of the constants that follow are gracefully derived from those listed in a certain `owenosborn`'s wiringPi-based driver.[^13]. Credit where credit's due, eh.

```zig
const SET_CONTRAST = 0x81;
const SET_DISPLAY_ALL_ON_RESUME = 0xA4;
const SET_DISPLAY_ALL_ON = 0xA5;
const SET_NORMAL_DISPLAY = 0xA6;
const SET_INVERT_DISPLAY = 0xA7;
const SET_DISPLAY_OFF = 0xAE;
const SET_DISPLAY_ON = 0xAF;
const SET_DISPLAY_OFFSET = 0xD3;
const SET_COLUMN_ADDR = 0x21;
const SET_PAGE_ADDR = 0x22;
const SET_COM_PINS = 0xDA;
const SET_VCOM_DETECT = 0xDB;
const SET_DISPLAY_CLOCK_FREQ = 0xD5;
const SET_PRECHARGE = 0xD9;
const SET_MULTIPLEX_RATIO = 0xA8;
const SET_LOW_COLUMN = 0x00;
const SET_HIGH_COLUMN = 0x10;
const SET_START_LINE = 0x40;
const SET_START_PAGE = 0xB0;
const SET_MEMORY_MODE = 0x20;
const SET_COM_SCAN_INC = 0xC0;
const SET_COM_SCAN_DEC = 0xC8;
const SET_SEG_REMAP = 0xA0;
const SET_CHARGE_PUMP = 0x8D;
```

The registers available to an i<sup>2</sup>c compatible device will depend on the device itself, so it's not really safe to copy and paste these without knowing exactly what you're dealing with. This is driver level code so it's not like you'll get some fancy validation error if you write the wrong bytes, you'll more likely fuck it up and burn down your house[^14].

Next we'll want to init the display and get it into a clean state, with the cursor pointing at the first pixel.

```zig
fn init_display(fd: fs.File) !void {
    const cmds = [_]u8{
        SET_MULTIPLEX_RATIO, 0x3F,                   0x00,
        SET_START_LINE,      SET_SEG_REMAP,          SET_COM_SCAN_DEC,
        SET_COM_PINS,        0x32,                   SET_DISPLAY_ALL_ON_RESUME,
        SET_NORMAL_DISPLAY,  SET_DISPLAY_CLOCK_FREQ, 0x80,
        SET_CHARGE_PUMP,     0x14,                   SET_MEMORY_MODE,
        0x20,
    };

    inline for (cmds) |cmd| {
        _ = try fd.write(&[2]u8{ 0x00, cmd });
    }
}

fn display_off(fd: fs.File) !void {
    _ = try fd.write(&[2]u8{ 0x00, SET_DISPLAY_OFF });
}

fn display_on(fd: fs.File) !void {
    _ = try fd.write(&[2]u8{ 0x00, SET_DISPLAY_ON });
}

fn reset_cursor(fd: fs.File) !void {
    const cmds = [_]u8{
        SET_COLUMN_ADDR,
        0x00,
        0x7F,
        SET_PAGE_ADDR,
        0x00,
        0x07,
    };

    inline for (cmds) |cmd| {
        _ = try fd.write(&[2]u8{ 0x00, cmd });
    }
}
```

Wow, actual Zig code! The formatting may look a little odd because that's what `zig fmt` decides is appropriate.

`init_display` is quite a complex beast that issues a whole series of commands that sets up the display for further use. A more detailed explanation of that will be in another post, for the sake of brevity, but in essence it was adapted from AdaFruit's CircuitPi driver, written in Python[^15].

The recurring theme in all of these new functions is that the entire basis of their existence is to create an array of two bytes, and then write them to file descriptor we opened right at the start. The data structure looks something like this:

```c
buf[0] = 0x00; // the register to be written to
buf[1] = 0x??; // the value to assign to that register
```

The file opened in `main` isn't a traditional file as you know it, but it points to all of the devices connected to your GPIO header on the Pi. Therefore, if you know enough about the hardware at a low enough level, you can control all of them by writing the right bytes to the right register, at the right address.

The rest of the code, e.g. `reset_cursor`, resets the state of the display in such a way that you can write a pixel and the cursor will advance, linearly, to the next one.

```zig
fn fill(fd: fs.File) !void {
    var i: usize = 0;

    while (i < 1024) {
        _ = try fd.write(&[2]u8{ 0x40, 0xFF });
        i += 1;
    }
}
```

This `fill` function will (rather quickly) turn the display solid white, updating each pixel one at a time. Before we continue though, let's go through some more Zig specifics; namely, `inline`.

---

## A zig-a-Zig aaaahhhh...

<aside>Reach out to me at [pleasemakeitstop@mrlee.dev](mailto:pleasemakeitstop@mrlee.dev) if this is too much for you.</aside>

Zig has some nice language features intended to replace and improve upon C/C++ preprocessor macros. The `inline` keyword is one such thing, and when applied to a `for` or `while` loop it'll unroll it at compile time. A simple optimisation but a useful one. We don't use it, but you also have `comptime`, which is powerful enough to be able to implement generics, if you so desire. We're not going to go into that here though, and you can read more about it from a certain Loris Cro[^16].

---

This post is getting pretty long-winded, and all I wanted to do was show how to set some pixels on a tiny display. Let's wrap this up then, since we're almost ready to recompile. Just one finishing touch, which is to call the functions we defined. Update `main` to look like this:

```zig
pub fn main() !void {
    const stdout = std.io.getStdOut().outStream();
    const fd = try fs.openFileAbsolute(i2c_device, fs.File.OpenFlags{ .write = true, .read = true });
    defer fd.close();

    if (c.ioctl(fd.handle, c.I2C_SLAVE, i2c_addr) < 0) {
        try stdout.print("ioctl failed, errno: {}\n", c.errno);
        return;
    }

    try stdout.print("init\n", .{});
    try display_off(fd);
    try init_display(fd);
    try display_on(fd);
    try reset_cursor(fd);

    try stdout.print("turn on\n", .{});
    try display_on(fd);

    try stdout.print("fill\n", .{});
    try fill(fd);
}
```

Once you're done, rebuild the binary and `scp` it over, like you did the first time. SSH into your Pi and run it again (i.e `./stardust`), and see your display light up! 🥳

---

Hopefully that worked, but if it didn't, get in touch with your feedback at [wtf@mrlee.dev](mailto:wtf@mrlee.dev) and help contribute to this post being a better, more informative read. After all, _works on my machine!_ can only go so far.

[^1]: <https://ziglang.org>
[^2]: <https://thepihut.com/products/raspberry-pi-zero-w>
[^3]: <https://thepihut.com/products/adafruit-pioled-128x32-monochrome-oled-add-on-for-raspberry-pi-ada3527>
[^4]: <https://thepihut.com/products/gpio-hammer-header-solderless>
[^5]: <http://wiringpi.com/>
[^6]: <https://www.kernel.org/doc/Documentation/i2c/dev-interface>
[^7]: <https://learn.adafruit.com/adafruits-raspberry-pi-lesson-4-gpio-setup/configuring-i2c>
[^8]: <https://github.com/mrleedev/stardust>
[^9]: <https://musl.libc.org/>
[^10]: <https://github.com/ziglang/zig/issues/4875>
[^11]: <https://knowyourmeme.com/memes/how-to-draw-an-owl>
[^12]: <https://cdn-shop.adafruit.com/datasheets/SSD1306.pdf>
[^13]: <https://github.com/owenosborn/SSD1306-OLED-WiringPi/blob/master/ssd1306.h>
[^14]: Possibly exaggerated for effect. Possibly.
[^15]: <https://github.com/adafruit/Adafruit_CircuitPython_SSD1306/blob/master/adafruit_ssd1306.py>
[^16]: <https://kristoff.it/blog/what-is-zig-comptime/>

```

```

A posts/gettin-ziggy-with-it-pi-zero.poly.pm => posts/gettin-ziggy-with-it-pi-zero.poly.pm +284 -0
@@ 0,0 1,284 @@
#lang pollen

◊define-meta[title]{Gettin' Ziggy With It On The Pi Zero}
◊define-meta[date]{2021-01-05}
◊define-meta[published #t]
◊define-meta[category]{programming}

Alright, you can read the article first and shoot me later for a title like that, and what will inevitably become a series of Zig-based puns.

Zig, for the unaware, is a fancy language that looks to be to C what Rust is to C++. Honestly, I recommend you read the summary on the main page◊^[1] to find out more yourself, as the best I can do is to just parrot what has already been written. However, you can see it as a valid ◊em{alternative} to C and Zig itself has claimed that it wants to be a better version of C than C itself. An ambitious challenge, for sure. To that end, Zig itself ships its own C compiler.

I've been interested in giving Zig a spin for quite a while, and once my Raspberry Pi Zero W◊^[2] and OLED display◊^[3] arrived in the post, I decided that this would be my best opportunity to try it out. I'm not really going to cover the process of wiring up the hardware, suffice to say that once you've got your Pi Zero you'll need to be able to SSH into it, and that you'll need a [solderless] GPIO header◊^[4] to plug the OLED display into. I recommend the Zero ◊b{W} because the W means 'WiFi', which means that if you connect it to your network you can SSH in without faffing around with USB cables and what not. It's not a requirement, though.

With that out of the way, let's see if we can write something in Zig to power this little display. It's going to be a simple program that simply fills the entire screen by turning the pixels from black (off) to white (on). As an extra challenge, we will do this without pulling in dependencies like WiringPi◊^[5], or relying on existing drivers, as lovely as they are.

Instead, we will be directly using the i◊sup{2}c dev interface◊^[6]. If you're using Debian and/or Ubuntu on your Pi and your own machine, you can grab these libraries with a simple ◊code{sudo apt install i2c-dev}. You will need to enable i◊sup{2}c on your Pi separately though, through ◊code{sudo raspi-config}◊^[7].

Ready to... get Ziggy with it? Oh, I bet you are. 😋 If you want to skip to the end and just grab the code, though, you can find this all on GitHub◊^[8]. I called it Stardust, like ◊em{Zig}gy Stardust. Get it?

🥁

◊hr{}

◊h2{Hello, Pi.}

The first and most complicated part of any low-level project is the bit where you try and establish a build system of some sorts. We're going to forget about that completely for now and apply some elbow-grease to the situation.

The next step is to define a ◊code{main} function that grabs a file descriptor (or handle) corresponding to our OLED display. According to the aforementioned dev interface docs, we'll need to open a file and check it with ◊code{ioctl}.

◊codeblock['zig]{
  const std = @import("std");

  const c = @cImport({
    @cInclude("linux/i2c.h");
    @cInclude("linux/i2c-dev.h");
    @cInclude("sys/ioctl.h");
  });

  const i2c_device = "/dev/i2c-1"; // this is assumed correct on a Pi Zero, but may be i2c-0 on an older Pi.
  const i2c_addr: c_int = 0x3c; // this is typed as a C-style int for ABI compatibility with C

  pub fn main() !void {
    const stdout = std.io.getStdOut().outStream();

    const fd = try fs.openFileAbsolute(i2c_device, fs.File.OpenFlags{ .write = true, .read = true });
    defer fd.close();

    if (c.ioctl(fd.handle, c.I2C_SLAVE, i2c_addr) < 0)) {
      try stdout.print("ioctl failed, errno: {}\n", c.errno);
    }

    stdout.print("Init successful.\n", .{});
  }
}

You might have noticed something odd: we're not really writing much Zig here, it's practically 95% interop with C. The beauty of Zig is that this interop is so simple and intuitive that it's the ◊em{easiest} way to get started if you're going to be linking against existing C libraries. Get the software working first, abstract it later, as they say, and you might already start to get an idea of what we could convert into idiomatic Zig libraries in future.

The actual Zig code you see though, is quite different to the C stuff. That ◊code{defer fd.close()}, for example, ◊em{ensures} that the file descriptor we opened up will be closed when we're done. If we don't do that, then it'll stay open and there'll be a leak.

There's also the ◊code{try} macro, used in combination with the ◊code{!void} return type, which will be super familiar if you've written some Rust and have dealt with option types. It's short hand for executing the code and catching/dealing with the error, with ◊code{!void} being another shorthand for ◊code{anyerror!void}, namely: this function returns either nothing, or an error if there is one.

What we've actually done, however, is open the device file ◊code{/dev/i2c-1}, and then used the ◊code{ioctl} library to specify which device in particular we want to talk to. You can find out this value by running ◊code{i2cdevice -y 1}, like so:

◊codeblock['text]{
  pi@raspberrypi:~ $ i2cdetect -y 1
      0  1  2  3  4  5  6  7  8  9  a  b  c  d  e  f
  00:          -- -- -- -- -- -- -- -- -- -- -- -- --
  10: -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
  20: -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
  30: -- -- -- -- -- -- -- -- -- -- -- -- 3c -- -- --
  40: -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
  50: -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
  60: -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
  70: -- -- -- -- -- -- -- --
}

◊aside{
  In my case, the device can be accessed at address ◊code{0x3C}, which is how I defined ◊code{i2c_addr} above.
}

We're at a good point now to try and compile this thing and then run it on the Pi. If we get the message 'Init successful.' then we're golden.

◊hr{}

◊h2{Build and Push}

Zig comes with a nice little build system out of the box, but we're not going to use it right now because it's a work in progress. I'll leave that as an exercise to you, the reader, and I urge you to contribute any documentation you come up with to Zig. Instead, we'll use the CLI which is just as powerful and, gracefully, a bit more discoverable for our purposes.

Are you writing this code on the Pi itself? Probably not, I imagine, and nor do you need to.

◊q["Andrew Kelley" 2020]{Cross-compiling is a first-class use case}

Let's build a binary, then. Save your code into a file, say, ◊code{stardust.zig} and then proceed.

◊codeblock['bash]{
  zig build-exe stardust.zig  -target arm-linux-musleabihf -mcpu arm1176jzf_s -O ReleaseSafe -lc
}

To unpack that a little, the ◊code{target} is a triplet stating that we want to build this using the musl◊^[9] libc ABI, on a 32bit ARM architecture. ◊code{mcpu} goes along with that to make sure the resulting binary will work on our Pi Zero. I grabbed these values from an issue on Zig's github repo◊^[10], so credit goes to the author of that issue for unintentionally guiding me forward.

Passing the optimiser flag (◊code{-O}) isn't strictly necessary, so you can omit this if you require a debug build and stack traces with errors.

◊code{-lc} basically says that this binary needs to be linked against libc.

Once the build finishes, you should find a shiny new executable called ◊code{stardust} in the same directory as your code. You can get it onto your Pi with ◊code{scp}, like so:

◊codeblock['bash]{
  scp stardust pi@raspberrypi:~/stardust
}

◊aside{
  You will need to change ◊code{pi@raspberrypi} to whatever else you've configured if you've changed the defaults.
}

SSH into your Pi after that, and try and run it! Does it return successfully? I hope so!

Let's move on and make this kitten purr. Meow 🐈.

◊hr{}

◊h2{Getting this show on the road}

In true ◊em{draw the rest of the fucking owl} fashion◊^[11], what follows is a bit of a code-dump since the primary method of communicating with your OLED display is to, literally, write a few bytes to a file. The registers available and what can be written to them are often described in a meticulously detailed datasheet◊^[12], but they're not exactly light reading and we can save a bit of time by grabbing the info from elsewhere. A lot of the constants that follow are gracefully derived from those listed in a certain ◊code{owenosborn}'s wiringPi-based driver.◊^[13]. Credit where credit's due, eh.

◊codeblock['zig]{
  const SET_CONTRAST = 0x81;
  const SET_DISPLAY_ALL_ON_RESUME = 0xA4;
  const SET_DISPLAY_ALL_ON = 0xA5;
  const SET_NORMAL_DISPLAY = 0xA6;
  const SET_INVERT_DISPLAY = 0xA7;
  const SET_DISPLAY_OFF = 0xAE;
  const SET_DISPLAY_ON = 0xAF;
  const SET_DISPLAY_OFFSET = 0xD3;
  const SET_COLUMN_ADDR = 0x21;
  const SET_PAGE_ADDR = 0x22;
  const SET_COM_PINS = 0xDA;
  const SET_VCOM_DETECT = 0xDB;
  const SET_DISPLAY_CLOCK_FREQ = 0xD5;
  const SET_PRECHARGE = 0xD9;
  const SET_MULTIPLEX_RATIO = 0xA8;
  const SET_LOW_COLUMN = 0x00;
  const SET_HIGH_COLUMN = 0x10;
  const SET_START_LINE = 0x40;
  const SET_START_PAGE = 0xB0;
  const SET_MEMORY_MODE = 0x20;
  const SET_COM_SCAN_INC = 0xC0;
  const SET_COM_SCAN_DEC = 0xC8;
  const SET_SEG_REMAP = 0xA0;
  const SET_CHARGE_PUMP = 0x8D;
}

The registers available to an i◊sup{2}c compatible device will depend on the device itself, so it's not really safe to copy and paste these without knowing exactly what you're dealing with. This is driver level code so it's not like you'll get some fancy validation error if you write the wrong bytes, you'll more likely fuck it up and burn down your house◊^[14].

Next we'll want to init the display and get it into a clean state, with the cursor pointing at the first pixel.

◊codeblock['zig]{
  fn init_display(fd: fs.File) !void {
      const cmds = [_]u8{
          SET_MULTIPLEX_RATIO, 0x3F,                   0x00,
          SET_START_LINE,      SET_SEG_REMAP,          SET_COM_SCAN_DEC,
          SET_COM_PINS,        0x32,                   SET_DISPLAY_ALL_ON_RESUME,
          SET_NORMAL_DISPLAY,  SET_DISPLAY_CLOCK_FREQ, 0x80,
          SET_CHARGE_PUMP,     0x14,                   SET_MEMORY_MODE,
          0x20,
      };

      inline for (cmds) |cmd| {
          _ = try fd.write(&[2]u8{ 0x00, cmd });
      }
  }

  fn display_off(fd: fs.File) !void {
      _ = try fd.write(&[2]u8{ 0x00, SET_DISPLAY_OFF });
  }

  fn display_on(fd: fs.File) !void {
      _ = try fd.write(&[2]u8{ 0x00, SET_DISPLAY_ON });
  }

  fn reset_cursor(fd: fs.File) !void {
      const cmds = [_]u8{
          SET_COLUMN_ADDR,
          0x00,
          0x7F,
          SET_PAGE_ADDR,
          0x00,
          0x07,
      };

      inline for (cmds) |cmd| {
          _ = try fd.write(&[2]u8{ 0x00, cmd });
      }
  }
}

Wow, actual Zig code! The formatting may look a little odd because that's what ◊code{zig fmt} decides is appropriate.

◊code{init_display} is quite a complex beast that issues a whole series of commands that sets up the display for further use. A more detailed explanation of that will be in another post, for the sake of brevity, but in essence it was adapted from AdaFruit's CircuitPi driver, written in Python◊^[15].

The recurring theme in all of these new functions is that the entire basis of their existence is to create an array of two bytes, and then write them to file descriptor we opened right at the start. The data structure looks something like this:

◊codeblock['c]{
buf[0] = 0x00; // the register to be written to
buf[1] = 0x??; // the value to assign to that register
}

The file opened in ◊code{main} isn't a traditional file as you know it, but it points to all of the devices connected to your GPIO header on the Pi. Therefore, if you know enough about the hardware at a low enough level, you can control all of them by writing the right bytes to the right register, at the right address.

The rest of the code, e.g. ◊code{reset_cursor}, resets the state of the display in such a way that you can write a pixel and the cursor will advance, linearly, to the next one.

◊codeblock['zig]{
  fn fill(fd: fs.File) !void {
      var i: usize = 0;

      while (i < 1024) {
          _ = try fd.write(&[2]u8{ 0x40, 0xFF });
          i += 1;
      }
  }
}

This ◊code{fill} function will (rather quickly) turn the display solid white, updating each pixel one at a time. Before we continue though, let's go through some more Zig specifics; namely, ◊code{inline}.

◊hr{}

◊h2{A zig-a-Zig aaaahhhh...}

◊aside{
  Reach out to me at pleasemakeitstop@mrlee.dev if this is too much for you.
}

Zig has some nice language features intended to replace and improve upon C/C++ preprocessor macros. The ◊code{inline} keyword is one such thing, and when applied to a ◊code{for} or ◊code{while} loop it'll unroll it at compile time. A simple optimisation but a useful one. We don't use it, but you also have ◊code{comptime}, which is powerful enough to be able to implement generics, if you so desire. We're not going to go into that here though, and you can read more about it from a certain Loris Cro◊^[16].

◊hr{}

This post is getting pretty long-winded, and all I wanted to do was show how to set some pixels on a tiny display. Let's wrap this up then, since we're almost ready to recompile. Just one finishing touch, which is to call the functions we defined. Update ◊code{main} to look like this:

◊codeblock['zig]{
  pub fn main() !void {
      const stdout = std.io.getStdOut().outStream();
      const fd = try fs.openFileAbsolute(i2c_device, fs.File.OpenFlags{ .write = true, .read = true });
      defer fd.close();

      if (c.ioctl(fd.handle, c.I2C_SLAVE, i2c_addr) < 0) {
          try stdout.print("ioctl failed, errno: {}\n", c.errno);
          return;
      }

      try stdout.print("init\n", .{});
      try display_off(fd);
      try init_display(fd);
      try display_on(fd);
      try reset_cursor(fd);

      try stdout.print("turn on\n", .{});
      try display_on(fd);

      try stdout.print("fill\n", .{});
      try fill(fd);
  }
}

Once you're done, rebuild the binary and ◊code{scp} it over, like you did the first time. SSH into your Pi and run it again (i.e ◊code{./stardust}), and see your display light up! 🥳

◊hr{}

Hopefully that worked, but if it didn't, get in touch with your feedback at wtf@mrlee.dev and help contribute to this post being a better, more informative read. After all, ◊em{works on my machine!} can only go so far.

◊^[1]{◊<>["https://ziglang.org"]}
◊^[2]{◊<>["https://thepihut.com/products/raspberry-pi-zero-w"]}
◊^[3]{◊<>["https://thepihut.com/products/adafruit-pioled-128x32-monochrome-oled-add-on-for-raspberry-pi-ada3527"]}
◊^[4]{◊<>["https://thepihut.com/products/gpio-hammer-header-solderless"]}
◊^[5]{◊<>["http://wiringpi.com"]}
◊^[6]{◊<>["https://www.kernel.org/doc/Documentation/i2c/dev-interface"]}
◊^[7]{◊<>["https://learn.adafruit.com/adafruits-raspberry-pi-lesson-4-gpio-setup/configuring-i2c"]}
◊^[8]{◊<>["https://github.com/mrleedev/stardust"]}
◊^[9]{◊<>["https://musl.libc.org"]}
◊^[10]{◊<>["https://github.com/ziglang/zig/issues/4875"]}
◊^[11]{◊<>["https://knowyourmeme.com/memes/how-to-draw-an-owl"]}
◊^[12]{◊<>["https://cdn-shop.adafruit.com/datasheets/SSD1306.pdf"]}
◊^[13]{◊<>["https://github.com/owenosborn/SSD1306-OLED-WiringPi/blob/master/ssd1306.h"]}
◊^[14]{Possibly exaggerated for effect. Possibly.}
◊^[15]{◊<>["https://github.com/adafruit/Adafruit_CircuitPython_SSD1306/blob/master/adafruit_ssd1306.py"]}
◊^[16]{◊<>["https://kristoff.it/blog/what-is-zig-comptime/"]}
\ No newline at end of file

R posts/growing-up.md => posts/growing-up.poly.pm +8 -8
@@ 1,17 1,17 @@
---
title: Growing up
date: 2021-05-17
status: published
category: personal
---
#lang pollen

◊define-meta[title]{Growing up}
◊define-meta[date]{2021-05-17}
◊define-meta[published #t]
◊define-meta[category]{personal}

Way back in 2002, or perhaps 2003, I'd acquired a copy of Macromedia Dreamweaver and started playing about with HTML. The timeline is a bit blurry, considering that it's almost twenty years ago. CSS wasn't really a thing yet, though, and IE6 was still the main browser of choice. Eventually CSS became a thing and one of my first real 'projects' was to create a user style that hid all the ads on the Gamesradar forum I used to spend time on. You had to switch to Firefox and then dump the CSS in your 'profile' folder; it was a total hack and there was more documentation than code.

I didn't realise that I'd be setting myself up for a career in programming, in fact I remained blissfully ignorant of this prospect even as I bought books like _MySQL 5.5 for Dummies_ and _PHP 4 For Dummies_. All I wanted to do was show the current time and date on the page and that quite quickly escalated into building a custom blog, using a database to store the posts. That then snowballed into messing with web frameworks like CakePHP and CodeIgniter.
I didn't realise that I'd be setting myself up for a career in programming, in fact I remained blissfully ignorant of this prospect even as I bought books like ◊em{MySQL 5.5 for Dummies} and ◊em{PHP 4 For Dummies}. All I wanted to do was show the current time and date on the page and that quite quickly escalated into building a custom blog, using a database to store the posts. That then snowballed into messing with web frameworks like CakePHP and CodeIgniter.

I actually didn't want a career in web development, I didn't really know what I wanted. It sounded like 'working with computers' and my mum in particular was pressuring me a lot with that. To her credit, she was technically correct, although I think she was more keen about me doing more hands-on work, like building and repairing computers, so there'd be someone in the family who could do it.

Despite this, and my rebellious teenage opposition to doing anything that wasn't _my_ idea, I'd spent years unknowingly educating myself as I spent time on various web dev forums offering advice, answering questions, and asking questions of my own. Stack Overflow came and many of those forums faded into obscurity, which is a shame since Stack Overflow preferred clear, unambiguous answers, and with a forum you could engage in a conversation around a problem and more organically hash out a solution. The lesson wasn't in the solution really, but in the discussion that preceded it, and it allowed me to develop an intuition.
Despite this, and my rebellious teenage opposition to doing anything that wasn't ◊em{my} idea, I'd spent years unknowingly educating myself as I spent time on various web dev forums offering advice, answering questions, and asking questions of my own. Stack Overflow came and many of those forums faded into obscurity, which is a shame since Stack Overflow preferred clear, unambiguous answers, and with a forum you could engage in a conversation around a problem and more organically hash out a solution. The lesson wasn't in the solution really, but in the discussion that preceded it, and it allowed me to develop an intuition.

After some time doing small-scale freelance work, managing phpBB instances and such like, I landed my first job for a little web marketing company that made websites for local businesses. It was mostly a case of repackaging Drupal apps with some theme on top, but we got given other project work too. For some reason that also had to be done in Drupal. I never wanted to work with Drupal again after all that. However, this was the first time I'd worked with another web developer, let alone a team, face to face in an office.


R posts/hakyll-on-devops-pipelines.md => posts/hakyll-on-devops-pipelines.poly.pm +144 -136
@@ 1,182 1,190 @@
---
title: Hakyll on DevOps Pipelines
date: 2020-08-18
status: published
category: programming
---
#lang pollen

◊define-meta[title]{Hakyll on DevOps Pipelines}
◊define-meta[date]{2020-08-18}
◊define-meta[published #t]
◊define-meta[category]{programming}

In a way, this is total overkill for a static site. If I have the repo cloned on my machine and I want to publish a new post, I can do it in two commands:

```bash
stack exec site build
scp -r _site/ deploy@mrlee.dev:/var/www/www.mrlee.dev/
```
◊codeblock['bash]{
  stack exec site build
  scp -r _site/ deploy@mrlee.dev:/var/www/www.mrlee.dev/
}

It's flawed compared to using `rsync`, as it won't remove existing files, but it does the job in less than a second or two.
It's flawed compared to using ◊code{rsync}, as it won't remove existing files, but it does the job in less than a second or two.

The thing is, this isn't so quick if I want to publish a post from a different computer that doesn't have any programming tools installed. I would have to install `stack`[^1], which is a build tool for Haskell, and then I would have to run `stack build`. This can take at least half an hour as the command will pull down the correct version of `GHC` and a 'snapshot' (basically a huge collection of all the Hackage[^2] libraries available for that build) before it even _thinks_ about compiling my `site.hs` file. It also means to committing a few gigs of storage space for all of that.
The thing is, this isn't so quick if I want to publish a post from a different computer that doesn't have any programming tools installed. I would have to install ◊code{stack}◊^[1], which is a build tool for Haskell, and then I would have to run ◊code{stack build}. This can take at least half an hour as the command will pull down the correct version of ◊code{GHC} and a 'snapshot' (basically a huge collection of all the Hackage◊^[2] libraries available for that build) before it even ◊em{thinks} about compiling my ◊code{site.hs} file. It also means to committing a few gigs of storage space for all of that.

I like to write from my little Surface Pro when I'm out and about, so I'd rather not do a full-blown compilation on that for the sake of my battery. Enter Azure DevOps Pipelines[^3].
I like to write from my little Surface Pro when I'm out and about, so I'd rather not do a full-blown compilation on that for the sake of my battery. Enter Azure DevOps Pipelines◊^[3].

I've been keen on playing with these pipelines for a while, and much like any dev-tool, it has a free tier for open source repos. So does Github Actions[^4], which actually shares some of the underlying architecture of DevOps Pipelines, but I wanted to play with something different.
I've been keen on playing with these pipelines for a while, and much like any dev-tool, it has a free tier for open source repos. So does Github Actions◊^[4], which actually shares some of the underlying architecture of DevOps Pipelines, but I wanted to play with something different.

Let's do a step-by-step walk through my setup.

----------
◊hr{}

```yaml
trigger:
  - master
pool:
  vmImage: 'ubuntu-latest'
```
◊codeblock['yaml]{
  trigger:
    - master
  pool:
    vmImage: 'ubuntu-latest'
}

This is pretty much CI boilerplate. The build will run on any PR that targets `master`, and it uses Ubuntu as the underlying image. I'm not doing any Docker stuff here.
This is pretty much CI boilerplate. The build will run on any PR that targets ◊code{master}, and it uses Ubuntu as the underlying image. I'm not doing any Docker stuff here.

```yaml
jobs:
- job: build
  steps: ...
```
◊codeblock['yaml]{
  jobs:
  - job: build
    steps: ...
}

I only have a couple of jobs in this pipeline, to keep it simple. The next bunch of steps are nested under this.

```yaml
- script: |
      mkdir -p ~/.local/bin $(Build.BinariesDirectory)
      curl -L https://get.haskellstack.org/stable/linux-x86_64.tar.gz | tar xz --wildcards --strip-components=1 -C ~/.local/bin '*/stack'
  displayName: Install Stack
```
◊codeblock['yaml]{
  - script: |
        mkdir -p ~/.local/bin $(Build.BinariesDirectory)
        curl -L https://get.haskellstack.org/stable/linux-x86_64.tar.gz | tar xz --wildcards --strip-components=1 -C ~/.local/bin '*/stack'
    displayName: Install Stack
}

Won't get far without grabbing the latest stable Stack binary.

```yaml
- task: Cache@2
  displayName: Cache Stack/GHC snapshot
  inputs:
    key: 'stack | root'
    path: .stack/
    cacheHitVar: 'STACK_SNAPSHOT_RESTORED'
```

Later on there will be a step that runs `stack build`, which will take about 40 minutes in CI. It would be a waste to repeatedly download all of that, so I'm caching the root stack folder for good measure. The `cacheHitVar` is something we will reference later.

```yaml
- task: Cache@2
  displayName: Cache local stack deps
  inputs:
    key: 'stack | stack.yaml.lock'
    path: .stack-work/
    cacheHitVar: 'STACK_DEPS_RESTORED'
```
◊codeblock['yaml]{
  - task: Cache@2
    displayName: Cache Stack/GHC snapshot
    inputs:
      key: 'stack | root'
      path: .stack/
      cacheHitVar: 'STACK_SNAPSHOT_RESTORED'
}

Later on there will be a step that runs ◊code{stack build}, which will take about 40 minutes in CI. It would be a waste to repeatedly download all of that, so I'm caching the root stack folder for good measure. The ◊code{cacheHitVar} is something we will reference later.

◊codeblock['yaml]{
  - task: Cache@2
    displayName: Cache local stack deps
    inputs:
      key: 'stack | stack.yaml.lock'
      path: .stack-work/
      cacheHitVar: 'STACK_DEPS_RESTORED'
}

This is the same as the last step, but it's for the dependencies my static site requires. I want to cache these separately so adding a new project dependency doesn't force a full refresh of the Stack snapshot.

```yaml
- script: |
      export PATH=$HOME/.local/bin:$PATH
      stack --no-terminal --stack-root $(System.DefaultWorkingDirectory)/.stack setup
  displayName: Build Snapshot
  condition: ne(variables.STACK_SNAPSHOT_RESTORED, 'true')
```
◊codeblock['yaml]{
  - script: |
        export PATH=$HOME/.local/bin:$PATH
        stack --no-terminal --stack-root $(System.DefaultWorkingDirectory)/.stack setup
    displayName: Build Snapshot
    condition: ne(variables.STACK_SNAPSHOT_RESTORED, 'true')
}

Notice the `STACK_SNAPSHOT_RESTORED` condition at the bottom there? This step sets up GHC and the Stack snapshot, but only if one wasn't restored from the cache. If the cache has it, then it will have alread been fetched.
Notice the ◊code{STACK_SNAPSHOT_RESTORED} condition at the bottom there? This step sets up GHC and the Stack snapshot, but only if one wasn't restored from the cache. If the cache has it, then it will have alread been fetched.

```yaml
- script: |
      export PATH=$HOME/.local/bin:$PATH
      stack --no-terminal --stack-root  $(System.DefaultWorkingDirectory)/.stack build
  displayName: Build Dependencies
  condition: ne(variables.STACK_DEPS_RESTORED, 'true')
```
◊codeblock['yaml]{
  - script: |
        export PATH=$HOME/.local/bin:$PATH
        stack --no-terminal --stack-root  $(System.DefaultWorkingDirectory)/.stack build
    displayName: Build Dependencies
    condition: ne(variables.STACK_DEPS_RESTORED, 'true')
}

This is the same as above, but for the project dependencies. So far so good. We're almost done now.

```yaml
- script: |
      export PATH=$HOME/.local/bin:$PATH
      stack --no-terminal --stack-root $(System.DefaultWorkingDirectory)/.stack install --local-bin-path $(Build.BinariesDirectory)
  displayName: Build Site Executable
```
◊codeblock['yaml]{
  - script: |
        export PATH=$HOME/.local/bin:$PATH
        stack --no-terminal --stack-root $(System.DefaultWorkingDirectory)/.stack install --local-bin-path $(Build.BinariesDirectory)
    displayName: Build Site Executable
}

Since I've already run `stack build`, this just copies the binary to a different location, which I use to store it as a build artifact. `Build.BinariesDirectory` is a special place on the VM to store compiled build artifacts. It doesn't matter where specifically that is, only that it's the same across steps.
Since I've already run ◊code{stack build}, this just copies the binary to a different location, which I use to store it as a build artifact. ◊code{Build.BinariesDirectory} is a special place on the VM to store compiled build artifacts. It doesn't matter where specifically that is, only that it's the same across steps.

```yaml
- task: PublishBuildArtifacts@1
  displayName: Save static site binary
  inputs:
    pathToPublish: $(Build.BinariesDirectory)
    artifactName: site
```
◊codeblock['yaml]{
  - task: PublishBuildArtifacts@1
    displayName: Save static site binary
    inputs:
      pathToPublish: $(Build.BinariesDirectory)
      artifactName: site
}

This is where that binaries directory comes into play, as I can tell Azure to upload everything in there as a build artifact, which I can then refer to in another job. This isn't quite the same as a cache, as a build is not expected to fail if the cache goes missing. It would fail if the binary isn't there though.

So, that's the first step done, but what about actually publishing a post? I have two jobs for that, which are very similar (one for draft posts/staging, one for prod). I'll describe one of them.

```yaml
- job: deploy_published
  dependsOn: build
  condition: and(succeeded(), eq(variables['build.sourceBranchName'], 'master'))
  steps: ...
```
◊codeblock['yaml]{
  - job: deploy_published
    dependsOn: build
    condition: and(succeeded(), eq(variables['build.sourceBranchName'], 'master'))
    steps: ...
}

The key to this step is the condition. This will run only if the `build` job was successful, *and* the branch being built is the master branch. Practically, this only runs if I push straight to master or merge a PR. The staging version runs only on PRs.
The key to this step is the condition. This will run only if the ◊code{build} job was successful, ◊em{and} the branch being built is the master branch. Practically, this only runs if I push straight to master or merge a PR. The staging version runs only on PRs.

```yaml
- task: DownloadBuildArtifacts@0
  displayName: Download site binary
  inputs:
    artifactName: site
    downloadPath: $(System.DefaultWorkingDirectory)
```
◊codeblock['yaml]{
  - task: DownloadBuildArtifacts@0
    displayName: Download site binary
    inputs:
      artifactName: site
      downloadPath: $(System.DefaultWorkingDirectory)
}

Time to put that binary I compiled to good use. It downloads it into the main working directory and I'll call it directly in a later step. The executable is self-contained (or otherwise dynamically links stuff the image already has), so I don't need to pull down Stack/GHC stuff again.

```yaml
- script: |
      export PATH=$(System.DefaultWorkingDirectory)/site:$PATH
      chmod +x $(System.DefaultWorkingDirectory)/site/site
      site build
  displayName: Build with published posts
```
◊codeblock['yaml]{
  - script: |
        export PATH=$(System.DefaultWorkingDirectory)/site:$PATH
        chmod +x $(System.DefaultWorkingDirectory)/site/site
        site build
    displayName: Build with published posts
}

This is the same as running `stack exec site build` on my local machine. It compiles the static site, so finally I'll have a new version to upload.
This is the same as running ◊code{stack exec site build} on my local machine. It compiles the static site, so finally I'll have a new version to upload.

```yaml
- task: InstallSSHKey@0
  displayName: Setup SSH
  inputs:
    knownHostsEntry: '$(NexusKnownHost)'
    sshKeySecureFile: 'nexus_deploy'
```
◊codeblock['yaml]{
  - task: InstallSSHKey@0
    displayName: Setup SSH
    inputs:
      knownHostsEntry: '$(NexusKnownHost)'
      sshKeySecureFile: 'nexus_deploy'
}

I host this blog on my own little VPS, which means that the server needs to know that the CI is authorised to connect to it with its SSH key. This is the same as having a deploy key on GitHub, and requires generating a keypair to be stored in CI, with the public key being added to your `authorized_keys` file of the appropriate user on the server.
I host this blog on my own little VPS, which means that the server needs to know that the CI is authorised to connect to it with its SSH key. This is the same as having a deploy key on GitHub, and requires generating a keypair to be stored in CI, with the public key being added to your ◊code{authorized_keys} file of the appropriate user on the server.

_(At this point I'll say that if you're doing this yourself, make sure to properly harden your server. I'll describe this more in a follow-up post.)_
◊aside{
  At this point I'll say that if you're doing this yourself, make sure to properly harden your server. I'll describe this more in a follow-up post.
}

There's only step left now, and that's to deploy!

```yaml
- task: CopyFilesOverSSH@0
  displayName: Deploy to prod
  inputs:
    sshEndpoint: 'Nexus'
    sourceFolder: '_site/'
    contents: '**'
    targetFolder: '/var/www/www.mrlee.dev'
    cleanTargetFolder: true
    readyTimeout: '20000'
```

This is similar to running `rsync` to deploy, except that it knows where to get your private key from and where to connect to. This is defined elsewhere in Azure DevOps, through the UI, rather than in the YAML file.

To solve the issue I first mentioned, `cleanTargetFolder` makes sure to delete the previous deployment before copying the new one over. Problem solved!

To see the pipeline in full, you can check out the full YAML file[^5] and also the public builds[^6]. I've been using it with success for the past couple of weeks now.

[^1]: <https://docs.haskellstack.org/en/stable/README/> 
[^2]: <https://hackage.haskell.org/>
[^3]: <https://dev.azure.com/>
[^4]: <https://github.com/features/actions>
[^5]: <https://github.com/mrleedev/www.mrlee.dev/blob/master/azure/pipeline.yml>
[^6]: <https://dev.azure.com/mrleedev/www.mrlee.dev/_build/results?buildId=115>
\ No newline at end of file
◊codeblock['yaml]{
  - task: CopyFilesOverSSH@0
    displayName: Deploy to prod
    inputs:
      sshEndpoint: 'Nexus'
      sourceFolder: '_site/'
      contents: '**'
      targetFolder: '/var/www/www.mrlee.dev'
      cleanTargetFolder: true
      readyTimeout: '20000'
}

This is similar to running ◊code{rsync} to deploy, except that it knows where to get your private key from and where to connect to. This is defined elsewhere in Azure DevOps, through the UI, rather than in the YAML file.

To solve the issue I first mentioned, ◊code{cleanTargetFolder} makes sure to delete the previous deployment before copying the new one over. Problem solved!

To see the pipeline in full, you can check out the full YAML file◊^[5]. I've been using it with success for the past couple of weeks now.

◊^[1]{◊<>["https://docs.haskellstack.org/en/stable/README"]}
◊^[2]{◊<>["https://hackage.haskell.org"]}
◊^[3]{◊<>["https://dev.azure.com"]}
◊^[4]{◊<>["https://github.com/features/actions"]}
◊^[5]{◊<>["https://git.sr.ht/~mrlee/www.kamelasa.dev/tree/9decaf4732dc7bc4510fcf23979af8657bdb01bd/item/azure/pipeline.yml"]}

[^1]: </> 
[^2]: </>
[^3]: </>
[^4]: <>
[^5]: <>
[^6]: <>
\ No newline at end of file

D posts/hardening-your-server.md => posts/hardening-your-server.md +0 -62
@@ 1,62 0,0 @@
---
title: Hardening your server
date: 2020-30-08
status: draft
category: devops
---

There was a time before Docker, Kubernetes (K8S for short), and cloud providers where the standard way of deploying your application to production typically involved buying a VPS or two from Linode[^1], Rackspace[^2], or similar. In some cases you might even buy some dedicated hardware. Of course, for many business that don't use AWS or Azure, that's still the case.

Using something like Docker or K8S for your own small server is taking overkill to a whole new level, and there are still so many use-cases where you are much better off with your £3/month VPS from Digital Ocean[^3] or some other local provider. Mine is run by Hostworld UK[^4]. Of course, the danger here is that your vanilla OS install isn't going to be well protected against the internet, which will leave it vulnerable to attack. Most likely it would become part of a botnet, used to mine crypto, or otherwise your blogging software might be injected with Javascript-based malware to be delivered to your unwitting readers. Not good.

Luckily enough, there are a few simple steps you can take to bring your new server up to the bare minimum standard of security, although of course this will not be the end of the story for you. Keep in mind that any new software you add to a machine, or any of your own code that you deploy, can open up their own holes into the system. It's not limited to remote code execution (e.g. being able to run shell scripts or execute malicious files because of a vulnerability), but SQL injection, cross-site scripting (XSS), denial of service (DOS) and so on.

Let's get started, anyway. I'm writing this with Ubuntu in mind as, with all due respect, it tends to be the lowest common denominator of Linux distros these days. Aside from certain installation instructions, such as using `apt` to install software, the meat of this post should be distro agnostic.

You should do these steps roughly in order, as in some cases you will end up locked out of the machine and will need to provision a fresh image with your host.

---

## Create a non-root user

Keeping the root user account open to the world, with only a password to protect it, is asking for trouble. By default your server isn't going to limit the number of attempts to log in, so it's a prime target for a brute-force attack. Once an attacker has root, the game is over.

So, let's first make an account for ourselves and then give it the ability to run `sudo`, which will allow for temporary root privileges:

```bash
createuser my_name
usermod -a -G sudo my_name
```

Make sure to choose a secure password! This will be used to invoke `sudo`, but will **not** be used for logging in.

## Generate an SSH key on your machine

Many people will advocate using more than one SSH key, for the different services you use (Github, your personal server, whatever else...). I think it's a good idea, and we can tweak our SSH client so we don't have to remember which key is being used when connecting to the server.

This key is going to be used to log in _interactively_ to another server, which is another way of saying you'll have a shell to do whatever you want in, and it's not like running `git push` and forgetting about it. To be safe, you should give this key a password, and make it different to the one you used for your account on the server.

```bash
ssh-keygen -f ~/.ssh/my_server
```

You should now have two new files in your `.ssh` directory: `my_server` and `my_server.pub`. The latter one ending in `.pub` is the one we can share publically, and is the one this new server needs to know about. Go ahead and upload it:

```bash
ssh-copy-id -i ~/.ssh/my_server my_name@my_server
```

It'll ask for your password before uploading. After that, you can try logging into the server again (`ssh -i ~/.ssh/my_server my_name@my_server`) and if you get in using only the password used for your key (if you did use one), then you're good!

## Disable root access and password login

The only time you should require a password when dealing with your server is when you need `sudo`. And you shouldn't depend on direct root access at all in the long run.

In order to achieve that, we can enforce the usage of public key authentication by disabling password login, and we can also disable the ability to log in as the root user.

This requires editing a file. Let's do it in `nano` for the sake of simplicity.

```bash
nano /etc/ssh/sshd.conf
```


R posts/human-after-all.md => posts/human-after-all.poly.pm +13 -13
@@ 1,15 1,15 @@
---
title: Human after all
date: 2021-02-20
status: published
category: culture
---
#lang pollen

◊define-meta[title]{Human after all}
◊define-meta[date]{2021-02-20}
◊define-meta[published #t]
◊define-meta[category]{culture}

I'm sure everybody goes through the 'angry programmer' phase at some point in their career. I'm sure that some people enter it and never leave. The angry programmer looks at code and wonders what kind of dumbass would write something so stupid. The angry programmer assumes a codebase to be a load of shit by default.

What I've just done is create an archetype of a person that reduces them to a few simplistic characteristics that might sound amusing if you relate to it, but can just as easily be used as a label to diminish your opinion of someone.

The internet is full of articles that treat interpersonal dynamics at work like a game of Dungeons and Dragons; and the goal of the campaign is for you and your colleages to seek out and destroy the toxic elements in your team. God forbid you identify The Two-Face[^1], or The Sociopath[^2].
The internet is full of articles that treat interpersonal dynamics at work like a game of Dungeons and Dragons; and the goal of the campaign is for you and your colleages to seek out and destroy the toxic elements in your team. God forbid you identify The Two-Face◊^[1], or The Sociopath◊^[2].

You might say that these shouldn't be taken so seriously, and to an extent you'd be correct. They're just puff pieces, after all, right? And you can have a self-deprecating chuckle about if you feel like you identify with the archetypes described.



@@ 27,15 27,15 @@ Realistically, everyone will have some combination of all those negative traits 

Whew, better bring this back to the topic I had in mind before I go off on another tangent.

**Compassion**.
◊strong{Compassion}.

I like compassion. Actually no, I _love_ it. It's an amazing word that can mean many things to many people, but I like to think it's what gives this world the soul it has; it's certainly responsible for a lot of good. The Compassionate Mind[^3] dedicates over 500 pages and thousands upon thousands of words to this, and this is what the blurb has to say about it:
I like compassion. Actually no, I ◊em{love} it. It's an amazing word that can mean many things to many people, but I like to think it's what gives this world the soul it has; it's certainly responsible for a lot of good. The Compassionate Mind◊^[3] dedicates over 500 pages and thousands upon thousands of words to this, and this is what the blurb has to say about it:

> Not only does compassion help to soothe distressing emotions, it actually increases feelings of contentment and well-being.

To my mind, spending mental energy on identifying and labelling different kinds of toxic people will never bring you contentment and inner-peace. I think you'll actually end up with more distressing emotions as you figure out how to deal with this knowledge.

What if you're worried about being toxic yourself? Same thing, you're just being dominated by it indirectly and defining yourself by what you're not. Contentment and inner-peace is found in what you _are_, who you are, not the other way around.
What if you're worried about being toxic yourself? Same thing, you're just being dominated by it indirectly and defining yourself by what you're not. Contentment and inner-peace is found in what you ◊em{are}, who you are, not the other way around.

The reason I'm saying this is because it works at a cultural level too. If the first thing you tell to a potential hire is that you don't hire toxic employees, or your culture isn't toxic, you're just begging the question. Why the hell is _that_ on your mind and not all the good stuff you could be saying instead? What are you hiding with your preoccupation?



@@ 45,6 45,6 @@ Of course, it doesn't work out for everyone. It's not about protecting people or

Just, no reason to be so needlessly unkind to each other. We're all human, after all.

[^1]: <https://twentytentalent.com/8-types-of-toxic-managers/>
[^2]: <https://getvoip.com/blog/2015/02/24/toxic-employees/>
[^3]: <https://www.amazon.co.uk/Compassionate-Mind-Compassion-Focused-Therapy/dp/1849010986>
◊^[1]{◊<>["https://twentytentalent.com/8-types-of-toxic-managers/"]}
◊^[2]{◊<>["https://getvoip.com/blog/2015/02/24/toxic-employees/"]}
◊^[3]{◊<>["https://uk.bookshop.org/a/6865/9781849010986"]}

R posts/i-am-here.md => posts/i-am-here.poly.pm +15 -17
@@ 1,25 1,25 @@
---
title: I am here
date: 2020-06-25
status: published
category: personal 
---
#lang pollen

◊define-meta[title]{I am here}
◊define-meta[date]{2020-06-25}
◊define-meta[published #t]
◊define-meta[category]{personal}

I've been no stranger to depression and burnout. The former is more or less something I've grown to become friends with, as bizarre as it sounds. And as dark and troubling as those times have been, at their worst, they're equally the reason where I find myself where I am now, having experienced all that I have. I wish Trump didn't abuse the words 'tremendous' and 'beautiful' so much, because there can be great beauty in these harrowing experiences once you can look back on them and see how you've grown, changed, since.

I can tell when I start to grow burned out not because I feel more depressed, but because I start to try too hard or overcompensate for perceived failures. It's the classic feeling of not being _enough_, and trying to pursue ever loftier goals as a way of becoming _more_ enough. It inevitably ends in abject failure and if I was to ask anyone else around me while that was happening, they would quite rightly tell me that I am being too hard on myself and nobody is beating me up the way I am. The inner critic is strong in this one, and he doesn't always want to admit or accept that he is totally enough as he is, and he should slow down and enjoy this eccentric life of his. I mean, we're all eccentric in our own ways; it's by no means an insult compared to an allegation of being _normal_.
I can tell when I start to grow burned out not because I feel more depressed, but because I start to try too hard or overcompensate for perceived failures. It's the classic feeling of not being ◊em{enough}, and trying to pursue ever loftier goals as a way of becoming ◊em{more} enough. It inevitably ends in abject failure and if I was to ask anyone else around me while that was happening, they would quite rightly tell me that I am being too hard on myself and nobody is beating me up the way I am. The inner critic is strong in this one, and he doesn't always want to admit or accept that he is totally enough as he is, and he should slow down and enjoy this eccentric life of his. I mean, we're all eccentric in our own ways; it's by no means an insult compared to an allegation of being ◊em{normal}.

It's funny, really. Life for me really began in 2012 when I moved from my parents home in Bolton to my own rented room in London. It wasn't glamourous in the slightest but the freedom and independence was well worth it. I'd joined an agency called New Bamboo that specialised in building software in Ruby on Rails, in an agile way, and their decision to hire me--a junior PHP engineer at the time with only a year of professional experience--basically changed my life. Even now, 8 years later, I don't know anyone from there who looks back on that time unfavourably. In fact, we often lament how hard it is to find somewhere even remotely similar to them. The culture was one of a kind and I greatly matured through it, making some excellent friends who are still close-knit to this day.

Three years later, in 2015, and I found myself moving to Barcelona for a new job at what was then a tiny startup called Typeform, truly embracing this independence I'd established earlier in the decade. It was a fantastic place to work, with fantastic people from all walks of life, and that was before you got to the weekly beach volleyball, sailing, and the general way of life in that little corner of Spain. It was around this time that my mental health began to diminish further, so I sought out a psychotherapist who could continue the work the one I had in the UK started, and so I had an impartial output for the things I was going through.

You might notice I said 'psychotherapist' and not just 'therapist'. In the UK I booked time with a private therapist who offered breathwork[^1] as a way to dive into past trauma and help heal it, along side the usual talking and listening, and some mild CBT (Cognitive Behavioural Therapy)[^2]. After some retreats in both Trakai, Lithuania and Lesvos, Greece, I moved on to something known as somatic experiencing[^3]. I'm aware that someone reading this might think "what on earth was this guy getting himself into?" or possibly be dismissive of these alternative aspects of therapy. I think that's fine and I accept that it can seem unusual or strange to some, and they are certainly not all-healing panaceas like many alternative medicines are perceived to be: they can very easily cause more harm than good if you place your trust into the wrong hands. I think that is a major issue with many New Age therapies, when they offer simplistic solutions to difficult problems.
You might notice I said 'psychotherapist' and not just 'therapist'. In the UK I booked time with a private therapist who offered breathwork◊^[1] as a way to dive into past trauma and help heal it, along side the usual talking and listening, and some mild CBT (Cognitive Behavioural Therapy)◊^[2]. After some retreats in both Trakai, Lithuania and Lesvos, Greece, I moved on to something known as somatic experiencing◊^[3]. I'm aware that someone reading this might think "what on earth was this guy getting himself into?" or possibly be dismissive of these alternative aspects of therapy. I think that's fine and I accept that it can seem unusual or strange to some, and they are certainly not all-healing panaceas like many alternative medicines are perceived to be: they can very easily cause more harm than good if you place your trust into the wrong hands. I think that is a major issue with many New Age therapies, when they offer simplistic solutions to difficult problems.

This kind of work continued for a couple of years until I decided I wanted a quieter life than what Barcelona had to offer. If you think that sounds utterly insane, because who would want to leave such a beautiful city? Well, I offer you the chance of working full-time for a week during Festa de Gracia, a week long festival in the village of Gracia that doesn't start to wrap up until 4 or 5am each morning. Something that is immensely fun, and is an incredible display of Catalunya's culture of celebration, wears a little thin when you desperately want to catch some proper shut-eye before work. So, I moved to Latvia, or Jurmala in Latvia more precisely, and enjoyed a solid year of working remotely besides the beautiful Baltic coast, which is genuinely a sight to behold.

In that time on the continent I travelled to more places, and experienced more new things, than I ever had before. Latvia itself was (and still is) utterly gorgeous. Meanwhile, I enjoyed visiting places as varied as Naxos, Istanbul, Croatia, Estonia, Lisbon, Verona, Vienna, and a few other places I struggle to remember. I was close to the epicentre of the Barcelona terror attack in 2017, and experienced the city in a much more sombre, mournful light. I was close to being in Ankara while Erdogan's coup took place until I was encouraged to cancel the journey. I had a genuine thirst for adventure that was hugely facilitated by being able to work remotely. 

Not long after that, I moved back to London. Partly due to redundancies in the (London based) startup I was working for out in Latvia, but also due to isolation and missing the people I loved back in London, feeling like I was ready to get closer to the action again. The focus on my health narrowed a little and this time I chose a therapist who specialised in complex PTSD[^4] and post-traumatic growth[^5], and later one who dealt with relationship issues specifically (as a single man, there are still things in that area to explore).
Not long after that, I moved back to London. Partly due to redundancies in the (London based) startup I was working for out in Latvia, but also due to isolation and missing the people I loved back in London, feeling like I was ready to get closer to the action again. The focus on my health narrowed a little and this time I chose a therapist who specialised in complex PTSD◊^[4] and post-traumatic growth◊^[5], and later one who dealt with relationship issues specifically (as a single man, there are still things in that area to explore).

I'm going to skip a head a bit because I do actually have a point here, besides being autobiographical. Those eight years so far have been almost consistently tinged with the hue of mental illness. We often use the colour blue to represent a low mood, but I'd really see it as a burning greyness...smouldering embers glowing in charred remains like a burned out log fire. Occasionally a new breath of life will bring the fire back to its original ferocity but, almost inevitably, you'll run out of logs to keep it aflame. Thus starts the slow but necessary process of tending to yourself so that you can refuel that fire after a period of recovery.



@@ 27,12 27,10 @@ That's one hell of a tortured metaphor, but I think we all have our own individu

To bring this back to the start, I would not change a thing as my place in this moment is entirely a function of all that shit, and all that wonder, I've been through over the years, going right back to my birth and my childhood as an adopted, abused kid. All of the pain, both given and received, and the pleasure, have served a purpose and I appreciate those experiences as I would not think or feel the way I currently do were it not for them. I would not value empathy and compassion and inclusivity so massively. And rather than focussing so heavily on just how shitty some of those moments have been, the amazing memories that have come from my struggle through it all are equally incalculable.

This isn't to say all my problems are solved, or I'm finished with my process. That's *pure BS* as this stuff lasts an entire lifetime as you learn, grow and adapt to new situations. But the track record I have must be pretty good if I'm _here_, right?


This isn't to say all my problems are solved, or I'm finished with my process. That's ◊strong{pure BS} as this stuff lasts an entire lifetime as you learn, grow and adapt to new situations. But the track record I have must be pretty good if I'm ◊em{here}, right?

[^1]: <https://en.wikipedia.org/wiki/Breathwork>
[^2]: <https://en.wikipedia.org/wiki/Cognitive_behavioral_therapy>
[^3]: <https://en.wikipedia.org/wiki/Somatic_experiencing>
[^4]: <https://en.wikipedia.org/wiki/Complex_post-traumatic_stress_disorder>
[^5]: <https://en.wikipedia.org/wiki/Posttraumatic_growth>
\ No newline at end of file
◊^[1]{◊<>["https://en.wikipedia.org/wiki/Breathwork"]}
◊^[2]{◊<>["https://en.wikipedia.org/wiki/Cognitive_behavioral_therapy"]}
◊^[3]{◊<>["https://en.wikipedia.org/wiki/Somatic_experiencing"]}
◊^[4]{◊<>["https://en.wikipedia.org/wiki/Complex_post-traumatic_stress_disorder"]}
◊^[5]{◊<>["https://en.wikipedia.org/wiki/Posttraumatic_growth"]}
\ No newline at end of file

R posts/isolation-aloneness-and-loneliness.md => posts/isolation-aloneness-and-loneliness.poly.pm +10 -14
@@ 1,9 1,9 @@
---
title: Isolation, aloneness and loneliness
date: 2020-07-07
category: personal
status: published
---
#lang pollen

◊define-meta[title]{Isolation, aloneness and loneliness}
◊define-meta[date]{2020-07-07}
◊define-meta[published #t]
◊define-meta[category]{personal}

I don't doubt that the COVID-19 situation this year has forced many of us to confront aspects of ourselves we were previously able to hide behind a mask. In some ways, this has been tragic and, unfortutely, traumatic as well. That isolatation has deprived partners from space away from their abusers at home, making a bad situation even worse for them. In other cases, it has made domestic violence much more apparent as people reveal an uglier side to themselves, frustrated by the prolonged contact. It's truly devastating and it can be hard to comprehend the sort of horror that takes place in the home unless you've been unfortunate enough to suffer from it yourself. I suffered it myself, at the hands of my sister's abusive boyfriend, and it was enough for me to contemplate life without any of my family in it. Took me a few years to follow up with that and come to terms with it, but I'm glad I did.



@@ 11,7 11,7 @@ That's not the full topic of this post, but I think it's important to acknowledg

Now I've explained where I'm coming from, let's move on.

----------
◊hr{}

For a long time (up until the last few years) I've considered myself a lonely person, or a bit of a loner. It sounds like a label to identify by and, if I were to call myself a loner enough times, it surely would become an identity. What I really mean when I say it, though, is that I don't feel like I have the capacity to provide myself the comfort, the fulfilment, the happiness, that I feel lacking in. There's a psychological undercurrent to it of course, based on my childhood, but I'm ultimately disempowering myself and also signalling that I don't really have that kind of positive feeling to give out either. There's an absence of it and I'd like it if someone else shared some with me.



@@ 19,7 19,7 @@ Don't get me wrong, everyone needs love, support, acknowledgement, recognition..

Eventually though, that continual desire to receive this energy from other people (as opposed to finding it within yourself somehow) is going to result in a self-fulfilling prophecy where you are convinced that you are lonely, or a loner, purely because you've taught yourself to believe that.

I have to say that I'm endlessly grateful to my closest, bestest friends for pointing out to me that it does't really have to be that way, so it was about five years ago that I started learning the difference between _lonely_-ness and _alone_-ness (deliberate mis-spellings for emphasis).
I have to say that I'm endlessly grateful to my closest, bestest friends for pointing out to me that it does't really have to be that way, so it was about five years ago that I started learning the difference between ◊em{lonely}-ness and ◊em{alone}-ness (deliberate mis-spellings for emphasis).

I seriously enjoy being alone, and to understand the difference there was hugely empowering to me. It's truly a liberating distinction that unlocks so many opportunities that were previously unavailable because of the expectation they had to be done 'with someone' or with a group. Maybe out of a desire to be romantic, or to share with someone, or just because society finds it weird if you don't.



@@ 27,7 27,7 @@ Most of the places I've travelled to for a holiday have been alone. A week in Cr

The difference here is that there is a source of happiness and comfort in the aloneness, and it is self-sufficient. It doesn't completely remove feelings of loneliness, as there can still be underlying reasons for that emotion, but in and of itself it can be hugely enriching, maturing, and enlightening.

----------
◊hr{}

So how does this pertain to the lockdown period we've found ourselves in since March? I have to admit that I've felt pangs of loneliness every now and then, and I seriously miss the social chit-chat at the pub after work, or meeting up with close friends to chill out. At the same time, I've discovered enough in myself over recent years to give me plenty to engage with and enjoy while I'm alone at home. As much as I'd love to dip my feet in more social things, like the dating scene and such like, I still feel quite comfortable riding this out by myself until I'm comfortable that it's safer for me to do so. It's a great feeling to have.



@@ 37,8 37,4 @@ I started off the post talking about relationships, and not in a positive way ei

The lockdown has been a great way to cement that in place for me and when all is said and done, it will definitely have had an impact on how I enjoy life from here on out.

🕉



<small>P.S. I promise I'll write some programming related stuff soon :))</small>
\ No newline at end of file
🕉
\ No newline at end of file

D posts/lonesome-lockdown.md => posts/lonesome-lockdown.md +0 -50
@@ 1,50 0,0 @@
---
title: Lonesome lockdown
category: personal
date: 2021-02-19
status: draft
---

As I write this, I'm thirty-three years old and the last twelve months of my life, like _many_ others, has been spent in some form of quarantine thanks to COVID-19. This isn't a woeful tale about life in the pandemic, though, and it's not a feel-good story about coming together in hard times. It's about intimacy, connection, and love.

I was adopted right after I was born; the decision was made while my biological mother was pregnant. I went through 'the system' in the first twelve weeks of my existence and got the paperwork to show for it. This isn't an identity for me, I'm not 'an adopted kid' but I'm one who still deals with attachment and abandonment issues to this day. I have problems with the definition of 'enough', because that's what I never was, and I now have a hard time <del>dealing with</del> accepting positive feedback. Deep down, this is where I'm coming from: hard on myself, wanting approval.

It's taken a lot of work to turn 'family' into a nice word and not one that makes me drink myself into a stupor out of pure anxiety. Family, for me, is not the birth one or the adoptive one, but the connections I've made since that have held strong and kept me alive through the coldest of hells and the hottest of high waters.

---

You know what insecurity is. Or you're worried because actually, you don't.

---

The last romantic relationship I had started in the summer of 2009, which is over a decade ago now. It ended in 2010 and as well as being the last relationship, it was also the first. Nothing particularly sexual happened since then and now (as I write this), but it was also the first time I had sex as well as the last.

The fallout from that failed connection, and the pressure from my family (at the time) to find someone to marry and have kids with, was enough to send me deep into the abyss. I was already toe-deep in it for reasons I didn't understand at the time, so it was only a matter of trajectory and I was aiming a solid forty-five degrees downwards.

I'm telling you that because, back then, that's what I thought it was all about: girlfriends and sex. I was massively insecure about it, and jealous to boot. Thank god I changed, but I can't deny I still have insecurities.

It's been a solitary existence then, but one I appreciate a lot. I found my independence, I travelled. I left the UK and spent three years of my life across Barcelona and Jurmala and Riga, in Latvia. Then I came back to London to be closer to who I now consider family, and I haven't moved since. Don't take 'solitary' to mean 'isolated' though: this is where I realised that I loved my friends; and I still do. They were there then and they're here now, and that's where intimacy and love combine to create friendship.

I'm still in the same place I moved to in 2018, I wrote about what happened my landlord the other week (no link, find the post on the front page) but I'm still overall good, right now. Right?

I'm gonna speak for everyone and say, no.

What I've learned through the trial-of-fire that is being thrown into this world as freshly born soul, and being given the decades of time to marinate in it, is that the last twelve months of this collective clusterfuck in the art of humanity _utterly fucking sucks_.

The thing is, we often talk about love and intimacy as romantic or sexual concepts, but they're not that. And as much as I try to put on a strong front and say I can cope with this lack of interaction, I honestly struggle.

I've put on over ten kilograms of weight since the first lockdown in the UK started almost a year ago. At first some of the weight was muscle mass because I was going to the gym over lunch with a close friend. The rest is fast food and shitty diet because we started working from home, and I live alone. It's hard to incentivise myself to cook well, although I do a better job right now.

The feeling of a big hug is one I truly miss. Body language is strong and immensely powerful and you can communicate so much through a hug or a cuddle.

There's the physical feeling of meeting someone face-to-face, sitting down across a table, catching up, and not just listening but feeling what they're saying.

There are the rituals and the routines; the time where you get out your phone when your friend goes for a piss. The serendipitous moments at the bar where you strike up an unexpected conversation. The whole wealth of human interaction that exists precisely because all of these opportunities to connect were implicitly available. So much of _life_ happens in happenstance.

There are a handful of people I truly love too, and while some may know it more than others, it's clear that there are different levels of intensity going on.

This is intimacy. It's the power of the trust you have with someone to reveal an unknown part of you.

And we lose it all when we're reduced to digital connections. It's a loss of a sense.

I'm quite happy in myself being alone, but I've never felt more lonely.

R posts/my-favourite-thing-about-programming.md => posts/my-favourite-thing-about-programming.poly.pm +29 -29
@@ 1,9 1,9 @@
---
title: My favourite thing about programming
date: 2020-08-15
category: programming
status: published
---
#lang pollen

◊define-meta[title]{My favourite thing about programming}
◊define-meta[date]{2020-08-15}
◊define-meta[published #t]
◊define-meta[category]{programming}

In a word: languages.



@@ 19,24 19,24 @@ Currently I'm playing with Haskell and also taking it a bit more seriously, to s

For example, this is the function I have for generating the estimated reading time you see at the top of every post here:

```haskell
ertField :: String -> Snapshot -> Context String
ertField name snapshot = field name $ \item -> do
  body <- itemBody <$> loadSnapshot (itemIdentifier item) snapshot
  let words = length (T.words . T.pack $ body)
  return $ show $ round $ words // 250
```
◊codeblock['haskell]{
  ertField :: String -> Snapshot -> Context String
  ertField name snapshot = field name $ \item -> do
    body <- itemBody <$> loadSnapshot (itemIdentifier item) snapshot
    let words = length (T.words . T.pack $ body)
    return $ show $ round $ words // 250
}

That's a lot of symbols doing a lot of legwork! And while this is difficult to understand for an untrained eye, it would become more familiar after understanding some of the basic 'rules' of Haskell and the 'styles' of programming you can use. Of course, you can always take it too far:

```haskell
(👏) = ($)
◊codeblock['haskell]{
  (👏) = ($)

ertField name snapshot = field name 👏 \item -> do
  body <- itemBody <$> loadSnapshot (itemIdentifier item) snapshot
  let words = length (T.words . T.pack 👏 body)
  return 👏 show 👏 round 👏 words // 250
```
  ertField name snapshot = field name 👏 \item -> do
    body <- itemBody <$> loadSnapshot (itemIdentifier item) snapshot
    let words = length (T.words . T.pack 👏 body)
    return 👏 show 👏 round 👏 words // 250
}

That should go down well with the Twitter crowd.



@@ 46,17 46,17 @@ Moving on, there's Lisp. My familiarity with Lisp comes from customising my emac

With emacs in particular, it made it trivially easy for me to launch a Rails console inside a deployed Kubernetes pod.

```commonlisp
(defun inf-ruby-console-k8s (env)
  (interactive (list (completing-read "Environment: "
                                      '("dev" "staging" "preprod") nil t)))
  (shell-command (concat "kubectl config use-context " env))
  (inf-ruby-console-run "kubectl exec -it ruby-app -- rails c" (concat "k8s-ruby-" env)))
```
◊codeblock['commonlisp]{
  (defun inf-ruby-console-k8s (env)
    (interactive (list (completing-read "Environment: "
                                        '("dev" "staging" "preprod") nil t)))
    (shell-command (concat "kubectl config use-context " env))
    (inf-ruby-console-run "kubectl exec -it ruby-app -- rails c" (concat "k8s-ruby-" env)))
}

I mapped it to a certain keybinding and a panel would open to the side within a second, ready for me to use. I don't think I'd have the patience to try and reproduce that in, say, VS Code, without using a task runner. Emacs itself is entirely programmable so you don't need to worry about setting up extension boilerplate to make minor modifications.

I should round this post off with an even three examples, so my final two are Smalltalk and Prolog. I haven't managed to build anything in it yet, as the learning curve is quite unlike any other. However, aspects of Smalltalk live on in languages like Ruby, where everything is an object and everything is defined in terms of message passing. I think Objective-C can be counted there too, although both languages diverge from some of Smalltalk's ideals in the name of pragmatism. My short term goal with Smalltalk is to (attempt to) implement a raytracer as described in The Ray Tracer Challenge by James Buck[^0], so I can better understand the language and apply what I've learned elsewhere. 
I should round this post off with an even three examples, so my final two are Smalltalk and Prolog. I haven't managed to build anything in it yet, as the learning curve is quite unlike any other. However, aspects of Smalltalk live on in languages like Ruby, where everything is an object and everything is defined in terms of message passing. I think Objective-C can be counted there too, although both languages diverge from some of Smalltalk's ideals in the name of pragmatism. My short term goal with Smalltalk is to (attempt to) implement a raytracer as described in ◊em{The Ray Tracer Challenge} by James Buck◊^[1], so I can better understand the language and apply what I've learned elsewhere. 

Prolog is on my list, and I keep coming back to it every couple of months to see what I'm inspired to do. I find it, and logic programming, intriguing, and I wonder how many problems would be solved a bit more easily that way compared to how we usually smush things together in procedural or OOP languages. Watch this space for more insight, I guess.



@@ 64,4 64,4 @@ One thing I've noticed while writing this is that each language selected has lit

For as long as that remains true I think I'll always have new toys to play with.

[^0]: <http://raytracerchallenge.com/>
\ No newline at end of file
◊^[1]{◊<>["http://raytracerchallenge.com"]}
\ No newline at end of file

R posts/on-sharing-vulnerability.md => posts/on-sharing-vulnerability.poly.pm +9 -9
@@ 1,11 1,11 @@
---
title: On Sharing Vulnerability
date: 2020-07-03
category: personal
status: published
---
#lang pollen

One of the most wonderful things I've seen on the internet in recent years is our growing willingness to be vulnerable, together. I have to admit that this wasn't really a 'thing' for me until I'd read Daring Greatly by Brene Brown[^1], as well as Rising Strong[^2]. As far as self help books go, I don't think I would actually categorise them as such.
◊define-meta[title]{On Sharing Vulnerability}
◊define-meta[date]{2020-07-03}
◊define-meta[published #t]
◊define-meta[category]{personal}

One of the most wonderful things I've seen on the internet in recent years is our growing willingness to be vulnerable, together. I have to admit that this wasn't really a 'thing' for me until I'd read Daring Greatly by Brene Brown◊^[1], as well as Rising Strong◊^[2]. As far as self help books go, I don't think I would actually categorise them as such.

Even LinkedIn these days is full of posts of people sharing their failures, admitting mistakes, dropping their egos, and being authentic with each other in favour of presenting a flawless persona. It's a beautiful thing, to allow yourself to be yourself, your very human self, without the pressure of having to appear flawless.



@@ 25,5 25,5 @@ So, considering that we see ourselves becoming more distant as a result of the C


[^1]: <https://www.amazon.co.uk/Daring-Greatly-Courage-Vulnerable-Transforms/dp/1592408419>
[^2]: <https://www.amazon.co.uk/Rising-Strong-Bren%C3%A9-Brown/dp/0091955033>
\ No newline at end of file
◊^[1]{◊<>["https://uk.bookshop.org/a/6865/9780241257401"]}
◊^[2]{◊<>["https://uk.bookshop.org/a/6865/9780091955038"]}
\ No newline at end of file

R posts/on-working-remotely.md => posts/on-working-remotely.poly.pm +6 -6
@@ 1,9 1,9 @@
---
title: On working remotely
category: personal
date: 2020-06-26
status: published
---
#lang pollen

◊define-meta[title]{On working remotely}
◊define-meta[date]{2020-06-26}
◊define-meta[published #t]
◊define-meta[category]{personal}

I imagine that working from home for extended periods is quite new to a lot of people, thanks to the COVID-19 lockdown. I enjoyed a solid year of it back when I worked remotely from Latvia. For me, it was both liberating and isolating at the same time, and while I really loved this semi-digital-nomadic lifestyle at the start, it wasn't long before I grew an appetite for collaborating in person again.


R posts/past-mistakes.md => posts/past-mistakes.poly.pm +9 -9
@@ 1,15 1,15 @@
---
title: Past mistakes
date: 2021-04-24
status: published
category: programming
---
#lang pollen

◊define-meta[title]{Past mistakes}
◊define-meta[date]{2021-04-24}
◊define-meta[published #t]
◊define-meta[category]{programming}

I think everyone needs at least one blog post under their belt that describes mistakes they've previously made in their careers. Now is the time for me.

1. Liberal application of the word 'just'

"Why don't you _just_ do this other thing instead?" Uuughhh...this word, which serves as punctuation as much as the word 'fuck' does in Glasgow, is bound to rile anyone up after the umpteenth attempt at trivialising the problem they have.
"Why don't you ◊em{just} do this other thing instead?" Uuughhh...this word, which serves as punctuation as much as the word 'fuck' does in Glasgow, is bound to rile anyone up after the umpteenth attempt at trivialising the problem they have.

It still slips out occasionally, and I should really wash my mouth out with soap after it, but the true magnitude of this error only becomes apparent when you start paying attention to how often everyone else does it too.



@@ 41,8 41,8 @@ You figure it out along the way and then become comfortable enough that you can 

These days I tell myself that it looks intimidating, but take it easy.

---
◊hr{}

Four is the magic number, so I'll leave it there. I'm not sure how sincere I felt when writing this to begin with, but it's all been part of the experience. I think next time I'll single one or two things out and go into more depth.

---
◊hr{}

R posts/permanent-solutions-to-temporary-problems.md => posts/permanent-solutions-to-temporary-problems.poly.pm +34 -18
@@ 1,23 1,23 @@
---
title: Permanent solutions to temporary problems
category: mental-health
date: 2021-02-01
status: published
---
#lang pollen

◊define-meta[title]{Permanent solutions to temporary problems}
◊define-meta[date]{2021-02-01}
◊define-meta[published #t]
◊define-meta[category]{mental-health}

Depression fucking sucks. It sucks to suffer from it; it sucks to see others suffer from it.

I lost contact with my landlord sometime last year, and obviously as a private tenant that raised some concerns as my main point of contact for my flat had vanished off the face of the earth. He'd deleted his email address and his phone number was deactivated. To be honest I thought nothing of it for a while, and the last thing he told me was that he was bogged down with work at the NHS. Not a particularly good sign given the situation with COVID.

At the beginning of the year I received a letter from the solicitors managing his estate: he died last November. Receiving this kind of news presents a veritable cocktail of emotions; a confusing mix of relief, catharsis, and sadness. Finally _you know_, so in that there's some closure, but also... fuck. What a shame.
At the beginning of the year I received a letter from the solicitors managing his estate: he died last November. Receiving this kind of news presents a veritable cocktail of emotions; a confusing mix of relief, catharsis, and sadness. Finally ◊em{you know}, so in that there's some closure, but also... fuck. What a shame.

The letter from the solicitors asked me to start paying my rent to a different account, so naturally I had to ask for some evidence to verify this claim. A death certificate would suffice, I said, and not more than a couple of days later I got an email with one in it, along with a letter from the coroner.

Cause of death: _suspension_.
Cause of death: ◊em{suspension}.

That's a euphemistic way to say that my landlord committed suicide by hanging himself. Fuuuck.

The thing is, I don't have to love this man or feel close to him to be upset by that news. I feel _for_ him, because I can only imagine what it takes to get to that state of sheer torment and utter desperation that the point of no return becomes a beacon of hope. A permanent solution to a temporary problem as it were, though I am in no place to make any kind of claim about the situation he was in.
The thing is, I don't have to love this man or feel close to him to be upset by that news. I feel ◊em{for} him, because I can only imagine what it takes to get to that state of sheer torment and utter desperation that the point of no return becomes a beacon of hope. A permanent solution to a temporary problem as it were, though I am in no place to make any kind of claim about the situation he was in.

I can only imagine that because I've been there myself, more than once. It's not something I could put into words, and nor would I want to relive it such that I could. I wouldn't want the people I love, who supported me at that time, to go back and find the words for it either. But I've been there, and those people closest to me have been there by extension. They shouldered some of my emotional burden themselves so that I could try and cope with a lighter load.



@@ 27,20 27,36 @@ Mental health is serious business and we don't always treat it with the seriousn

There are loads of resources out there that could help out if you're starting to struggle, and if one thing doesn't work that doesn't mean nothing else will. Here are some quick suggestions:

- Find a therapist you can connect with and trust (you don't have to stick with the first one you find)
- Ask your closest friends if you can confide in them (try not to treat a 'no' as a rejection, some people are better equipped to support than others)
- If you can chat to people you don't know over the phone, then Samaritans[^1] is a good place to start but not the only place
- Talk to your doctor about Cognitive Behavioural Therapy (CBT)
◊ul{
  ◊li{
    Find a therapist you can connect with and trust (you don't have to stick with the first one you find)
  }
  ◊li{
    Ask your closest friends if you can confide in them (try not to treat a 'no' as a rejection, some people are better equipped to support than others)
  }
  ◊li{
    If you can chat to people you don't know over the phone, then Samaritans◊^[1] is a good place to start but not the only place
  }
  ◊li{
    Talk to your doctor about Cognitive Behavioural Therapy (CBT)
  }
}

A friend has also suggested some other options:

- If you prefer to text rather than call, then shout[^2] might work for you
- And if you're younger, still in school maybe, then Papyrus[^3] can help
◊ul{
  ◊li{
    If you prefer to text rather than call, then shout◊^[2] might work for you
  }
  ◊li{
    And if you're younger, still in school maybe, then Papyrus◊^[3] can help
  }
}

I'm aware that these options are very specific to the UK and you might not have equivalents in the country you live in. Please fire off an email to <me@mrlee.dev> if you have recommendations.

Take care of yourself.

[^1]: <https://www.samaritans.org/>
[^2]: <https://giveusashout.org/>
[^3]: <https://www.papyrus-uk.org/>
◊^[1]{◊<>["https://www.samaritans.org"]}
◊^[2]{◊<>["https://giveusashout.org"]}
◊^[3]{◊<>["https://www.papyrus-uk.org"]}

D posts/testing-drafts.md => posts/testing-drafts.md +0 -7
@@ 1,7 0,0 @@
---
title: Testing drafts
date: 2020-07-28
category: programming
---

This is a test
\ No newline at end of file

R posts/things-ive-changed-my-mind-on.md => posts/things-ive-changed-my-mind-on.poly.pm +6 -6
@@ 1,9 1,9 @@
---
title: Things I've learned after 10 years in the industry
date: 2021-01-24
category: programming
status: published
---
#lang pollen

◊define-meta[title]{Things I've learned after 10 years in the industry}
◊define-meta[date]{2021-01-24}
◊define-meta[published #t]
◊define-meta[category]{programming}

Earlier today I read a blog post titled _Software development topics I've changed my mind on after 6 years in the industry_[^0] and it made me reflect on how my own thinking has (hopefully) evolved over my decade long career. I'm not going to discuss the content of the linked post, except to say that as much as I empathise with the author and have been an angry programmer myself, the overly aggressive tone that occasionally slips out isn't really my cup of tea.


R posts/time-travel.md => posts/time-travel.poly.pm +22 -22
@@ 1,13 1,13 @@
---
title: Time travel
date: 2021-04-06
status: published
category: personal
---
#lang pollen

Once upon a time, a _golden oldie_ referred to a song released in the 50s, 60s or 70s, and modern music doesn't really go much further back than that.
◊define-meta[title]{Time travel}
◊define-meta[date]{2021-01-24}
◊define-meta[published #t]
◊define-meta[category]{personal}

More generally, it's the music your parents would listen to if you're around my age (early/mid thirties). You know, _you whippersnappers wouldn't know good music if it slapped you in the face_, _you kids don't half listen to some tripe these days_. Classic get-off-my-lawn commentary from the older generation.
Once upon a time, a ◊em{golden oldie} referred to a song released in the 50s, 60s or 70s, and modern music doesn't really go much further back than that.

More generally, it's the music your parents would listen to if you're around my age (early/mid thirties). You know, ◊em{you whippersnappers wouldn't know good music if it slapped you in the face}, ◊em{you kids don't half listen to some tripe these days}. Classic get-off-my-lawn commentary from the older generation.

It's only a matter of time before the latest cohort of unruly youth passes their first decade of onboarding and sees _me_ as one of the older generation.



@@ 17,37 17,37 @@ So, I've been binging all the music I used to listen to as a kid that now makes 

Continue reading if you'd like to join me on a musical trip down memory lane.

## Grizzly Bear - Veckatimest (2009)
◊h2{Grizzly Bear - Veckatimest (2009)}

_Two Weeks_ was arguably the hit single from Grizzly Bear's sophomore record and it got major play time on XFM and BBC Radio 6. I would eventually get massively into this band and see them play live in Leeds, but that's not what I remember so vividly.
◊em{Two Weeks} was arguably the hit single from Grizzly Bear's sophomore record and it got major play time on XFM and BBC Radio 6. I would eventually get massively into this band and see them play live in Leeds, but that's not what I remember so vividly.

I worked as a cashier and early-morning change-runner at my local Tesco back then, and during one break time received a Facebook message from the cousin of one my friends, who was from Sacramento in California if I remember correctly. She asked if I could pick her up from Manchester Airport and I thought, heh, why not? This was the beginning of a whirlwind romance that ended just as quickly as it started (blink and you'd miss it) and _Veckatimest_ was the soundtrack to it all.
I worked as a cashier and early-morning change-runner at my local Tesco back then, and during one break time received a Facebook message from the cousin of one my friends, who was from Sacramento in California if I remember correctly. She asked if I could pick her up from Manchester Airport and I thought, heh, why not? This was the beginning of a whirlwind romance that ended just as quickly as it started (blink and you'd miss it) and ◊em{Veckatimest} was the soundtrack to it all.

It was the height of summer and the weather was glorious. I showed her the sights around Manchester, she showed me how to dance the way she did back home. It was wild and not meant to be.

Whenever I listen to _Two Weeks_ I'm instantly transported back into my little blue Ford Ka, scooting down the M61 towards Chorley with the windows open, the cool breeze, and the blazing sun bearing down upon us.
Whenever I listen to ◊em{Two Weeks} I'm instantly transported back into my little blue Ford Ka, scooting down the M61 towards Chorley with the windows open, the cool breeze, and the blazing sun bearing down upon us.

This was probably one of the first times I really embraced spontaneity and went with the flow, although it was also a point in time where I was unaware of several issues that I would later struggle with. A lot.

Oh to be ignorant.

## Arctic Monkeys - AM (2013)
◊h2{Arctic Monkeys - AM (2013)}

I was hooked by the Arctic Monkeys as soon as I heard their first single, _I Bet That You Look Good On The Dancefloor_, a little bit before they released their debut album in 2006. I can still put on _Whatever People Say I Am, That's What I'm Not_ and recite the lyrics from heart, as I'd sing along with it in the car so much when driving home from sixth-form, or from meeting friends further up in Lancashire.
I was hooked by the Arctic Monkeys as soon as I heard their first single, ◊em{I Bet That You Look Good On The Dancefloor}, a little bit before they released their debut album in 2006. I can still put on ◊em{Whatever People Say I Am, That's What I'm Not} and recite the lyrics from heart, as I'd sing along with it in the car so much when driving home from sixth-form, or from meeting friends further up in Lancashire.

_AM_, however, is something else. I lived in Poplar in East London then and would commute to work from the DLR at All Saints, passing through Poplar and then into Bank or Tower Hill. By the time I got past Poplar, _Arabella_ would begin and I'd stare out of the window towards Canary Wharf as the DLR pootled past, watching the strange new Crossrail station come to form as each diamond-shaped glass panel was fitted into place across the days, weeks... months.
◊em{AM}, however, is something else. I lived in Poplar in East London then and would commute to work from the DLR at All Saints, passing through Poplar and then into Bank or Tower Hill. By the time I got past Poplar, ◊em{Arabella} would begin and I'd stare out of the window towards Canary Wharf as the DLR pootled past, watching the strange new Crossrail station come to form as each diamond-shaped glass panel was fitted into place across the days, weeks... months.

I've always enjoyed riding the DLR over using the Tube or traditional trains, and I wonder if part of it is because of moments like that.

It was also the time I worked at an agency called New Bamboo, which was my first job in London and still my favourite. My best friend and I connected quite a bit over this album.

## Milky Chance - Blossom (2017)
◊h2{Milky Chance - Blossom (2017)}

Blossom was released while I was living in and working remotely from Jūrmala, Latvia. I had an appointment to get my hair cut in Rīga, which was about 20-30 minutes away on the train depending on the timetable that day.

While I worked remotely, I didn't work very much from my flat at the Dzintari side of Jūrmala. I did a fair bit from the train and from various coffee shops in the capital and near the flat.

On this day in particular, again in the middle of summer, I had my Discover Weekly playlist on and _Doing Good_ started as I walked through the city to my appointment. I was just passing _Latvijas Nacionālā opera_ as it happened and felt compelled to take a quick detour towards it, and the nice garden in front.
On this day in particular, again in the middle of summer, I had my Discover Weekly playlist on and ◊em{Doing Good} started as I walked through the city to my appointment. I was just passing ◊em{Latvijas Nacionālā opera} as it happened and felt compelled to take a quick detour towards it, and the nice garden in front.

This is one of many memories, but it sticks in my mind because I moved to Latvia after spending two years in Barcelona, and I did it fully knowing that I would be leaving my BCN friends and support network behind and essentially starting anew.



@@ 57,21 57,21 @@ Returning the the music, I returned to the beach that same day and worked quite 

I miss that feeling.

## The Killers - Hot Fuss (2004)
◊h2{The Killers - Hot Fuss (2004)}

_Hot Fuss_ was the first album I ever bought with my own money. It's hard to point to one specific moment for this, but it takes me right back to my teenage years.
◊em{Hot Fuss} was the first album I ever bought with my own money. It's hard to point to one specific moment for this, but it takes me right back to my teenage years.

I grew up in Salford, in the north west of England, and so most of my formative years were spent between Manchester and Bolton. A bit of Lancs and a bit of Manc.

Manchester in particular is an amazing cultural hotspot for music and The Killers themselves sought to emulate that with their debut album. There was Joy Division, New Order, Happy Mondays, The Stone Roses, and an entire scene known as _Madchester_. That was itself a bit before my time as I was only a toddler when it all kicked off, but it has left an indelible mark on the city.
Manchester in particular is an amazing cultural hotspot for music and The Killers themselves sought to emulate that with their debut album. There was Joy Division, New Order, Happy Mondays, The Stone Roses, and an entire scene known as ◊em{Madchester}. That was itself a bit before my time as I was only a toddler when it all kicked off, but it has left an indelible mark on the city.

Elbow might be one of my absolute favourite Mancunian exports. Oasis is often met with a sigh but they did put out some pure fuckin' belters.

But I digress. _Mr Brightside_ is the song of every night I went out clubbing with my friends, or with workmates from Tesco at the time, or both.
But I digress. ◊em{Mr Brightside} is the song of every night I went out clubbing with my friends, or with workmates from Tesco at the time, or both.

It's the song of the foam parties at 5th Avenue (fondly known as 5th Chav) we'd feverishly anticipate on every bank holiday, starting at around 8pm on the Sunday night and ending in a taxi trip home at 3am smelling like washing up liquid mixed with £1 vodka red bulls. It's the song of more expensive, foamless nights at 42nd Street (fondly known as 42s), but 5th Ave was where it was at.

Towards the end you'd get _Mr Brightside_ and _I Am The Resurrection_, practically a Manc anthem, and just as the beat dropped and built back up the floodgates would open and unthinkable quantities of lathered up soap would be splattered across the dancefloor, soaking into your clothes, getting into your eyes, tainting your cheap drink, and coating your lungs as you yelled along to the lyrics and bounced blindly across the room to link arms with whoever else was nearby. It was romantic in its own way.
Towards the end you'd get ◊em{Mr Brightside} and ◊em{I Am The Resurrection}, practically a Manc anthem, and just as the beat dropped and built back up the floodgates would open and unthinkable quantities of lathered up soap would be splattered across the dancefloor, soaking into your clothes, getting into your eyes, tainting your cheap drink, and coating your lungs as you yelled along to the lyrics and bounced blindly across the room to link arms with whoever else was nearby. It was romantic in its own way.

It's the song of practically every single night I spent on the town, visiting the handful of indie clubs we had between Manchester and Bolton.


R posts/to-simpler-times.md => posts/to-simpler-times.poly.pm +58 -59
@@ 1,13 1,13 @@
---
title: To simpler times
date: 2021-01-30
category: programming
status: published
---
#lang pollen

I previously wrote about how this site was built[^1] and then deployed[^2]. I'm quite happy experimenting with how I set up this whole shebang because I can learn a lot from it and the worst that happens is that the site goes down for half an hour. The stakes are low.
◊define-meta[title]{To simpler times}
◊define-meta[date]{2021-01-30}
◊define-meta[published #t]
◊define-meta[category]{programming}

You might tell from the design that I'm trying to keep things basic. The most outrageous things on the entire site are two SVG icons, a CSS animation, an embedded font, and poor mobile responsiveness. Oh, and one added script for site stats that you are able to look at yourself[^3] (I just want to see which posts get more traction than others, is all).
I previously wrote about how this site was built◊^[1] and then deployed◊^[2]. I'm quite happy experimenting with how I set up this whole shebang because I can learn a lot from it and the worst that happens is that the site goes down for half an hour. The stakes are low.

You might tell from the design that I'm trying to keep things basic. The most outrageous things on the entire site are two SVG icons, a CSS animation, an embedded font, and poor mobile responsiveness. Oh, and one added script for site stats that you are able to look at yourself◊^[3] (I just want to see which posts get more traction than others, is all).

The thing about the build and deploy process, and I was well aware of it at the time, is that it is wildly overcomplicated. In fact, I imagine a lot of our new processes are more complicated than they need to be, as cloud providers and SaaS compete for developer mindshare and hook people into various novel solutions to highly specific problems.



@@ 15,74 15,73 @@ I've tripped up on this a few times lately because I was locked into a certain w

The first issue was how this site is built. Hakyll is a library for building a static site generator (SSG), so you build your own program using Hakyll's functions, add in whatever else you want because it's a simple Haskell program, and then use the resulting binary to convert your markdowns and org-modes and LaTeXes (LaTiCeS?) into beautiful HTML.

Naturally I had to keep this binary stored somewhere because re-running a 30+ minute build is a little bit wasteful (Hakyll pulls in pretty much _all_ of Pandoc). At first, I gravitated towards using build caches in CI or temporary artifact stores, and so long as they didn't expire I wouldn't trigger a rebuild. Then there was some exploration around binary caches in Haskell, or using a service like bintray to host the file.
Naturally I had to keep this binary stored somewhere because re-running a 30+ minute build is a little bit wasteful (Hakyll pulls in pretty much ◊em{all} of Pandoc). At first, I gravitated towards using build caches in CI or temporary artifact stores, and so long as they didn't expire I wouldn't trigger a rebuild. Then there was some exploration around binary caches in Haskell, or using a service like bintray to host the file.

Even though it worked it all felt a bit... manky. It's clearly not the way software has been distributed for a long time, and the idea of hopping from one SaaS to another on their free or open source accounts is just not a good one. But alas, that's where we find ourselves these days.

I threw all of that out at one point and moved back over to GitHub Pages, because that's where my repo was. The last few posts here were published in a manner not so dissimilar to this:

```
emacs posts/my-new-post.md &
# write the damn post
git add posts/my-new-post.md
git commit -m 'add post...'
git push
stack exec site build # dumps output into www
mv www ../blah2
git co deploy
cp -r ../blah2/* .
git add .
git commit -m 'deploy'
git push
git co main
# find typo
git add .
git commit --amend --no-edit
git push -f
stack exec site build
#.........
```
◊codeblock['bash]{
  emacs posts/my-new-post.md &
  # write the damn post
  git add posts/my-new-post.md
  git commit -m 'add post...'
  git push
  stack exec site build # dumps output into www
  mv www ../blah2
  git co deploy
  cp -r ../blah2/* .
  git add .
  git commit -m 'deploy'
  git push
  git co main
  # find typo
  git add .
  git commit --amend --no-edit
  git push -f
  stack exec site build
  #.........
}

It's not the worst thing ever, except that if I switch to a different computer, like my laptop, I have to do a lot of setup to be able to write and deploy. I'd rather focus on my writing and automate the rest of it away, which brings us back to CI and complication.

So, back to basics!

The site has moved once again, back to a VPS hosted somewhere in the UK. Caddy[^5] is doing the hard work as an elegant alternative to nginx or apache and for simple setups you can't really go wrong with it if you just want a server with HTTPS by default. Here's how I configured this site:
The site has moved once again, back to a VPS hosted somewhere in the UK. Caddy◊^[4] is doing the hard work as an elegant alternative to nginx or apache and for simple setups you can't really go wrong with it if you just want a server with HTTPS by default. Here's how I configured this site:

```
kamelasa.dev {
  redir https://www.kamelasa.dev{uri}
}
◊codeblock['caddy]{
  kamelasa.dev {
    redir https://www.kamelasa.dev{uri}
  }

www.kamelasa.dev {
  root * /var/www/kamelasa.dev
  file_server
  www.kamelasa.dev {
    root * /var/www/kamelasa.dev
    file_server
  }
}
```

Deploying to this server is a case of firing off a couple of `ssh`, `scp` or `rsync` requests using a separate user with its own SSH key, and as soon as the command is finished running the changes are visible online.[^6]
Deploying to this server is a case of firing off a couple of ◊code{ssh}, ◊code{scp} or ◊code{rsync} requests using a separate user with its own SSH key, and as soon as the command is finished running the changes are visible online.◊^[5]

This leads me to the final bit. Modern tech feels more complicated as it tends towards distributed solutions: put thing _x_ here, deploy service _y_ there, sync them up with webhooks, and hope the network holds up to the task. Earlier tech feels more complicated because the documentation is intricate and detailed and requires some fidgeting around with.
This leads me to the final bit. Modern tech feels more complicated as it tends towards distributed solutions: put thing ◊em{x} here, deploy service ◊em{y} there, sync them up with webhooks, and hope the network holds up to the task. Earlier tech feels more complicated because the documentation is intricate and detailed and requires some fidgeting around with.

It took me just about a day to figure out how to host my own `apt` repository for Debian[^7], compiling information from various manuals, blog posts and examples. It was mostly a case of creating a GPG key and setting up a correct directory structure for `apt-ftparchive`[^8] to do its business, with a little bit of extra config. I'll go into detail about that another time, but let it be said it does the job tremendously in any Debian-based CI pipeline.
It took me just about a day to figure out how to host my own ◊code{apt} repository for Debian◊^[6], compiling information from various manuals, blog posts and examples. It was mostly a case of creating a GPG key and setting up a correct directory structure for ◊code{apt-ftparchive}◊^[7] to do its business, with a little bit of extra config. I'll go into detail about that another time, but let it be said it does the job tremendously in any Debian-based CI pipeline.

```
◊codeblock['bash]{
cd www.kamelasa.dev
sudo apt install kamelasa
kamelasa build
```

---

On another note, this site now also left GitHub for Sourcehut[^9] and, at risk of being a bit narcissistic, a comments section lives on a mailing list there[^10]. Should you feel that the stuff I post is worth talking about, of course. You don't need a Sourcehut account to get involved, although you'll need to join the list (without signing up for Sourcehut) if you want more than read-only access.

[^1]: <https://www.kamelasa.dev/programming/blogging-in-haskell>
[^2]: <https://www.kamelasa.dev/programming/hakyll-on-devops-pipelines>
[^3]: <https://plausible.io/kamelasa.dev>
[^4]: <https://sourcehut.org/>
[^5]: <https://caddyserver.com/v2>
[^6]: I should probably sort out proper HTTP caching though...
[^7]: <https://pkg.kamelasa.dev>
[^8]: <https://manpages.debian.org/buster/apt-utils/apt-ftparchive.1.en.html>
[^9]: <https://sourcehut.org/>
[^10]: <https://lists.sr.ht/~mrlee/kamelasa.dev-discuss>
}

◊hr{}

On another note, this site now also left GitHub for Sourcehut◊^[8] and, at risk of being a bit narcissistic, a comments section lives on a mailing list there◊^[9]. Should you feel that the stuff I post is worth talking about, of course. You don't need a Sourcehut account to get involved, although you'll need to join the list (without signing up for Sourcehut) if you want more than read-only access.

◊^[1]{◊<>["https://www.kamelasa.dev/programming/blogging-in-haskell"]}
◊^[2]{◊<>["https://www.kamelasa.dev/programming/hakyll-on-devops-pipelines"]}
◊^[3]{◊<>["https://plausible.io/kamelasa.dev"]}
◊^[4]{◊<>["https://caddyserver.com/v2"]}
◊^[5]{◊<>["I should probably sort out proper HTTP caching though..."]}
◊^[6]{◊<>["https://pkg.kamelasa.dev"]}
◊^[7]{◊<>["https://manpages.debian.org/buster/apt-utils/apt-ftparchive.1.en.html"]}
◊^[8]{◊<>["https://sourcehut.org"]}
◊^[9]{◊<>["https://lists.sr.ht/~mrlee/kamelasa.dev-discuss"]}
\ No newline at end of file

D posts/using-ruby-c-in-ruby.md => posts/using-ruby-c-in-ruby.md +0 -91
@@ 1,91 0,0 @@
---
title: Using Ruby's C API inside Ruby
date: 2021-01-18
category: programming
status: published
---

A thought occurred to me in my mask-wearing, lockdown-addled brain last night: why the hell did I choose _now_ to stop drinking? It's for my own good, I told myself, and so my thoughts shifted further into the absurd with nary a mind-altering substance in sight to stop them.

One of those thoughts stuck out in particular, because of how ridiculous it sounded: could you optimise your Ruby code by using FFI with Ruby's C bindings? I'm not talking about making a native extension in pure C, I'm talking about making Ruby talk to itself through a foreign function interface using the ffi gem[^1].

Let's apply some method to this madness and set up some bindings, otherwise we're dead in the water. Let's be descriptive and call our FFI module `LibRuby`. No naming conflicts at all there, _no sirree_!

```ruby
require 'ffi'

module LibRuby
  extend FFI::Library

  ffi_lib 'ruby'

  typedef :pointer, :value
  typedef :pointer, :id

  attach_variable :rb_mKernel, :value
  attach_function :rb_const_get, [:value, :id], :value
  attach_function :rb_intern, [:string], :id
  attach_function :rb_funcall, [:value, :id, :int, :varargs], :value
  attach_function :rb_str_new_cstr, [:string], :value
end
```

If you look at the code in this module, you'll notice that I used `attach_variable` to get access to the Kernel module, and `attach_function` for the method calls. The `:id` and `:value` types are just aliases for `:pointer`, because `VALUE` and `ID` in the C API are themselves pointers. It's for the sake of documentation, so it's clearer what order you pass arguments in.

Ruby's built in modules and classes are already defined globally with a naming scheme. In this case, `Kernel` is a variable called `rb_mKernel`, where `rb` is a prefix that every C function has in common (so you know it's for Ruby as C doesn't have namespaces), and the letter `m` means `module`. If it was `c` instead it would mean `class`.

Anyway this boilerplate should give us enough to do a hello world using Ruby's C API but at runtime, in Ruby, so it's time to fire up `irb`.

<aside>It should go without saying that at this point, you're not just playing with fire, you're inviting it to burn down your house. Be careful lest the `segfault`s creep up on you.</aside>

Let's take it from the top and talk through this ungodly incantation. Go ahead and copy that little module into your console! If it fails, make sure you've got the `ffi` gem installed[^2].

Once you're done, you can save some keystrokes by importing that module.

```ruby
include LibRuby
```

In order to call `puts` in Ruby through the C API, we'll need to get a reference to the module it's defined in (`Kernel`), and also get the method name as a symbol (like you might normally do with `.to_sym`).

```ruby
kernel = LibRuby.rb_mKernel
puts_method = rb_intern('puts')
```

Oh, before we continue, better disable the garbage collector. This is a simple way to stop the oscillating turbine from splattering unpleasant substances around the room. (More on that later, but see if you can guess why.)

```ruby
GC.disable
```

We can't just pass in a normal string to `puts` without things going 💥, as everything is an object in Ruby and therefore we need to
get a pointer to a `String` instance (or in internal Ruby lingo, one of those `VALUE`s).

```ruby
str = rb_str_new_cstr('welcome, mortals')
```

Now we have all of the ingredients to make the actual call, which syntactically and aesthetically blows idiomatic Ruby out of the water. Delicately paste this into your console and you should see the string printed out. You'll also get a return value like `#<FFI::Pointer address=0x0000000000000008>`, which will refer to `Qnil`. `Qnil` is a pointer to Ruby's `nil` object.

```ruby
rb_funcall(kernel, puts_method, 1, :value, str)
```

Run it again a few times, and with different strings. If you're feeling experimental, attach more functions in `LibRuby` and see what else you can print out! Ruby's extension documentation should be a good place to start[^3].

### So, why disable the GC?

For every step in this post up to creating a `String` object, we've been using function bindings and global variables. Global variables and constants won't be garbage collected, because the global scope will always maintain a reference to them; besides which, it would be quite bad if your classes and modules suddenly disappeared after a GC pass.

The string object is different, however, as on the C side of things Ruby is taking a pointer to a C string (a `const char *`), allocating memory, and giving back a pointer to the new object. Eventually the GC will run and free up the memory at the pointer's address, and the string will no longer exist. You'll probably find something else at that address instead, or just garbage.

Disabling the GC in this instance is a **shitty hack** because it's a direct admission that the code is _not memory safe_. Hopefully you didn't need me to tell you that, though, and the quality of the code in this post was self-evident.

How would you fix it? Well, now we've found out that we _can_ write Ruby with itself we'll explore that next time. And there'll be benchmarks, too.

Until then, I'll see you further into the abyss.

[^1]: <https://github.com/ffi/ffi>
[^2]: `gem install ffi -- --enable-system-libffi`
[^3]: <https://ruby-doc.org/core-2.7.0/doc/extension_rdoc.html>

A posts/using-ruby-c-in-ruby.poly.pm => posts/using-ruby-c-in-ruby.poly.pm +93 -0
@@ 0,0 1,93 @@
#lang pollen

◊define-meta[title]{Using Ruby's C API inside Ruby}
◊define-meta[date]{2021-01-18}
◊define-meta[published #t]
◊define-meta[category]{programming}

A thought occurred to me in my mask-wearing, lockdown-addled brain last night: why the hell did I choose ◊em{now} to stop drinking? It's for my own good, I told myself, and so my thoughts shifted further into the absurd with nary a mind-altering substance in sight to stop them.

One of those thoughts stuck out in particular, because of how ridiculous it sounded: could you optimise your Ruby code by using FFI with Ruby's C bindings? I'm not talking about making a native extension in pure C, I'm talking about making Ruby talk to itself through a foreign function interface using the ffi gem◊^[1].

Let's apply some method to this madness and set up some bindings, otherwise we're dead in the water. Let's be descriptive and call our FFI module ◊code{LibRuby}. No naming conflicts at all there, ◊em{no sirree}!

◊codeblock['ruby]{
  require 'ffi'

  module LibRuby
    extend FFI::Library

    ffi_lib 'ruby'

    typedef :pointer, :value
    typedef :pointer, :id

    attach_variable :rb_mKernel, :value
    attach_function :rb_const_get, [:value, :id], :value
    attach_function :rb_intern, [:string], :id
    attach_function :rb_funcall, [:value, :id, :int, :varargs], :value
    attach_function :rb_str_new_cstr, [:string], :value
  end
}

If you look at the code in this module, you'll notice that I used ◊code{attach_variable} to get access to the Kernel module, and ◊code{attach_function} for the method calls. The ◊code{:id} and ◊code{:value} types are just aliases for ◊code{:pointer}, because ◊code{VALUE} and ◊code{ID} in the C API are themselves pointers. It's for the sake of documentation, so it's clearer what order you pass arguments in.

Ruby's built in modules and classes are already defined globally with a naming scheme. In this case, ◊code{Kernel} is a variable called ◊code{rb_mKernel}, where ◊code{rb} is a prefix that every C function has in common (so you know it's for Ruby as C doesn't have namespaces), and the letter ◊code{m} means ◊code{module}. If it was ◊code{c} instead it would mean ◊code{class}.

Anyway this boilerplate should give us enough to do a hello world using Ruby's C API but at runtime, in Ruby, so it's time to fire up ◊code{irb}.

◊aside{
  It should go without saying that at this point, you're not just playing with fire, you're inviting it to burn down your house. Be careful lest the `segfault`s creep up on you.
}

Let's take it from the top and talk through this ungodly incantation. Go ahead and copy that little module into your console! If it fails, make sure you've got the ◊code{ffi} gem installed◊^[2].

Once you're done, you can save some keystrokes by importing that module.

◊codeblock['ruby]{
  include LibRuby
}

In order to call ◊code{puts} in Ruby through the C API, we'll need to get a reference to the module it's defined in (◊code{Kernel}), and also get the method name as a symbol (like you might normally do with ◊code{.to_sym}).

◊codeblock['ruby]{
  kernel = LibRuby.rb_mKernel
  puts_method = rb_intern('puts')
}

Oh, before we continue, better disable the garbage collector. This is a simple way to stop the oscillating turbine from splattering unpleasant substances around the room. (More on that later, but see if you can guess why.)

◊codeblock['ruby]{
  GC.disable
}

We can't just pass in a normal string to ◊code{puts} without things going 💥, as everything is an object in Ruby and therefore we need to
get a pointer to a ◊code{String} instance (or in internal Ruby lingo, one of those ◊code{VALUE}s).

◊codeblock['ruby]{
  str = rb_str_new_cstr('welcome, mortals')
}

Now we have all of the ingredients to make the actual call, which syntactically and aesthetically blows idiomatic Ruby out of the water. Delicately paste this into your console and you should see the string printed out. You'll also get a return value like ◊code{#<FFI::Pointer address=0x0000000000000008>}, which will refer to ◊code{Qnil}. ◊code{Qnil} is a pointer to Ruby's ◊code{nil} object.

◊codeblock['ruby]{
  rb_funcall(kernel, puts_method, 1, :value, str)
}

Run it again a few times, and with different strings. If you're feeling experimental, attach more functions in ◊code{LibRuby} and see what else you can print out! Ruby's extension documentation should be a good place to start◊^[3].

◊h3{So, why disable the GC?}

For every step in this post up to creating a ◊code{String} object, we've been using function bindings and global variables. Global variables and constants won't be garbage collected, because the global scope will always maintain a reference to them; besides which, it would be quite bad if your classes and modules suddenly disappeared after a GC pass.

The string object is different, however, as on the C side of things Ruby is taking a pointer to a C string (a ◊code{const char *}), allocating memory, and giving back a pointer to the new object. Eventually the GC will run and free up the memory at the pointer's address, and the string will no longer exist. You'll probably find something else at that address instead, or just garbage.

Disabling the GC in this instance is a ◊strong{shitty hack} because it's a direct admission that the code is ◊em{not memory safe}. Hopefully you didn't need me to tell you that, though, and the quality of the code in this post was self-evident.

How would you fix it? Well, now we've found out that we ◊code{can} write Ruby with itself we'll explore that next time. And there'll be benchmarks, too.

Until then, I'll see you further into the abyss.

◊^[1]{◊<>["https://github.com/ffi/ffi"]}
◊^[2]{◊code{`gem install ffi -- --enable-system-libffi`}}
◊^[3]{◊<>["https://ruby-doc.org/core-2.7.0/doc/extension_rdoc.html"]}
\ No newline at end of file

A redirs.caddy => redirs.caddy +50 -0
@@ 0,0 1,50 @@
redir /personal/enough/ /posts/enough.html permanent
redir /personal/a-decade-of-work/ /posts/a-decade-of-work.html permanent
redir /personal/on-working-remotely/ /posts/on-working-remotely.html permanent
redir /personal/growing-up/ /posts/growing-up.html permanent
redir /personal/time-travel/ /posts/time-travel.html permanent
redir /personal/celebrate-each-other/ /posts/celebrate-each-other.html permanent
redir /personal/a-damn-good-listen/ /posts/a-damn-good-listen.html permanent
redir /personal/i-am-here/ /posts/i-am-here.html permanent
redir /personal/isolation-aloneness-and-loneliness/ /posts/isolation-aloneness-and-loneliness.html permanent
redir /personal/on-sharing-vulnerability/ /posts/on-sharing-vulnerability.html permanent
redir /culture/human-after-all/ /posts/human-after-all.html permanent
redir /web/floc-off/ /posts/floc-off.html permanent
redir /programming/gettin-ziggy-with-it-pi-zero/ /posts/gettin-ziggy-with-it-pi-zero.html permanent
redir /programming/to-simpler-times/ /posts/to-simpler-times.html permanent
redir /programming/my-favourite-thing-about-programming/ /posts/my-favourite-thing-about-programming.html permanent
redir /programming/devops/ /posts/devops.html permanent
redir /programming/blogging-in-haskell/ /posts/blogging-in-haskell.html permanent
redir /programming/things-ive-changed-my-mind-on/ /posts/things-ive-changed-my-mind-on.html permanent
redir /programming/past-mistakes/ /posts/past-mistakes.html permanent
redir /programming/agile-lipstick/ /posts/agile-lipstick.html permanent
redir /programming/hakyll-on-devops-pipelines/ /posts/hakyll-on-devops-pipelines.html permanent
redir /programming/do-you-really-need-those-microservices/ /posts/do-you-really-need-those-microservices.html permanent
redir /programming/can-you-crack-the-code/ /posts/can-you-crack-the-code.html permanent
redir /programming/using-ruby-c-in-ruby/ /posts/using-ruby-c-in-ruby.html permanent
redir /mental-health/permanent-solutions-to-temporary-problems/ /posts/permanent-solutions-to-temporary-problems.html permanent


























R templates/default.html => template.html.p +36 -18
@@ 4,7 4,7 @@
    <meta charset="utf-8" />
    <meta name="viewport" content="width=device-width, initial-scale=1" />

    <title>kamelåså - $title$</title>
    <title>kamelåså - ◊post->title[here]</title>

    <link rel="alternate" type="application/rss+xml" href="/rss.xml" />
    <link rel="alternate" type="application/atom+xml" href="/atom.xml" />


@@ 12,21 12,6 @@
    <link rel="stylesheet" href="/css/main.css" />
    <link rel="stylesheet" href="/css/syntax.css" />

    <link
      rel="preload"
      as="font"
      href="/fonts/cascadia.woff2"
      type="font/woff2"
      crossorigin="anonymous"
    />
    <link
      rel="preload"
      as="font"
      href="/fonts/cascadia.woff"
      type="font/woff"
      crossorigin="anonymous"
    />

    <script
      async
      defer


@@ 97,12 82,45 @@
      </div>
    </div>

    <main class="container">$body$</main>
    <main class="container">
◊when/splice[(equal? 'posts (parent here))]{
  <article>
    <header class="header inverse-video">
      <h2 class="title">◊post->title[here]</h2>
      <span class="ert">~◊post->ert[here] min.read</span>
      <span class="post-date">◊post->date[here]</span>
    </header>

    <section>
      ◊(->html doc)
    </section>

    <hr />

    <section>
      <h3>Changelog</h3>
      <ul>
        ◊for/splice[([log (in-list (post->history here))])]{
          <li class="git-log">
            <a href="◊log->giturl[log]">◊log->commit[log]</a>
            <span class="commit-msg">◊log->message[log]</span>
            <time class="commit-time" datetime="◊log->date[log]">(◊log->date[log])</time>
          </li>
        }
      </ul>
    </section>
  </article>
}

◊when/splice[(not (equal? 'posts (parent here)))]{
  ◊(->html doc)
}
    </main>

    <footer class="container">
      <header>
        <p class="modeline inverse-video">
          -UUU:----F1&nbsp;&nbsp;$title$&nbsp;&nbsp;&nbsp;&nbsp;Bot
          -UUU:----F1&nbsp;&nbsp;◊|page-title|&nbsp;&nbsp;&nbsp;&nbsp;Bot
          L100%&nbsp;&nbsp;Git:main&nbsp;&nbsp;(HTML+) ----------
        </p>
      </header>

D templates/archive.html => templates/archive.html +0 -2
@@ 1,2 0,0 @@
Here you can find all my previous posts:
$partial("templates/post-list.html")$

D templates/git-log.html => templates/git-log.html +0 -9
@@ 1,9 0,0 @@
<ul>
    $for(gitlogs)$
    <li class="git-log">
      <a href="https://git.sr.ht/~mrlee/www.kamelasa.dev/commit/$commit$">$commit$</a>
      <span class="commit-msg">$message$</span>
      <time class="commit-time" datetime="$date$">($date$)</time>
    </li>
    $endfor$
</ul>

D templates/post-list.html => templates/post-list.html +0 -14
@@ 1,14 0,0 @@
<table class="borderless">
    <tbody>
        $for(posts)$
        <tr>
            <td class="file-permission">-rw-r--r--</td>
            <td class="user">mrlee</td>
            <td class="group">www</td>
            <td class="size">$size$</td>
            <td class="date">$date$</td>
            <td><a href="$url$">$title$</a></td>
        </tr>
        $endfor$
    </tbody>
</table>
\ No newline at end of file

D templates/post.html => templates/post.html +0 -15
@@ 1,15 0,0 @@
<article>
  <header class="header inverse-video">
    <h2 class="title">$title$</h2>
    <span class="ert">~$ert$ min. read</span>
  </header>

  <section>$body$</section>

  <hr />

  <section>
    <h3>Changelog</h3>
    $partial("templates/git-log.html")$
  </section>
</article>