From 26c067147a3140ec6b23ee9d93dd2b01ef6f45ae Mon Sep 17 00:00:00 2001 From: Levi Neuwirth Date: Thu, 19 Mar 2026 15:27:12 -0400 Subject: [PATCH] Build telemetry --- Makefile | 4 + build/Site.hs | 2 + build/Stats.hs | 532 +++++++++++++++++++++++++++++++++ content/me/index.md | 19 +- content/memento-mori/index.md | 8 + data/build-start.txt | 1 + data/last-build-seconds.txt | 1 + levineuwirth.cabal | 1 + spec.md | 10 +- static/css/build.css | 109 +++++++ static/css/layout.css | 12 + templates/partials/footer.html | 2 +- templates/partials/head.html | 1 + 13 files changed, 695 insertions(+), 7 deletions(-) create mode 100644 build/Stats.hs create mode 100644 data/build-start.txt create mode 100644 data/last-build-seconds.txt create mode 100644 static/css/build.css diff --git a/Makefile b/Makefile index 17dc27e..888f905 100644 --- a/Makefile +++ b/Makefile @@ -8,9 +8,13 @@ export build: @git add content/ @git diff --cached --quiet || git commit -m "auto: $$(date -u +%Y-%m-%dT%H:%M:%SZ)" + @date +%s > data/build-start.txt cabal run site -- build pagefind --site _site > IGNORE.txt + @BUILD_END=$$(date +%s); \ + BUILD_START=$$(cat data/build-start.txt); \ + echo $$((BUILD_END - BUILD_START)) > data/last-build-seconds.txt deploy: build @if [ -z "$(GITHUB_TOKEN)" ] || [ -z "$(GITHUB_REPO)" ]; then \ diff --git a/build/Site.hs b/build/Site.hs index 1e5fb70..ecdf3b2 100644 --- a/build/Site.hs +++ b/build/Site.hs @@ -16,6 +16,7 @@ import Commonplace (commonplaceCtx) import Contexts (siteCtx, essayCtx, postCtx, pageCtx, poetryCtx, fictionCtx, compositionCtx) import Tags (buildAllTags, applyTagRules) import Pagination (blogPaginateRules) +import Stats (statsRules) feedConfig :: FeedConfiguration feedConfig = FeedConfiguration @@ -54,6 +55,7 @@ rules = do -- --------------------------------------------------------------------------- tags <- buildAllTags applyTagRules tags siteCtx + statsRules tags -- Per-page JS files — authored alongside content in content/**/*.js match "content/**/*.js" $ do diff --git a/build/Stats.hs b/build/Stats.hs new file mode 100644 index 0000000..62c9ee1 --- /dev/null +++ b/build/Stats.hs @@ -0,0 +1,532 @@ +{-# LANGUAGE GHC2021 #-} +{-# LANGUAGE OverloadedStrings #-} +-- | Build telemetry page (/build/): corpus statistics, word-length +-- distribution, tag frequencies, link analysis, epistemic coverage, +-- output metrics, repository overview, and build timing. +-- Rendered as a full essay (3-column layout, TOC, metadata block). +module Stats (statsRules) where + +import Control.Exception (IOException, catch) +import Control.Monad (forM) +import Data.List (find, isSuffixOf, sortBy) +import qualified Data.Map.Strict as Map +import Data.Maybe (catMaybes, fromMaybe, isJust, listToMaybe) +import Data.Ord (comparing, Down (..)) +import qualified Data.Set as Set +import Data.Time (getCurrentTime, formatTime, defaultTimeLocale) +import System.Directory (doesDirectoryExist, getFileSize, listDirectory) +import System.Exit (ExitCode (..)) +import System.FilePath (takeExtension, ()) +import System.Process (readProcessWithExitCode) +import Text.Read (readMaybe) +import qualified Data.Aeson as Aeson +import qualified Data.Aeson.Key as AK +import qualified Data.Aeson.KeyMap as KM +import qualified Data.Vector as V +import qualified Data.Text as T +import qualified Data.Text.Encoding as TE +import Hakyll +import Authors (authorLinksField) +import Contexts (siteCtx) +import Utils (readingTime) + +-- --------------------------------------------------------------------------- +-- Types +-- --------------------------------------------------------------------------- + +data TypeRow = TypeRow + { trLabel :: String + , trCount :: Int + , trWords :: Int + } + +data PageInfo = PageInfo + { piTitle :: String + , piUrl :: String + , piWC :: Int + } + +-- --------------------------------------------------------------------------- +-- Hakyll helpers +-- --------------------------------------------------------------------------- + +loadWC :: Item String -> Compiler Int +loadWC item = do + snap <- loadSnapshot (itemIdentifier item) "word-count" + return $ fromMaybe 0 (readMaybe (itemBody snap)) + +loadPI :: Item String -> Compiler (Maybe PageInfo) +loadPI item = do + meta <- getMetadata (itemIdentifier item) + mRoute <- getRoute (itemIdentifier item) + wc <- loadWC item + return $ fmap (\r -> PageInfo + { piTitle = fromMaybe "(untitled)" (lookupString "title" meta) + , piUrl = "/" ++ r + , piWC = wc + }) mRoute + +-- --------------------------------------------------------------------------- +-- Formatting helpers +-- --------------------------------------------------------------------------- + +commaInt :: Int -> String +commaInt n + | n < 1000 = show n + | otherwise = commaInt (n `div` 1000) ++ "," ++ pad3 (n `mod` 1000) + where + pad3 x + | x < 10 = "00" ++ show x + | x < 100 = "0" ++ show x + | otherwise = show x + +formatBytes :: Integer -> String +formatBytes b + | b < 1024 = show b ++ " B" + | b < 1024*1024 = showD (b * 10 `div` 1024) ++ " KB" + | otherwise = showD (b * 10 `div` (1024*1024)) ++ " MB" + where showD n = show (n `div` 10) ++ "." ++ show (n `mod` 10) + +rtStr :: Int -> String +rtStr totalWords + | mins < 60 = show mins ++ " min" + | otherwise = show (mins `div` 60) ++ "h " ++ show (mins `mod` 60) ++ "m" + where mins = totalWords `div` 200 + +pctStr :: Int -> Int -> String +pctStr _ 0 = "—" +pctStr n total = show (n * 100 `div` total) ++ "%" + +-- | Strip HTML tags for plain-text word counting. +stripHtmlTags :: String -> String +stripHtmlTags [] = [] +stripHtmlTags ('<':rest) = stripHtmlTags (drop 1 (dropWhile (/= '>') rest)) +stripHtmlTags (c:rest) = c : stripHtmlTags rest + +-- | Normalise a page URL for backlink map lookup (strip trailing .html). +normUrl :: String -> String +normUrl u + | ".html" `isSuffixOf` u = take (length u - 5) u + | otherwise = u + +-- --------------------------------------------------------------------------- +-- IO: output directory walk +-- --------------------------------------------------------------------------- + +walkDir :: FilePath -> IO [(FilePath, Integer)] +walkDir dir = do + entries <- listDirectory dir `catch` (\(_ :: IOException) -> return []) + fmap concat $ forM entries $ \e -> do + let path = dir e + isDir <- doesDirectoryExist path + if isDir + then walkDir path + else do + sz <- getFileSize path `catch` (\(_ :: IOException) -> return 0) + return [(path, sz)] + +displayExt :: FilePath -> String +displayExt path = case takeExtension path of + ".html" -> ".html" + ".css" -> ".css" + ".js" -> ".js" + ".woff2" -> ".woff2" + ".svg" -> ".svg" + ".mp3" -> ".mp3" + ".pdf" -> ".pdf" + ".json" -> ".json" + ".xml" -> ".xml" + ".ico" -> ".ico" + ".png" -> "image" + ".jpg" -> "image" + ".jpeg" -> "image" + ".webp" -> "image" + _ -> "other" + +getOutputStats :: IO (Map.Map String (Int, Integer), Int, Integer) +getOutputStats = do + files <- walkDir "_site" + let grouped = foldr (\(path, sz) acc -> + Map.insertWith (\(c1,s1) (c2,s2) -> (c1+c2, s1+s2)) + (displayExt path) + (1, sz) acc) + Map.empty files + return (grouped, length files, sum (map snd files)) + +-- --------------------------------------------------------------------------- +-- IO: lines of code +-- --------------------------------------------------------------------------- + +countLinesDir :: FilePath -> String -> (FilePath -> Bool) -> IO (Int, Int) +countLinesDir dir ext skipPred = do + entries <- listDirectory dir `catch` (\(_ :: IOException) -> return []) + let files = filter (\e -> takeExtension e == ext && not (skipPred e)) entries + ls <- fmap sum $ forM files $ \e -> do + content <- readFile (dir e) `catch` (\(_ :: IOException) -> return "") + return (length (lines content)) + return (length files, ls) + +getLocStats :: IO (Int, Int, Int, Int, Int, Int) +-- (hsFiles, hsLines, cssFiles, cssLines, jsFiles, jsLines) +getLocStats = do + (hf, hl) <- countLinesDir "build" ".hs" (const False) + (cf, cl) <- countLinesDir "static/css" ".css" (const False) + (jf, jl) <- countLinesDir "static/js" ".js" (".min.js" `isSuffixOf`) + return (hf, hl, cf, cl, jf, jl) + +-- --------------------------------------------------------------------------- +-- IO: git stats +-- --------------------------------------------------------------------------- + +gitRun :: [String] -> IO String +gitRun args = do + (ec, out, _) <- readProcessWithExitCode "git" args "" + return $ if ec == ExitSuccess then out else "" + +getGitStats :: IO (Int, String) +getGitStats = do + countOut <- gitRun ["rev-list", "--count", "HEAD"] + firstOut <- gitRun ["log", "--format=%ad", "--date=short", "--reverse"] + let commits = fromMaybe 0 (readMaybe (filter (/= '\n') countOut) :: Maybe Int) + firstDate = case lines firstOut of { (d:_) -> d; _ -> "\x2014" } + return (commits, firstDate) + +-- --------------------------------------------------------------------------- +-- HTML rendering: section helpers +-- --------------------------------------------------------------------------- + +section :: String -> String -> String -> String +section id_ title body = concat + [ "

", title, "

\n" + , body + ] + +table :: [String] -> [[String]] -> Maybe [String] -> String +table headers rows mFoot = concat + [ "" + , "", concatMap (\h -> "") headers, "" + , "", concatMap renderRow rows, "" + , maybe "" renderFoot mFoot + , "
" ++ h ++ "
" + ] + where + renderRow cells = "" ++ concatMap (\c -> "" ++ c ++ "") cells ++ "" + renderFoot cells = "" + ++ concatMap (\c -> "" ++ c ++ "") cells + ++ "" + +dl :: [(String, String)] -> String +dl pairs = "
" + ++ concatMap (\(k, v) -> "
" ++ k ++ "
" ++ v ++ "
") pairs + ++ "
" + +link :: String -> String -> String +link url title = "" ++ title ++ "" + +-- --------------------------------------------------------------------------- +-- HTML rendering: sections +-- --------------------------------------------------------------------------- + +renderContent :: [TypeRow] -> String +renderContent rows = + section "content" "Content" $ + table + ["Type", "Count", "Words", "Reading time"] + (map row rows) + (Just ["Total", commaInt totalCount, commaInt totalWords, rtStr totalWords]) + where + totalCount = sum (map trCount rows) + totalWords = sum (map trWords rows) + row r = [ trLabel r + , commaInt (trCount r) + , commaInt (trWords r) + , rtStr (trWords r) + ] + +renderPages :: [PageInfo] -> Maybe (String,String,String) -> Maybe (String,String,String) -> String +renderPages allPIs mOldest mNewest = + section "pages" "Pages" $ concat + [ dl $ + [ ("Total pages", commaInt (length allPIs)) + , ("Average length", commaInt avgWC ++ " words") + ] ++ + maybe [] (\(d,t,u) -> [("Oldest content", d ++ " \x2014 " ++ link u t)]) mOldest ++ + maybe [] (\(d,t,u) -> [("Newest content", d ++ " \x2014 " ++ link u t)]) mNewest + , "

Longest

" + , pageList (take 3 (sortBy (comparing (Down . piWC)) hasSomeWC)) + , "

Shortest

" + , pageList (take 3 (sortBy (comparing piWC) hasSomeWC)) + ] + where + hasSomeWC = filter (\p -> piWC p > 0) allPIs + avgWC = if null hasSomeWC then 0 + else sum (map piWC hasSomeWC) `div` length hasSomeWC + pageList ps = "
    " + ++ concatMap (\p -> "
  1. " ++ link (piUrl p) (piTitle p) + ++ " \x2014 " ++ commaInt (piWC p) ++ " words
  2. ") ps + ++ "
" + +renderDistribution :: [Int] -> String +renderDistribution wcs = + section "distribution" "Word-length distribution" $ + "
" ++ concatMap bar buckets ++ "
" + where + bucketOf w + | w < 500 = 0 | w < 1000 = 1 | w < 2000 = 2 | w < 5000 = 3 | otherwise = 4 + labels = ["< 500", "500 \x2013 1k", "1k \x2013 2k", "2k \x2013 5k", "\x2265 5k"] + counts = foldr (\w acc -> Map.insertWith (+) (bucketOf w) 1 acc) + (Map.fromList [(i,0) | i <- [0..4]]) wcs + buckets = [(labels !! i, fromMaybe 0 (Map.lookup i counts)) | i <- [0..4]] + maxCount = max 1 (maximum (map snd buckets)) + bar (lbl, n) = + let pct = n * 100 `div` maxCount + in concat + [ "
" + , "", lbl, "" + , "" + , "", show n, "" + , "
" + ] + +renderTagsSection :: [(String, Int)] -> Int -> String +renderTagsSection topTags uniqueCount = + section "tags" "Tags" $ concat + [ dl [("Unique tags", commaInt uniqueCount)] + , table ["Tag", "Items"] (map row topTags) Nothing + ] + where + row (t, n) = [link ("/" ++ t ++ "/") t, show n] + +renderLinks :: Maybe (String, Int, String) -> Int -> Int -> String +renderLinks mMostLinked orphanCount total = + section "links" "Links" $ + dl $ + (case mMostLinked of + Nothing -> [("Most-linked page", "\x2014")] + Just (u, n, t) -> [("Most-linked page", + link u t ++ " (" ++ show n ++ " inbound links)")]) ++ + [ ("Orphan pages", commaInt orphanCount + ++ " of " ++ commaInt total + ++ " (" ++ pctStr orphanCount total ++ ")") ] + +renderEpistemic :: Int -> Int -> Int -> Int -> Int -> String +renderEpistemic total ws wc wi we = + section "epistemic" "Epistemic coverage" $ + table + ["Field", "Set", "Coverage"] + [ row "Status" ws + , row "Confidence" wc + , row "Importance" wi + , row "Evidence" we + ] + Nothing + where + row label n = [label, show n ++ " / " ++ show total, pctStr n total] + +renderOutput :: Map.Map String (Int, Integer) -> Int -> Integer -> String +renderOutput grouped totalFiles totalSize = + section "output" "Output" $ + table + ["Type", "Files", "Size"] + (map row (sortBy (comparing (Down . snd . snd)) (Map.toList grouped))) + (Just ["Total", commaInt totalFiles, formatBytes totalSize]) + where + row (ext, (n, sz)) = [ext, commaInt n, formatBytes sz] + +renderRepository :: Int -> Int -> Int -> Int -> Int -> Int -> Int -> String -> String +renderRepository hf hl cf cl jf jl commits firstDate = + section "repository" "Repository" $ + dl + [ ("Haskell", commaInt hl ++ " lines across " ++ show hf ++ " files") + , ("CSS", commaInt cl ++ " lines across " ++ show cf ++ " files") + , ("JavaScript", commaInt jl ++ " lines across " ++ show jf ++ " files (excl. minified)") + , ("Total git commits", commaInt commits) + , ("Repository started", firstDate) + ] + +renderBuild :: String -> String -> String +renderBuild ts dur = + section "build" "Build" $ + dl + [ ("Generated", ts) + , ("Last build duration", dur) + ] + +-- --------------------------------------------------------------------------- +-- Static TOC (matches the nine h2 sections above) +-- --------------------------------------------------------------------------- + +pageTOC :: String +pageTOC = "
    \n" ++ concatMap item sections ++ "
\n" + where + item (id_, title) = + "
  • " + ++ title ++ "
  • \n" + sections = + [ ("content", "Content") + , ("pages", "Pages") + , ("distribution", "Word-length distribution") + , ("tags", "Tags") + , ("links", "Links") + , ("epistemic", "Epistemic coverage") + , ("output", "Output") + , ("repository", "Repository") + , ("build", "Build") + ] + +-- --------------------------------------------------------------------------- +-- Rules +-- --------------------------------------------------------------------------- + +statsRules :: Tags -> Rules () +statsRules tags = + create ["build/index.html"] $ do + route idRoute + compile $ do + -- ---------------------------------------------------------------- + -- Load all content items + -- ---------------------------------------------------------------- + essays <- loadAll ("content/essays/*.md" .&&. hasNoVersion) + posts <- loadAll ("content/blog/*.md" .&&. hasNoVersion) + poems <- loadAll ("content/poetry/*.md" .&&. hasNoVersion) + fiction <- loadAll ("content/fiction/*.md" .&&. hasNoVersion) + comps <- loadAll ("content/music/*/index.md" .&&. hasNoVersion) + + -- ---------------------------------------------------------------- + -- Word counts + -- ---------------------------------------------------------------- + essayWCs <- mapM loadWC essays + postWCs <- mapM loadWC posts + poemWCs <- mapM loadWC poems + fictionWCs <- mapM loadWC fiction + compWCs <- mapM loadWC comps + + let allWCs = essayWCs ++ postWCs ++ poemWCs ++ fictionWCs ++ compWCs + rows = + [ TypeRow "Essays" (length essays) (sum essayWCs) + , TypeRow "Blog posts" (length posts) (sum postWCs) + , TypeRow "Poems" (length poems) (sum poemWCs) + , TypeRow "Fiction" (length fiction) (sum fictionWCs) + , TypeRow "Compositions" (length comps) (sum compWCs) + ] + + -- ---------------------------------------------------------------- + -- Per-page info (title + URL + word count) + -- ---------------------------------------------------------------- + allItems <- return (essays ++ posts ++ poems ++ fiction ++ comps) + allPIs <- catMaybes <$> mapM loadPI allItems + + -- ---------------------------------------------------------------- + -- Dates (essays + posts only) + -- ---------------------------------------------------------------- + let getDateMeta item = do + meta <- getMetadata (itemIdentifier item) + mRoute <- getRoute (itemIdentifier item) + let d = fromMaybe "" (lookupString "date" meta) + t = fromMaybe "(untitled)" (lookupString "title" meta) + u = maybe "#" (\r -> "/" ++ r) mRoute + return (d, t, u) + essayDates <- mapM getDateMeta essays + postDates <- mapM getDateMeta posts + let allDates = filter (\(d,_,_) -> not (null d)) (essayDates ++ postDates) + sortedDates = sortBy (comparing (\(d,_,_) -> d)) allDates + oldestDate = listToMaybe sortedDates + newestDate = listToMaybe (reverse sortedDates) + + -- ---------------------------------------------------------------- + -- Tags + -- ---------------------------------------------------------------- + let tagFreqs = map (\(t, ids) -> (t, length ids)) (tagsMap tags) + topTags = take 15 (sortBy (comparing (Down . snd)) tagFreqs) + uniqueTags = length tagFreqs + + -- ---------------------------------------------------------------- + -- Backlinks: most-linked page + orphan count + -- ---------------------------------------------------------------- + blItem <- load (fromFilePath "data/backlinks.json") :: Compiler (Item String) + let rawBL = itemBody blItem + mBLVal = Aeson.decodeStrict (TE.encodeUtf8 (T.pack rawBL)) :: Maybe Aeson.Value + blPairs = case mBLVal of + Just (Aeson.Object km) -> + [ (T.unpack (AK.toText k), + case v of Aeson.Array arr -> V.length arr; _ -> 0) + | (k, v) <- KM.toList km ] + _ -> [] + blSet = Set.fromList (map fst blPairs) + orphanCount = length + [ p | p <- allPIs + , not (Set.member (normUrl (piUrl p)) blSet) ] + mostLinked = listToMaybe (sortBy (comparing (Down . snd)) blPairs) + mostLinkedInfo = mostLinked >>= \(url, ct) -> + let mTitle = piTitle <$> find (\p -> normUrl (piUrl p) == url) allPIs + in Just (url, ct, fromMaybe url mTitle) + + -- ---------------------------------------------------------------- + -- Epistemic coverage (essays + posts) + -- ---------------------------------------------------------------- + essayMetas <- mapM (getMetadata . itemIdentifier) essays + postMetas <- mapM (getMetadata . itemIdentifier) posts + let epMetas = essayMetas ++ postMetas + epTotal = length epMetas + ep f = length (filter (isJust . f) epMetas) + withStatus = ep (lookupString "status") + withConf = ep (lookupString "confidence") + withImp = ep (lookupString "importance") + withEv = ep (lookupString "evidence") + + -- ---------------------------------------------------------------- + -- Output directory stats + -- ---------------------------------------------------------------- + (outputGrouped, totalFiles, totalSize) <- + unsafeCompiler getOutputStats + + -- ---------------------------------------------------------------- + -- Lines of code + git stats + -- ---------------------------------------------------------------- + (hf, hl, cf, cl, jf, jl) <- unsafeCompiler getLocStats + (commits, firstDate) <- unsafeCompiler getGitStats + + -- ---------------------------------------------------------------- + -- Build timestamp + last build duration + -- ---------------------------------------------------------------- + buildTimestamp <- unsafeCompiler $ + formatTime defaultTimeLocale "%Y-%m-%d %H:%M UTC" <$> getCurrentTime + lastBuildDur <- unsafeCompiler $ + (readFile "data/last-build-seconds.txt" >>= \s -> + let secs = fromMaybe 0 (readMaybe (filter (/= '\n') s) :: Maybe Int) + in return (show secs ++ "s")) + `catch` (\(_ :: IOException) -> return "\x2014") + + -- ---------------------------------------------------------------- + -- Assemble page + -- ---------------------------------------------------------------- + let content = concat + [ renderContent rows + , renderPages allPIs oldestDate newestDate + , renderDistribution allWCs + , renderTagsSection topTags uniqueTags + , renderLinks mostLinkedInfo orphanCount (length allPIs) + , renderEpistemic epTotal withStatus withConf withImp withEv + , renderOutput outputGrouped totalFiles totalSize + , renderRepository hf hl cf cl jf jl commits firstDate + , renderBuild buildTimestamp lastBuildDur + ] + plainText = stripHtmlTags content + wc = length (words plainText) + rt = readingTime plainText + ctx = constField "toc" pageTOC + <> constField "word-count" (show wc) + <> constField "reading-time" (show rt) + <> constField "title" "Build Telemetry" + <> constField "abstract" "Per-build corpus statistics, tag distribution, \ + \link analysis, epistemic coverage, output metrics, \ + \repository overview, and build timing." + <> constField "build" "true" + <> authorLinksField + <> siteCtx + + makeItem content + >>= loadAndApplyTemplate "templates/essay.html" ctx + >>= loadAndApplyTemplate "templates/default.html" ctx + >>= relativizeUrls diff --git a/content/me/index.md b/content/me/index.md index b1d9a6a..e52fe83 100644 --- a/content/me/index.md +++ b/content/me/index.md @@ -65,6 +65,9 @@ I fell in love with Artificial Intelligence during my first semester at Brown. M I have long said to friends that one does not "do mathematics," but rather "mathematics does you," and this encapsulates how I feel about mathematics better than anything else I can think of at the moment. Mathematics is endlessly creative and has, to me, unlimited intrigue. I vividly remember learning about the [Sylow Theorems](https://en.wikipedia.org/wiki/Sylow_theorems) in my 3rd semester abstract algebra course - my first math upper level - and feeling a sense of absolute wonder and beauty at the proof, yes, but moreso at the grandeur of human genius - that *we* were able to derive this result, and so many subsequent ones, and that I had the power to understand it just the same!^[Which was perhaps *not* evident based off my score for the final exam of that course, but I digress.] +### Computer Systems +I have been interested in the low level since I began to study computation. Getting closer to the hardware was a constant goal as I learned Java in high school. Later, when I took my first real "Systems"^[Brown makes a real distinction about what is "Systems" and what is not "Systems", much more so than I would. But, the effect of having your undergrad take place within a *particularly* semantically concerned department sticks.] course, I felt like I was a wizard, learning the ways of some magic. Computers are wonderfully beautiful and powerful machines, and the systems that they are are nothing short of exquisite. Many folks seem to think that those of the system-minded type are some hardcore, late-night hacker type devoid of social life, romance, etc. I think quite the opposite: those of us who love systems love beauty and elegance, and those who opt to write Javascript whilst blatantly refusing to learn about how the systems they use work are the ones with deficits to fill! + ## Music Music is core to who I am. I have played trumpet, my primary instrument, for the majority of my life. I also play piano, horn, trombone, euphonium, tuba, and a bit of drums. More important to me than playing, however, is composition. I feel that my compositions are fundamentally a part of me, an extension of the person that I am. @@ -82,7 +85,7 @@ COMPOSITION [IS]{.smallcaps} PERHAPS MORE THAN ANYTHING ELSE THE PRACTICE [OF MY Music composition is thus **chiefly distinct** from other forms of creative activity for me. Music is the most rewarding for me, invoking the most passion, and it is the medium by which I feel I have the most expression potential and the most capacity to express.^[These are two different things for me. By **expression potential**, I mean the range of sentiments and ideas that music can, in the abstract / in principle, express. This **expression potential** is thus innately provided to me by mere virtue of my partaking in the act of writing music. By **capacity to express**, I am referring to my own personal ability as a composer to successfully express *that which I intend to* rather than the full range of what music itself might be able to encapsulate.] When I hear a composition that I have finished it surmounts me and effortlessly transports me into an immersive state; I am returned to the deep feelings and profound^[Not necessarily in grandeur, but in personal depth.] ideas that I tried to capture through my project. On the contrary, music also torments me. I am something of a perfectionist with my compositions and get frustrated when they do not pan out the way I intend. I scrap many projects that I perceive as insufficient, and when ideas are not flowing, I suffer for it. Luckily, since composition is such a core constituent of who I am, I have found a consistency in my undergraduate years, and the ideas have generally flowed without significant pause since 2023. I can only hope for my own sake that this trend continues far into the future. -::: {.score-fragment score-name="Violin Sonata - I (2026)" score-caption="A short excerpt from the first movement of my Flute Sonata, composed in January 2026."} +::: {.score-fragment score-name="Flute Sonata - I (2026)" score-caption="A short excerpt from the first movement of my Flute Sonata, composed in January 2026."} ![](scores/fl.svg) ::: @@ -98,8 +101,22 @@ I am extremely interested in **Foreign Language**, and most fortunate to be a na #### Spanish +During the last five years of my time in public high school, I took 5 courses in Spanish - the first two were *required*, and the latter three were *pseudorequired* - New York State requires only a basic Spanish credit for graduation, but to have any true competitive college application, one really must take a foreign language throughout. Regardless of this fact, my time in public high school taught me essentially **no Spanish**. Rather, I decided that learning Spanish was worth my time in my senior year, after our class ranks had been finalized, yielding a lighter workload as a result. During the day for the rest of the year until I graduated, whenever I had free time or nothing worthwhile to do, which was often, I would read exclusively in Spanish. I made an effort for the first time to immerse myself in Spanish and it worked well. This was the moment that I fell in love with the *process* of learning languages. + #### Chinese +During my first year at Brown I took Mandarin Chinese. It was a great challenge compared to Spanish, and I enjoyed it as such. I regret that I have not had time to keep up the practice since - I intended to continue into third year Chinese and beyond, but unfortunately my schedule became to busy for that intention to become reality. Mandarin is, perhaps curiously, though perhaps not so curiously, the *only* language I have ever studied where reading was not the easiest skill for me to acquire. (In fact, for Mandarin, it was the second-hardest, only after writing.) #### Danish +I entered 2024 having absolutely no intention of learning Danish, and left it conversational. This happened because I studied abroad in Copenhagen for the second half of the year and fell in love with the city, the country, the culture, and, yes, the language! Danish, along with German, is one of my focuses as of now (2026). #### German +I decided to make a push to learn German, which I had long intended but never taken action toward, in late 2025. The diversity of philosophers whose primary language was German was a great motivation for this. My own aspirations of spending some extended time in Germany were also an inspiration, as were my fond memories of Berlin from my visit in 2024. I have quickly fallen in love with German this year, and intend to make my German better than my Spanish. I anticipate relative fluency by the end of the year 2026, and only improvement from there. + +#### Linguistic Bucket List +I have long had some notion of a "Linguistic Bucket List" - a collection of languages I intend to learn, whether for literacy or for true fluency over the course of my life. A subset of that follows: + +- **French.** French is 100% the next language I will learn after my German is at a level such that study becomes more passive. +- **Russian.** The literature is exceptional, and thus I feel obliged. +- **Latin & Greek.** I am interested in *reading* the ancient texts in these languages, and perhaps pursuing Greek further, as it is part of my heritage. +- **Sanskrit & Pali.** I am *also* interested in *reading* the ancient texts of these languages. + diff --git a/content/memento-mori/index.md b/content/memento-mori/index.md index 009a9f7..9aecfa6 100644 --- a/content/memento-mori/index.md +++ b/content/memento-mori/index.md @@ -24,6 +24,14 @@ In sequent toil all forwards do contend.

    --- +## Who is a website for? + +::: dropcap +It is a trick question. A website (at least, of the personal website type) is created and, in particular, shared with the world in the hopes that someone else will derive utility from it, whether that utility is diversion, knowledge, or something else. If I had something to say to myself, creating a website for it would be overkill, wouldn't it? +::: + +Yes and no, if you ask me. I believe in the power of transparency and honesty. If I am going to endeavor to accomplish something, making my attempt in the public eye is an excellent motivator and pushes me to be consistent. Similarly, if I have something that I feel is important to me, such as the content of this page, then I have yet to see a reason why I should not incorporate it into the website. Yes, the website is primarily a place for me to share works that I expressly create with the intent of sharing, but I have yet to see *any* detriment to additionally sharing works that I expressly create for my own consumption. I lose nothing, and have the possibility of gaining *something*, so this seems to me to be an instantiation of [Pascal's Wager](https://en.wikipedia.org/wiki/Pascal%27s_wager). + ## Applied Vanitas ![Dante and Beatrice gaze upon the highest heaven.](/images/canto31.jpg "Gustav Doré - Paradiso, Canto 31") diff --git a/data/build-start.txt b/data/build-start.txt new file mode 100644 index 0000000..3fa679a --- /dev/null +++ b/data/build-start.txt @@ -0,0 +1 @@ +1773865704 diff --git a/data/last-build-seconds.txt b/data/last-build-seconds.txt new file mode 100644 index 0000000..d00491f --- /dev/null +++ b/data/last-build-seconds.txt @@ -0,0 +1 @@ +1 diff --git a/levineuwirth.cabal b/levineuwirth.cabal index 00aaa70..714da9c 100644 --- a/levineuwirth.cabal +++ b/levineuwirth.cabal @@ -19,6 +19,7 @@ executable site Backlinks Compilers Contexts + Stats Stability Metadata Tags diff --git a/spec.md b/spec.md index 7e69b84..7563721 100644 --- a/spec.md +++ b/spec.md @@ -355,7 +355,7 @@ levineuwirth.org/ - [x] Templates: default, essay, blog-post, index - [x] Dark/light toggle with `localStorage` + `prefers-color-scheme` - [x] Basic Pandoc pipeline (Markdown → HTML, smart typography) -- [ ] Deploy to DreamHost via rsync +- [x] Deploy to DreamHost via rsync — deployed to Hetzner VPS instead ### Phase 2: Content Features ✓ - [x] Pandoc filters: sidenotes, dropcaps, smallcaps, wikilinks, typography, link classification, code, math @@ -392,8 +392,8 @@ levineuwirth.org/ - [ ] Content migration — migrate existing essays, poems, fiction, and music landing pages from prior formats into `content/` ### Phase 5: Infrastructure & Advanced -- [ ] **Arch Linux VPS + nginx + certbot + DNS migration** — Provision Hetzner VPS, install nginx (config in §III), obtain TLS cert via certbot, migrate DNS from DreamHost. Update `make deploy` target. Serve `_site/` as static files; no server-side logic needed. -- [ ] **Semantic embedding pipeline** — Generate per-page embeddings (OpenAI `text-embedding-3-small` or local model). Store as `data/embeddings.json` (identifier → vector). At build time, compute nearest neighbors and write `data/similar-links.json`. Serve as static JSON; JS loads it client-side to populate a "Similar" section in the page footer. +- [x] **Arch Linux VPS + nginx + certbot + DNS migration** — Hetzner VPS provisioned, Arch Linux installed, nginx configured (config in §III), TLS cert via certbot, DNS migrated from DreamHost. `make deploy` pushes to GitHub and rsyncs to VPS. +- [ ] **Semantic embedding pipeline** — Superseded by Phase 6 "Embedding-powered similar links" (local model, no API cost). - [x] **Backlinks with context** — Two-pass build-time system (`build/Backlinks.hs`). Pass 1: `version "links"` compiles each page lightly (wikilinks preprocessed, links + context extracted, serialised as JSON). Pass 2: `create ["data/backlinks.json"]` inverts the map. `backlinksField` in `essayCtx` / `postCtx` loads the JSON and renders `
    `-collapsible per-entry lists. `popups.js` excludes `.backlink-source` links from the preview popup. Context paragraph uses `runPure . writeHtml5String` on the surrounding `Para` block. See Implementation Notes. - [ ] **Link archiving** — For all external links in `data/bibliography.bib` and in page bodies, check availability and save snapshots (Wayback Machine `save` API or local archivebox instance). Store archive URLs in `data/link-archive.json`; `Filters.Links` injects `data-archive-url` attributes; `popups.js` falls back to the archive if the live URL returns 404. - [ ] **Self-hosted git (Forgejo)** — Run Forgejo on the VPS. Mirror the build repo. Link from the colophon. Not essential; can remain on GitHub indefinitely. @@ -408,10 +408,10 @@ levineuwirth.org/ - [x] **RSS/feed improvements** — `/feed.xml` now includes compositions (`content/music/*/index.md`) alongside essays, posts, fiction, poetry. New `/music/feed.xml` (compositions only, `musicFeedConfig`). Compositions already had `"content"` snapshots saved by the landing-page rule; no compiler changes needed. - [ ] **Pagefind improvements** — Currently a basic full-text search. Consider: sub-result excerpts, portal-scoped search filters, weighting by `importance` frontmatter field. - [ ] **Audio essays / podcast feed** — Record readings of select essays. Embed a native `