Build telemetry
This commit is contained in:
parent
885056fe28
commit
26c067147a
4
Makefile
4
Makefile
|
|
@ -8,9 +8,13 @@ export
|
|||
build:
|
||||
@git add content/
|
||||
@git diff --cached --quiet || git commit -m "auto: $$(date -u +%Y-%m-%dT%H:%M:%SZ)"
|
||||
@date +%s > data/build-start.txt
|
||||
cabal run site -- build
|
||||
pagefind --site _site
|
||||
> IGNORE.txt
|
||||
@BUILD_END=$$(date +%s); \
|
||||
BUILD_START=$$(cat data/build-start.txt); \
|
||||
echo $$((BUILD_END - BUILD_START)) > data/last-build-seconds.txt
|
||||
|
||||
deploy: build
|
||||
@if [ -z "$(GITHUB_TOKEN)" ] || [ -z "$(GITHUB_REPO)" ]; then \
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ import Commonplace (commonplaceCtx)
|
|||
import Contexts (siteCtx, essayCtx, postCtx, pageCtx, poetryCtx, fictionCtx, compositionCtx)
|
||||
import Tags (buildAllTags, applyTagRules)
|
||||
import Pagination (blogPaginateRules)
|
||||
import Stats (statsRules)
|
||||
|
||||
feedConfig :: FeedConfiguration
|
||||
feedConfig = FeedConfiguration
|
||||
|
|
@ -54,6 +55,7 @@ rules = do
|
|||
-- ---------------------------------------------------------------------------
|
||||
tags <- buildAllTags
|
||||
applyTagRules tags siteCtx
|
||||
statsRules tags
|
||||
|
||||
-- Per-page JS files — authored alongside content in content/**/*.js
|
||||
match "content/**/*.js" $ do
|
||||
|
|
|
|||
|
|
@ -0,0 +1,532 @@
|
|||
{-# LANGUAGE GHC2021 #-}
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
-- | Build telemetry page (/build/): corpus statistics, word-length
|
||||
-- distribution, tag frequencies, link analysis, epistemic coverage,
|
||||
-- output metrics, repository overview, and build timing.
|
||||
-- Rendered as a full essay (3-column layout, TOC, metadata block).
|
||||
module Stats (statsRules) where
|
||||
|
||||
import Control.Exception (IOException, catch)
|
||||
import Control.Monad (forM)
|
||||
import Data.List (find, isSuffixOf, sortBy)
|
||||
import qualified Data.Map.Strict as Map
|
||||
import Data.Maybe (catMaybes, fromMaybe, isJust, listToMaybe)
|
||||
import Data.Ord (comparing, Down (..))
|
||||
import qualified Data.Set as Set
|
||||
import Data.Time (getCurrentTime, formatTime, defaultTimeLocale)
|
||||
import System.Directory (doesDirectoryExist, getFileSize, listDirectory)
|
||||
import System.Exit (ExitCode (..))
|
||||
import System.FilePath (takeExtension, (</>))
|
||||
import System.Process (readProcessWithExitCode)
|
||||
import Text.Read (readMaybe)
|
||||
import qualified Data.Aeson as Aeson
|
||||
import qualified Data.Aeson.Key as AK
|
||||
import qualified Data.Aeson.KeyMap as KM
|
||||
import qualified Data.Vector as V
|
||||
import qualified Data.Text as T
|
||||
import qualified Data.Text.Encoding as TE
|
||||
import Hakyll
|
||||
import Authors (authorLinksField)
|
||||
import Contexts (siteCtx)
|
||||
import Utils (readingTime)
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- Types
|
||||
-- ---------------------------------------------------------------------------
|
||||
|
||||
data TypeRow = TypeRow
|
||||
{ trLabel :: String
|
||||
, trCount :: Int
|
||||
, trWords :: Int
|
||||
}
|
||||
|
||||
data PageInfo = PageInfo
|
||||
{ piTitle :: String
|
||||
, piUrl :: String
|
||||
, piWC :: Int
|
||||
}
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- Hakyll helpers
|
||||
-- ---------------------------------------------------------------------------
|
||||
|
||||
loadWC :: Item String -> Compiler Int
|
||||
loadWC item = do
|
||||
snap <- loadSnapshot (itemIdentifier item) "word-count"
|
||||
return $ fromMaybe 0 (readMaybe (itemBody snap))
|
||||
|
||||
loadPI :: Item String -> Compiler (Maybe PageInfo)
|
||||
loadPI item = do
|
||||
meta <- getMetadata (itemIdentifier item)
|
||||
mRoute <- getRoute (itemIdentifier item)
|
||||
wc <- loadWC item
|
||||
return $ fmap (\r -> PageInfo
|
||||
{ piTitle = fromMaybe "(untitled)" (lookupString "title" meta)
|
||||
, piUrl = "/" ++ r
|
||||
, piWC = wc
|
||||
}) mRoute
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- Formatting helpers
|
||||
-- ---------------------------------------------------------------------------
|
||||
|
||||
commaInt :: Int -> String
|
||||
commaInt n
|
||||
| n < 1000 = show n
|
||||
| otherwise = commaInt (n `div` 1000) ++ "," ++ pad3 (n `mod` 1000)
|
||||
where
|
||||
pad3 x
|
||||
| x < 10 = "00" ++ show x
|
||||
| x < 100 = "0" ++ show x
|
||||
| otherwise = show x
|
||||
|
||||
formatBytes :: Integer -> String
|
||||
formatBytes b
|
||||
| b < 1024 = show b ++ " B"
|
||||
| b < 1024*1024 = showD (b * 10 `div` 1024) ++ " KB"
|
||||
| otherwise = showD (b * 10 `div` (1024*1024)) ++ " MB"
|
||||
where showD n = show (n `div` 10) ++ "." ++ show (n `mod` 10)
|
||||
|
||||
rtStr :: Int -> String
|
||||
rtStr totalWords
|
||||
| mins < 60 = show mins ++ " min"
|
||||
| otherwise = show (mins `div` 60) ++ "h " ++ show (mins `mod` 60) ++ "m"
|
||||
where mins = totalWords `div` 200
|
||||
|
||||
pctStr :: Int -> Int -> String
|
||||
pctStr _ 0 = "—"
|
||||
pctStr n total = show (n * 100 `div` total) ++ "%"
|
||||
|
||||
-- | Strip HTML tags for plain-text word counting.
|
||||
stripHtmlTags :: String -> String
|
||||
stripHtmlTags [] = []
|
||||
stripHtmlTags ('<':rest) = stripHtmlTags (drop 1 (dropWhile (/= '>') rest))
|
||||
stripHtmlTags (c:rest) = c : stripHtmlTags rest
|
||||
|
||||
-- | Normalise a page URL for backlink map lookup (strip trailing .html).
|
||||
normUrl :: String -> String
|
||||
normUrl u
|
||||
| ".html" `isSuffixOf` u = take (length u - 5) u
|
||||
| otherwise = u
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- IO: output directory walk
|
||||
-- ---------------------------------------------------------------------------
|
||||
|
||||
walkDir :: FilePath -> IO [(FilePath, Integer)]
|
||||
walkDir dir = do
|
||||
entries <- listDirectory dir `catch` (\(_ :: IOException) -> return [])
|
||||
fmap concat $ forM entries $ \e -> do
|
||||
let path = dir </> e
|
||||
isDir <- doesDirectoryExist path
|
||||
if isDir
|
||||
then walkDir path
|
||||
else do
|
||||
sz <- getFileSize path `catch` (\(_ :: IOException) -> return 0)
|
||||
return [(path, sz)]
|
||||
|
||||
displayExt :: FilePath -> String
|
||||
displayExt path = case takeExtension path of
|
||||
".html" -> ".html"
|
||||
".css" -> ".css"
|
||||
".js" -> ".js"
|
||||
".woff2" -> ".woff2"
|
||||
".svg" -> ".svg"
|
||||
".mp3" -> ".mp3"
|
||||
".pdf" -> ".pdf"
|
||||
".json" -> ".json"
|
||||
".xml" -> ".xml"
|
||||
".ico" -> ".ico"
|
||||
".png" -> "image"
|
||||
".jpg" -> "image"
|
||||
".jpeg" -> "image"
|
||||
".webp" -> "image"
|
||||
_ -> "other"
|
||||
|
||||
getOutputStats :: IO (Map.Map String (Int, Integer), Int, Integer)
|
||||
getOutputStats = do
|
||||
files <- walkDir "_site"
|
||||
let grouped = foldr (\(path, sz) acc ->
|
||||
Map.insertWith (\(c1,s1) (c2,s2) -> (c1+c2, s1+s2))
|
||||
(displayExt path)
|
||||
(1, sz) acc)
|
||||
Map.empty files
|
||||
return (grouped, length files, sum (map snd files))
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- IO: lines of code
|
||||
-- ---------------------------------------------------------------------------
|
||||
|
||||
countLinesDir :: FilePath -> String -> (FilePath -> Bool) -> IO (Int, Int)
|
||||
countLinesDir dir ext skipPred = do
|
||||
entries <- listDirectory dir `catch` (\(_ :: IOException) -> return [])
|
||||
let files = filter (\e -> takeExtension e == ext && not (skipPred e)) entries
|
||||
ls <- fmap sum $ forM files $ \e -> do
|
||||
content <- readFile (dir </> e) `catch` (\(_ :: IOException) -> return "")
|
||||
return (length (lines content))
|
||||
return (length files, ls)
|
||||
|
||||
getLocStats :: IO (Int, Int, Int, Int, Int, Int)
|
||||
-- (hsFiles, hsLines, cssFiles, cssLines, jsFiles, jsLines)
|
||||
getLocStats = do
|
||||
(hf, hl) <- countLinesDir "build" ".hs" (const False)
|
||||
(cf, cl) <- countLinesDir "static/css" ".css" (const False)
|
||||
(jf, jl) <- countLinesDir "static/js" ".js" (".min.js" `isSuffixOf`)
|
||||
return (hf, hl, cf, cl, jf, jl)
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- IO: git stats
|
||||
-- ---------------------------------------------------------------------------
|
||||
|
||||
gitRun :: [String] -> IO String
|
||||
gitRun args = do
|
||||
(ec, out, _) <- readProcessWithExitCode "git" args ""
|
||||
return $ if ec == ExitSuccess then out else ""
|
||||
|
||||
getGitStats :: IO (Int, String)
|
||||
getGitStats = do
|
||||
countOut <- gitRun ["rev-list", "--count", "HEAD"]
|
||||
firstOut <- gitRun ["log", "--format=%ad", "--date=short", "--reverse"]
|
||||
let commits = fromMaybe 0 (readMaybe (filter (/= '\n') countOut) :: Maybe Int)
|
||||
firstDate = case lines firstOut of { (d:_) -> d; _ -> "\x2014" }
|
||||
return (commits, firstDate)
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- HTML rendering: section helpers
|
||||
-- ---------------------------------------------------------------------------
|
||||
|
||||
section :: String -> String -> String -> String
|
||||
section id_ title body = concat
|
||||
[ "<h2 id=\"", id_, "\">", title, "</h2>\n"
|
||||
, body
|
||||
]
|
||||
|
||||
table :: [String] -> [[String]] -> Maybe [String] -> String
|
||||
table headers rows mFoot = concat
|
||||
[ "<table class=\"build-table\">"
|
||||
, "<thead><tr>", concatMap (\h -> "<th>" ++ h ++ "</th>") headers, "</tr></thead>"
|
||||
, "<tbody>", concatMap renderRow rows, "</tbody>"
|
||||
, maybe "" renderFoot mFoot
|
||||
, "</table>"
|
||||
]
|
||||
where
|
||||
renderRow cells = "<tr>" ++ concatMap (\c -> "<td>" ++ c ++ "</td>") cells ++ "</tr>"
|
||||
renderFoot cells = "<tfoot><tr class=\"build-total\">"
|
||||
++ concatMap (\c -> "<td>" ++ c ++ "</td>") cells
|
||||
++ "</tr></tfoot>"
|
||||
|
||||
dl :: [(String, String)] -> String
|
||||
dl pairs = "<dl class=\"build-meta\">"
|
||||
++ concatMap (\(k, v) -> "<dt>" ++ k ++ "</dt><dd>" ++ v ++ "</dd>") pairs
|
||||
++ "</dl>"
|
||||
|
||||
link :: String -> String -> String
|
||||
link url title = "<a href=\"" ++ url ++ "\">" ++ title ++ "</a>"
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- HTML rendering: sections
|
||||
-- ---------------------------------------------------------------------------
|
||||
|
||||
renderContent :: [TypeRow] -> String
|
||||
renderContent rows =
|
||||
section "content" "Content" $
|
||||
table
|
||||
["Type", "Count", "Words", "Reading time"]
|
||||
(map row rows)
|
||||
(Just ["Total", commaInt totalCount, commaInt totalWords, rtStr totalWords])
|
||||
where
|
||||
totalCount = sum (map trCount rows)
|
||||
totalWords = sum (map trWords rows)
|
||||
row r = [ trLabel r
|
||||
, commaInt (trCount r)
|
||||
, commaInt (trWords r)
|
||||
, rtStr (trWords r)
|
||||
]
|
||||
|
||||
renderPages :: [PageInfo] -> Maybe (String,String,String) -> Maybe (String,String,String) -> String
|
||||
renderPages allPIs mOldest mNewest =
|
||||
section "pages" "Pages" $ concat
|
||||
[ dl $
|
||||
[ ("Total pages", commaInt (length allPIs))
|
||||
, ("Average length", commaInt avgWC ++ " words")
|
||||
] ++
|
||||
maybe [] (\(d,t,u) -> [("Oldest content", d ++ " \x2014 " ++ link u t)]) mOldest ++
|
||||
maybe [] (\(d,t,u) -> [("Newest content", d ++ " \x2014 " ++ link u t)]) mNewest
|
||||
, "<p><strong>Longest</strong></p>"
|
||||
, pageList (take 3 (sortBy (comparing (Down . piWC)) hasSomeWC))
|
||||
, "<p><strong>Shortest</strong></p>"
|
||||
, pageList (take 3 (sortBy (comparing piWC) hasSomeWC))
|
||||
]
|
||||
where
|
||||
hasSomeWC = filter (\p -> piWC p > 0) allPIs
|
||||
avgWC = if null hasSomeWC then 0
|
||||
else sum (map piWC hasSomeWC) `div` length hasSomeWC
|
||||
pageList ps = "<ol class=\"build-page-list\">"
|
||||
++ concatMap (\p -> "<li>" ++ link (piUrl p) (piTitle p)
|
||||
++ " \x2014 " ++ commaInt (piWC p) ++ " words</li>") ps
|
||||
++ "</ol>"
|
||||
|
||||
renderDistribution :: [Int] -> String
|
||||
renderDistribution wcs =
|
||||
section "distribution" "Word-length distribution" $
|
||||
"<div class=\"build-bars\">" ++ concatMap bar buckets ++ "</div>"
|
||||
where
|
||||
bucketOf w
|
||||
| w < 500 = 0 | w < 1000 = 1 | w < 2000 = 2 | w < 5000 = 3 | otherwise = 4
|
||||
labels = ["< 500", "500 \x2013 1k", "1k \x2013 2k", "2k \x2013 5k", "\x2265 5k"]
|
||||
counts = foldr (\w acc -> Map.insertWith (+) (bucketOf w) 1 acc)
|
||||
(Map.fromList [(i,0) | i <- [0..4]]) wcs
|
||||
buckets = [(labels !! i, fromMaybe 0 (Map.lookup i counts)) | i <- [0..4]]
|
||||
maxCount = max 1 (maximum (map snd buckets))
|
||||
bar (lbl, n) =
|
||||
let pct = n * 100 `div` maxCount
|
||||
in concat
|
||||
[ "<div class=\"build-bar-row\">"
|
||||
, "<span class=\"build-bar-label\">", lbl, "</span>"
|
||||
, "<span class=\"build-bar-wrap\"><span class=\"build-bar\" style=\"width:"
|
||||
, show pct, "%\"></span></span>"
|
||||
, "<span class=\"build-bar-count\">", show n, "</span>"
|
||||
, "</div>"
|
||||
]
|
||||
|
||||
renderTagsSection :: [(String, Int)] -> Int -> String
|
||||
renderTagsSection topTags uniqueCount =
|
||||
section "tags" "Tags" $ concat
|
||||
[ dl [("Unique tags", commaInt uniqueCount)]
|
||||
, table ["Tag", "Items"] (map row topTags) Nothing
|
||||
]
|
||||
where
|
||||
row (t, n) = [link ("/" ++ t ++ "/") t, show n]
|
||||
|
||||
renderLinks :: Maybe (String, Int, String) -> Int -> Int -> String
|
||||
renderLinks mMostLinked orphanCount total =
|
||||
section "links" "Links" $
|
||||
dl $
|
||||
(case mMostLinked of
|
||||
Nothing -> [("Most-linked page", "\x2014")]
|
||||
Just (u, n, t) -> [("Most-linked page",
|
||||
link u t ++ " (" ++ show n ++ " inbound links)")]) ++
|
||||
[ ("Orphan pages", commaInt orphanCount
|
||||
++ " of " ++ commaInt total
|
||||
++ " (" ++ pctStr orphanCount total ++ ")") ]
|
||||
|
||||
renderEpistemic :: Int -> Int -> Int -> Int -> Int -> String
|
||||
renderEpistemic total ws wc wi we =
|
||||
section "epistemic" "Epistemic coverage" $
|
||||
table
|
||||
["Field", "Set", "Coverage"]
|
||||
[ row "Status" ws
|
||||
, row "Confidence" wc
|
||||
, row "Importance" wi
|
||||
, row "Evidence" we
|
||||
]
|
||||
Nothing
|
||||
where
|
||||
row label n = [label, show n ++ " / " ++ show total, pctStr n total]
|
||||
|
||||
renderOutput :: Map.Map String (Int, Integer) -> Int -> Integer -> String
|
||||
renderOutput grouped totalFiles totalSize =
|
||||
section "output" "Output" $
|
||||
table
|
||||
["Type", "Files", "Size"]
|
||||
(map row (sortBy (comparing (Down . snd . snd)) (Map.toList grouped)))
|
||||
(Just ["Total", commaInt totalFiles, formatBytes totalSize])
|
||||
where
|
||||
row (ext, (n, sz)) = [ext, commaInt n, formatBytes sz]
|
||||
|
||||
renderRepository :: Int -> Int -> Int -> Int -> Int -> Int -> Int -> String -> String
|
||||
renderRepository hf hl cf cl jf jl commits firstDate =
|
||||
section "repository" "Repository" $
|
||||
dl
|
||||
[ ("Haskell", commaInt hl ++ " lines across " ++ show hf ++ " files")
|
||||
, ("CSS", commaInt cl ++ " lines across " ++ show cf ++ " files")
|
||||
, ("JavaScript", commaInt jl ++ " lines across " ++ show jf ++ " files (excl. minified)")
|
||||
, ("Total git commits", commaInt commits)
|
||||
, ("Repository started", firstDate)
|
||||
]
|
||||
|
||||
renderBuild :: String -> String -> String
|
||||
renderBuild ts dur =
|
||||
section "build" "Build" $
|
||||
dl
|
||||
[ ("Generated", ts)
|
||||
, ("Last build duration", dur)
|
||||
]
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- Static TOC (matches the nine h2 sections above)
|
||||
-- ---------------------------------------------------------------------------
|
||||
|
||||
pageTOC :: String
|
||||
pageTOC = "<ol>\n" ++ concatMap item sections ++ "</ol>\n"
|
||||
where
|
||||
item (id_, title) =
|
||||
"<li><a href=\"#" ++ id_ ++ "\" data-target=\"" ++ id_ ++ "\">"
|
||||
++ title ++ "</a></li>\n"
|
||||
sections =
|
||||
[ ("content", "Content")
|
||||
, ("pages", "Pages")
|
||||
, ("distribution", "Word-length distribution")
|
||||
, ("tags", "Tags")
|
||||
, ("links", "Links")
|
||||
, ("epistemic", "Epistemic coverage")
|
||||
, ("output", "Output")
|
||||
, ("repository", "Repository")
|
||||
, ("build", "Build")
|
||||
]
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- Rules
|
||||
-- ---------------------------------------------------------------------------
|
||||
|
||||
statsRules :: Tags -> Rules ()
|
||||
statsRules tags =
|
||||
create ["build/index.html"] $ do
|
||||
route idRoute
|
||||
compile $ do
|
||||
-- ----------------------------------------------------------------
|
||||
-- Load all content items
|
||||
-- ----------------------------------------------------------------
|
||||
essays <- loadAll ("content/essays/*.md" .&&. hasNoVersion)
|
||||
posts <- loadAll ("content/blog/*.md" .&&. hasNoVersion)
|
||||
poems <- loadAll ("content/poetry/*.md" .&&. hasNoVersion)
|
||||
fiction <- loadAll ("content/fiction/*.md" .&&. hasNoVersion)
|
||||
comps <- loadAll ("content/music/*/index.md" .&&. hasNoVersion)
|
||||
|
||||
-- ----------------------------------------------------------------
|
||||
-- Word counts
|
||||
-- ----------------------------------------------------------------
|
||||
essayWCs <- mapM loadWC essays
|
||||
postWCs <- mapM loadWC posts
|
||||
poemWCs <- mapM loadWC poems
|
||||
fictionWCs <- mapM loadWC fiction
|
||||
compWCs <- mapM loadWC comps
|
||||
|
||||
let allWCs = essayWCs ++ postWCs ++ poemWCs ++ fictionWCs ++ compWCs
|
||||
rows =
|
||||
[ TypeRow "Essays" (length essays) (sum essayWCs)
|
||||
, TypeRow "Blog posts" (length posts) (sum postWCs)
|
||||
, TypeRow "Poems" (length poems) (sum poemWCs)
|
||||
, TypeRow "Fiction" (length fiction) (sum fictionWCs)
|
||||
, TypeRow "Compositions" (length comps) (sum compWCs)
|
||||
]
|
||||
|
||||
-- ----------------------------------------------------------------
|
||||
-- Per-page info (title + URL + word count)
|
||||
-- ----------------------------------------------------------------
|
||||
allItems <- return (essays ++ posts ++ poems ++ fiction ++ comps)
|
||||
allPIs <- catMaybes <$> mapM loadPI allItems
|
||||
|
||||
-- ----------------------------------------------------------------
|
||||
-- Dates (essays + posts only)
|
||||
-- ----------------------------------------------------------------
|
||||
let getDateMeta item = do
|
||||
meta <- getMetadata (itemIdentifier item)
|
||||
mRoute <- getRoute (itemIdentifier item)
|
||||
let d = fromMaybe "" (lookupString "date" meta)
|
||||
t = fromMaybe "(untitled)" (lookupString "title" meta)
|
||||
u = maybe "#" (\r -> "/" ++ r) mRoute
|
||||
return (d, t, u)
|
||||
essayDates <- mapM getDateMeta essays
|
||||
postDates <- mapM getDateMeta posts
|
||||
let allDates = filter (\(d,_,_) -> not (null d)) (essayDates ++ postDates)
|
||||
sortedDates = sortBy (comparing (\(d,_,_) -> d)) allDates
|
||||
oldestDate = listToMaybe sortedDates
|
||||
newestDate = listToMaybe (reverse sortedDates)
|
||||
|
||||
-- ----------------------------------------------------------------
|
||||
-- Tags
|
||||
-- ----------------------------------------------------------------
|
||||
let tagFreqs = map (\(t, ids) -> (t, length ids)) (tagsMap tags)
|
||||
topTags = take 15 (sortBy (comparing (Down . snd)) tagFreqs)
|
||||
uniqueTags = length tagFreqs
|
||||
|
||||
-- ----------------------------------------------------------------
|
||||
-- Backlinks: most-linked page + orphan count
|
||||
-- ----------------------------------------------------------------
|
||||
blItem <- load (fromFilePath "data/backlinks.json") :: Compiler (Item String)
|
||||
let rawBL = itemBody blItem
|
||||
mBLVal = Aeson.decodeStrict (TE.encodeUtf8 (T.pack rawBL)) :: Maybe Aeson.Value
|
||||
blPairs = case mBLVal of
|
||||
Just (Aeson.Object km) ->
|
||||
[ (T.unpack (AK.toText k),
|
||||
case v of Aeson.Array arr -> V.length arr; _ -> 0)
|
||||
| (k, v) <- KM.toList km ]
|
||||
_ -> []
|
||||
blSet = Set.fromList (map fst blPairs)
|
||||
orphanCount = length
|
||||
[ p | p <- allPIs
|
||||
, not (Set.member (normUrl (piUrl p)) blSet) ]
|
||||
mostLinked = listToMaybe (sortBy (comparing (Down . snd)) blPairs)
|
||||
mostLinkedInfo = mostLinked >>= \(url, ct) ->
|
||||
let mTitle = piTitle <$> find (\p -> normUrl (piUrl p) == url) allPIs
|
||||
in Just (url, ct, fromMaybe url mTitle)
|
||||
|
||||
-- ----------------------------------------------------------------
|
||||
-- Epistemic coverage (essays + posts)
|
||||
-- ----------------------------------------------------------------
|
||||
essayMetas <- mapM (getMetadata . itemIdentifier) essays
|
||||
postMetas <- mapM (getMetadata . itemIdentifier) posts
|
||||
let epMetas = essayMetas ++ postMetas
|
||||
epTotal = length epMetas
|
||||
ep f = length (filter (isJust . f) epMetas)
|
||||
withStatus = ep (lookupString "status")
|
||||
withConf = ep (lookupString "confidence")
|
||||
withImp = ep (lookupString "importance")
|
||||
withEv = ep (lookupString "evidence")
|
||||
|
||||
-- ----------------------------------------------------------------
|
||||
-- Output directory stats
|
||||
-- ----------------------------------------------------------------
|
||||
(outputGrouped, totalFiles, totalSize) <-
|
||||
unsafeCompiler getOutputStats
|
||||
|
||||
-- ----------------------------------------------------------------
|
||||
-- Lines of code + git stats
|
||||
-- ----------------------------------------------------------------
|
||||
(hf, hl, cf, cl, jf, jl) <- unsafeCompiler getLocStats
|
||||
(commits, firstDate) <- unsafeCompiler getGitStats
|
||||
|
||||
-- ----------------------------------------------------------------
|
||||
-- Build timestamp + last build duration
|
||||
-- ----------------------------------------------------------------
|
||||
buildTimestamp <- unsafeCompiler $
|
||||
formatTime defaultTimeLocale "%Y-%m-%d %H:%M UTC" <$> getCurrentTime
|
||||
lastBuildDur <- unsafeCompiler $
|
||||
(readFile "data/last-build-seconds.txt" >>= \s ->
|
||||
let secs = fromMaybe 0 (readMaybe (filter (/= '\n') s) :: Maybe Int)
|
||||
in return (show secs ++ "s"))
|
||||
`catch` (\(_ :: IOException) -> return "\x2014")
|
||||
|
||||
-- ----------------------------------------------------------------
|
||||
-- Assemble page
|
||||
-- ----------------------------------------------------------------
|
||||
let content = concat
|
||||
[ renderContent rows
|
||||
, renderPages allPIs oldestDate newestDate
|
||||
, renderDistribution allWCs
|
||||
, renderTagsSection topTags uniqueTags
|
||||
, renderLinks mostLinkedInfo orphanCount (length allPIs)
|
||||
, renderEpistemic epTotal withStatus withConf withImp withEv
|
||||
, renderOutput outputGrouped totalFiles totalSize
|
||||
, renderRepository hf hl cf cl jf jl commits firstDate
|
||||
, renderBuild buildTimestamp lastBuildDur
|
||||
]
|
||||
plainText = stripHtmlTags content
|
||||
wc = length (words plainText)
|
||||
rt = readingTime plainText
|
||||
ctx = constField "toc" pageTOC
|
||||
<> constField "word-count" (show wc)
|
||||
<> constField "reading-time" (show rt)
|
||||
<> constField "title" "Build Telemetry"
|
||||
<> constField "abstract" "Per-build corpus statistics, tag distribution, \
|
||||
\link analysis, epistemic coverage, output metrics, \
|
||||
\repository overview, and build timing."
|
||||
<> constField "build" "true"
|
||||
<> authorLinksField
|
||||
<> siteCtx
|
||||
|
||||
makeItem content
|
||||
>>= loadAndApplyTemplate "templates/essay.html" ctx
|
||||
>>= loadAndApplyTemplate "templates/default.html" ctx
|
||||
>>= relativizeUrls
|
||||
|
|
@ -65,6 +65,9 @@ I fell in love with Artificial Intelligence during my first semester at Brown. M
|
|||
|
||||
I have long said to friends that one does not "do mathematics," but rather "mathematics does you," and this encapsulates how I feel about mathematics better than anything else I can think of at the moment. Mathematics is endlessly creative and has, to me, unlimited intrigue. I vividly remember learning about the [Sylow Theorems](https://en.wikipedia.org/wiki/Sylow_theorems) in my 3rd semester abstract algebra course - my first math upper level - and feeling a sense of absolute wonder and beauty at the proof, yes, but moreso at the grandeur of human genius - that *we* were able to derive this result, and so many subsequent ones, and that I had the power to understand it just the same!^[Which was perhaps *not* evident based off my score for the final exam of that course, but I digress.]
|
||||
|
||||
### Computer Systems
|
||||
I have been interested in the low level since I began to study computation. Getting closer to the hardware was a constant goal as I learned Java in high school. Later, when I took my first real "Systems"^[Brown makes a real distinction about what is "Systems" and what is not "Systems", much more so than I would. But, the effect of having your undergrad take place within a *particularly* semantically concerned department sticks.] course, I felt like I was a wizard, learning the ways of some magic. Computers are wonderfully beautiful and powerful machines, and the systems that they are are nothing short of exquisite. Many folks seem to think that those of the system-minded type are some hardcore, late-night hacker type devoid of social life, romance, etc. I think quite the opposite: those of us who love systems love beauty and elegance, and those who opt to write Javascript whilst blatantly refusing to learn about how the systems they use work are the ones with deficits to fill!
|
||||
|
||||
## Music
|
||||
|
||||
Music is core to who I am. I have played trumpet, my primary instrument, for the majority of my life. I also play piano, horn, trombone, euphonium, tuba, and a bit of drums. More important to me than playing, however, is composition. I feel that my compositions are fundamentally a part of me, an extension of the person that I am.
|
||||
|
|
@ -82,7 +85,7 @@ COMPOSITION [IS]{.smallcaps} PERHAPS MORE THAN ANYTHING ELSE THE PRACTICE [OF MY
|
|||
|
||||
Music composition is thus **chiefly distinct** from other forms of creative activity for me. Music is the most rewarding for me, invoking the most passion, and it is the medium by which I feel I have the most expression potential and the most capacity to express.^[These are two different things for me. By **expression potential**, I mean the range of sentiments and ideas that music can, in the abstract / in principle, express. This **expression potential** is thus innately provided to me by mere virtue of my partaking in the act of writing music. By **capacity to express**, I am referring to my own personal ability as a composer to successfully express *that which I intend to* rather than the full range of what music itself might be able to encapsulate.] When I hear a composition that I have finished it surmounts me and effortlessly transports me into an immersive state; I am returned to the deep feelings and profound^[Not necessarily in grandeur, but in personal depth.] ideas that I tried to capture through my project. On the contrary, music also torments me. I am something of a perfectionist with my compositions and get frustrated when they do not pan out the way I intend. I scrap many projects that I perceive as insufficient, and when ideas are not flowing, I suffer for it. Luckily, since composition is such a core constituent of who I am, I have found a consistency in my undergraduate years, and the ideas have generally flowed without significant pause since 2023. I can only hope for my own sake that this trend continues far into the future.
|
||||
|
||||
::: {.score-fragment score-name="Violin Sonata - I (2026)" score-caption="A short excerpt from the first movement of my Flute Sonata, composed in January 2026."}
|
||||
::: {.score-fragment score-name="Flute Sonata - I (2026)" score-caption="A short excerpt from the first movement of my Flute Sonata, composed in January 2026."}
|
||||

|
||||
:::
|
||||
|
||||
|
|
@ -98,8 +101,22 @@ I am extremely interested in **Foreign Language**, and most fortunate to be a na
|
|||
|
||||
#### Spanish
|
||||
|
||||
During the last five years of my time in public high school, I took 5 courses in Spanish - the first two were *required*, and the latter three were *pseudorequired* - New York State requires only a basic Spanish credit for graduation, but to have any true competitive college application, one really must take a foreign language throughout. Regardless of this fact, my time in public high school taught me essentially **no Spanish**. Rather, I decided that learning Spanish was worth my time in my senior year, after our class ranks had been finalized, yielding a lighter workload as a result. During the day for the rest of the year until I graduated, whenever I had free time or nothing worthwhile to do, which was often, I would read exclusively in Spanish. I made an effort for the first time to immerse myself in Spanish and it worked well. This was the moment that I fell in love with the *process* of learning languages.
|
||||
|
||||
#### Chinese
|
||||
During my first year at Brown I took Mandarin Chinese. It was a great challenge compared to Spanish, and I enjoyed it as such. I regret that I have not had time to keep up the practice since - I intended to continue into third year Chinese and beyond, but unfortunately my schedule became to busy for that intention to become reality. Mandarin is, perhaps curiously, though perhaps not so curiously, the *only* language I have ever studied where reading was not the easiest skill for me to acquire. (In fact, for Mandarin, it was the second-hardest, only after writing.)
|
||||
|
||||
#### Danish
|
||||
I entered 2024 having absolutely no intention of learning Danish, and left it conversational. This happened because I studied abroad in Copenhagen for the second half of the year and fell in love with the city, the country, the culture, and, yes, the language! Danish, along with German, is one of my focuses as of now (2026).
|
||||
|
||||
#### German
|
||||
I decided to make a push to learn German, which I had long intended but never taken action toward, in late 2025. The diversity of philosophers whose primary language was German was a great motivation for this. My own aspirations of spending some extended time in Germany were also an inspiration, as were my fond memories of Berlin from my visit in 2024. I have quickly fallen in love with German this year, and intend to make my German better than my Spanish. I anticipate relative fluency by the end of the year 2026, and only improvement from there.
|
||||
|
||||
#### Linguistic Bucket List
|
||||
I have long had some notion of a "Linguistic Bucket List" - a collection of languages I intend to learn, whether for literacy or for true fluency over the course of my life. A subset of that follows:
|
||||
|
||||
- **French.** French is 100% the next language I will learn after my German is at a level such that study becomes more passive.
|
||||
- **Russian.** The literature is exceptional, and thus I feel obliged.
|
||||
- **Latin & Greek.** I am interested in *reading* the ancient texts in these languages, and perhaps pursuing Greek further, as it is part of my heritage.
|
||||
- **Sanskrit & Pali.** I am *also* interested in *reading* the ancient texts of these languages.
|
||||
|
||||
|
|
|
|||
|
|
@ -24,6 +24,14 @@ In sequent toil all forwards do contend.</p>
|
|||
<div id="countdown-wrapper"></div>
|
||||
|
||||
---
|
||||
## Who is a website for?
|
||||
|
||||
::: dropcap
|
||||
It is a trick question. A website (at least, of the personal website type) is created and, in particular, shared with the world in the hopes that someone else will derive utility from it, whether that utility is diversion, knowledge, or something else. If I had something to say to myself, creating a website for it would be overkill, wouldn't it?
|
||||
:::
|
||||
|
||||
Yes and no, if you ask me. I believe in the power of transparency and honesty. If I am going to endeavor to accomplish something, making my attempt in the public eye is an excellent motivator and pushes me to be consistent. Similarly, if I have something that I feel is important to me, such as the content of this page, then I have yet to see a reason why I should not incorporate it into the website. Yes, the website is primarily a place for me to share works that I expressly create with the intent of sharing, but I have yet to see *any* detriment to additionally sharing works that I expressly create for my own consumption. I lose nothing, and have the possibility of gaining *something*, so this seems to me to be an instantiation of [Pascal's Wager](https://en.wikipedia.org/wiki/Pascal%27s_wager).
|
||||
|
||||
## Applied Vanitas
|
||||
|
||||

|
||||
|
|
|
|||
|
|
@ -0,0 +1 @@
|
|||
1773865704
|
||||
|
|
@ -0,0 +1 @@
|
|||
1
|
||||
|
|
@ -19,6 +19,7 @@ executable site
|
|||
Backlinks
|
||||
Compilers
|
||||
Contexts
|
||||
Stats
|
||||
Stability
|
||||
Metadata
|
||||
Tags
|
||||
|
|
|
|||
10
spec.md
10
spec.md
|
|
@ -355,7 +355,7 @@ levineuwirth.org/
|
|||
- [x] Templates: default, essay, blog-post, index
|
||||
- [x] Dark/light toggle with `localStorage` + `prefers-color-scheme`
|
||||
- [x] Basic Pandoc pipeline (Markdown → HTML, smart typography)
|
||||
- [ ] Deploy to DreamHost via rsync
|
||||
- [x] Deploy to DreamHost via rsync — deployed to Hetzner VPS instead
|
||||
|
||||
### Phase 2: Content Features ✓
|
||||
- [x] Pandoc filters: sidenotes, dropcaps, smallcaps, wikilinks, typography, link classification, code, math
|
||||
|
|
@ -392,8 +392,8 @@ levineuwirth.org/
|
|||
- [ ] Content migration — migrate existing essays, poems, fiction, and music landing pages from prior formats into `content/`
|
||||
|
||||
### Phase 5: Infrastructure & Advanced
|
||||
- [ ] **Arch Linux VPS + nginx + certbot + DNS migration** — Provision Hetzner VPS, install nginx (config in §III), obtain TLS cert via certbot, migrate DNS from DreamHost. Update `make deploy` target. Serve `_site/` as static files; no server-side logic needed.
|
||||
- [ ] **Semantic embedding pipeline** — Generate per-page embeddings (OpenAI `text-embedding-3-small` or local model). Store as `data/embeddings.json` (identifier → vector). At build time, compute nearest neighbors and write `data/similar-links.json`. Serve as static JSON; JS loads it client-side to populate a "Similar" section in the page footer.
|
||||
- [x] **Arch Linux VPS + nginx + certbot + DNS migration** — Hetzner VPS provisioned, Arch Linux installed, nginx configured (config in §III), TLS cert via certbot, DNS migrated from DreamHost. `make deploy` pushes to GitHub and rsyncs to VPS.
|
||||
- [ ] **Semantic embedding pipeline** — Superseded by Phase 6 "Embedding-powered similar links" (local model, no API cost).
|
||||
- [x] **Backlinks with context** — Two-pass build-time system (`build/Backlinks.hs`). Pass 1: `version "links"` compiles each page lightly (wikilinks preprocessed, links + context extracted, serialised as JSON). Pass 2: `create ["data/backlinks.json"]` inverts the map. `backlinksField` in `essayCtx` / `postCtx` loads the JSON and renders `<details>`-collapsible per-entry lists. `popups.js` excludes `.backlink-source` links from the preview popup. Context paragraph uses `runPure . writeHtml5String` on the surrounding `Para` block. See Implementation Notes.
|
||||
- [ ] **Link archiving** — For all external links in `data/bibliography.bib` and in page bodies, check availability and save snapshots (Wayback Machine `save` API or local archivebox instance). Store archive URLs in `data/link-archive.json`; `Filters.Links` injects `data-archive-url` attributes; `popups.js` falls back to the archive if the live URL returns 404.
|
||||
- [ ] **Self-hosted git (Forgejo)** — Run Forgejo on the VPS. Mirror the build repo. Link from the colophon. Not essential; can remain on GitHub indefinitely.
|
||||
|
|
@ -408,10 +408,10 @@ levineuwirth.org/
|
|||
- [x] **RSS/feed improvements** — `/feed.xml` now includes compositions (`content/music/*/index.md`) alongside essays, posts, fiction, poetry. New `/music/feed.xml` (compositions only, `musicFeedConfig`). Compositions already had `"content"` snapshots saved by the landing-page rule; no compiler changes needed.
|
||||
- [ ] **Pagefind improvements** — Currently a basic full-text search. Consider: sub-result excerpts, portal-scoped search filters, weighting by `importance` frontmatter field.
|
||||
- [ ] **Audio essays / podcast feed** — Record readings of select essays. Embed a native `<audio>` player at the top of the essay page, activated by an `audio` frontmatter key (path to MP3, relative to the content dir). Generate a separate `/podcast.xml` Atom feed with `<enclosure>` elements pointing to the MP3s so readers can subscribe in any podcast app. Stretch goal: a paragraph-sync mode where the player emits `timeupdate` events that highlight the paragraph currently being read — requires a `data/audio/{slug}-timestamps.json` file mapping paragraph indices to timestamps, authored manually or via forced-alignment tooling (e.g. `whisper` with word timestamps).
|
||||
- [ ] **Build telemetry page** — A `/build` page generated at build time showing infrastructure statistics: total build time (wall clock), number of pages compiled by type, total output size, Pandoc AST statistics aggregated across the whole corpus (paragraph count, heading count, code blocks, math blocks, inline citations, word count distribution). Could also include a dependency graph of which pages triggered rebuilds. A meta-page about the site's own construction — fits the "configuration is code" philosophy. Implementation: `unsafeCompiler` calls to gather stats during build, written to a `data/build-stats.json` snapshot, rendered via a dedicated template.
|
||||
- [x] **Build telemetry page** — `/build/` page generated at build time. `build/Stats.hs` loads all content items by type, reads `"word-count"` snapshots, aggregates counts/words/reading-time per type, computes word-length distribution (5 buckets), and reads top-15 tags from the `Tags` object. Makefile writes `date +%s` to `data/build-start.txt` before Hakyll runs; after pagefind, computes elapsed and writes `data/last-build-seconds.txt` (read on next build). CSS in `static/css/build.css` (flex bar chart, tabular-nums table, grid dl); loaded conditionally via `$if(build)$` in head.html.
|
||||
- [x] **Epistemic profile** — Replaces the old `certainty` / `importance` fields with a richer multi-axis system. **Compact** (always visible in footer): status chip · confidence % · importance dots · evidence dots. **Expanded** (`<details>`): stability (auto) · scope · novelty · practicality · last reviewed · confidence trend. Auto-calculation in `build/Stability.hs` via `git log --follow`; `IGNORE.txt` pins overrides. See Metadata section and Implementation Notes for full schema and vocabulary.
|
||||
- [ ] **Writing statistics dashboard** — A `/stats` page computed entirely at build time from the corpus. Contents: total word count across all content types, essay/post/poem count, words written per month rendered as a GitHub-style contribution heatmap (SVG generated by Haskell or a Python script), average and median essay length, longest essay, most-cited essay (by backlink count), tag distribution as a treemap, reading-time histogram, site growth over time (cumulative word count by date). All data collected during the Hakyll build from compiled items and their snapshots; serialized to `data/stats.json` and rendered into a dedicated `stats.html` template.
|
||||
- [ ] **Memento mori** — An interactive widget, likely placed on the homepage or `/me` page, that confronts the reader (and author) with time. Exact form TBD, but the spirit is: a live display of time elapsed and time statistically remaining, computed from a birthdate and actuarial life expectancy. Could manifest as a progress bar, a grid of weeks (in the style of Tim Urban's "Your Life in Weeks"), or a running clock. Interactive via JavaScript — requires support for **custom inline JavaScript** in Pandoc-compiled pages (a `RawBlock "html"` passthrough or a dedicated fenced-div filter that emits `<script>` tags). The inline JS requirement is a prerequisite; implement that first. No tracking, no external data — all computation client-side from a hardcoded birthdate.
|
||||
- [x] **Memento mori** — Implemented at `/memento-mori/` as a full standalone page. 90×52 grid of weeks anchored to birthday anniversaries (nested year/week loop via `setFullYear`; week 52 stretched to eve of next birthday to absorb 365th/366th days). Week popup shows dynamic day-count and locale-derived day names. Score fragment (bassoon, `content/memento-mori/scores/bsn.svg`) inlined via `Filters.Score`. Linked from footer (MM).
|
||||
- [ ] **Embedding-powered similar links** — Precompute dense vector embeddings for every page using a local model (e.g. `nomic-embed-text` or `gte-large` via `ollama` or `llama.cpp`) on personal hardware — no API dependency, no per-call cost. At build time, a Python script reads `_site/` HTML, embeds each page, computes top-N cosine neighbors, and writes `data/similar-links.json` (slug → [{slug, title, score}]). Hakyll injects this into each page's context (via `Metadata.hs` reading the JSON); template renders a "Related" section in the page footer. Analogous to gwern's `GenerateSimilar.hs` but model-agnostic and self-hosted. Note: supersedes the Phase 5 "Semantic embedding pipeline" stub — that stub should be replaced by this when implemented.
|
||||
- [x] **Bidirectional backlinks with context** — See Phase 5 above; implemented with full context-paragraph extraction. Merged with the Phase 5 stub.
|
||||
- [ ] **Signed pages / content integrity** — GPG-sign each HTML output file at build time using a detached ASCII-armored signature (`.sig` file per page). The signing step runs as a final Makefile target after Hakyll and Pagefind complete: `find _site -name '*.html' -exec gpg --batch --yes --detach-sign --armor {} \;`. Signatures are served alongside their pages (e.g. `/essays/my-essay.html.sig`). The page footer displays a verification block near the license: the signing key fingerprint, a link to `/gpg/` where the public key is published, and a link to the `.sig` file for that page — so readers can verify without hunting for the key. The public key is also available at the standard WKD location and published to keyservers. **Operational requirement:** a dedicated signing subkey (no passphrase) on the build machine; the master certifying key stays offline and passphrase-protected. A `tools/setup-signing.sh` script will walk through creating the signing subkey, exporting it, and configuring the build — so the setup is repeatable when moving between machines or provisioning the VPS. Philosophically consistent with the FOSS/privacy ethos and the "configuration is code" principle; extreme, but the site is already committed to doing things properly.
|
||||
|
|
|
|||
|
|
@ -0,0 +1,109 @@
|
|||
/* ============================================================
|
||||
Build telemetry page (/build/)
|
||||
============================================================ */
|
||||
|
||||
.build-section {
|
||||
margin: 2.5rem 0;
|
||||
}
|
||||
|
||||
/* Summary + tag tables */
|
||||
.build-table {
|
||||
width: 100%;
|
||||
border-collapse: collapse;
|
||||
font-variant-numeric: tabular-nums;
|
||||
font-size: 0.95em;
|
||||
}
|
||||
|
||||
.build-table th,
|
||||
.build-table td {
|
||||
padding: 0.35rem 0.75rem;
|
||||
text-align: left;
|
||||
border-bottom: 1px solid var(--rule);
|
||||
}
|
||||
|
||||
.build-table th {
|
||||
font-family: var(--font-ui);
|
||||
font-size: 0.8em;
|
||||
letter-spacing: 0.04em;
|
||||
text-transform: uppercase;
|
||||
color: var(--text-muted);
|
||||
border-bottom: 2px solid var(--rule);
|
||||
}
|
||||
|
||||
.build-table td:not(:first-child) {
|
||||
font-variant-numeric: tabular-nums;
|
||||
}
|
||||
|
||||
.build-total td {
|
||||
font-weight: 600;
|
||||
border-top: 2px solid var(--rule);
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
/* Word-length distribution bars */
|
||||
.build-bars {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 0.5rem;
|
||||
max-width: 480px;
|
||||
}
|
||||
|
||||
.build-bar-row {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.75rem;
|
||||
font-size: 0.9em;
|
||||
}
|
||||
|
||||
.build-bar-label {
|
||||
width: 6.5rem;
|
||||
flex-shrink: 0;
|
||||
color: var(--text-muted);
|
||||
font-family: var(--font-ui);
|
||||
font-size: 0.85em;
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
.build-bar-wrap {
|
||||
flex: 1;
|
||||
background: var(--rule);
|
||||
height: 1rem;
|
||||
border-radius: 2px;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.build-bar {
|
||||
display: block;
|
||||
height: 100%;
|
||||
background: var(--text);
|
||||
border-radius: 2px;
|
||||
transition: width 0.2s ease;
|
||||
min-width: 2px;
|
||||
}
|
||||
|
||||
.build-bar-count {
|
||||
width: 2.5rem;
|
||||
flex-shrink: 0;
|
||||
font-variant-numeric: tabular-nums;
|
||||
font-size: 0.85em;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
/* Build info dl */
|
||||
.build-meta {
|
||||
display: grid;
|
||||
grid-template-columns: 12rem 1fr;
|
||||
gap: 0.25rem 1rem;
|
||||
font-size: 0.95em;
|
||||
}
|
||||
|
||||
.build-meta dt {
|
||||
color: var(--text-muted);
|
||||
font-family: var(--font-ui);
|
||||
font-size: 0.85em;
|
||||
}
|
||||
|
||||
.build-meta dd {
|
||||
margin: 0;
|
||||
font-variant-numeric: tabular-nums;
|
||||
}
|
||||
|
|
@ -170,6 +170,18 @@ body > footer {
|
|||
font-variant-numeric: tabular-nums;
|
||||
}
|
||||
|
||||
.footer-build-link {
|
||||
margin-left: 0.3em;
|
||||
font-size: 0.72rem;
|
||||
color: var(--text-faint);
|
||||
text-decoration: none;
|
||||
opacity: 0.6;
|
||||
}
|
||||
|
||||
.footer-build-link:hover {
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
|
||||
/* ============================================================
|
||||
RESPONSIVE BREAKPOINTS
|
||||
|
|
|
|||
|
|
@ -7,6 +7,6 @@
|
|||
<span class="footer-license">CC BY-SA-NC 4.0 · <a href="https://github.com/levineuwirth/levineuwirth.org">MIT</a> · <a href="/memento-mori.html" class="footer-mm">MM</a></span>
|
||||
</div>
|
||||
<div class="footer-right">
|
||||
<span class="footer-build">build $build-time$</span>
|
||||
<span class="footer-build">build $build-time$</span><a href="/build/" class="footer-build-link" aria-label="Build telemetry">→</a>
|
||||
</div>
|
||||
</footer>
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ $if(library)$<link rel="stylesheet" href="/css/library.css">$endif$
|
|||
$if(memento-mori)$<link rel="stylesheet" href="/css/memento-mori.css">$endif$
|
||||
$if(catalog)$<link rel="stylesheet" href="/css/catalog.css">$endif$
|
||||
$if(commonplace)$<link rel="stylesheet" href="/css/commonplace.css">$endif$
|
||||
$if(build)$<link rel="stylesheet" href="/css/build.css">$endif$
|
||||
$if(reading)$<link rel="stylesheet" href="/css/reading.css">$endif$
|
||||
$if(composition)$<link rel="stylesheet" href="/css/score-reader.css">$endif$
|
||||
<link rel="stylesheet" href="/css/print.css" media="print">
|
||||
|
|
|
|||
Loading…
Reference in New Issue