chore: Significantly restructure folder layout
This moves the various projects from "type-based" folders (such as "services" or "tools") into more appropriate semantic folders (such as "nix", "ops" or "web"). Deprecated projects (nixcon-demo & gotest) which only existed for testing/demonstration purposes have been removed. (Note: *all* builds are broken with this commit)
This commit is contained in:
parent
e52eed3cd4
commit
03bfe08e1d
110 changed files with 1 additions and 998 deletions
141
web/tazblog/src/Blog.hs
Normal file
141
web/tazblog/src/Blog.hs
Normal file
|
|
@ -0,0 +1,141 @@
|
|||
{-# LANGUAGE DeriveDataTypeable #-}
|
||||
{-# LANGUAGE FlexibleContexts #-}
|
||||
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
|
||||
{-# LANGUAGE MultiParamTypeClasses #-}
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
{-# LANGUAGE QuasiQuotes #-}
|
||||
{-# LANGUAGE RecordWildCards #-}
|
||||
{-# LANGUAGE ScopedTypeVariables #-}
|
||||
{-# LANGUAGE TemplateHaskell #-}
|
||||
{-# LANGUAGE TypeFamilies #-}
|
||||
|
||||
module Blog where
|
||||
|
||||
import BlogStore
|
||||
import Data.Text (Text, pack)
|
||||
import qualified Data.Text as T
|
||||
import Data.Text.Lazy (fromStrict)
|
||||
import Data.Time
|
||||
import Text.Blaze.Html (preEscapedToHtml)
|
||||
import Text.Hamlet
|
||||
import Text.Markdown
|
||||
|
||||
blogTitle :: Text = "tazjin's blog"
|
||||
|
||||
repoURL :: Text = "https://bitbucket.org/tazjin/tazblog-haskell"
|
||||
|
||||
mailTo :: Text = "mailto:mail@tazj.in"
|
||||
|
||||
twitter :: Text = "https://twitter.com/tazjin"
|
||||
|
||||
replace :: Eq a => a -> a -> [a] -> [a]
|
||||
replace x y = map (\z -> if z == x then y else z)
|
||||
|
||||
-- |After this date all entries are Markdown
|
||||
markdownCutoff :: Day
|
||||
markdownCutoff = fromGregorian 2013 04 28
|
||||
|
||||
blogTemplate :: Text -> Html -> Html
|
||||
blogTemplate t_append body =
|
||||
[shamlet|
|
||||
$doctype 5
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<meta name="description" content=#{blogTitle}#{t_append}>
|
||||
<link rel="stylesheet" type="text/css" href="/static/blog.css" media="all">
|
||||
<link rel="alternate" type="application/rss+xml" title="RSS-Feed" href="/rss.xml">
|
||||
<title>#{blogTitle}#{t_append}
|
||||
<body>
|
||||
<header>
|
||||
<h1>
|
||||
<a href="/" .unstyled-link>#{blogTitle}
|
||||
<hr>
|
||||
^{body}
|
||||
^{showFooter}
|
||||
|]
|
||||
|
||||
showFooter :: Html
|
||||
showFooter =
|
||||
[shamlet|
|
||||
<footer>
|
||||
<p .footer>Served without any dynamic languages.
|
||||
<p .footer>
|
||||
<a href=#{repoURL} .uncoloured-link>
|
||||
|
|
||||
<a href=#{twitter} .uncoloured-link>Twitter
|
||||
|
|
||||
<a href=#{mailTo} .uncoloured-link>Mail
|
||||
<p .lod>
|
||||
ಠ_ಠ
|
||||
|]
|
||||
|
||||
isEntryMarkdown :: Entry -> Bool
|
||||
isEntryMarkdown e = edate e > markdownCutoff
|
||||
|
||||
renderEntryMarkdown :: Text -> Html
|
||||
renderEntryMarkdown = markdown def {msXssProtect = False} . fromStrict
|
||||
|
||||
renderEntries :: [Entry] -> Maybe Html -> Html
|
||||
renderEntries entries pageLinks =
|
||||
[shamlet|
|
||||
$forall entry <- entries
|
||||
<article>
|
||||
<h2 .inline>
|
||||
<a href=#{linkElems entry} .unstyled-link>
|
||||
#{title entry}
|
||||
<aside .date>
|
||||
#{pack $ formatTime defaultTimeLocale "%Y-%m-%d" $ edate entry}
|
||||
$if (isEntryMarkdown entry)
|
||||
^{renderEntryMarkdown $ text entry}
|
||||
$else
|
||||
^{preEscapedToHtml $ text entry}
|
||||
<hr>
|
||||
$maybe links <- pageLinks
|
||||
^{links}
|
||||
|]
|
||||
where
|
||||
linkElems Entry {..} = "/" ++ show entryId
|
||||
|
||||
showLinks :: Maybe Int -> Html
|
||||
showLinks (Just i) =
|
||||
[shamlet|
|
||||
$if ((>) i 1)
|
||||
<div .navigation>
|
||||
<a href=#{nLink $ succ i} .uncoloured-link>Earlier
|
||||
|
|
||||
<a href=#{nLink $ pred i} .uncoloured-link>Later
|
||||
$elseif ((<=) i 1)
|
||||
^{showLinks Nothing}
|
||||
|]
|
||||
where
|
||||
nLink page = T.concat ["/?page=", show' page]
|
||||
showLinks Nothing =
|
||||
[shamlet|
|
||||
<div .navigation>
|
||||
<a href="/?page=2" .uncoloured-link>Earlier
|
||||
|]
|
||||
|
||||
renderEntry :: Entry -> Html
|
||||
renderEntry e@Entry {..} =
|
||||
[shamlet|
|
||||
<article>
|
||||
<h2 .inline>
|
||||
#{title}
|
||||
<aside .date>
|
||||
#{pack $ formatTime defaultTimeLocale "%Y-%m-%d" edate}
|
||||
$if (isEntryMarkdown e)
|
||||
^{renderEntryMarkdown text}
|
||||
$else
|
||||
^{preEscapedToHtml $ text}
|
||||
<hr>
|
||||
|]
|
||||
|
||||
showError :: Text -> Text -> Html
|
||||
showError title err =
|
||||
blogTemplate (": " <> title)
|
||||
[shamlet|
|
||||
<p>:(
|
||||
<p>#{err}
|
||||
<hr>
|
||||
|]
|
||||
182
web/tazblog/src/BlogStore.hs
Normal file
182
web/tazblog/src/BlogStore.hs
Normal file
|
|
@ -0,0 +1,182 @@
|
|||
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
|
||||
{-# LANGUAGE LambdaCase #-}
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
|
||||
-- |This module implements fetching of individual blog entries from
|
||||
-- DNS. Yes, you read that correctly.
|
||||
--
|
||||
-- Each blog post is stored as a set of records in a designated DNS
|
||||
-- zone. For the production blog, this zone is `blog.tazj.in.`.
|
||||
--
|
||||
-- A top-level record at `_posts` contains a list of all published
|
||||
-- post IDs.
|
||||
--
|
||||
-- For each of these post IDs, there is a record at `_meta.$postID`
|
||||
-- that contains the title and number of post chunks.
|
||||
--
|
||||
-- For each post chunk, there is a record at `_$chunkID.$postID` that
|
||||
-- contains a base64-encoded post fragment.
|
||||
--
|
||||
-- This module implements logic for assembling a post out of these
|
||||
-- fragments and caching it based on the TTL of its `_meta` record.
|
||||
module BlogStore
|
||||
( BlogCache,
|
||||
EntryId (..),
|
||||
Entry (..),
|
||||
withCache,
|
||||
listEntries,
|
||||
getEntry,
|
||||
show'
|
||||
)
|
||||
where
|
||||
|
||||
import Control.Applicative ((<$>), (<*>))
|
||||
import Control.Monad (mzero)
|
||||
import Control.Monad.IO.Class (MonadIO, liftIO)
|
||||
import Data.Aeson ((.:), FromJSON (..), Value (Object), decodeStrict)
|
||||
import Data.ByteString.Base64 (decodeLenient)
|
||||
import Data.Either (fromRight)
|
||||
import Data.List (sortBy)
|
||||
import Data.Text as T (Text, concat, pack)
|
||||
import Data.Text.Encoding (decodeUtf8', encodeUtf8)
|
||||
import Data.Time (Day)
|
||||
import Network.DNS (DNSError, lookupTXT)
|
||||
import qualified Network.DNS.Resolver as R
|
||||
|
||||
newtype EntryId = EntryId {unEntryId :: Integer}
|
||||
deriving (Eq, Ord, FromJSON)
|
||||
|
||||
instance Show EntryId where
|
||||
|
||||
show = show . unEntryId
|
||||
|
||||
data Entry
|
||||
= Entry
|
||||
{ entryId :: EntryId,
|
||||
author :: Text,
|
||||
title :: Text,
|
||||
text :: Text,
|
||||
edate :: Day
|
||||
}
|
||||
deriving (Eq, Ord, Show)
|
||||
|
||||
-- | Wraps a DNS resolver with caching configured. For the initial
|
||||
-- version of this, all caching of entries is done by the resolver
|
||||
-- (i.e. no pre-assembled versions of entries are cached).
|
||||
data BlogCache = BlogCache R.Resolver Text
|
||||
|
||||
data StoreError
|
||||
= PostNotFound EntryId
|
||||
| DNS DNSError
|
||||
| InvalidMetadata
|
||||
| InvalidChunk
|
||||
| InvalidPosts
|
||||
deriving (Show)
|
||||
|
||||
type Offset = Int
|
||||
|
||||
type Count = Int
|
||||
|
||||
withCache :: Text -> (BlogCache -> IO a) -> IO a
|
||||
withCache zone f = do
|
||||
let conf =
|
||||
R.defaultResolvConf
|
||||
{ R.resolvCache = Just R.defaultCacheConf,
|
||||
R.resolvConcurrent = True
|
||||
}
|
||||
seed <- R.makeResolvSeed conf
|
||||
R.withResolver seed (\r -> f $ BlogCache r zone)
|
||||
|
||||
listEntries :: MonadIO m => BlogCache -> Offset -> Count -> m [Entry]
|
||||
listEntries cache offset count = liftIO $ do
|
||||
posts <- postList cache
|
||||
entries <- mapM (entryFromDNS cache) $ take count $ drop offset $ fromRight (error "no posts") posts
|
||||
-- TODO: maybe don't just drop broken entries
|
||||
return
|
||||
$ fromRight (error "no entries")
|
||||
$ sequence entries
|
||||
|
||||
getEntry :: MonadIO m => BlogCache -> EntryId -> m (Maybe Entry)
|
||||
getEntry cache eid = liftIO $ entryFromDNS cache eid >>= \case
|
||||
Left _ -> return Nothing -- TODO: ??
|
||||
Right entry -> return $ Just entry
|
||||
|
||||
show' :: Show a => a -> Text
|
||||
show' = pack . show
|
||||
|
||||
-- * DNS fetching implementation
|
||||
type Chunk = Integer
|
||||
|
||||
-- | Represents the metadata stored for each post in the _meta record.
|
||||
data Meta = Meta Integer Text Day
|
||||
deriving (Show)
|
||||
|
||||
instance FromJSON Meta where
|
||||
|
||||
parseJSON (Object v) =
|
||||
Meta
|
||||
<$> v
|
||||
.: "c"
|
||||
<*> v
|
||||
.: "t"
|
||||
<*> v
|
||||
.: "d"
|
||||
parseJSON _ = mzero
|
||||
|
||||
entryMetadata :: BlogCache -> EntryId -> IO (Either StoreError Meta)
|
||||
entryMetadata (BlogCache r z) (EntryId eid) =
|
||||
let domain = encodeUtf8 ("_meta." <> show' eid <> "." <> z)
|
||||
record = lookupTXT r domain
|
||||
toMeta rrdata = case decodeStrict $ decodeLenient rrdata of
|
||||
Nothing -> Left InvalidMetadata
|
||||
Just m -> Right m
|
||||
in record >>= \case
|
||||
(Left err) -> return $ Left $ DNS err
|
||||
(Right [bs]) -> return $ toMeta bs
|
||||
_ -> return $ Left InvalidMetadata
|
||||
|
||||
entryChunk :: BlogCache -> EntryId -> Chunk -> IO (Either StoreError Text)
|
||||
entryChunk (BlogCache r z) (EntryId eid) c =
|
||||
let domain = encodeUtf8 ("_" <> show' c <> "." <> show' eid <> "." <> z)
|
||||
record = lookupTXT r domain
|
||||
toChunk rrdata = case decodeUtf8' $ decodeLenient rrdata of
|
||||
Left _ -> Left InvalidChunk
|
||||
Right chunk -> Right chunk
|
||||
in record >>= \case
|
||||
(Left err) -> return $ Left $ DNS err
|
||||
(Right [bs]) -> return $ toChunk bs
|
||||
_ -> return $ Left InvalidChunk
|
||||
|
||||
fetchAssembleChunks :: BlogCache -> EntryId -> Meta -> IO (Either StoreError Text)
|
||||
fetchAssembleChunks cache eid (Meta n _ _) = do
|
||||
chunks <- mapM (entryChunk cache eid) [0 .. (n - 1)]
|
||||
return $ fmap T.concat $ sequence chunks
|
||||
|
||||
entryFromDNS :: BlogCache -> EntryId -> IO (Either StoreError Entry)
|
||||
entryFromDNS cache eid = do
|
||||
meta <- entryMetadata cache eid
|
||||
case meta of
|
||||
Left err -> return $ Left err
|
||||
Right meta -> do
|
||||
chunks <- fetchAssembleChunks cache eid meta
|
||||
let (Meta _ t d) = meta
|
||||
return
|
||||
$ either Left
|
||||
( \text -> Right $ Entry
|
||||
{ entryId = eid,
|
||||
author = "tazjin",
|
||||
title = t,
|
||||
text = text,
|
||||
edate = d
|
||||
}
|
||||
)
|
||||
chunks
|
||||
|
||||
postList :: BlogCache -> IO (Either StoreError [EntryId])
|
||||
postList (BlogCache r z) =
|
||||
let domain = encodeUtf8 ("_posts." <> z)
|
||||
record = lookupTXT r domain
|
||||
toPosts =
|
||||
fmap (sortBy (flip compare))
|
||||
. mapM (maybe (Left InvalidPosts) Right . decodeStrict)
|
||||
in either (Left . DNS) toPosts <$> record
|
||||
48
web/tazblog/src/RSS.hs
Normal file
48
web/tazblog/src/RSS.hs
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
{-# LANGUAGE RecordWildCards #-}
|
||||
|
||||
module RSS
|
||||
( renderFeed
|
||||
)
|
||||
where
|
||||
|
||||
import BlogStore
|
||||
import Data.Maybe (fromJust)
|
||||
import qualified Data.Text as T
|
||||
import Data.Time (UTCTime (..), getCurrentTime, secondsToDiffTime)
|
||||
import Network.URI (URI, parseURI)
|
||||
import Text.RSS
|
||||
|
||||
createChannel :: UTCTime -> [ChannelElem]
|
||||
createChannel now =
|
||||
[ Language "en",
|
||||
Copyright "Vincent Ambo",
|
||||
WebMaster "mail@tazj.in",
|
||||
ChannelPubDate now
|
||||
]
|
||||
|
||||
createRSS :: UTCTime -> [Item] -> RSS
|
||||
createRSS t =
|
||||
let link = fromJust $ parseURI "https://tazj.in"
|
||||
in RSS "tazjin's blog" link "tazjin's blog feed" (createChannel t)
|
||||
|
||||
createItem :: Entry -> Item
|
||||
createItem Entry {..} =
|
||||
[ Title "tazjin's blog",
|
||||
Link $ entryLink entryId,
|
||||
Description $ T.unpack text,
|
||||
PubDate $ UTCTime edate $ secondsToDiffTime 0
|
||||
]
|
||||
|
||||
entryLink :: EntryId -> URI
|
||||
entryLink i =
|
||||
let url = "http://tazj.in/" ++ "/" ++ show i
|
||||
in fromJust $ parseURI url
|
||||
|
||||
createItems :: [Entry] -> [Item]
|
||||
createItems = map createItem
|
||||
|
||||
createFeed :: [Entry] -> IO RSS
|
||||
createFeed e = getCurrentTime >>= (\t -> return $ createRSS t $ createItems e)
|
||||
|
||||
renderFeed :: [Entry] -> IO String
|
||||
renderFeed e = fmap (showXML . rssToXML) (createFeed e)
|
||||
81
web/tazblog/src/Server.hs
Normal file
81
web/tazblog/src/Server.hs
Normal file
|
|
@ -0,0 +1,81 @@
|
|||
{-# LANGUAGE FlexibleContexts #-}
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
{-# LANGUAGE ScopedTypeVariables #-}
|
||||
|
||||
module Server where
|
||||
|
||||
import Blog
|
||||
import BlogStore
|
||||
import Control.Applicative (optional)
|
||||
import Control.Monad (msum)
|
||||
import Control.Monad.IO.Class (liftIO)
|
||||
import Data.Maybe (maybe)
|
||||
import qualified Data.Text as T
|
||||
import Happstack.Server hiding (Session)
|
||||
import RSS
|
||||
|
||||
pageSize :: Int
|
||||
pageSize = 3
|
||||
|
||||
tmpPolicy :: BodyPolicy
|
||||
tmpPolicy = defaultBodyPolicy "/tmp" 0 200000 1000
|
||||
|
||||
runBlog :: Int -> String -> IO ()
|
||||
runBlog port respath =
|
||||
withCache "blog.tazj.in." $ \cache ->
|
||||
simpleHTTP nullConf {port = port} $ tazblog cache respath
|
||||
|
||||
tazblog :: BlogCache -> String -> ServerPart Response
|
||||
tazblog cache resDir =
|
||||
msum
|
||||
[ -- legacy language-specific routes
|
||||
dir "de" $ blogHandler cache,
|
||||
dir "en" $ blogHandler cache,
|
||||
dir "static" $ staticHandler resDir,
|
||||
blogHandler cache,
|
||||
staticHandler resDir,
|
||||
notFound $ toResponse $ showError "Not found" "Page not found"
|
||||
]
|
||||
|
||||
blogHandler :: BlogCache -> ServerPart Response
|
||||
blogHandler cache =
|
||||
msum
|
||||
[ path $ \(eId :: Integer) -> showEntry cache $ EntryId eId,
|
||||
nullDir >> showIndex cache,
|
||||
dir "rss" $ nullDir >> showRSS cache,
|
||||
dir "rss.xml" $ nullDir >> showRSS cache
|
||||
]
|
||||
|
||||
staticHandler :: String -> ServerPart Response
|
||||
staticHandler resDir = do
|
||||
setHeaderM "cache-control" "max-age=630720000"
|
||||
setHeaderM "expires" "Tue, 20 Jan 2037 04:20:42 GMT"
|
||||
serveDirectory DisableBrowsing [] resDir
|
||||
|
||||
showEntry :: BlogCache -> EntryId -> ServerPart Response
|
||||
showEntry cache eId = do
|
||||
entry <- getEntry cache eId
|
||||
tryEntry entry
|
||||
|
||||
tryEntry :: Maybe Entry -> ServerPart Response
|
||||
tryEntry Nothing = notFound $ toResponse $ showError "Not found" "Blog entry not found"
|
||||
tryEntry (Just entry) = ok $ toResponse $ blogTemplate eTitle $ renderEntry entry
|
||||
where
|
||||
eTitle = T.append ": " (title entry)
|
||||
|
||||
offset :: Maybe Int -> Int
|
||||
offset = maybe 0 (pageSize *)
|
||||
|
||||
showIndex :: BlogCache -> ServerPart Response
|
||||
showIndex cache = do
|
||||
(page :: Maybe Int) <- optional $ lookRead "page"
|
||||
entries <- listEntries cache (offset page) pageSize
|
||||
ok $ toResponse $ blogTemplate ""
|
||||
$ renderEntries entries (Just $ showLinks page)
|
||||
|
||||
showRSS :: BlogCache -> ServerPart Response
|
||||
showRSS cache = do
|
||||
entries <- listEntries cache 0 4
|
||||
feed <- liftIO $ renderFeed entries
|
||||
setHeaderM "content-type" "text/xml"
|
||||
ok $ toResponse feed
|
||||
Loading…
Add table
Add a link
Reference in a new issue