feat(users/Profpatsch/whatcd-resolver): locally serve resources

Proxy the resources through our server instead of always requesting
them from the CDN, leaking lots of usage data.

This prefetches the resources at server startup (instead of e.g.
adding them to the build environment statically), which is a nice
compromise I think. It will also keep the integrity verification in
place, but that is still done in the client.

Adds `rel=preload` as well, so we start fetching asap.

No caching (yet). We could easily add a hash over the content though
so the client never has to re-request the resources.

Change-Id: I9aac80cfb1ded09e578ba2a70dcf982bf5322ff6
Reviewed-on: https://cl.tvl.fyi/c/depot/+/13215
Reviewed-by: Profpatsch <mail@profpatsch.de>
Tested-by: BuildkiteCI
This commit is contained in:
Profpatsch 2025-03-07 15:06:55 +01:00
parent f5c7f9e666
commit 17f5b55066
4 changed files with 400 additions and 240 deletions

View file

@ -173,6 +173,7 @@ module MyPrelude
HasCallStack, HasCallStack,
module Data.Error, module Data.Error,
symbolText, symbolText,
unwrapErrorTree,
) )
where where
@ -199,6 +200,7 @@ import Data.Char qualified
import Data.Coerce (Coercible, coerce) import Data.Coerce (Coercible, coerce)
import Data.Data (Proxy (Proxy)) import Data.Data (Proxy (Proxy))
import Data.Error import Data.Error
import Data.Error.Tree (ErrorTree, prettyErrorTree)
import Data.Foldable (Foldable (foldMap', toList), fold, foldl', for_, sequenceA_, traverse_) import Data.Foldable (Foldable (foldMap', toList), fold, foldl', for_, sequenceA_, traverse_)
import Data.Foldable qualified as Foldable import Data.Foldable qualified as Foldable
import Data.Function ((&)) import Data.Function ((&))
@ -833,3 +835,7 @@ symbolText :: forall sym. (KnownSymbol sym) => Text
symbolText = do symbolText = do
symbolVal (Proxy :: Proxy sym) symbolVal (Proxy :: Proxy sym)
& stringToText & stringToText
-- | Like 'unwrapError', but for 'ErrorTree'. Will crash with an uncatchable exception if 'Left'.
unwrapErrorTree :: (HasCallStack) => Either ErrorTree a -> a
unwrapErrorTree e = e & first (newError . prettyErrorTree) & unwrapError

View file

@ -1,4 +1,5 @@
{-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskellQuotes #-}
module Parse where module Parse where
@ -14,6 +15,7 @@ import Data.Semigroupoid qualified as Semigroupoid
import Data.Text qualified as Text import Data.Text qualified as Text
import FieldParser (FieldParser) import FieldParser (FieldParser)
import FieldParser qualified as Field import FieldParser qualified as Field
import Language.Haskell.TH.Syntax qualified as TH
import PossehlAnalyticsPrelude import PossehlAnalyticsPrelude
import Validation (partitionValidations) import Validation (partitionValidations)
import Prelude hiding (init, maybe) import Prelude hiding (init, maybe)
@ -191,3 +193,18 @@ zipNonEmpty (x :| xs) (y :| ys) = (x, y) :| zip xs ys
zipIndex :: NonEmpty b -> NonEmpty (Natural, b) zipIndex :: NonEmpty b -> NonEmpty (Natural, b)
zipIndex = zipNonEmpty (1 :| [2 :: Natural ..]) zipIndex = zipNonEmpty (1 :| [2 :: Natural ..])
-- | Parse a literal value at compile time. This is used with Template Haskell, like so:
--
-- > $$("2023-07-27" & literal hyphenatedDay) :: Time.Day
--
-- You need the double @$$@!
--
-- ATTN: This needs an instance of the 'TH.Lift' class for the output type.
-- Many library types dont yet implement this class, so we have to provide the instances ourselves.
-- See NOTE: Lift for library types
literal :: forall from to. (TH.Lift to) => Parse from to -> from -> TH.Code TH.Q to
literal parser s = do
case runParse "Literal parse failed" parser s of
Right a -> [||a||]
Left err -> TH.liftCode (err & prettyErrorTree & textToString & fail)

View file

@ -1,7 +1,10 @@
{-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE QuasiQuotes #-}
{-# OPTIONS_GHC -Wno-orphans #-}
module Http module Http
( doRequestJson, ( textToURI,
uriToHttpClientRequest,
doRequestJson,
RequestOptions (..), RequestOptions (..),
mkRequestOptions, mkRequestOptions,
httpJson, httpJson,
@ -19,6 +22,8 @@ module Http
where where
import AppT import AppT
import Control.Exception (Exception (..), SomeException)
import Control.Monad.Catch.Pure (runCatch)
import Data.Aeson qualified as Json import Data.Aeson qualified as Json
import Data.Aeson.BetterErrors qualified as Json import Data.Aeson.BetterErrors qualified as Json
import Data.CaseInsensitive (CI (original)) import Data.CaseInsensitive (CI (original))
@ -29,6 +34,7 @@ import Data.List.NonEmpty qualified as NonEmpty
import Data.Ord (clamp) import Data.Ord (clamp)
import Data.Text qualified as Text import Data.Text qualified as Text
import Data.Text.Punycode qualified as Punycode import Data.Text.Punycode qualified as Punycode
import FieldParser (FieldParser' (..))
import FieldParser qualified as Field import FieldParser qualified as Field
import Json qualified import Json qualified
import Json.Enc qualified as Enc import Json.Enc qualified as Enc
@ -38,12 +44,51 @@ import Network.HTTP.Client
import Network.HTTP.Client qualified as Http import Network.HTTP.Client qualified as Http
import Network.HTTP.Simple qualified as Http import Network.HTTP.Simple qualified as Http
import Network.HTTP.Types.Status (Status (..)) import Network.HTTP.Types.Status (Status (..))
import Network.URI (URI, parseURI)
import Network.Wai.Parse qualified as Wai import Network.Wai.Parse qualified as Wai
import Optional import Optional
import Parse (Parse)
import Parse qualified
import Pretty import Pretty
import UnliftIO.Concurrent (threadDelay) import UnliftIO.Concurrent (threadDelay)
import Prelude hiding (span) import Prelude hiding (span)
-- | Make sure we can parse the given Text into an URI.
textToURI :: Parse Text URI
textToURI =
Parse.fieldParser
( FieldParser $ \text ->
text
& textToString
& Network.URI.parseURI
& annotate [fmt|Cannot parse this as a URL: "{text}"|]
)
-- | Make sure we can parse the given URI into a Request.
--
-- This tries to work around the horrible, horrible interface in Http.Client.
uriToHttpClientRequest :: Parse URI Http.Request
uriToHttpClientRequest =
Parse.mkParseNoContext
( \(ctx, url) ->
(url & Http.requestFromURI)
& runCatch
& first (checkException @Http.HttpException)
& \case
Left (Right (Http.InvalidUrlException urlText reason)) ->
Left [fmt|Unable to set the url "{urlText}" as request URL, reason: {reason}, at {Parse.showContext ctx}|]
Left (Right exc@(Http.HttpExceptionRequest _ _)) ->
Left [fmt|Weird! Should not get a HttpExceptionRequest when parsing an URL (bad library design), was {exc & displayException}, at {Parse.showContext ctx}|]
Left (Left someExc) ->
Left [fmt|Weird! Should not get anyhting but a HttpException when parsing an URL (bad library design), was {someExc & displayException}, at {Parse.showContext ctx}|]
Right req -> pure req
)
where
checkException :: (Exception b) => SomeException -> Either SomeException b
checkException some = case fromException some of
Nothing -> Left some
Just e -> Right e
data RequestOptions = RequestOptions data RequestOptions = RequestOptions
{ method :: ByteString, { method :: ByteString,
host :: Text, host :: Text,

View file

@ -10,12 +10,12 @@ import Comparison
import Conduit (ConduitT) import Conduit (ConduitT)
import Conduit qualified import Conduit qualified
import Control.Category qualified as Cat import Control.Category qualified as Cat
import Control.Monad.Catch.Pure (runCatch)
import Control.Monad.Logger.CallStack import Control.Monad.Logger.CallStack
import Control.Monad.Reader import Control.Monad.Reader
import Data.Aeson qualified as Json import Data.Aeson qualified as Json
import Data.Aeson.BetterErrors qualified as Json import Data.Aeson.BetterErrors qualified as Json
import Data.Aeson.KeyMap qualified as KeyMap import Data.Aeson.KeyMap qualified as KeyMap
import Data.ByteString qualified as ByteString
import Data.CaseInsensitive (CI) import Data.CaseInsensitive (CI)
import Data.Conduit ((.|)) import Data.Conduit ((.|))
import Data.Error.Tree import Data.Error.Tree
@ -28,9 +28,10 @@ import Data.Text qualified as Text
import Database.PostgreSQL.Simple qualified as Postgres import Database.PostgreSQL.Simple qualified as Postgres
import Database.PostgreSQL.Simple.Types (PGArray (PGArray)) import Database.PostgreSQL.Simple.Types (PGArray (PGArray))
import Database.Postgres.Temp qualified as TmpPg import Database.Postgres.Temp qualified as TmpPg
import FieldParser (FieldParser, FieldParser' (..)) import FieldParser (FieldParser)
import FieldParser qualified as Field import FieldParser qualified as Field
import Html qualified import Html qualified
import Http
import IHP.HSX.QQ (hsx) import IHP.HSX.QQ (hsx)
import IHP.HSX.ToHtml (ToHtml) import IHP.HSX.ToHtml (ToHtml)
import Json qualified import Json qualified
@ -45,11 +46,10 @@ import Network.HTTP.Client.Conduit qualified as Http
import Network.HTTP.Simple qualified as Http import Network.HTTP.Simple qualified as Http
import Network.HTTP.Types import Network.HTTP.Types
import Network.HTTP.Types qualified as Http import Network.HTTP.Types qualified as Http
import Network.URI (URI)
import Network.URI qualified
import Network.Wai (ResponseReceived) import Network.Wai (ResponseReceived)
import Network.Wai qualified as Wai import Network.Wai qualified as Wai
import Network.Wai.Handler.Warp qualified as Warp import Network.Wai.Handler.Warp qualified as Warp
import Network.Wai.Parse (parseContentType)
import OpenTelemetry.Attributes qualified as Otel import OpenTelemetry.Attributes qualified as Otel
import OpenTelemetry.Context.ThreadLocal qualified as Otel import OpenTelemetry.Context.ThreadLocal qualified as Otel
import OpenTelemetry.Trace qualified as Otel hiding (getTracer, inSpan, inSpan') import OpenTelemetry.Trace qualified as Otel hiding (getTracer, inSpan, inSpan')
@ -67,7 +67,9 @@ import System.Environment qualified as Env
import System.FilePath ((</>)) import System.FilePath ((</>))
import Text.Blaze.Html (Html) import Text.Blaze.Html (Html)
import Text.Blaze.Html.Renderer.Utf8 qualified as Html import Text.Blaze.Html.Renderer.Utf8 qualified as Html
import Text.Blaze.Html5 ((!))
import Text.Blaze.Html5 qualified as Html import Text.Blaze.Html5 qualified as Html
import Text.Blaze.Html5.Attributes qualified as HtmlA
import Tool (readTool, readTools) import Tool (readTool, readTools)
import Transmission import Transmission
import UnliftIO hiding (Handler) import UnliftIO hiding (Handler)
@ -98,6 +100,8 @@ htmlUi = do
() ()
(Dec.fromField @Text) (Dec.fromField @Text)
ourHtmlIntegrities <- prefetchHtmlIntegrities
(counterHtmlM, counterHandler, _counterAsync) <- testCounter (label @"endpoint" "counter") (counterHtmlM, counterHandler, _counterAsync) <- testCounter (label @"endpoint" "counter")
withRunInIO $ \runInIO -> Warp.run 9093 $ \req respondOrig -> do withRunInIO $ \runInIO -> Warp.run 9093 $ \req respondOrig -> do
@ -129,199 +133,201 @@ htmlUi = do
let handlers :: Handlers (AppT IO) let handlers :: Handlers (AppT IO)
handlers = handlers =
Map.fromList Map.fromList $
[ ( "", ourHtmlIntegrities.handlers
Html $ \span -> do <> [ ( "",
counterHtml <- counterHtmlM Html $ \span -> do
mainHtml counterHtml uniqueRunId span counterHtml <- counterHtmlM
), mainHtml ourHtmlIntegrities counterHtml uniqueRunId span
( "redacted-search", ),
HtmlWithQueryArgs (label @"searchstr" <$> singleQueryArgument "searchstr" Cat.id) $ ( "redacted-search",
\dat _span -> do HtmlWithQueryArgs (label @"searchstr" <$> singleQueryArgument "searchstr" Cat.id) $
t <- redactedSearchAndInsert [("searchstr", dat.queryArgs.searchstr)] \dat _span -> do
runTransaction $ do t <- redactedSearchAndInsert [("searchstr", dat.queryArgs.searchstr)]
res <- t runTransaction $ do
(table, settings) <- res <- t
concurrentlyTraced (table, settings) <-
( getBestTorrentsTable concurrentlyTraced
(label @"groupByReleaseType" True) ( getBestTorrentsTable
( Just (label @"groupByReleaseType" True)
( E21 ( Just
(label @"onlyTheseTorrents" res.newTorrents) ( E21
) :: (label @"onlyTheseTorrents" res.newTorrents)
Maybe ) ::
( E2 Maybe
"onlyTheseTorrents" ( E2
[Label "torrentId" Int] "onlyTheseTorrents"
"artistRedactedId" [Label "torrentId" Int]
Natural "artistRedactedId"
) Natural
) )
) )
(getSettings) )
pure $ (getSettings)
mainHtml' pure $
( MainHtml mainHtml'
{ pageTitle = [fmt|whatcd-resolver Search {dat.queryArgs.searchstr & bytesToTextUtf8Lenient}|], ourHtmlIntegrities
returnUrl = dat.returnUrl, ( MainHtml
counterHtml = "", { pageTitle = [fmt|whatcd-resolver Search {dat.queryArgs.searchstr & bytesToTextUtf8Lenient}|],
mainContent = returnUrl = dat.returnUrl,
[hsx|<h1>Search results for <pre>{dat.queryArgs.searchstr}</pre></h1>{table}|], counterHtml = "",
uniqueRunId, mainContent =
searchFieldContent = dat.queryArgs.searchstr & bytesToTextUtf8Lenient, [hsx|<h1>Search results for <pre>{dat.queryArgs.searchstr}</pre></h1>{table}|],
settings uniqueRunId,
} searchFieldContent = dat.queryArgs.searchstr & bytesToTextUtf8Lenient,
) settings
), }
( "snips/redacted/torrentDataJson", )
Html $ \span -> do ),
dat <- torrentIdMp span ( "snips/redacted/torrentDataJson",
Html.mkVal <$> (runTransaction $ getTorrentById dat) Html $ \span -> do
), dat <- torrentIdMp span
( "snips/redacted/getTorrentFile", Html.mkVal <$> (runTransaction $ getTorrentById dat)
HtmlOrReferer $ \span -> do ),
dat <- torrentIdMp span ( "snips/redacted/getTorrentFile",
runTransaction $ do HtmlOrReferer $ \span -> do
settings <- getSettings dat <- torrentIdMp span
inserted <- redactedGetTorrentFileAndInsert (T2 dat (getLabel @"useFreeleechTokens" settings)) runTransaction $ do
running <- settings <- getSettings
lift @Transaction $ inserted <- redactedGetTorrentFileAndInsert (T2 dat (getLabel @"useFreeleechTokens" settings))
doTransmissionRequest' (transmissionRequestAddTorrent inserted) running <-
updateTransmissionTorrentHashById lift @Transaction $
( T2 doTransmissionRequest' (transmissionRequestAddTorrent inserted)
(getLabel @"torrentHash" running) updateTransmissionTorrentHashById
(getLabel @"torrentId" dat) ( T2
) (getLabel @"torrentHash" running)
pure $ (getLabel @"torrentId" dat)
everySecond )
"snips/transmission/getTorrentState" pure $
(Enc.object [("torrent-hash", Enc.text running.torrentHash)]) everySecond
"Starting" "snips/transmission/getTorrentState"
), (Enc.object [("torrent-hash", Enc.text running.torrentHash)])
-- TODO: this is bad duplication?? "Starting"
( "snips/redacted/startTorrentFile", ),
Html $ \span -> do -- TODO: this is bad duplication??
dat <- torrentIdMp span ( "snips/redacted/startTorrentFile",
runTransaction $ do Html $ \span -> do
file <- dat <- torrentIdMp span
getTorrentFileById dat runTransaction $ do
<&> annotate [fmt|No torrent file for torrentId "{dat.torrentId}"|] file <-
>>= orAppThrow span getTorrentFileById dat
<&> annotate [fmt|No torrent file for torrentId "{dat.torrentId}"|]
>>= orAppThrow span
running <- running <-
lift @Transaction $ lift @Transaction $
doTransmissionRequest' (transmissionRequestAddTorrent file) doTransmissionRequest' (transmissionRequestAddTorrent file)
updateTransmissionTorrentHashById updateTransmissionTorrentHashById
( T2 ( T2
(getLabel @"torrentHash" running) (getLabel @"torrentHash" running)
(getLabel @"torrentId" dat) (getLabel @"torrentId" dat)
) )
pure $ pure $
everySecond everySecond
"snips/transmission/getTorrentState" "snips/transmission/getTorrentState"
(Enc.object [("torrent-hash", Enc.text running.torrentHash)]) (Enc.object [("torrent-hash", Enc.text running.torrentHash)])
"Starting" "Starting"
), ),
( "snips/transmission/getTorrentState", ( "snips/transmission/getTorrentState",
Html $ \span -> do Html $ \span -> do
dat <- parseMultipartOrThrow span req $ label @"torrentHash" <$> Multipart.field "torrent-hash" Field.utf8 dat <- parseMultipartOrThrow span req $ label @"torrentHash" <$> Multipart.field "torrent-hash" Field.utf8
status <- status <-
doTransmissionRequest' doTransmissionRequest'
( transmissionRequestListOnlyTorrents ( transmissionRequestListOnlyTorrents
( T2 ( T2
(label @"ids" [label @"torrentHash" dat.torrentHash]) (label @"ids" [label @"torrentHash" dat.torrentHash])
(label @"fields" ["hashString"]) (label @"fields" ["hashString"])
) )
(Json.keyLabel @"torrentHash" "hashString" Json.asText) (Json.keyLabel @"torrentHash" "hashString" Json.asText)
) )
<&> List.find (\torrent -> torrent.torrentHash == dat.torrentHash) <&> List.find (\torrent -> torrent.torrentHash == dat.torrentHash)
pure $ pure $
case status of case status of
Nothing -> [hsx|ERROR unknown|] Nothing -> [hsx|ERROR unknown|]
Just _torrent -> [hsx|Running|] Just _torrent -> [hsx|Running|]
), ),
( "snips/jsonld/render", ( "snips/jsonld/render",
do do
HtmlWithQueryArgs HtmlWithQueryArgs
( label @"target" ( label @"target"
<$> ( (singleQueryArgument "target" Field.utf8 >>> textToURI) <$> ( (singleQueryArgument "target" Field.utf8 >>> textToURI)
& Parse.andParse uriToHttpClientRequest & Parse.andParse uriToHttpClientRequest
) )
) )
( \dat _span -> do ( \dat _span -> do
jsonld <- httpGetJsonLd (dat.queryArgs.target) jsonld <- httpGetJsonLd (dat.queryArgs.target)
pure $ renderJsonld jsonld pure $ renderJsonld jsonld
) )
), ),
("counter", counterHandler), ("counter", counterHandler),
( "settings", ( "settings",
PostAndRedirect PostAndRedirect
( do ( do
settings <- runTransaction getSettings settings <- runTransaction getSettings
pure $ do pure $ do
returnTo <- Multipart.fieldLabel @"returnTo" "returnTo" Field.utf8 returnTo <- Multipart.fieldLabel @"returnTo" "returnTo" Field.utf8
parsed <- label @"settings" <$> settingsMultipartParser settings parsed <- label @"settings" <$> settingsMultipartParser settings
pure $ T2 returnTo parsed pure $ T2 returnTo parsed
) )
$ \_span (s :: T2 "returnTo" Text "settings" Settings) -> do $ \_span (s :: T2 "returnTo" Text "settings" Settings) -> do
let Settings {useFreeleechTokens} = s.settings let Settings {useFreeleechTokens} = s.settings
runTransaction $ do runTransaction $ do
_ <- _ <-
writeSettings writeSettings
[ T2 [ T2
(label @"key" "useFreeleechTokens") (label @"key" "useFreeleechTokens")
(label @"val" $ Json.Bool useFreeleechTokens) (label @"val" $ Json.Bool useFreeleechTokens)
] ]
pure $ label @"redirectTo" (s.returnTo & textToBytesUtf8) pure $ label @"redirectTo" (s.returnTo & textToBytesUtf8)
), ),
( "artist", ( "artist",
do do
HtmlWithQueryArgs HtmlWithQueryArgs
( label @"artistRedactedId" ( label @"artistRedactedId"
<$> (singleQueryArgument "redacted_id" (Field.utf8 >>> Field.decimalNatural)) <$> (singleQueryArgument "redacted_id" (Field.utf8 >>> Field.decimalNatural))
) )
$ \dat _span -> do $ \dat _span -> do
artistPage (T2 dat.queryArgs (label @"uniqueRunId" uniqueRunId)) artistPage ourHtmlIntegrities (T2 dat.queryArgs (label @"uniqueRunId" uniqueRunId))
), ),
( "artist/refresh", ( "artist/refresh",
HtmlOrRedirect $ HtmlOrRedirect $
\span -> do \span -> do
dat <- dat <-
parseMultipartOrThrow parseMultipartOrThrow
span span
req req
(label @"artistId" <$> Multipart.field "artist-id" Field.utf8) (label @"artistId" <$> Multipart.field "artist-id" Field.utf8)
t <- redactedRefreshArtist dat t <- redactedRefreshArtist dat
runTransaction $ do runTransaction $ do
t t
pure $ E22 (label @"redirectTo" [fmt|/artist?redacted_id={dat.artistId}|]) pure $ E22 (label @"redirectTo" [fmt|/artist?redacted_id={dat.artistId}|])
), ),
( "autorefresh", ( "autorefresh",
Plain $ do Plain $ do
qry <- qry <-
parseQueryArgsNewSpan parseQueryArgsNewSpan
"Autorefresh Query Parse" "Autorefresh Query Parse"
( label @"hasItBeenRestarted" ( label @"hasItBeenRestarted"
<$> singleQueryArgument "hasItBeenRestarted" Field.utf8 <$> singleQueryArgument "hasItBeenRestarted" Field.utf8
) )
pure $ pure $
Wai.responseLBS Wai.responseLBS
Http.ok200 Http.ok200
( [("Content-Type", "text/html")] ( [("Content-Type", "text/html")]
<> if uniqueRunId /= qry.hasItBeenRestarted <> if uniqueRunId /= qry.hasItBeenRestarted
then -- cause the client side to refresh then -- cause the client side to refresh
[("HX-Refresh", "true")] [("HX-Refresh", "true")]
else [] else []
) )
"" ""
) )
] ]
runInIO $ runInIO $
runHandlers runHandlers
( Html $ \span -> do ( Html $ \span -> do
counterHtml <- counterHtmlM counterHtml <- counterHtmlM
mainHtml counterHtml uniqueRunId span mainHtml ourHtmlIntegrities counterHtml uniqueRunId span
) )
handlers handlers
req req
@ -330,8 +336,8 @@ htmlUi = do
everySecond :: Text -> Enc -> Html -> Html everySecond :: Text -> Enc -> Html -> Html
everySecond call extraData innerHtml = [hsx|<div hx-trigger="every 1s" hx-swap="outerHTML" hx-post={call} hx-vals={Enc.encToBytesUtf8 extraData}>{innerHtml}</div>|] everySecond call extraData innerHtml = [hsx|<div hx-trigger="every 1s" hx-swap="outerHTML" hx-post={call} hx-vals={Enc.encToBytesUtf8 extraData}>{innerHtml}</div>|]
mainHtml :: Html -> Text -> Otel.Span -> AppT IO Html mainHtml :: OurHtmlIntegrities (AppT IO) -> Html -> Text -> Otel.Span -> AppT IO Html
mainHtml counterHtml uniqueRunId _span = runTransaction $ do mainHtml ourHtmlIntegrities counterHtml uniqueRunId _span = runTransaction $ do
-- jsonld <- -- jsonld <-
-- httpGetJsonLd -- httpGetJsonLd
-- ( URI.parseURI "https://musicbrainz.org/work/92000fd4-d304-406d-aeb4-6bdbeed318ec" & annotate "not an URI" & unwrapError, -- ( URI.parseURI "https://musicbrainz.org/work/92000fd4-d304-406d-aeb4-6bdbeed318ec" & annotate "not an URI" & unwrapError,
@ -345,6 +351,7 @@ htmlUi = do
-- transmissionTorrentsTable <- lift @Transaction getTransmissionTorrentsTable -- transmissionTorrentsTable <- lift @Transaction getTransmissionTorrentsTable
pure $ pure $
mainHtml' mainHtml'
ourHtmlIntegrities
( MainHtml ( MainHtml
{ pageTitle = "whatcd-resolver", { pageTitle = "whatcd-resolver",
returnUrl = "/", returnUrl = "/",
@ -366,10 +373,11 @@ data MainHtml = MainHtml
settings :: Settings settings :: Settings
} }
mainHtml' :: MainHtml -> Html mainHtml' :: OurHtmlIntegrities m -> MainHtml -> Html
mainHtml' dat = do mainHtml' integrities dat = do
-- transmissionTorrentsTable <- lift @Transaction getTransmissionTorrentsTable -- transmissionTorrentsTable <- lift @Transaction getTransmissionTorrentsTable
htmlPageChrome htmlPageChrome
integrities
dat.pageTitle dat.pageTitle
( [hsx| ( [hsx|
{dat.counterHtml} {dat.counterHtml}
@ -443,8 +451,8 @@ redirectOrFallback target responseFn req = do
Just _ -> responseFn Http.ok200 ("Hx-Redirect", target) Just _ -> responseFn Http.ok200 ("Hx-Redirect", target)
Nothing -> responseFn Http.seeOther303 ("Location", target) Nothing -> responseFn Http.seeOther303 ("Location", target)
htmlPageChrome :: (ToHtml a) => Text -> a -> Html htmlPageChrome :: (ToHtml a) => OurHtmlIntegrities m -> Text -> a -> Html
htmlPageChrome title body = htmlPageChrome integrities title body =
Html.docTypeHtml $ Html.docTypeHtml $
[hsx| [hsx|
<head> <head>
@ -458,9 +466,7 @@ htmlPageChrome title body =
TODO: create favicon TODO: create favicon
--> -->
<link rel="icon" href="data:,"> <link rel="icon" href="data:,">
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-9ndCyUaIbzAi2FUVXJi0CjmCapSmO7SnpJef0486qhLnuZ2cdeRhO02iuK6FUUVM" crossorigin="anonymous"> {integrities.html}
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/js/bootstrap.bundle.min.js" integrity="sha384-geWF76RCwLtnZ8qwWowPQNguL3RmwHVBC9FhGdlKrxdiJJigb/j/68SIy3Te4Bkz" crossorigin="anonymous"></script>
<script src="https://unpkg.com/htmx.org@1.9.2" integrity="sha384-L6OqL9pRWyyFU3+/bjdSri+iIphTN/bvYyM37tICVyOJkWZLpP2vGn6VUEXgzg6h" crossorigin="anonymous"></script>
<style> <style>
dl { dl {
margin: 1em; margin: 1em;
@ -474,6 +480,47 @@ htmlPageChrome title body =
</body> </body>
|] |]
data OurHtmlIntegrities m = OurHtmlIntegrities
{ html :: Html,
handlers :: [(Text, HandlerResponse m)]
}
prefetchHtmlIntegrities :: (MonadOtel m, MonadThrow m) => m (OurHtmlIntegrities m)
prefetchHtmlIntegrities = do
let resources =
[ HtmlIntegrity
{ integrityName = "Bootstrap CSS",
integrityUrl = "https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/css/bootstrap.min.css",
integrityHash = "sha384-9ndCyUaIbzAi2FUVXJi0CjmCapSmO7SnpJef0486qhLnuZ2cdeRhO02iuK6FUUVM",
localPath = "resources/bootstrap.min.css",
isTag = E21 (label @"link" ())
},
HtmlIntegrity
{ integrityName = "Bootstrap JS",
integrityUrl = "https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/js/bootstrap.bundle.min.js",
integrityHash = "sha384-geWF76RCwLtnZ8qwWowPQNguL3RmwHVBC9FhGdlKrxdiJJigb/j/68SIy3Te4Bkz",
localPath = "resources/bootstrap.bundle.min.js",
isTag = E22 (label @"script" ())
},
HtmlIntegrity
{ integrityName = "htmx",
integrityUrl = "https://unpkg.com/htmx.org@1.9.2",
integrityHash = "sha384-L6OqL9pRWyyFU3+/bjdSri+iIphTN/bvYyM37tICVyOJkWZLpP2vGn6VUEXgzg6h",
localPath = "resources/htmx.js",
isTag = E22 (label @"script" ())
}
]
resources
& traverse
( \r ->
prefetchResourceIntegrity r <&> \(html, handler) ->
( html,
[(r.localPath, handler)]
)
)
<&> fold
<&> \(html, handlers) -> OurHtmlIntegrities {..}
artistPage :: artistPage ::
( HasField "artistRedactedId" dat Natural, ( HasField "artistRedactedId" dat Natural,
HasField "uniqueRunId" dat Text, HasField "uniqueRunId" dat Text,
@ -483,9 +530,10 @@ artistPage ::
MonadThrow m, MonadThrow m,
MonadTransmission m MonadTransmission m
) => ) =>
OurHtmlIntegrities m ->
dat -> dat ->
m Html m Html
artistPage dat = runTransaction $ do artistPage ourHtmlIntegrities dat = runTransaction $ do
(fresh, settings) <- (fresh, settings) <-
concurrentlyTraced concurrentlyTraced
( getBestTorrentsData ( getBestTorrentsData
@ -503,8 +551,6 @@ artistPage dat = runTransaction $ do
Just a -> [fmt|{a} - Artist Page - whatcd-resolver|] Just a -> [fmt|{a} - Artist Page - whatcd-resolver|]
let mainContent = let mainContent =
[hsx| [hsx|
<p>Artist ID: {dat.artistRedactedId}</p>
<div id="artist-torrents"> <div id="artist-torrents">
{torrents} {torrents}
</div> </div>
@ -522,6 +568,7 @@ artistPage dat = runTransaction $ do
|] |]
pure $ pure $
mainHtml' mainHtml'
ourHtmlIntegrities
( MainHtml ( MainHtml
{ pageTitle, { pageTitle,
returnUrl, returnUrl,
@ -677,42 +724,6 @@ singleQueryArgumentMay field inner =
) )
>>> Parse.maybe (Parse.fieldParser inner) >>> Parse.maybe (Parse.fieldParser inner)
-- | Make sure we can parse the given Text into an URI.
textToURI :: Parse Text URI
textToURI =
Parse.fieldParser
( FieldParser $ \text ->
text
& textToString
& Network.URI.parseURI
& annotate [fmt|Cannot parse this as a URL: "{text}"|]
)
-- | Make sure we can parse the given URI into a Request.
--
-- This tries to work around the horrible, horrible interface in Http.Client.
uriToHttpClientRequest :: Parse URI Http.Request
uriToHttpClientRequest =
Parse.mkParseNoContext
( \(ctx, url) ->
(url & Http.requestFromURI)
& runCatch
& first (checkException @Http.HttpException)
& \case
Left (Right (Http.InvalidUrlException urlText reason)) ->
Left [fmt|Unable to set the url "{urlText}" as request URL, reason: {reason}, at {Parse.showContext ctx}|]
Left (Right exc@(Http.HttpExceptionRequest _ _)) ->
Left [fmt|Weird! Should not get a HttpExceptionRequest when parsing an URL (bad library design), was {exc & displayException}, at {Parse.showContext ctx}|]
Left (Left someExc) ->
Left [fmt|Weird! Should not get anyhting but a HttpException when parsing an URL (bad library design), was {someExc & displayException}, at {Parse.showContext ctx}|]
Right req -> pure req
)
checkException :: (Exception b) => SomeException -> Either SomeException b
checkException some = case fromException some of
Nothing -> Left some
Just e -> Right e
data ArtistFilter = ArtistFilter data ArtistFilter = ArtistFilter
{ onlyArtist :: Maybe (Label "artistId" Text) { onlyArtist :: Maybe (Label "artistId" Text)
} }
@ -1286,3 +1297,84 @@ counterConduit =
threadDelay 300_000 threadDelay 300_000
Conduit.yield [hsx|<p>{i}</p>|] Conduit.yield [hsx|<p>{i}</p>|]
) )
data HtmlIntegrity = HtmlIntegrity
{ -- | The name of the resource, for debugging purposes
integrityName :: Text,
-- | The URL of the resource content
integrityUrl :: Text,
-- | The integrity hash of the resource
integrityHash :: Text,
-- | The local url path to fetch the cached resource from the frontend
localPath :: Text,
-- | is @<link>@ or @<script>@ tag?
isTag :: E2 "link" () "script" ()
}
-- | Fetch a resource, calculate its integrity hash, and return a html @<link>@ snippet and a handler to return the resource.
prefetchResourceIntegrity :: (MonadOtel m, MonadThrow m) => HtmlIntegrity -> m (Html, HandlerResponse m)
prefetchResourceIntegrity dat = inSpan' [fmt|prefetching resource {dat.integrityName}|] $ \span -> do
let x =
dat.integrityUrl
& Parse.runParse "Failed to parse URI" (textToURI >>> uriToHttpClientRequest)
& unwrapErrorTree
Http.httpBS x
>>= ( \resp -> do
let !statusCode = resp & Http.responseStatus & (.statusCode)
let !mContentType =
resp
& Http.responseHeaders
& List.lookup "content-type"
<&> parseContentType
<&> (\(!ct, _mimeAttributes) -> ct)
let !bodyStrict = resp & Http.responseBody
let !bodyLength = bodyStrict & ByteString.length
if
| statusCode == 200 ->
case dat.isTag of
E21 l -> do
let _ = l.link
pure
( -- hsx does not understand the `as` attr
( Html.link
! HtmlA.rel "preload"
! HtmlA.href (Html.textValue dat.localPath)
! Html.customAttribute "as" "style"
)
<> [hsx|
<link rel="stylesheet" href={dat.localPath} integrity={dat.integrityHash} crossorigin="anonymous">
|],
Plain $
pure $
Wai.responseLBS
Http.ok200
[ ("Content-Type", mContentType & fromMaybe "text/css; charset=UTF-8"),
("Content-Length", buildBytes intDecimalB bodyLength)
]
(toLazyBytes $ bodyStrict)
)
E22 l -> do
let _ = l.script
pure
( ( -- hsx does not understand the `as` attr
Html.link
! HtmlA.rel "preload"
! HtmlA.href (Html.textValue dat.localPath)
! Html.customAttribute "as" "script"
)
<> [hsx|
<script src={dat.localPath} integrity={dat.integrityHash} crossorigin="anonymous"></script>
|],
Plain $
pure $
Wai.responseLBS
Http.ok200
[ ("Content-Type", mContentType & fromMaybe "text/javascript; charset=UTF-8"),
("Content-Length", buildBytes intDecimalB bodyLength)
]
(toLazyBytes $ bodyStrict)
)
| code <- statusCode -> appThrow span $ AppExceptionPretty [[fmt|Server returned an non-200 error code, code {code}:|], pretty resp]
)