Compare commits

..

33 commits

Author SHA1 Message Date
bc8db13160 Add nix install instructions 2020-12-13 20:05:07 +01:00
de4b32c022 Move UnitJS-related documentation to README 2020-12-13 18:02:14 +01:00
44c4813d6f Remember that git doesn't version empty directories and fix the Structure tests 2020-12-09 18:33:44 +01:00
d4fb3eb778 Remove unsincere -dynamic flag 2020-12-09 12:16:23 +01:00
49f819c54c Add version guards for Monoid imports 2020-12-09 11:06:47 +01:00
fc054ee575 Fixing test dependencies 2020-12-09 10:58:28 +01:00
cb2bd54596 Update version bounds, restructure file (remove useless library name) 2020-12-08 21:39:41 +01:00
d338e7b5c9 Merge branch 'main' into implement-static-pages 2020-12-06 17:28:30 +01:00
ded02d4c71 Update installation instructions for UnitJS to follow the use of SJW to generate JS 2020-10-28 10:49:02 +01:00
3fe0dd3c2e Delete remark insisting on the 'tags' directory being required since it's no longer the case since f7ec6d06c1 2020-10-28 10:22:27 +01:00
a0dccc770d Fill-in description for the --pages command line option 2020-10-26 21:32:29 +01:00
f9465d1aa5 Implement correct behaviour for default and custom articles and pages as outlined by the previous unit tests 2020-10-25 22:22:22 +01:00
804d3aa644 Add unit tests for new behaviour : articles or pages, auto or custom, fail if none is present 2020-10-25 22:22:13 +01:00
e74eadd6ba Stop dying in Path validation and return an Either instead so we can handle expected errors cleanly 2020-10-23 15:36:49 +02:00
4c6ee2d9bc Fix unit tests broken by latest changes 2020-09-30 22:02:25 +02:00
f7ec6d06c1 Erase the asymmetry between articles and pages; make both optional though check that at least one exists 2020-09-30 22:00:30 +02:00
371b9a8098 Fix missing '<li>' around pages by factorizing the 'li_' into navigationSection 2020-09-30 11:44:19 +02:00
e0161173ef Make both navigation sections optional and disable them when empty 2020-09-29 22:11:53 +02:00
5211379f00 Replace magic string for default Article description by a template variable and add another one for a default description for Pages 2020-06-21 22:16:14 +02:00
8382dc11f2 Reuse new mDLink function to simplify mDContent 2020-06-21 21:46:35 +02:00
47f5c70e21 Add links to static pages and handle dynamic navigation on the JS side 2020-06-20 22:59:39 +02:00
937a6858e0 Add a class type for Markdown and implement HTML rendering for Pages 2020-06-20 16:23:33 +02:00
19b3694d06 Directly store each Markdown content's path in the data structure to save having to re-build the same concatenation again and again for all the various outputs where the path matters ; also handles elegantly the «issue» of pagesPath being a Maybe FilePath because pages are optional 2020-06-09 17:52:16 +02:00
ce3003178f Also add end-to-end test to verify the HTML generated for the cards by lucid 2020-06-09 17:45:54 +02:00
08990e8440 Add more article card tests and articlesList card tests 2020-06-09 15:21:29 +02:00
6002f7c4d6 Lay the basis for a very simple test suite 2020-06-08 22:45:16 +02:00
fc0ef57b53 Remove dead code 2020-06-08 12:36:36 +02:00
1a2ece9dd9 Finish adapting everything to the new Markdown data type 2020-06-08 10:34:30 +02:00
1df95d5091 Start adding a Markdown data type common to Articles and Pages, refactor here and there, will need some more renaming / refactoring in DOM module 2020-06-07 23:16:40 +02:00
baa1d0ce09 Simplify inelegant code 2020-05-30 12:57:52 +02:00
fc8e26a983 Merge branch 'main' into implement-static-pages 2020-05-30 12:35:28 +02:00
46daaa2b7a Draft a data structure for pages and make it part of the Blog datastructure 2019-08-27 16:49:47 +02:00
b080c32d4c Handle pages parameter : use custom value provided with the usual checks or default it to «pages/» iif the directory exists, otherwise pages are deactivated 2019-08-27 16:47:45 +02:00
7 changed files with 64 additions and 160 deletions

View file

@ -1,16 +1,5 @@
# Revision history for hablo
## 1.1.0.1 -- 2021-01-20
* Ensure compilation on Nix as far as 18.09
* Fix missing metadata when landing on articles
## 1.1.0.0 -- 2020-12-13
* Implement static pages
* Implement RSS feeds
* Use SJW to pack JS into a single script and simplify deployment
## 1.0.3.0 -- 2019-12-21
* Fix OpenGraph cards displayed for links to hablo-generated pages posted on the Fediverse (should work elsewhere too but I don't care and have never tested)

View file

@ -1,27 +0,0 @@
(use-modules (gnu packages haskell-xyz)
(gnu packages haskell-web)
(guix build-system haskell)
(guix download)
(guix gexp)
(guix git-download)
(guix licenses)
(guix packages))
(package
(name "ghc-template")
(version "0.2.0.10")
(source (origin
(method url-fetch)
(uri (hackage-uri "template" version))
(sha256
(base32
"10mcnhi2rdflmv79z0359nn5sylifvk9ih38xnjqqby6n4hs7mcg"))))
(build-system haskell-build-system)
(properties '((upstream-name . "template")))
(home-page "http://hackage.haskell.org/package/template")
(synopsis "Simple string substitution")
(description
"Simple string substitution library that supports \\\"$\\\"-based substitution.
Meant to be used when Text.Printf or string concatenation would lead to code
that is hard to read but when a full blown templating system is overkill.")
(license bsd-3))

View file

@ -1,44 +0,0 @@
(use-modules (gnu packages haskell-xyz)
(gnu packages haskell-web)
(guix build-system haskell)
(guix download)
(guix gexp)
(guix git-download)
(guix licenses)
(guix packages)
(loom packages sjw))
(let
((%source-dir (dirname (current-filename)))
(ghc-template (load "ghc-template.scm")))
(package
(name "hablo")
(version "devel")
(source
(local-file %source-dir
#:recursive? #t
#:select? (git-predicate %source-dir)))
(build-system haskell-build-system)
(inputs
(list ghc-aeson
ghc-attoparsec
ghc-lucid
ghc-optparse-applicative
ghc-parsec
ghc-random
ghc-sjw
ghc-template
ghc-xdg-basedir))
(native-search-paths
(list
(search-path-specification (variable "SJW_PATH")
(files '("lib/SJW")))))
(home-page "https://git.marvid.fr/Tissevert/SJW")
(synopsis "The Simple Javascript Wrench")
(description
"SJW is a very simple tool to pack several JS modules into a single
script. It doesn't really do proper compilation work (yet) except
resolving the modules dependencies and detecting import loops but it
provides each module with an independent execution context in the
resulting script.")
(license gpl3+)))

View file

@ -3,16 +3,16 @@ cabal-version: >= 1.10
-- For further documentation, see http://haskell.org/cabal/users-guide/
name: hablo
version: 1.1.0.1
version: 1.0.3.0
synopsis: A minimalist static blog generator
description:
Hablo is a fediverse-oriented static blog generator for articles written
in Markdown. It tries to generate as little HTML as needed and uses
Javascript to implement dynamic features in the browser.
Those features include the handling of comments and a cached navigation to
minimize the number of queries to the server. Hablo also generates RSS feeds
and Open Graph cards for prettier shares on social networks.
Those features include the handling of comments and a cached navigation
to minimize the queries to the server. Hablo also generate cards for all
pages, including articles for prettier shares on social-networks.
homepage: https://git.marvid.fr/Tissevert/hablo
-- bug-reports:
license: BSD3
@ -50,15 +50,15 @@ library
, Pretty
, RSS
-- other-extensions:
build-depends: aeson >= 1.2.0 && < 2.1
, base >= 4.9.1 && < 4.17
build-depends: aeson >= 1.4.0 && < 1.6
, base >= 4.9.1 && < 4.15
, bytestring >= 0.10.8 && < 0.12
, containers >= 0.5.11 && < 0.7
, directory >= 1.3.1 && < 1.4
, filepath >= 1.4.2 && < 1.5
, lucid >= 2.8.0 && < 2.12
, lucid >= 2.9.11 && < 2.10
, mtl >= 2.2.2 && < 2.3
, optparse-applicative >= 0.14.0 && < 0.18
, optparse-applicative >= 0.14.3 && < 0.17
, parsec >= 3.1.13 && < 3.2
, template >= 0.2.0 && < 0.3
, text >= 1.2.3 && < 1.3
@ -73,7 +73,7 @@ executable hablo
main-is: src/Main.hs
other-modules: Paths_hablo
-- other-extensions:
build-depends: base
build-depends: base >= 4.9.1 && < 4.15
, hablo
, mtl >= 2.2.2 && < 2.3
ghc-options: -Wall

View file

@ -7,37 +7,15 @@ import {defined} from UnitJS.Fun;
return {
articlesList: articlesList,
getResource: getResource,
render: render,
replaceMarkdown: replaceMarkdown
};
function getResource(url) {
var i = url.lastIndexOf('/');
var path = url.slice(1, i);
if(path == blog.path.articlesPath) {
return {type: 'article', key: url.slice(i+1).replace(/\.html/, '')};
} else if(path == blog.path.pagesPath) {
return {type: 'page', key: url.slice(i+1).replace(/\.html/, '')};
} else if(path == '' || blog.tags[path] != undefined) {
var tag = path.length > 0 ? path : undefined;
return {type: 'list', tag: tag, all: url.slice(i+1) == 'all.html'};
} else {
return {type: 'unknown'};
}
}
function resourceUrl(resource, limit) {
var directory = blog.path[resource.type + 'sPath'];
var extension = limit != undefined ? '.html' : '.md';
return ["", directory, resource.key + extension].join('/');
}
function replaceMarkdown() {
var div = document.getElementById('contents');
if(div.children[0] && div.children[0].tagName.toLowerCase() == 'article') {
var resourceType = getResource(window.location.pathname).type;
convertContent(resourceType, div.children[0], true);
var contentType = window.location.pathname.slice(1).replace(/\/.*/, '');
convertContent(contentType, div.children[0], true);
} else {
var articles = div.getElementsByClassName('articles')[0];
if(articles != undefined) {
@ -50,15 +28,15 @@ function replaceMarkdown() {
}
}
function convertContent(resourceType, article, comments) {
function convertContent(contentType, article, comments) {
var header = article.getElementsByTagName('header')[0];
if(resourceType == 'article') {
if(contentType == 'article') {
header.appendChild(Metadata.get(article.id));
}
var text = article.getElementsByTagName('pre')[0];
if(text != undefined) {
article.replaceChild(getDiv(text.innerText), text);
if(resourceType == 'article' && comments) {
if(contentType == 'article' && comments) {
Metadata.getComments(article.id)
.forEach(article.appendChild.bind(article));
}
@ -81,53 +59,59 @@ function getDiv(markdown) {
return d;
}
function commentsSection(resource, limit) {
if(resource.type != 'article' || limit != undefined) {
function contentUrl(contentType, key, limit) {
var directory = blog.path[contentType + 'sPath'];
var extension = limit != undefined ? '.html' : '.md';
return ["", directory, key + extension].join('/');
}
function commentsSection(contentType, key, limit) {
if(contentType != 'article' || limit != undefined) {
return [];
} else {
return Metadata.getComments(resource.key);
return Metadata.getComments(key);
}
}
function render(resource, markdown, limit) {
var url = resourceUrl(resource, limit);
var content = blog[resource.type + 's'][resource.key];
var lines = markdown.split(/\n/).slice(content.bodyOffset);
function render(contentType, key, markdown, limit) {
var url = contentUrl(contentType, key, limit);
var resource = blog[contentType + 's'][key];
var lines = markdown.split(/\n/).slice(resource.bodyOffset);
var div = getDiv(lines.slice(0, limit).join('\n'));
return Dom.make('article', {}, [
Dom.make('header', {}, [
Dom.make('h1', {}, [
Dom.make('a', {href: url, innerText: content.title})
])].concat(resource.type == 'article' ? Metadata.get(resource.key) : [])
Dom.make('a', {href: url, innerText: resource.title})
])].concat(contentType == 'article' ? Metadata.get(key) : [])
),
div
].concat(commentsSection(resource, limit)));
].concat(commentsSection(contentType, key, limit)));
}
function pageTitle(resource) {
return Template.render(resource.all ? 'allPage' : 'latestPage', {tag: resource.tag});
function pageTitle(tag, all) {
return Template.render(all ? 'allPage' : 'latestPage', {tag: tag});
}
function otherUrl(resource) {
var path = [resource.tag, resource.all ? '' : 'all.html'];
function otherUrl(tag, all) {
var path = [tag, all ? '' : 'all.html'];
return '/' + path.filter(defined).join('/');
}
function articlesList(resource) {
function articlesList(tag, all) {
return function(articlePreviews) {
return [
Dom.make('h2', {innerText: pageTitle(resource)}),
Dom.make('ul', {}, articlesListLinks(resource)),
Dom.make('h2', {innerText: pageTitle(tag, all)}),
Dom.make('ul', {}, articlesListLinks(tag, all)),
Dom.make('div', {class: 'articles'}, articlePreviews.filter(defined))
];
};
}
function articlesListLinks(resource) {
function articlesListLinks(tag, all) {
var links = [
Dom.make('a', {
innerText: resource.all ? blog.wording.latestLink : blog.wording.allLink,
href: otherUrl(resource),
innerText: all ? blog.wording.latestLink : blog.wording.allLink,
href: otherUrl(tag, all),
class: 'other'
})
];
@ -136,7 +120,7 @@ function articlesListLinks(resource) {
innerText: blog.wording.rssLink,
href: 'rss.xml',
class: 'RSS',
title: Template.render('rssTitle', {tag: resource.tag})
title: Template.render('rssTitle', {tag: tag})
}));
}
return links.map(function(e) {return Dom.make('li', {}, [e]);});

View file

@ -1,4 +1,4 @@
import {articlesList, getResource, render} from DomRenderer;
import {articlesList, render} from DomRenderer;
import blog from Hablo.Config;
import * as Async from UnitJS.Async;
import * as Cache from UnitJS.Cache;
@ -60,20 +60,23 @@ function visit(url) {
}
function navigate(url) {
var resource = getResource(url);
switch(resource.type) {
case 'list': show(getArticlesList(resource)); break;
case 'article':
case 'page': show(getCached(resource)); break;
default: console.log("No idea how to navigate to " + url);
var path = decodeURI(url).split("/").slice(1);
if(blog.tags[path[0]] != undefined) {
show(getArticlesList(path[0], path[1] == "all.html"));
} else if(path[0] == blog.path.articlesPath) {
show(getResource('article', path[1].replace(/\.html$/, '')));
} else if(path[0] == blog.path.pagesPath) {
show(getResource('page', path[1].replace(/\.html$/, '')));
} else {
show(getArticlesList(null, path[0] == "all.html"));
}
}
function getCached(resource) {
function getResource(contentType, key) {
return Async.bind(
cache[resource.type].get(resource.key),
cache[contentType].get(key),
Async.map(
function(contents) {return [render(resource, contents)];}
function(contents) {return [render(contentType, key, contents)];}
)
);
}
@ -83,23 +86,23 @@ function preview(key) {
cache.article.get(key),
function(contents) {
return Async.wrap(
render({type: 'article', key: key}, contents, blog.skin.previewLinesCount)
render('article', key, contents, blog.skin.previewLinesCount)
);
}
);
}
function articleIds(resource) {
var ids = resource.tag != undefined ? blog.tags[resource.tag] : Object.keys(blog.articles);
function articleIds(tag, all) {
var ids = tag != undefined ? blog.tags[tag] : Object.keys(blog.articles);
var reverseDate = function (id) {return -blog.articles[id].metadata.date;};
ids.sort(Fun.compare(reverseDate));
return ids.slice(0, resource.all ? undefined : blog.skin.previewArticlesCount);
return ids.slice(0, all ? undefined : blog.skin.previewArticlesCount);
}
function getArticlesList(resource) {
function getArticlesList(tag, all) {
return Async.bind(
Async.parallel.apply(null, articleIds(resource).map(preview)),
Async.map(articlesList(resource))
Async.parallel.apply(null, articleIds(tag, all).map(preview)),
Async.map(articlesList(tag, all))
);
}

View file

@ -16,7 +16,7 @@ import Data.Text.Encoding (encodeUtf8)
import JSON (exportBlog)
import Paths_hablo (getDataDir)
import Pretty ((.$))
import SJW (compile, source)
import SJW (compile, source, sourceCode)
import System.Directory (createDirectoryIfMissing)
import System.Exit (die)
import System.FilePath ((</>))
@ -45,11 +45,10 @@ generateConfig destinationDir = do
generateMain :: FilePath -> IO ()
generateMain destinationDir = do
habloSources <- (</> "js") <$> getDataDir
compile (source [destinationDir, "unitJS", habloSources])
>>= either abort (output . fst)
result <- compile $ source [destinationDir, "unitJS", habloSources]
maybe (die "JS compilation failed\n") output =<< sourceCode result
where
output = writeFile (destinationDir </> "hablo.js") . fromStrict . encodeUtf8
abort = die . (<> "JS compilation failed\n")
generate :: ReaderT Blog IO ()
generate = do