Draft
Basic unit renamed from Note to Text. New modular text-parser, internal to Logarion, for generic notation parsing. The default input format is now a much plainer text. Eliminated Meta module and generally lib/ modules. New Store interface, with additional information from Store. For example the converter can now check filesystem dates. Changed to filesystem hardlinks for tracking publications & indexing, instead of categories. New commands `publish [-i]` and `deindex [-u]`. Categories are ignored now. Logarion created texts have part of the UUID instead of a counter in their filename. New -i, --interactive flag for interactive creation & publication. Logarion's index re-written in Messagepack format. Removed `indices` command. They are generated during `convert`.
This commit is contained in:
parent
3d92789cdb
commit
6c27c7c6fb
@ -1,83 +0,0 @@
|
|||||||
# Contributing to Logarion
|
|
||||||
|
|
||||||
Logarions primary aim is to create a note system, which doesn't waste resources.
|
|
||||||
The secondary aim is to provide an exemplary OCaml project to demonstrate and promote the language (as it happens with many other "Blogging" systems written in other languages).
|
|
||||||
|
|
||||||
As part of the secondary aim, the source code needs to written in a way that encourages the language's adoption and the participation to the OCaml developer community.
|
|
||||||
|
|
||||||
## Starting with OCaml
|
|
||||||
|
|
||||||
_"OCaml is an industrial strength programming language supporting functional, imperative and object-oriented styles"_ -- https://ocaml.org/
|
|
||||||
|
|
||||||
OCaml simply rocks.
|
|
||||||
|
|
||||||
If you are unfamiliar with OCaml, consider starting with these resources:
|
|
||||||
|
|
||||||
- Install OCaml: https://ocaml.org/docs/install.html
|
|
||||||
- Read about OCaml: https://ocaml.org/learn/books.html
|
|
||||||
- Ask questions & join the community:
|
|
||||||
- Mailing lists: https://ocaml.org/community/
|
|
||||||
- IRC: irc://irc.freenode.net/#ocaml (Web client: https://riot.im/app/#/room/#freenode_#ocaml:matrix.org )
|
|
||||||
- Reddit: http://www.reddit.com/r/ocaml/
|
|
||||||
- Discourse: https://discuss.ocaml.org/
|
|
||||||
- .. other: https://ocaml.org/community/
|
|
||||||
|
|
||||||
## Design principles
|
|
||||||
|
|
||||||
[Unix philosophy](https://en.wikipedia.org/wiki/Unix_philosophy#Do_One_Thing_and_Do_It_Well)
|
|
||||||
|
|
||||||
1. System simplicity & interoperability.
|
|
||||||
2. Output quality.
|
|
||||||
3. Distributed interactivity, like sharing with friends.
|
|
||||||
|
|
||||||
## Developing & contributing
|
|
||||||
|
|
||||||
### Clone
|
|
||||||
|
|
||||||
```
|
|
||||||
git clone https://cgit.orbitalfox.eu/logarion/
|
|
||||||
```
|
|
||||||
|
|
||||||
Install dependencies:
|
|
||||||
|
|
||||||
```
|
|
||||||
cd logarion
|
|
||||||
pin add logarion . -n
|
|
||||||
opam depext --install logarion
|
|
||||||
```
|
|
||||||
|
|
||||||
Build the project:
|
|
||||||
|
|
||||||
```
|
|
||||||
dune build src/logarion.exe
|
|
||||||
```
|
|
||||||
|
|
||||||
This will create `_build/default/src/logarion.exe` (the command line interface).
|
|
||||||
|
|
||||||
### Project structure
|
|
||||||
|
|
||||||
There are three layers:
|
|
||||||
|
|
||||||
- notes
|
|
||||||
- archive
|
|
||||||
- interfaces & intermediate formats
|
|
||||||
|
|
||||||
### Core
|
|
||||||
|
|
||||||
- `logarion.ml`: repository related functions (listing, adding/removing, etc). ([src/logarion.ml](https://gitlab.com/orbifx/logarion/blob/master/src/logarion.ml))
|
|
||||||
- `note.ml`: parsing from and to note files. ([src/note.ml](https://gitlab.com/orbifx/logarion/blob/master/src/note.ml))
|
|
||||||
|
|
||||||
### Intermediate formats
|
|
||||||
|
|
||||||
Converters:
|
|
||||||
|
|
||||||
- `html.ml`: archive to HTML pages.
|
|
||||||
- `atom.ml`: archive to Atom feeds.
|
|
||||||
|
|
||||||
### Servers & utilities
|
|
||||||
|
|
||||||
Logarion's archives can be served over various protocols using servers.
|
|
||||||
Find related software here:
|
|
||||||
|
|
||||||
- https://logarion.orbitalfox.eu/
|
|
||||||
- https://cgit.orbitalfox.eu/
|
|
13
Makefile
13
Makefile
@ -1,18 +1,13 @@
|
|||||||
all: cli
|
all: cli
|
||||||
|
|
||||||
cli:
|
cli:
|
||||||
dune build src/logarion_cli.exe
|
dune build app/logarion_cli.exe
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
dune clean
|
dune clean
|
||||||
|
|
||||||
theme-dark:
|
|
||||||
sassc share/sass/main-dark.sass > share/static/main.css
|
|
||||||
|
|
||||||
theme-light:
|
|
||||||
sassc share/sass/main-light.sass > share/static/main.css
|
|
||||||
|
|
||||||
tgz:
|
tgz:
|
||||||
cp _build/default/src/logarion_cli.exe logarion
|
cp _build/default/app/logarion_cli.exe logarion
|
||||||
strip logarion
|
strip logarion
|
||||||
tar czvf "logarion-$(shell ./logarion --version)-$(shell uname -s)-$(shell uname -m)-$(shell git rev-parse --short HEAD).tar.gz" share logarion
|
tar czvf "logarion-$(shell date -r _build/default/src/logarion_cli.exe "+%y-%m-%d")-$(shell uname -s)-$(shell uname -m)-$(shell git rev-parse --short HEAD).tar.gz" share logarion
|
||||||
|
rm logarion
|
||||||
|
29
README
Normal file
29
README
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
Logarion is a free and open-source text archive system. A blog-wiki hybrid.
|
||||||
|
|
||||||
|
Download: <https://logarion.orbitalfox.eu/downloads/>
|
||||||
|
EUPL licence: <https://joinup.ec.europa.eu/software/page/eupl>
|
||||||
|
|
||||||
|
Start
|
||||||
|
|
||||||
|
Create a folder and run `logarion init` from within it to produce `.logarion/config` configuration file.
|
||||||
|
Run `logarion --help` for more options.
|
||||||
|
|
||||||
|
Community & support
|
||||||
|
|
||||||
|
* Website: <https://logarion.orbitalfox.eu>
|
||||||
|
* Report an issue: <mailto:logarion@lists.orbitalfox.eu?subject=Issue:>
|
||||||
|
* Discussion: <https://lists.orbitalfox.eu/listinfo/logarion>
|
||||||
|
or join via <mailto:logarion-join@lists.orbitalfox.eu>
|
||||||
|
|
||||||
|
|
||||||
|
Install stable from source
|
||||||
|
|
||||||
|
opam add repo orbifx https://opam.orbitalfox.eu
|
||||||
|
opam install logarion
|
||||||
|
|
||||||
|
|
||||||
|
Install development version
|
||||||
|
|
||||||
|
opam pin add text git://orbitalfox.eu/text-parse-ml
|
||||||
|
opam pin add logarion git://orbitalfox.eu/logarion
|
||||||
|
opam install logarion
|
50
README.md
50
README.md
@ -1,50 +0,0 @@
|
|||||||
# Logarion
|
|
||||||
|
|
||||||
Logarion is a [free and open-source][Licence] personal note taking, journaling and publication system; a blog-wiki hybrid.
|
|
||||||
|
|
||||||
## Features
|
|
||||||
|
|
||||||
- Plain file system store, where each note is a file.
|
|
||||||
- Command line & web interfaces.
|
|
||||||
- Atom feeds
|
|
||||||
- Static (conversion to files for uploading) & dynamic serving (HTTP, Gopher, ..).
|
|
||||||
|
|
||||||
|
|
||||||
## Community & support
|
|
||||||
|
|
||||||
- Website: <https://logarion.orbitalfox.eu>
|
|
||||||
- Mailing list: <https://lists.orbitalfox.eu/listinfo/logarion>
|
|
||||||
- Matrix (chat): `#logarion:matrix.org`. Via Riot web-app: <https://riot.im/app/#/room/#logarion:matrix.org>
|
|
||||||
- For issues peferably email to [mailto:logarion@lists.orbitalfox.eu](mailto:logarion@lists.orbitalfox.eu?subject=[Issue] summary-here).
|
|
||||||
Alternatively <https://gitlab.com/orbifx/logarion/issues>
|
|
||||||
|
|
||||||
|
|
||||||
## Install
|
|
||||||
|
|
||||||
The following instructions are the quickest way to install Logarion (in the absence of binary releases).
|
|
||||||
|
|
||||||
```
|
|
||||||
opam pin add logarion git://orbitalfox.eu/logarion
|
|
||||||
opam install logarion
|
|
||||||
```
|
|
||||||
|
|
||||||
Once installed you will have `logarion` for command line control of the repository.
|
|
||||||
|
|
||||||
## Archives
|
|
||||||
|
|
||||||
### Command line
|
|
||||||
|
|
||||||
Create a folder and run `logarion init` from within it to produce `.logarion/config.toml`, which is the core configuration file.
|
|
||||||
The archive options are under the `[archive]` section.
|
|
||||||
|
|
||||||
Run `logarion --help` for more options.
|
|
||||||
|
|
||||||
|
|
||||||
#### Theme
|
|
||||||
|
|
||||||
Optionally install a [Sass](http://sass-lang.com/) compiler, like [sassc](http://sass-lang.com/libsass#sassc), and then run `make theme-dark` or `make theme-light`, to generate a stylesheet as `share/static/main.css`, using the respective Sass files in `share/sass/`.
|
|
||||||
|
|
||||||
## See also
|
|
||||||
|
|
||||||
- [CONTRIBUTING.md](CONTRIBUTING.md)
|
|
||||||
- [Licence](https://joinup.ec.europa.eu/software/page/eupl)
|
|
37
app/atom.ml
Normal file
37
app/atom.ml
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
let esc = Converter.Html.esc
|
||||||
|
|
||||||
|
let element tag content = "<" ^ tag ^ ">" ^ content ^ "</" ^ tag ^ ">"
|
||||||
|
|
||||||
|
let opt_element tag_name content =
|
||||||
|
if content <> ""
|
||||||
|
then element tag_name content
|
||||||
|
else ""
|
||||||
|
|
||||||
|
module P = Parsers.Plain_text.Make (Converter.Html)
|
||||||
|
|
||||||
|
let entry base_url text =
|
||||||
|
let open Logarion in
|
||||||
|
let u = Text.alias text in
|
||||||
|
let authors elt a =
|
||||||
|
a ^ "<author>"
|
||||||
|
^ (opt_element "name" @@ esc elt.Person.name)
|
||||||
|
^ (List.fold_left (fun acc addr -> acc ^ element "uri" (Uri.to_string addr)) "" elt.Person.addresses)
|
||||||
|
^ "</author>"
|
||||||
|
in
|
||||||
|
"<entry><title>" ^ esc text.title ^ "</title><id>urn:uuid:" ^ Id.to_string text.uuid ^ "</id><link rel=\"alternate\" href=\""
|
||||||
|
^ base_url ^ "/" ^ u ^ ".html\" /><updated>"
|
||||||
|
^ Date.(text.date |> listing |> rfc_string) ^ "</updated>"
|
||||||
|
^ Person.Set.fold authors text.authors ""
|
||||||
|
^ (opt_element "summary" @@ esc @@ Text.str "abstract" text)
|
||||||
|
^ String_set.fold (fun elt a -> a ^ "<category term=\"" ^ elt ^ "\"/>") (Text.set "topics" text) ""
|
||||||
|
^ "<content type=\"xhtml\"><div xmlns=\"http://www.w3.org/1999/xhtml\">"
|
||||||
|
^ P.of_string text.body ""
|
||||||
|
^ "</div></content></entry>"
|
||||||
|
|
||||||
|
let feed title archive_id base_url texts =
|
||||||
|
{|<?xml version="1.0" encoding="utf-8"?><feed xmlns="http://www.w3.org/2005/Atom" xml:base="|} ^ base_url ^ {|"><title>|}
|
||||||
|
^ title ^ {|</title><link rel="alternate" type="text/html" href="|}
|
||||||
|
^ base_url ^ {|/"/><link rel="self" type="application/atom+xml" href="|}
|
||||||
|
^ base_url ^ {|/feed.atom" /><id>urn:uuid:|} ^ Logarion.Id.to_string archive_id ^ "</id><updated>"
|
||||||
|
^ Ptime.to_rfc3339 (Ptime_clock.now ()) ^ "</updated>"
|
||||||
|
^ List.fold_left (fun acc t -> acc ^ entry base_url t) "" texts ^ "</feed>"
|
80
app/convert.ml
Normal file
80
app/convert.ml
Normal file
@ -0,0 +1,80 @@
|
|||||||
|
let version = "%%VERSION%%"
|
||||||
|
|
||||||
|
open Logarion
|
||||||
|
module A = Archive.Make (Logarion.File_store)
|
||||||
|
|
||||||
|
let file_when_changed source dest fn title text =
|
||||||
|
if (try Unix.((stat dest).st_mtime < (stat source).st_mtime) with _-> true) then
|
||||||
|
File_store.file dest (fn title text)
|
||||||
|
|
||||||
|
let writer dir name acc (text,store_item) = (* todo: single_parser -> [files] *)
|
||||||
|
let open Logarion in
|
||||||
|
match Text.(str "Content-Type" text) with
|
||||||
|
| "" | "text/plain" ->
|
||||||
|
let filename = dir ^ "/" ^ Text.alias text in
|
||||||
|
let idfilename = dir ^ "/" ^ Id.to_string text.Text.uuid ^ ".txt" in
|
||||||
|
file_when_changed store_item idfilename (fun _title -> Text.to_string) text.title text;
|
||||||
|
file_when_changed store_item (filename ^ ".html") (Html.page "") name text;
|
||||||
|
file_when_changed store_item (filename ^ ".gmi") Gemini.page name text;
|
||||||
|
(acc + 1)
|
||||||
|
| x -> prerr_endline ("No converter for Content-Type: " ^ x ^ ", for " ^ text.Text.title); acc
|
||||||
|
|
||||||
|
let index_pack archive indexed_texts =
|
||||||
|
let persons ps = List.rev @@ Person.Set.fold (fun x a -> Msgpck.String (Person.to_string x) :: a) ps [] in
|
||||||
|
let to_pack a t =
|
||||||
|
let open Text in
|
||||||
|
let of_set field t = List.rev @@ String_set.fold (fun x a -> Msgpck.String x :: a) (set field t) [] in
|
||||||
|
Msgpck.(List [Bytes (Id.to_bytes t.uuid); String t.title; List (persons t.authors); List (of_set "topics" t); List (of_set "keywords" t)])
|
||||||
|
:: a
|
||||||
|
in
|
||||||
|
let header_pack = Msgpck.List List.(fold_left to_pack [] indexed_texts) in
|
||||||
|
let columns = Msgpck.(List [String "id"; String "title"; String "authors"; String "topics"; String "keywords"]) in
|
||||||
|
let archive = Msgpck.(List [Int 0; String archive.A.name; List (persons archive.A.archivists)]) in
|
||||||
|
Bytes.to_string @@ Msgpck.Bytes.to_string (List [archive; columns; header_pack])
|
||||||
|
|
||||||
|
let index_writer dir archive topic_roots topic_map indexed_texts =
|
||||||
|
let name = archive.A.name in
|
||||||
|
let file path = File_store.file (dir ^ path) in
|
||||||
|
file "/index.pck" (index_pack archive indexed_texts);
|
||||||
|
file "/index.html" (Html.topic_main_index name topic_roots indexed_texts);
|
||||||
|
List.iter
|
||||||
|
(fun topic -> file ("/index." ^ topic ^ ".htm") (Html.topic_sub_index name topic_map topic indexed_texts))
|
||||||
|
topic_roots;
|
||||||
|
file "/index.date.html" (Html.date_index name indexed_texts);
|
||||||
|
let base_url = try Store.KV.find "HTTP-URL" archive.File_store.kv
|
||||||
|
with Not_found -> prerr_endline "Missing `HTTP-URL:` in config"; ""
|
||||||
|
in
|
||||||
|
file "/feed.atom" (Atom.feed archive.A.name archive.A.id base_url indexed_texts);
|
||||||
|
file "/index.date.gmi" (Gemini.date_index name indexed_texts)
|
||||||
|
|
||||||
|
let convert_all dir archive =
|
||||||
|
let name = archive.A.name in
|
||||||
|
let published_count = A.(fold ~order:newest (writer dir name) 0 (published archive)) in
|
||||||
|
let topic_map, indexed_metas =
|
||||||
|
let topic_map = Logarion.Topic_set.Map.empty in
|
||||||
|
let fn (ts,ls) (elt,_) = Logarion.(Topic_set.to_map ts (Text.set "topics" elt)), elt::ls in
|
||||||
|
A.(fold ~order:newest fn (topic_map,[]) (indexed archive)) in
|
||||||
|
let topic_roots = Logarion.Topic_set.roots topic_map in
|
||||||
|
index_writer dir archive topic_roots topic_map indexed_metas;
|
||||||
|
print_endline @@ "Converted: " ^ string_of_int (published_count) ^ "\nIndexed: " ^ string_of_int (List.length indexed_metas);
|
||||||
|
Ok ()
|
||||||
|
|
||||||
|
let convert_dir cmd_dir =
|
||||||
|
let (>>=) = Result.bind in
|
||||||
|
let config_dir archive = try Ok (Store.KV.find "Export-Dir" archive.File_store.kv) with Not_found -> Error "unspecified export dir" in
|
||||||
|
let init dir =
|
||||||
|
Result.map_error (function `Msg m -> m) Logarion.File_store.Directory.(directory dir |> print ~descr:"export" dir) in
|
||||||
|
(A.of_path "."
|
||||||
|
>>= fun archive -> (match cmd_dir with "" -> config_dir archive | x -> Ok x)
|
||||||
|
>>= fun dir -> init dir
|
||||||
|
>>= fun _ -> convert_all dir archive)
|
||||||
|
|> function Ok () -> () | Error x -> prerr_endline x
|
||||||
|
|
||||||
|
open Cmdliner
|
||||||
|
|
||||||
|
let term =
|
||||||
|
let directory = Arg.(value & pos 0 string "" & info [] ~docv:"target directory" ~doc:"Directory to convert into") in
|
||||||
|
Term.(const convert_dir $ directory),
|
||||||
|
Term.info
|
||||||
|
"convert" ~doc:"convert archive"
|
||||||
|
~man:[ `S "DESCRIPTION"; `P "Convert Logarion archive" ]
|
5
app/dune
Normal file
5
app/dune
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
(executable
|
||||||
|
(name logarion_cli)
|
||||||
|
(public_name logarion)
|
||||||
|
(modules logarion_cli convert html atom gemini)
|
||||||
|
(libraries logarion re.str cmdliner bos ptime ptime.clock.os text_parse.converter text_parse.parsers msgpck))
|
14
app/gemini.ml
Normal file
14
app/gemini.ml
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
let page _archive_title text =
|
||||||
|
let open Logarion.Text in
|
||||||
|
"# " ^ text.title
|
||||||
|
^ "\nAuthors: " ^ Logarion.Person.Set.to_string text.authors
|
||||||
|
^ "\nDated: " ^ Logarion.Date.(pretty_date @@ listing text.date)
|
||||||
|
^ let module T = Parsers.Plain_text.Make (Converter.Gemini) in
|
||||||
|
"\n" ^ T.of_string text.body ""
|
||||||
|
|
||||||
|
let date_index title meta_list =
|
||||||
|
List.fold_left
|
||||||
|
(fun a m ->
|
||||||
|
a ^ "=> " ^ Logarion.Text.alias m ^ ".gmi " ^
|
||||||
|
Logarion.(Date.(pretty_date (listing m.date)) ^ " " ^ m.title) ^ "\n")
|
||||||
|
("# " ^ title ^ "\n") meta_list
|
117
app/html.ml
Normal file
117
app/html.ml
Normal file
@ -0,0 +1,117 @@
|
|||||||
|
let wrap ?(keywords="") (title:string) (subtitle:string) body =
|
||||||
|
{|<!DOCTYPE html><html xmlns="http://www.w3.org/1999/xhtml"><head><title>|}
|
||||||
|
^ subtitle ^ " | " ^ title
|
||||||
|
^ {|</title><link rel="stylesheet" href="main.css" media/><link rel="alternate" href="feed.atom" type="application/atom+xml"/><meta charset="utf-8"/> <meta name="keywords" content="|}
|
||||||
|
^ keywords ^ {|"></head><body><header><h1><a href=".">|} ^ title
|
||||||
|
^ "</a></h1></header>" ^ body
|
||||||
|
^ {|<footer><a href="feed.atom" id="feed">Subscribe to feed 📰</a></footer></body></html>|}
|
||||||
|
|
||||||
|
let topic_link root topic =
|
||||||
|
{|<a href="index.|} ^ root ^ {|.htm#|} ^ topic ^ {|">|} ^ String.capitalize_ascii topic ^ "</a>"
|
||||||
|
|
||||||
|
let page _topic_roots archive_title text =
|
||||||
|
let open Logarion in
|
||||||
|
let open Text in
|
||||||
|
let module T = Parsers.Plain_text.Make (Converter.Html) in
|
||||||
|
let sep_append ?(sep=", ") a x = match a,x with "",_ -> x | _, "" -> a | _ -> a ^ sep ^ x in
|
||||||
|
let opt_kv key value = if String.length value > 0 then "<dt>" ^ key ^ "<dd>" ^ value else "" in
|
||||||
|
(* let author acc auth = sep_append acc Person.(auth.name ^ " ") in*)
|
||||||
|
let authors = T.of_string (Person.Set.to_string text.authors ^ " ") "" in
|
||||||
|
let keywords = str_set "keywords" text in
|
||||||
|
let header =
|
||||||
|
let time x = {|<time datetime="|} ^ x ^ {|">|} ^ x ^ "</time>" in
|
||||||
|
let topic_links x =
|
||||||
|
let to_linked t a =
|
||||||
|
let ts = Topic_set.of_string t in
|
||||||
|
sep_append a (List.fold_left (fun a t -> sep_append ~sep:" > " a (topic_link (List.hd ts) t)) "" ts) in
|
||||||
|
String_set.fold to_linked x "" in
|
||||||
|
"<article><header><dl>"
|
||||||
|
^ opt_kv "Title:" text.title
|
||||||
|
^ opt_kv "Authors:" authors
|
||||||
|
^ opt_kv "Date: " (time (Date.(pretty_date @@ listing text.date)))
|
||||||
|
^ opt_kv "Series: " (str_set "series" text)
|
||||||
|
^ opt_kv "Topics: " (topic_links (set "topics" text))
|
||||||
|
^ opt_kv "Keywords: " keywords
|
||||||
|
^ opt_kv "Id: " (Id.to_string text.uuid)
|
||||||
|
^ {|</dl></header><pre style="white-space:pre-wrap">|} in
|
||||||
|
wrap ~keywords archive_title text.title ((T.of_string text.body header) ^ "</pre></article>")
|
||||||
|
|
||||||
|
let to_dated_links ?(limit) meta_list =
|
||||||
|
let meta_list = match limit with
|
||||||
|
| None -> meta_list
|
||||||
|
| Some limit->
|
||||||
|
let rec reduced acc i = function
|
||||||
|
| [] -> acc
|
||||||
|
| h::t -> if i < limit then reduced (h::acc) (i+1) t else acc in
|
||||||
|
List.rev @@ reduced [] 0 meta_list
|
||||||
|
in
|
||||||
|
List.fold_left
|
||||||
|
(fun a m ->
|
||||||
|
a ^ Logarion.(Date.(pretty_date (listing m.Text.date)) ^ " ")
|
||||||
|
^ {|<a href="|} ^ Logarion.Text.alias m ^ {|.html">|} ^ m.Logarion.Text.title ^ "</a><br/>")
|
||||||
|
"" meta_list
|
||||||
|
|
||||||
|
let date_index ?(limit) title meta_list =
|
||||||
|
match limit with
|
||||||
|
| Some limit -> wrap title "Index" (to_dated_links ~limit meta_list)
|
||||||
|
| None -> wrap title "Index" (to_dated_links meta_list)
|
||||||
|
|
||||||
|
let fold_topic_roots topic_roots =
|
||||||
|
let list_item root t = "<li>" ^ topic_link root t in
|
||||||
|
"<nav><h2>Main topics</h2>"
|
||||||
|
^ List.fold_left (fun a x -> a ^ list_item x x) "<ul>" (List.rev topic_roots)
|
||||||
|
^ "</ul></nav>"
|
||||||
|
|
||||||
|
let fold_topics topic_map topic_roots metas =
|
||||||
|
let open Logarion in
|
||||||
|
let rec unordered_list root topic =
|
||||||
|
List.fold_left (fun a x -> a ^ list_item root x) "<ul>" topic ^ "</ul>"
|
||||||
|
and sub_items root topic = match Topic_set.Map.find_opt topic topic_map with
|
||||||
|
| None -> ""
|
||||||
|
| Some (_, subtopics) -> unordered_list root (String_set.elements subtopics)
|
||||||
|
and list_item root t =
|
||||||
|
let item =
|
||||||
|
if List.exists (fun x -> String_set.mem t (String_set.map Topic_set.topic (Text.set "topics" x))) metas
|
||||||
|
then topic_link root t else t
|
||||||
|
in
|
||||||
|
"<li>" ^ item ^ sub_items root t
|
||||||
|
in
|
||||||
|
"<nav><h2>Topics</h2>"
|
||||||
|
^ List.fold_left (fun a x -> a ^ list_item x x) "<ul>" (List.rev topic_roots)
|
||||||
|
^ "</ul></nav>"
|
||||||
|
|
||||||
|
let text_item path meta =
|
||||||
|
let open Logarion in
|
||||||
|
{|<tr><td><a href="|} ^ path ^ Text.alias meta ^ {|.html">|} ^ meta.Text.title
|
||||||
|
^ "</a><td><time>" ^ Date.(pretty_date (listing meta.Text.date)) ^ "</time>"
|
||||||
|
|
||||||
|
let listing_index topic_map topic_roots path metas =
|
||||||
|
let rec item_group topics =
|
||||||
|
List.fold_left (fun acc topic -> acc ^ sub_groups topic ^ "<tbody>" ^ items topic) "" topics
|
||||||
|
and sub_groups topic = match Logarion.Topic_set.Map.find_opt topic topic_map with
|
||||||
|
| None -> ""
|
||||||
|
| Some (_, subtopics) -> item_group (Logarion.String_set.elements subtopics)
|
||||||
|
and items topic =
|
||||||
|
let items =
|
||||||
|
let open Logarion in
|
||||||
|
List.fold_left
|
||||||
|
(fun a e ->
|
||||||
|
if String_set.mem topic (String_set.map (Logarion.Topic_set.topic) (Text.set "Topics" e))
|
||||||
|
then text_item path e ^ a else a) "" metas in
|
||||||
|
match items with
|
||||||
|
| "" -> ""
|
||||||
|
| x -> {|<tr id="|} ^ topic ^ {|"><th colspan="3">|} ^ topic ^ "</th></tr>" ^ x
|
||||||
|
in
|
||||||
|
"<nav><h2>Texts</h2><table>" ^ item_group topic_roots ^ "</table></nav>"
|
||||||
|
|
||||||
|
let topic_main_index title topic_roots metas =
|
||||||
|
wrap title "Topics"
|
||||||
|
(fold_topic_roots topic_roots
|
||||||
|
^ "<nav><h2>Recent</h2>" ^ to_dated_links ~limit:5 metas
|
||||||
|
^ {|<br/><a href="index.date.html">More by date</a></nav>|} )
|
||||||
|
|
||||||
|
let topic_sub_index title topic_map topic_root metas =
|
||||||
|
wrap title topic_root
|
||||||
|
(fold_topics topic_map [topic_root] metas
|
||||||
|
^ {|<a href="feed.atom" id="feed">Subscribe to |}^ topic_root ^{| feed 📰</a>|}
|
||||||
|
^ listing_index topic_map [topic_root] "" metas)
|
111
app/logarion_cli.ml
Normal file
111
app/logarion_cli.ml
Normal file
@ -0,0 +1,111 @@
|
|||||||
|
let version = "%%VERSION%%"
|
||||||
|
|
||||||
|
open Cmdliner
|
||||||
|
open Logarion
|
||||||
|
module A = Logarion.Archive.Make(File_store)
|
||||||
|
|
||||||
|
(* TODO: merge in lib/ so other modules can use (.e.g HTTP pull) *)
|
||||||
|
let text_list field_opt authors_opt keywords_opt topics_opt =
|
||||||
|
match A.of_path "." with
|
||||||
|
| Error msg -> prerr_endline msg
|
||||||
|
| Ok archive ->
|
||||||
|
let predicates =
|
||||||
|
A.predicate A.authored authors_opt
|
||||||
|
@ A.predicate A.keyworded keywords_opt
|
||||||
|
@ A.predicate A.topics topics_opt
|
||||||
|
in
|
||||||
|
let predicate text = List.fold_left (fun a e -> a && e text) true predicates in
|
||||||
|
let order = A.newest in
|
||||||
|
let print_fold ~predicate fn =
|
||||||
|
let ts = A.fold ~predicate ~order fn String_set.empty archive in
|
||||||
|
String_set.iter (print_endline) ts
|
||||||
|
in
|
||||||
|
match field_opt with
|
||||||
|
| None -> A.iter ~predicate ~order (fun (n,_) -> print_endline n.Text.title) archive
|
||||||
|
| Some "keywords"-> print_fold ~predicate (fun a (e,_) -> (String_set.union a (Text.set "keywords" e)))
|
||||||
|
| Some "topics" -> print_fold ~predicate (fun a (e,_) -> (String_set.union a (Text.set "topics" e)))
|
||||||
|
| Some "authors" ->
|
||||||
|
let s = A.fold ~predicate ~order (fun a (e,_) -> Person.Set.union a e.Text.authors) Person.Set.empty archive in
|
||||||
|
print_endline @@ Person.Set.to_string s
|
||||||
|
| Some x -> prerr_endline @@ "Unrecognised field: " ^ x
|
||||||
|
|
||||||
|
let list_term =
|
||||||
|
let field = Arg.(value & opt (some string) None & info ["f"; "field"] ~docv:"FIELD" ~doc:"what to list") in
|
||||||
|
let authors = Arg.(value & opt (some string) None & info ["a"; "authors"] ~docv:"AUTHORS" ~doc:"texts with authors") in
|
||||||
|
let keywords= Arg.(value & opt (some string) None & info ["k"; "keywords"] ~docv:"KEYWORDS" ~doc:"texts with keywords") in
|
||||||
|
let topics = Arg.(value & opt (some string) None & info ["t"; "topics"] ~docv:"TOPICS" ~doc:"texts with topics") in
|
||||||
|
Term.(const text_list $ field $ authors $ keywords $ topics),
|
||||||
|
Term.info "list" ~doc:"list texts" ~man:[ `S "DESCRIPTION"; `P "List texts in Logarion archive" ]
|
||||||
|
|
||||||
|
let publish and_index files = match A.of_path "." with
|
||||||
|
| Error msg -> prerr_endline msg
|
||||||
|
| Ok archive -> File_store.(list_iter (if and_index then index else publish) archive files)
|
||||||
|
|
||||||
|
let publish_term =
|
||||||
|
let files = Arg.(value & pos_all string [] & info [] ~doc:"filenames") in
|
||||||
|
let index = Arg.(value & flag & info ["i"; "index"] ~doc:"Also index") in
|
||||||
|
let doc = "publish notes; it will become available in exports and for distribution" in
|
||||||
|
let man = [ `S "DESCRIPTION"; `P doc ] in
|
||||||
|
Term.(const publish $ index $ files), Term.info "publish" ~doc ~man
|
||||||
|
|
||||||
|
let deindex and_unpub files = match A.of_path "." with
|
||||||
|
| Error msg -> prerr_endline msg
|
||||||
|
| Ok archive -> File_store.(list_iter (if and_unpub then unpublish else deindex) archive files)
|
||||||
|
|
||||||
|
let deindex_term =
|
||||||
|
let files = Arg.(value & pos_all string [] & info [] ~doc:"filenames") in
|
||||||
|
let unpub = Arg.(value & flag & info ["u"; "unpublish"] ~doc:"Also unpublish") in
|
||||||
|
let doc = "deindex; it will disappear from indices" in
|
||||||
|
let man = [ `S "DESCRIPTION"; `P doc ] in
|
||||||
|
Term.(const deindex $ unpub $ files), Term.info "deindex" ~doc ~man
|
||||||
|
|
||||||
|
let init _force = File_store.init ()
|
||||||
|
|
||||||
|
let init_term =
|
||||||
|
let force = Arg.(value & flag & info ["f"; "force"] ~doc:"Initialise even if directory is not empty") in
|
||||||
|
let doc = "initialise a logarion repository in present directory" in
|
||||||
|
let man = [ `S "DESCRIPTION"; `P "Create a repository in current directory" ] in
|
||||||
|
Term.(const init $ force), Term.info "init" ~doc ~man
|
||||||
|
|
||||||
|
let create_term =
|
||||||
|
let f title topics_opt interactive =
|
||||||
|
match A.of_path "." with
|
||||||
|
| Error m -> prerr_endline m
|
||||||
|
| Ok archive ->
|
||||||
|
let t = match title with "" -> "Draft" | _ -> title in
|
||||||
|
let authors = archive.archivists in
|
||||||
|
let date = Date.({ created = Some (Ptime_clock.now ()); edited = None }) in
|
||||||
|
let text = { (Text.blank ()) with title = t; authors; date } in
|
||||||
|
let text = try Text.with_str_set text "Topics" (Option.get topics_opt) with _ -> text in
|
||||||
|
match File_store.with_text archive text with
|
||||||
|
| Error s -> prerr_endline s
|
||||||
|
| Ok (filepath, _note) ->
|
||||||
|
match interactive with false -> print_endline filepath
|
||||||
|
| true ->
|
||||||
|
print_endline @@ "Created: " ^ filepath;
|
||||||
|
let rec yn () = match read_line () with "y" -> true | "n" -> false
|
||||||
|
| _ -> print_string "y or n? "; flush stdout; yn () in
|
||||||
|
let _code = Sys.command ("$EDITOR " ^ filepath) in
|
||||||
|
print_string "Publish? [yn]: ";
|
||||||
|
match yn () with false -> ()
|
||||||
|
| true ->
|
||||||
|
print_string "Index? [yn]: ";
|
||||||
|
let and_index = yn () in
|
||||||
|
File_store.(list_iter (if and_index then index else publish) archive [filepath])
|
||||||
|
in
|
||||||
|
let title = Arg.(value & pos 0 string "" & info [] ~docv:"TITLE" ~doc:"Title for new article") in
|
||||||
|
let topics= Arg.(value & opt (some string) None & info ["t"; "topics"] ~docv:"TOPICS" ~doc:"Topics for new article") in
|
||||||
|
let inter = Arg.(value & flag & info ["i"; "interactive"] ~doc:"Prompts through the steps of creation and publication") in
|
||||||
|
let man = [ `S "DESCRIPTION"; `P "Create a new article, with title 'Draft' when none provided"] in
|
||||||
|
Term.(const f $ title $ topics $ inter), Term.info "create" ~doc:"create a new article" ~man
|
||||||
|
|
||||||
|
let default_cmd =
|
||||||
|
let doc = "text archival & publishing" in
|
||||||
|
let man = [ `S "BUGS"; `P "Submit bugs <mailto:logarion@lists.orbitalfox.eu?subject=[Issue] summary-here>" ] in
|
||||||
|
Term.(ret (const (`Help (`Pager, None)))), Term.info "logarion" ~version ~doc ~man
|
||||||
|
|
||||||
|
let cmds = [ init_term; create_term; publish_term; deindex_term; list_term; Convert.term ]
|
||||||
|
|
||||||
|
let () =
|
||||||
|
Random.self_init();
|
||||||
|
match Term.eval_choice default_cmd cmds with `Error _ -> exit 1 | _ -> exit 0
|
@ -1,3 +0,0 @@
|
|||||||
Logarion
|
|
||||||
Ymd
|
|
||||||
Web
|
|
16
dune-project
Normal file
16
dune-project
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
(lang dune 2.0)
|
||||||
|
(name logarion)
|
||||||
|
(homepage "https://logarion.orbitalfox.eu")
|
||||||
|
|
||||||
|
(source (uri git://orbitalfox.eu/logarion))
|
||||||
|
(license EUPL)
|
||||||
|
(authors "orbifx")
|
||||||
|
(maintainers "fox@orbitalfox.eu")
|
||||||
|
(bug_reports "mailto:logarion@lists.orbitalfox.eu?subject=Issue:")
|
||||||
|
|
||||||
|
(generate_opam_files true)
|
||||||
|
|
||||||
|
(package
|
||||||
|
(name logarion)
|
||||||
|
(synopsis "A texts system")
|
||||||
|
(depends re cmdliner bos ptime uuidm uri text_parse msgpck))
|
33
lib/archive.ml
Normal file
33
lib/archive.ml
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
(*let module S = Set.Make (Text) in*)
|
||||||
|
(*let module M = Map.Make (String) in*)
|
||||||
|
(*let module I = Map.Make (Id) in*)
|
||||||
|
(*let aggr = I.empty, M.empty, M.empty, M.empty in*)
|
||||||
|
(*let fn (id, a, t, k) (n,_) =*)
|
||||||
|
(* let id = I.add n.Text.uuid n id in*)
|
||||||
|
(* let a =*)
|
||||||
|
(* let f e a = M.update (e.Person.name) (function Some v -> Some (S.add n v) | None -> Some (S.singleton n)) a in*)
|
||||||
|
(* Person.Set.fold f n.Text.authors a in*)
|
||||||
|
(* let t =*)
|
||||||
|
(* let f e a = M.update e (function Some v -> Some (S.add n v) | None -> Some (S.singleton n)) a in*)
|
||||||
|
(* String_set.fold f (Text.set "Topics" n) t in*)
|
||||||
|
(* let k =*)
|
||||||
|
(* let f e a = M.update e (function Some v -> Some (S.add n v) | None -> Some (S.singleton n)) a in*)
|
||||||
|
(* String_set.fold f (Text.set "Keywords" n) k in*)
|
||||||
|
(* (id, a, t, k)*)
|
||||||
|
|
||||||
|
module Make (Store : Store.T) = struct
|
||||||
|
include Store
|
||||||
|
let predicate fn opt = Option.(to_list @@ map fn opt)
|
||||||
|
|
||||||
|
let authored query_string =
|
||||||
|
let q = Person.Set.of_query @@ String_set.query query_string in
|
||||||
|
fun n -> Person.Set.predicate q n.Text.authors
|
||||||
|
|
||||||
|
let keyworded query_string =
|
||||||
|
let q = String_set.query query_string in
|
||||||
|
fun n -> String_set.(predicate q (Text.set "Keywords" n))
|
||||||
|
|
||||||
|
let topics query_string =
|
||||||
|
let q = String_set.query query_string in
|
||||||
|
fun n -> String_set.(predicate q (Text.set "Topics" n))
|
||||||
|
end
|
22
lib/category.ml
Normal file
22
lib/category.ml
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
module Category = struct
|
||||||
|
type t = Unlisted | Published | Invalid | Custom of string
|
||||||
|
let compare = Stdlib.compare
|
||||||
|
let of_string = function "unlisted" | "published" -> Invalid | c -> Custom c
|
||||||
|
let to_string = function Custom c -> c | _ -> ""
|
||||||
|
end
|
||||||
|
|
||||||
|
include Category
|
||||||
|
|
||||||
|
module CategorySet = struct
|
||||||
|
include Set.Make (Category)
|
||||||
|
let of_stringset s = String_set.fold (fun e a -> add (Category.of_string e) a) s empty
|
||||||
|
let of_query q = of_stringset (fst q), of_stringset (snd q)
|
||||||
|
let predicate (inc, exl) set = not (disjoint inc set) && disjoint exl set
|
||||||
|
let of_string x = of_stringset (String_set.of_string x)
|
||||||
|
let to_string set =
|
||||||
|
let f elt a =
|
||||||
|
let s = Category.to_string elt in
|
||||||
|
if a <> "" then a ^ ", " ^ s else s
|
||||||
|
in
|
||||||
|
fold f set ""
|
||||||
|
end
|
8
lib/date.ml
Normal file
8
lib/date.ml
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
type t = { created: Ptime.t option; edited: Ptime.t option }
|
||||||
|
let compare = compare
|
||||||
|
let rfc_string date = match date with Some t -> Ptime.to_rfc3339 t | None -> ""
|
||||||
|
let of_string (rfc : string) = match Ptime.of_rfc3339 rfc with Ok (t,_,_) -> Some t | Error _ -> None
|
||||||
|
let listing date = if Option.is_some date.edited then date.edited else date.created
|
||||||
|
let pretty_date = function
|
||||||
|
| Some t -> Ptime.to_date t |> fun (y, m, d) -> Printf.sprintf "%04d-%02d-%02d" y m d
|
||||||
|
| None -> ""
|
4
lib/dune
Normal file
4
lib/dune
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
(library
|
||||||
|
(name logarion)
|
||||||
|
(public_name logarion)
|
||||||
|
(libraries ptime uuidm uri re.str bos text_parse text_parse.parsers))
|
183
lib/file_store.ml
Normal file
183
lib/file_store.ml
Normal file
@ -0,0 +1,183 @@
|
|||||||
|
type t = string
|
||||||
|
type item_t = string
|
||||||
|
type archive_t = { name: string; archivists: Person.Set.t; id: Id.t;
|
||||||
|
kv: string Store.KV.t; store: t }
|
||||||
|
type record_t = Text.t * item_t
|
||||||
|
|
||||||
|
let extensions = [ ".txt"; ".gmi"; ".md"; ".org" ]
|
||||||
|
let pubdir = "/.logarion/published/"
|
||||||
|
let idxdir = "/.logarion/indexed/"
|
||||||
|
|
||||||
|
let to_string f =
|
||||||
|
let ic = open_in f in
|
||||||
|
let n = in_channel_length ic in
|
||||||
|
let s = Bytes.create n in
|
||||||
|
really_input ic s 0 n;
|
||||||
|
close_in ic;
|
||||||
|
Bytes.to_string s
|
||||||
|
|
||||||
|
let (//) a b = a ^ "/" ^ b
|
||||||
|
|
||||||
|
let to_text dir filename = let f = dir // filename in
|
||||||
|
to_string f |> Text.of_string |> Result.map_error (fun m -> f^": "^m)
|
||||||
|
let text_filetype dir name =
|
||||||
|
try Fpath.(let v = v name in mem_ext extensions v || (not (exists_ext v) && not (Sys.is_directory @@ dir^"/"^name)))
|
||||||
|
with Invalid_argument str -> prerr_endline ("file: " ^ name ^ " invalid (" ^ str ^ ")"); false
|
||||||
|
|
||||||
|
let clean_unlinked dir filepath =
|
||||||
|
let file = dir ^ filepath in
|
||||||
|
match (Unix.lstat file).st_nlink with 1 -> Unix.unlink file | _ -> ()
|
||||||
|
| exception Unix.Unix_error(Unix.ENOENT,_,_) -> ()
|
||||||
|
|
||||||
|
let purged_substore archive subdir =
|
||||||
|
let store = archive.store ^ subdir in
|
||||||
|
Array.iter (clean_unlinked store) @@ Sys.readdir store;
|
||||||
|
{ archive with store }
|
||||||
|
|
||||||
|
let published archive = purged_substore archive pubdir
|
||||||
|
let indexed archive = purged_substore archive idxdir
|
||||||
|
|
||||||
|
let publish dir text file =
|
||||||
|
try Unix.link file (dir ^ pubdir ^ Id.to_string text.Text.uuid)
|
||||||
|
with Unix.Unix_error(Unix.EEXIST,_,_) -> ()
|
||||||
|
let deindex dir text _file =
|
||||||
|
try Unix.unlink (dir ^ idxdir ^ Id.to_string text.Text.uuid)
|
||||||
|
with Unix.Unix_error(Unix.ENOENT,_,_) -> ()
|
||||||
|
let unpublish dir text file =
|
||||||
|
deindex dir text file;
|
||||||
|
try Unix.unlink (dir ^ pubdir ^ Id.to_string text.Text.uuid)
|
||||||
|
with Unix.Unix_error(Unix.ENOENT,_,_) -> ()
|
||||||
|
let index dir text file =
|
||||||
|
publish dir text file;
|
||||||
|
try Unix.link file (dir ^ idxdir ^ Id.to_string text.Text.uuid)
|
||||||
|
with Unix.Unix_error(Unix.EEXIST,_,_) -> ()
|
||||||
|
|
||||||
|
let newest (a,_pa) (b,_pb) = Text.newest a b
|
||||||
|
let oldest (a,_pa) (b,_pb) = Text.oldest a b
|
||||||
|
|
||||||
|
let list_iter fn {store;_} paths =
|
||||||
|
let link f = match to_text store f with Ok t -> fn store t f | Error s -> prerr_endline s in
|
||||||
|
List.iter link paths
|
||||||
|
|
||||||
|
let iter_valid_text dir pred fn p =
|
||||||
|
if text_filetype dir p then
|
||||||
|
match to_text dir p with Error x -> prerr_endline x | Ok t -> if pred t then fn (t, dir^p)
|
||||||
|
|
||||||
|
let fold_valid_text dir pred fn acc p =
|
||||||
|
if not (text_filetype dir p) then acc else
|
||||||
|
match to_text dir p with
|
||||||
|
Error x -> prerr_endline x; acc | Ok t -> if pred t then fn acc (t, dir^p) else acc
|
||||||
|
|
||||||
|
let iter ?(predicate=fun _ -> true) ?order fn {store;_} =
|
||||||
|
match order with
|
||||||
|
| None -> Array.iter (iter_valid_text store predicate fn) @@ Sys.readdir store
|
||||||
|
| Some comp ->
|
||||||
|
List.iter fn @@ List.fast_sort comp
|
||||||
|
@@ Array.fold_left (fold_valid_text store predicate (fun a e -> List.cons e a)) []
|
||||||
|
@@ Sys.readdir store
|
||||||
|
|
||||||
|
let fold ?(predicate=fun _ -> true) ?order fn acc {store;_} =
|
||||||
|
match order with
|
||||||
|
| None -> Array.fold_left (fold_valid_text store predicate fn) acc @@ Sys.readdir store
|
||||||
|
| Some comp ->
|
||||||
|
List.fold_left fn acc @@ List.fast_sort comp
|
||||||
|
@@ Array.fold_left (fold_valid_text store predicate (fun a e -> List.cons e a)) []
|
||||||
|
@@ Sys.readdir store
|
||||||
|
|
||||||
|
let with_id { store; _ } id =
|
||||||
|
let matched acc path =
|
||||||
|
if not (text_filetype store path) then acc
|
||||||
|
else
|
||||||
|
match to_text store path with
|
||||||
|
| Error x -> prerr_endline x; acc
|
||||||
|
| Ok text ->
|
||||||
|
if text.Text.uuid <> id then acc
|
||||||
|
else
|
||||||
|
match acc with
|
||||||
|
| Ok None -> Ok (Some text)
|
||||||
|
| Ok (Some prev) -> if prev = text then acc else Error [text; prev]
|
||||||
|
| Error x -> Error (text :: x)
|
||||||
|
in
|
||||||
|
Array.fold_left matched (Ok None) @@ Sys.readdir store
|
||||||
|
|
||||||
|
module Directory = struct
|
||||||
|
let print ?(descr="") dir result =
|
||||||
|
let () = match result with
|
||||||
|
| Ok true -> print_endline ("Created " ^ descr ^ " directory " ^ dir)
|
||||||
|
| Ok false -> print_endline ("Using existing " ^ descr ^ " directory " ^ dir)
|
||||||
|
| Error (`Msg msg) -> prerr_endline @@ "Failed making " ^ descr ^ ". " ^ msg
|
||||||
|
in
|
||||||
|
result
|
||||||
|
|
||||||
|
let directory dir = Result.bind (Fpath.of_string dir) Bos.OS.Dir.create
|
||||||
|
|
||||||
|
let rec directories = function
|
||||||
|
| [] -> Ok ()
|
||||||
|
| (d, descr)::tl ->
|
||||||
|
match directory d |> print ~descr d with
|
||||||
|
| Ok _ -> directories tl
|
||||||
|
| Error _ -> Error (d, descr)
|
||||||
|
end
|
||||||
|
|
||||||
|
let copy ?(recursive = false) src dst =
|
||||||
|
Bos.OS.Cmd.run (Bos.Cmd.(v "cp" %% (on recursive @@ v "-r") % src % dst))
|
||||||
|
|
||||||
|
let versioned_basename_of_title ?(version=0) repo extension (title : string) =
|
||||||
|
let basename = Text.string_alias title in
|
||||||
|
let rec next version =
|
||||||
|
let candidate = repo // basename ^ "." ^ string_of_int version ^ extension in
|
||||||
|
if Sys.file_exists candidate then next (succ version) else candidate
|
||||||
|
in
|
||||||
|
next version
|
||||||
|
|
||||||
|
let uuid_filename repo extension text =
|
||||||
|
let basename = Text.alias text in
|
||||||
|
let candidate = repo // String.sub (Id.to_string text.uuid) 0 6 ^ "." ^ basename ^ extension in
|
||||||
|
if Sys.file_exists candidate then Error "Name clash, try again" else Ok candidate
|
||||||
|
|
||||||
|
let file path content = let out = open_out path in output_string out content; close_out out
|
||||||
|
|
||||||
|
let with_text {store;_} new_text =
|
||||||
|
let extension = List.hd extensions in
|
||||||
|
Result.bind (uuid_filename store extension new_text) @@
|
||||||
|
fun path ->
|
||||||
|
try file path (Text.to_string new_text); Ok (path, new_text) with Sys_error s -> Error s
|
||||||
|
|
||||||
|
let basic_toml_bytes =
|
||||||
|
"Archive-Name: "
|
||||||
|
^ "\nArchive-ID: " ^ Id.(generate () |> to_string)
|
||||||
|
^ "\nArchivists: " ^ Bos.OS.Env.opt_var "USER" ~absent:""
|
||||||
|
|> Bytes.of_string
|
||||||
|
|
||||||
|
let init ?(dotdir=".logarion/") () =
|
||||||
|
match Directory.directories [dotdir, "dotdir"; dotdir//"published", "linkdir"; dotdir//"indexed", "link dir"] with
|
||||||
|
| Error (_dir, _desc) -> ()
|
||||||
|
| Ok () ->
|
||||||
|
let config_file =
|
||||||
|
open_out_gen [Open_creat; Open_excl; Open_wronly]
|
||||||
|
0o700 (dotdir // "config") in
|
||||||
|
output_bytes config_file basic_toml_bytes;
|
||||||
|
close_out config_file
|
||||||
|
|
||||||
|
module Config = struct
|
||||||
|
type t = archive_t
|
||||||
|
let key_value k v a = match k with
|
||||||
|
| "Archive-Name" -> { a with name = String.trim v }
|
||||||
|
| "Archive-ID" -> { a with id = Option.get (Id.of_string (String.trim v)) }
|
||||||
|
| "Archivists" -> { a with archivists = Person.Set.of_string v }
|
||||||
|
| _ -> { a with kv = Store.KV.add k (String.trim v) a.kv }
|
||||||
|
end
|
||||||
|
|
||||||
|
let of_path store =
|
||||||
|
let open Text_parse in
|
||||||
|
let subsyntaxes = [| (module Parsers.Key_value.Make (Config) : Parser.S with type t = Config.t); (module Parsers.Key_value.Make (Config)); |] in
|
||||||
|
let of_string text acc = Parser.parse subsyntaxes { text; pos = 0; right_boundary = String.length text - 1 } acc in
|
||||||
|
Ok (of_string (to_string @@ store ^ "/.logarion/config")
|
||||||
|
{
|
||||||
|
name = "";
|
||||||
|
archivists = Person.Set.empty;
|
||||||
|
id = Id.nil;
|
||||||
|
kv = Store.KV.empty;
|
||||||
|
store = Bos.OS.Env.opt_var "LOGARION_DIR" ~absent:"."
|
||||||
|
}
|
||||||
|
)
|
9
lib/id.ml
Normal file
9
lib/id.ml
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
let random_state = Random.State.make_self_init ()
|
||||||
|
type t = Uuidm.t
|
||||||
|
let compare = Uuidm.compare
|
||||||
|
let to_string = Uuidm.to_string
|
||||||
|
let of_string = Uuidm.of_string
|
||||||
|
let to_bytes = Uuidm.to_bytes
|
||||||
|
let of_bytes = Uuidm.of_bytes
|
||||||
|
let generate ?(random_state=random_state) = Uuidm.v4_gen random_state
|
||||||
|
let nil = Uuidm.nil
|
31
lib/person.ml
Normal file
31
lib/person.ml
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
module Person = struct
|
||||||
|
type name_t = string
|
||||||
|
type address_t = Uri.t
|
||||||
|
type t = { name: name_t; addresses: address_t list }
|
||||||
|
let empty = { name = ""; addresses = [] }
|
||||||
|
let compare = Stdlib.compare
|
||||||
|
let to_string p = List.fold_left (fun a e -> a^" <"^Uri.to_string e^">") p.name p.addresses
|
||||||
|
let of_string s = match String.trim s with "" -> empty | s ->
|
||||||
|
match Re.Str.(split (regexp " *< *") s) with
|
||||||
|
| [] -> empty
|
||||||
|
| [n] -> let name = String.trim n in { empty with name }
|
||||||
|
| n::adds ->
|
||||||
|
let name = String.trim n in
|
||||||
|
let addresses = List.map (fun f -> Uri.of_string @@ String.(sub f 0 (length f -1))) adds in
|
||||||
|
{ name; addresses }
|
||||||
|
end
|
||||||
|
|
||||||
|
include Person
|
||||||
|
|
||||||
|
module Set = struct
|
||||||
|
include Set.Make(Person)
|
||||||
|
let to_string ?(pre="") ?(sep=", ") s =
|
||||||
|
let str = Person.to_string in
|
||||||
|
let j x a = match a, x with "",_ -> str x | _,x when x = Person.empty -> a | _ -> a^sep^str x in
|
||||||
|
fold j s pre
|
||||||
|
let of_string s = of_list (List.map Person.of_string (String_set.list_of_csv s))
|
||||||
|
|
||||||
|
let of_stringset s = String_set.fold (fun e a -> union (of_string e) a) s empty
|
||||||
|
let of_query q = of_stringset (fst q), of_stringset (snd q)
|
||||||
|
let predicate (inc, exl) set = not (disjoint inc set) && disjoint exl set
|
||||||
|
end
|
17
lib/store.ml
Normal file
17
lib/store.ml
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
module KV = Map.Make (String)
|
||||||
|
|
||||||
|
module type T = sig
|
||||||
|
type t
|
||||||
|
type item_t
|
||||||
|
type archive_t = { name : string; archivists : Person.Set.t; id : Id.t; kv : string KV.t; store : t }
|
||||||
|
type record_t = Text.t * item_t
|
||||||
|
val of_path: string -> (archive_t, string) result
|
||||||
|
val published: archive_t -> archive_t
|
||||||
|
val indexed: archive_t -> archive_t
|
||||||
|
val newest: record_t -> record_t -> int
|
||||||
|
val oldest: record_t -> record_t -> int
|
||||||
|
val with_id: archive_t -> Id.t -> (Text.t option, Text.t list) result
|
||||||
|
val with_text: archive_t -> Text.t -> (string * Text.t, string) result
|
||||||
|
val iter: ?predicate:(Text.t -> bool) -> ?order:(record_t -> record_t -> int) -> (record_t -> unit) -> archive_t -> unit
|
||||||
|
val fold: ?predicate:(Text.t -> bool) -> ?order:(record_t -> record_t -> int) -> ('a -> record_t -> 'a) -> 'a -> archive_t -> 'a
|
||||||
|
end
|
15
lib/string_set.ml
Normal file
15
lib/string_set.ml
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
include Set.Make(String)
|
||||||
|
|
||||||
|
let list_of_csv x = Re.Str.(split (regexp " *, *")) (String.trim x)
|
||||||
|
let of_string x = of_list (list_of_csv x)
|
||||||
|
let to_string ?(pre="") ?(sep=", ") s =
|
||||||
|
let j a x = match a, x with "", _ -> x | _, "" -> a | _ -> a ^ sep ^ x in
|
||||||
|
fold (fun x acc -> j acc x) s pre
|
||||||
|
|
||||||
|
let query string =
|
||||||
|
let partition (include_set, exclude_set) elt =
|
||||||
|
if String.get elt 0 = '!' then (include_set, add String.(sub elt 1 (length elt - 1)) exclude_set)
|
||||||
|
else (add elt include_set, exclude_set) in
|
||||||
|
List.fold_left partition (empty, empty) @@ list_of_csv string
|
||||||
|
|
||||||
|
let predicate (inc, exl) set = not (disjoint inc set) && disjoint exl set
|
101
lib/text.ml
Normal file
101
lib/text.ml
Normal file
@ -0,0 +1,101 @@
|
|||||||
|
module String_map = Map.Make (String)
|
||||||
|
type t = {
|
||||||
|
title: string;
|
||||||
|
uuid: Id.t;
|
||||||
|
authors: Person.Set.t;
|
||||||
|
date: Date.t;
|
||||||
|
string_map: string String_map.t;
|
||||||
|
stringset_map: String_set.t String_map.t;
|
||||||
|
body: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
let blank ?(uuid=(Id.generate ())) () = {
|
||||||
|
title = "";
|
||||||
|
uuid;
|
||||||
|
authors = Person.Set.empty;
|
||||||
|
date = Date.({ created = None; edited = None});
|
||||||
|
string_map = String_map.empty;
|
||||||
|
stringset_map = String_map.empty;
|
||||||
|
body = "";
|
||||||
|
}
|
||||||
|
|
||||||
|
let compare = Stdlib.compare
|
||||||
|
let newest a b = Date.(compare a.date b.date)
|
||||||
|
let oldest a b = Date.(compare b.date a.date)
|
||||||
|
let str key m = try String_map.find (String.lowercase_ascii key) m.string_map with Not_found -> ""
|
||||||
|
let set key m = try String_map.find (String.lowercase_ascii key) m.stringset_map with Not_found -> String_set.empty
|
||||||
|
let str_set key m = String_set.to_string @@ set key m
|
||||||
|
let with_str_set m key str = { m with stringset_map = String_map.add (String.lowercase_ascii key) (String_set.of_string str) m.stringset_map }
|
||||||
|
|
||||||
|
let with_kv x (k,v) =
|
||||||
|
let trim = String.trim in
|
||||||
|
match String.lowercase_ascii k with
|
||||||
|
| "body" -> { x with body = String.trim v }
|
||||||
|
| "title"-> { x with title = trim v }
|
||||||
|
| "id" -> (match Id.of_string v with Some id -> { x with uuid = id } | None -> x)
|
||||||
|
| "author"
|
||||||
|
| "authors" -> { x with authors = Person.Set.of_string (trim v)}
|
||||||
|
| "date" -> { x with date = Date.{ x.date with created = Date.of_string v }}
|
||||||
|
| "date-edited"-> { x with date = Date.{ x.date with edited = Date.of_string v }}
|
||||||
|
| "licences" | "topics" | "keywords" | "series" as k -> with_str_set x k v
|
||||||
|
| k -> { x with string_map = String_map.add k (trim v) x.string_map }
|
||||||
|
|
||||||
|
let kv_of_string line = match Re.Str.(bounded_split (regexp ": *")) line 2 with
|
||||||
|
| [ key; value ] -> Re.Str.(replace_first (regexp "^#\\+") "" key), value
|
||||||
|
| [ key ] -> Re.Str.(replace_first (regexp "^#\\+") "" key), ""
|
||||||
|
| _ -> "",""
|
||||||
|
|
||||||
|
let of_header front_matter =
|
||||||
|
let fields = List.map kv_of_string (Re.Str.(split (regexp "\n")) front_matter) in
|
||||||
|
List.fold_left with_kv (blank ~uuid:Id.nil ()) fields
|
||||||
|
|
||||||
|
let front_matter_body_split s =
|
||||||
|
if Re.Str.(string_match (regexp ".*:.*")) s 0
|
||||||
|
then match Re.Str.(bounded_split (regexp "^$")) s 2 with
|
||||||
|
| front::body::[] -> (front, body)
|
||||||
|
| _ -> ("", s)
|
||||||
|
else ("", s)
|
||||||
|
|
||||||
|
let of_string s =
|
||||||
|
let front_matter, body = front_matter_body_split s in
|
||||||
|
try
|
||||||
|
let note = { (of_header front_matter) with body } in
|
||||||
|
if note.uuid <> Id.nil then Ok note else Error "Missing ID header"
|
||||||
|
with _ -> Error ("Failed parsing" ^ s)
|
||||||
|
|
||||||
|
let to_string x =
|
||||||
|
let has_len v = String.length v > 0 in
|
||||||
|
let s field value = if has_len value then field ^ ": " ^ value ^ "\n" else "" in
|
||||||
|
let a value = if Person.Set.is_empty value then "" else "Authors: " ^ Person.Set.to_string value ^ "\n" in
|
||||||
|
let d field value = match value with Some _ -> field ^ ": " ^ Date.rfc_string value ^ "\n" | None -> "" in
|
||||||
|
let rows =
|
||||||
|
[ s "Title" x.title;
|
||||||
|
a x.authors;
|
||||||
|
d "Date" x.date.Date.created;
|
||||||
|
d "Edited" x.date.Date.edited;
|
||||||
|
s "Licences" (str_set "licences" x);
|
||||||
|
s "Topics" (str_set "topics" x);
|
||||||
|
s "Keywords" (str_set "keywords" x);
|
||||||
|
s "Series" (str_set "series" x);
|
||||||
|
s "Abstract" (str "abstract" x);
|
||||||
|
s "ID" (Uuidm.to_string x.uuid);
|
||||||
|
s "Alias" (str "Alias" x) ]
|
||||||
|
in
|
||||||
|
String.concat "" rows ^ "\n" ^ x.body
|
||||||
|
|
||||||
|
let string_alias t =
|
||||||
|
let is_reserved = function
|
||||||
|
| '!' | '*' | '\'' | '(' | ')' | ';' | ':' | '@' | '&' | '=' | '+' | '$'
|
||||||
|
| ',' | '/' | '?' | '#' | '[' | ']' | ' ' | '\t' | '\x00' -> true
|
||||||
|
| _ -> false
|
||||||
|
in
|
||||||
|
let b = Buffer.create (String.length t) in
|
||||||
|
let filter char =
|
||||||
|
let open Buffer in
|
||||||
|
if is_reserved char then (try (if nth b (pred (length b)) <> '-' then add_char b '-') with Invalid_argument _ -> prerr_endline "reserved")
|
||||||
|
else add_char b char
|
||||||
|
in
|
||||||
|
String.(iter filter (lowercase_ascii t));
|
||||||
|
Buffer.contents b
|
||||||
|
|
||||||
|
let alias t = match str "alias" t with "" -> string_alias t.title | x -> x
|
35
lib/topic_set.ml
Normal file
35
lib/topic_set.ml
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
let of_string x = Re.Str.(split (regexp " *> *")) (String.trim x)
|
||||||
|
|
||||||
|
let topic x =
|
||||||
|
let path = of_string x in
|
||||||
|
try List.nth path (List.length path - 1) with _ -> ""
|
||||||
|
|
||||||
|
module Map = Map.Make(String)
|
||||||
|
|
||||||
|
let edges x map = try Map.find x map with Not_found -> (String_set.empty, String_set.empty)
|
||||||
|
|
||||||
|
let edges_with_context context (contexts, subtopics) = (String_set.add context contexts, subtopics)
|
||||||
|
let edges_with_subtopic subtopic (contexts, subtopics) = (contexts, String_set.add subtopic subtopics)
|
||||||
|
|
||||||
|
let rec list_to_map map = function
|
||||||
|
| [] -> map
|
||||||
|
| [topic] ->
|
||||||
|
let edges = edges topic map in
|
||||||
|
Map.add topic edges map
|
||||||
|
| context :: topic :: tail ->
|
||||||
|
let context_edges = edges context map in
|
||||||
|
let topic_edges = edges topic map in
|
||||||
|
let map =
|
||||||
|
map
|
||||||
|
|> Map.add context (edges_with_subtopic topic context_edges)
|
||||||
|
|> Map.add topic (edges_with_context context topic_edges)
|
||||||
|
in
|
||||||
|
list_to_map map (topic :: tail)
|
||||||
|
|
||||||
|
let to_map map set =
|
||||||
|
List.fold_left (fun acc elt -> list_to_map acc (of_string elt)) map @@ String_set.elements set
|
||||||
|
|
||||||
|
let roots map =
|
||||||
|
let root_keys acc (key, (contexts, _topics)) = if String_set.is_empty contexts then key :: acc else acc in
|
||||||
|
List.fold_left root_keys [] @@ Map.bindings map
|
||||||
|
|
@ -1,29 +1,34 @@
|
|||||||
opam-version: "1.2"
|
# This file is generated by dune, edit dune-project instead
|
||||||
name: "logarion"
|
opam-version: "2.0"
|
||||||
version: "0.5.0"
|
synopsis: "A texts system"
|
||||||
homepage: "https://logarion.orbitalfox.eu"
|
maintainer: ["fox@orbitalfox.eu"]
|
||||||
dev-repo: "git://orbitalfox.eu/logarion"
|
authors: ["orbifx"]
|
||||||
bug-reports: "mailto:logarion@lists.orbitalfox.eu?subject=[Issue]"
|
|
||||||
maintainer: "Stavros Polymenis <sp@orbitalfox.eu>"
|
|
||||||
authors: "Stavros Polymenis <sp@orbitalfox.eu>"
|
|
||||||
license: "EUPL"
|
license: "EUPL"
|
||||||
build: [
|
homepage: "https://logarion.orbitalfox.eu"
|
||||||
["dune" "build" "--root" "." "-j" jobs "@install"]
|
bug-reports: "mailto:logarion@lists.orbitalfox.eu?subject=Issue:"
|
||||||
]
|
|
||||||
depends: [
|
depends: [
|
||||||
"dune" {build}
|
"dune" {>= "2.0"}
|
||||||
|
"re"
|
||||||
|
"cmdliner"
|
||||||
|
"bos"
|
||||||
"ptime"
|
"ptime"
|
||||||
"uuidm"
|
"uuidm"
|
||||||
"uri"
|
"uri"
|
||||||
"re"
|
"text_parse"
|
||||||
"emile"
|
"msgpck"
|
||||||
"omd"
|
|
||||||
"lwt"
|
|
||||||
"mustache"
|
|
||||||
"tyxml"
|
|
||||||
|
|
||||||
"cmdliner"
|
|
||||||
"bos"
|
|
||||||
"toml"
|
|
||||||
"fpath"
|
|
||||||
]
|
]
|
||||||
|
build: [
|
||||||
|
["dune" "subst"] {pinned}
|
||||||
|
[
|
||||||
|
"dune"
|
||||||
|
"build"
|
||||||
|
"-p"
|
||||||
|
name
|
||||||
|
"-j"
|
||||||
|
jobs
|
||||||
|
"@install"
|
||||||
|
"@runtest" {with-test}
|
||||||
|
"@doc" {with-doc}
|
||||||
|
]
|
||||||
|
]
|
||||||
|
dev-repo: "git://orbitalfox.eu/logarion"
|
||||||
|
@ -1,16 +0,0 @@
|
|||||||
#This is an exemplar config file. Use `logarion_cli init` to have one generated.
|
|
||||||
|
|
||||||
[archive]
|
|
||||||
title = "Logarion"
|
|
||||||
owner = "Name"
|
|
||||||
email = "name@example.com"
|
|
||||||
uuid = "" # Generate UUID using `uuidgen` or https://www.uuidgenerator.net/
|
|
||||||
|
|
||||||
[web]
|
|
||||||
static_dir = ".logarion/static"
|
|
||||||
stylesheets = ["main.css"]
|
|
||||||
url = "http://localhost:3666"
|
|
||||||
|
|
||||||
[gopher]
|
|
||||||
static_dir = ".logarion/static"
|
|
||||||
url = "gopher://localhost"
|
|
@ -1,15 +0,0 @@
|
|||||||
@font-face
|
|
||||||
font-family: "Orbitron Medium"
|
|
||||||
src: url('#{$font-url}/orbitron/orbitron-medium.otf')
|
|
||||||
|
|
||||||
@font-face
|
|
||||||
font-family: "Orbitron Light"
|
|
||||||
src: url('#{$font-url}/orbitron/orbitron-light.otf')
|
|
||||||
|
|
||||||
@font-face
|
|
||||||
font-family: "Orbitron Bold"
|
|
||||||
src: url('#{$font-url}/orbitron/orbitron-bold.otf')
|
|
||||||
|
|
||||||
@font-face
|
|
||||||
font-family: "Orbitron Black"
|
|
||||||
src: url('#{$font-url}/orbitron/orbitron-black.otf')
|
|
@ -1,99 +0,0 @@
|
|||||||
$font-url: "fonts"
|
|
||||||
|
|
||||||
@import fonts/orbitron.sass
|
|
||||||
|
|
||||||
$font-face: "DejaVu Sans"
|
|
||||||
|
|
||||||
body
|
|
||||||
font-family: $font-face
|
|
||||||
font-weight: 400
|
|
||||||
|
|
||||||
main, article
|
|
||||||
margin: auto
|
|
||||||
padding: 2pt
|
|
||||||
|
|
||||||
main, article, p, img, h1, h2, h3, h4, h5
|
|
||||||
max-width: 75ch
|
|
||||||
|
|
||||||
pre
|
|
||||||
display: block
|
|
||||||
overflow: auto
|
|
||||||
padding-left: 1ch
|
|
||||||
|
|
||||||
blockquote
|
|
||||||
font-style: italic
|
|
||||||
|
|
||||||
article > .meta
|
|
||||||
margin: auto 2ch
|
|
||||||
|
|
||||||
article > h1
|
|
||||||
text-align: center
|
|
||||||
|
|
||||||
header > h1
|
|
||||||
font-family: "Orbitron Light"
|
|
||||||
|
|
||||||
header, footer
|
|
||||||
text-align: center
|
|
||||||
|
|
||||||
li a, header a, header a:hover
|
|
||||||
text-decoration: none
|
|
||||||
|
|
||||||
a:hover
|
|
||||||
text-decoration: underline
|
|
||||||
|
|
||||||
h1, h2, h3, h4, h5
|
|
||||||
font-family: "Orbitron Medium"
|
|
||||||
|
|
||||||
footer
|
|
||||||
clear: both
|
|
||||||
margin-top: 2em
|
|
||||||
border-top: 1px dotted
|
|
||||||
padding: 1em 0
|
|
||||||
|
|
||||||
fieldset
|
|
||||||
border: .5mm dashed
|
|
||||||
|
|
||||||
fieldset > p
|
|
||||||
margin: .5em auto
|
|
||||||
padding: .5em
|
|
||||||
float: left
|
|
||||||
|
|
||||||
label
|
|
||||||
margin: .2em
|
|
||||||
display: block
|
|
||||||
|
|
||||||
input, textarea
|
|
||||||
display: block
|
|
||||||
border: none
|
|
||||||
border-bottom: .5mm solid
|
|
||||||
min-width: 100%
|
|
||||||
|
|
||||||
textarea
|
|
||||||
border: .5mm solid
|
|
||||||
width: 80ch
|
|
||||||
height: 40ch
|
|
||||||
display: block-inline
|
|
||||||
clear: both
|
|
||||||
|
|
||||||
button
|
|
||||||
clear: both
|
|
||||||
display: block
|
|
||||||
margin: 1em auto
|
|
||||||
border: .5mm solid
|
|
||||||
|
|
||||||
.topics > li
|
|
||||||
list-style-type: none
|
|
||||||
text-transform: capitalize
|
|
||||||
|
|
||||||
ul.listing
|
|
||||||
padding: 0 1ch
|
|
||||||
|
|
||||||
.listing > li
|
|
||||||
list-style-type: none
|
|
||||||
text-transform: none
|
|
||||||
padding: 4px
|
|
||||||
margin-bottom: .5em
|
|
||||||
|
|
||||||
.listing p
|
|
||||||
padding: 0
|
|
||||||
margin: 0
|
|
@ -1,23 +0,0 @@
|
|||||||
@import layout.sass
|
|
||||||
|
|
||||||
body
|
|
||||||
background-color: #191b22
|
|
||||||
|
|
||||||
body, a, header a:visited
|
|
||||||
color: #f2f2f2
|
|
||||||
|
|
||||||
pre
|
|
||||||
border-left: 1mm solid #f2f2f233
|
|
||||||
|
|
||||||
a
|
|
||||||
color: PaleTurquoise
|
|
||||||
|
|
||||||
.abstract, .meta
|
|
||||||
color: #909090
|
|
||||||
|
|
||||||
article, .listing > li
|
|
||||||
background-color: rgba(100,100,100,.1)
|
|
||||||
border: 1px solid rgba(100,100,100,.2)
|
|
||||||
|
|
||||||
.pipe
|
|
||||||
opacity: .3
|
|
@ -1,23 +0,0 @@
|
|||||||
@import layout.sass
|
|
||||||
|
|
||||||
body
|
|
||||||
background-color: WhiteSmoke
|
|
||||||
|
|
||||||
body, a, header a:visited
|
|
||||||
color: #191B22
|
|
||||||
|
|
||||||
pre
|
|
||||||
border-left: 1mm solid #191B22
|
|
||||||
|
|
||||||
a
|
|
||||||
color: SteelBlue
|
|
||||||
|
|
||||||
.abstract, .meta
|
|
||||||
color: #909090
|
|
||||||
|
|
||||||
article, .listing > li
|
|
||||||
background-color: rgba(100,100,100,.1)
|
|
||||||
border: 1px solid rgba(100,100,100,.2)
|
|
||||||
|
|
||||||
.pipe
|
|
||||||
opacity: .3
|
|
@ -1,131 +0,0 @@
|
|||||||
@font-face {
|
|
||||||
font-family: "Orbitron Medium";
|
|
||||||
src: url("fonts/orbitron/orbitron-medium.otf"); }
|
|
||||||
|
|
||||||
@font-face {
|
|
||||||
font-family: "Orbitron Light";
|
|
||||||
src: url("fonts/orbitron/orbitron-light.otf"); }
|
|
||||||
|
|
||||||
@font-face {
|
|
||||||
font-family: "Orbitron Bold";
|
|
||||||
src: url("fonts/orbitron/orbitron-bold.otf"); }
|
|
||||||
|
|
||||||
@font-face {
|
|
||||||
font-family: "Orbitron Black";
|
|
||||||
src: url("fonts/orbitron/orbitron-black.otf"); }
|
|
||||||
|
|
||||||
body {
|
|
||||||
font-family: "DejaVu Sans";
|
|
||||||
font-weight: 400; }
|
|
||||||
|
|
||||||
main, article {
|
|
||||||
margin: auto;
|
|
||||||
padding: 2pt; }
|
|
||||||
|
|
||||||
main, article, p, img, h1, h2, h3, h4, h5 {
|
|
||||||
max-width: 75ch; }
|
|
||||||
|
|
||||||
pre {
|
|
||||||
display: block;
|
|
||||||
overflow: auto;
|
|
||||||
padding-left: 1ch; }
|
|
||||||
|
|
||||||
blockquote {
|
|
||||||
font-style: italic; }
|
|
||||||
|
|
||||||
article > .meta {
|
|
||||||
margin: auto 2ch; }
|
|
||||||
|
|
||||||
article > h1 {
|
|
||||||
text-align: center; }
|
|
||||||
|
|
||||||
header > h1 {
|
|
||||||
font-family: "Orbitron Light"; }
|
|
||||||
|
|
||||||
header, footer {
|
|
||||||
text-align: center; }
|
|
||||||
|
|
||||||
li a, header a, header a:hover {
|
|
||||||
text-decoration: none; }
|
|
||||||
|
|
||||||
a:hover {
|
|
||||||
text-decoration: underline; }
|
|
||||||
|
|
||||||
h1, h2, h3, h4, h5 {
|
|
||||||
font-family: "Orbitron Medium"; }
|
|
||||||
|
|
||||||
footer {
|
|
||||||
clear: both;
|
|
||||||
margin-top: 2em;
|
|
||||||
border-top: 1px dotted;
|
|
||||||
padding: 1em 0; }
|
|
||||||
|
|
||||||
fieldset {
|
|
||||||
border: .5mm dashed; }
|
|
||||||
|
|
||||||
fieldset > p {
|
|
||||||
margin: .5em auto;
|
|
||||||
padding: .5em;
|
|
||||||
float: left; }
|
|
||||||
|
|
||||||
label {
|
|
||||||
margin: .2em;
|
|
||||||
display: block; }
|
|
||||||
|
|
||||||
input, textarea {
|
|
||||||
display: block;
|
|
||||||
border: none;
|
|
||||||
border-bottom: .5mm solid;
|
|
||||||
min-width: 100%; }
|
|
||||||
|
|
||||||
textarea {
|
|
||||||
border: .5mm solid;
|
|
||||||
width: 80ch;
|
|
||||||
height: 40ch;
|
|
||||||
display: block-inline;
|
|
||||||
clear: both; }
|
|
||||||
|
|
||||||
button {
|
|
||||||
clear: both;
|
|
||||||
display: block;
|
|
||||||
margin: 1em auto;
|
|
||||||
border: .5mm solid; }
|
|
||||||
|
|
||||||
.topics > li {
|
|
||||||
list-style-type: none;
|
|
||||||
text-transform: capitalize; }
|
|
||||||
|
|
||||||
ul.listing {
|
|
||||||
padding: 0 1ch; }
|
|
||||||
|
|
||||||
.listing > li {
|
|
||||||
list-style-type: none;
|
|
||||||
text-transform: none;
|
|
||||||
padding: 4px;
|
|
||||||
margin-bottom: .5em; }
|
|
||||||
|
|
||||||
.listing p {
|
|
||||||
padding: 0;
|
|
||||||
margin: 0; }
|
|
||||||
|
|
||||||
body {
|
|
||||||
background-color: #191b22; }
|
|
||||||
|
|
||||||
body, a, header a:visited {
|
|
||||||
color: #f2f2f2; }
|
|
||||||
|
|
||||||
pre {
|
|
||||||
border-left: 1mm solid #f2f2f233; }
|
|
||||||
|
|
||||||
a {
|
|
||||||
color: PaleTurquoise; }
|
|
||||||
|
|
||||||
.abstract, .meta {
|
|
||||||
color: #909090; }
|
|
||||||
|
|
||||||
article, .listing > li {
|
|
||||||
background-color: rgba(100, 100, 100, 0.1);
|
|
||||||
border: 1px solid rgba(100, 100, 100, 0.2); }
|
|
||||||
|
|
||||||
.pipe {
|
|
||||||
opacity: .3; }
|
|
@ -1,3 +0,0 @@
|
|||||||
## Articles
|
|
||||||
|
|
||||||
{{recent_texts_listing}}
|
|
@ -1 +0,0 @@
|
|||||||
{{title}}
|
|
@ -1,3 +0,0 @@
|
|||||||
{{date_human}}
|
|
||||||
{{link}}
|
|
||||||
{{abstract}}
|
|
@ -1,7 +0,0 @@
|
|||||||
### Topics
|
|
||||||
|
|
||||||
{{topics}}
|
|
||||||
|
|
||||||
### Recent articles
|
|
||||||
|
|
||||||
{{recent_texts_listing}}
|
|
@ -1,5 +0,0 @@
|
|||||||
# {{title}}
|
|
||||||
|
|
||||||
{{details}}
|
|
||||||
|
|
||||||
{{body}}
|
|
@ -1,82 +0,0 @@
|
|||||||
module Validation = struct
|
|
||||||
let empty = []
|
|
||||||
|
|
||||||
let (&>) report = function None -> report | Some msg -> msg :: report
|
|
||||||
let (&&>) report = function [] -> report | msgs -> msgs @ report
|
|
||||||
|
|
||||||
let check ok msg = if ok then None else Some msg
|
|
||||||
|
|
||||||
let file_exists ?(msg=(fun s -> (s ^ " is not a file"))) ?(parent_dir=Fpath.v ".") file =
|
|
||||||
let str = Fpath.(to_string (parent_dir // file)) in
|
|
||||||
check (Sys.file_exists str) (msg str)
|
|
||||||
|
|
||||||
let is_directory ?(msg=(fun s -> (s ^ " is not a directory"))) dir =
|
|
||||||
let str = Fpath.to_string dir in
|
|
||||||
check (Sys.file_exists str && Sys.is_directory str) (msg str)
|
|
||||||
|
|
||||||
let files_exist ?(msg=(fun s -> (s ^ " is not a file"))) ?(parent_dir=Fpath.v ".") files =
|
|
||||||
let f report file = report &> file_exists ~msg ~parent_dir file in
|
|
||||||
List.fold_left f empty files
|
|
||||||
|
|
||||||
let terminate_when_invalid ?(print_error=true) =
|
|
||||||
let error i msg = prerr_endline ("Error " ^ string_of_int i ^ ": " ^ msg) in
|
|
||||||
function
|
|
||||||
| [] -> ()
|
|
||||||
| msgs -> if print_error then List.iteri error (List.rev msgs); exit 1
|
|
||||||
end
|
|
||||||
|
|
||||||
module Path = struct
|
|
||||||
let of_string str =
|
|
||||||
if Sys.file_exists str then
|
|
||||||
match Fpath.v str with
|
|
||||||
| path -> Ok path
|
|
||||||
| exception (Invalid_argument msg) -> Error ("Invalid path " ^ msg)
|
|
||||||
else Error (str ^ " not found")
|
|
||||||
|
|
||||||
let path_exists x = Fpath.to_string x |> Sys.file_exists
|
|
||||||
|
|
||||||
let conventional_paths =
|
|
||||||
let paths =
|
|
||||||
try [ ".logarion"; Sys.getenv "HOME" ^ "/.config/logarion"; "/etc/logarion" ]
|
|
||||||
with Not_found -> [ ".logarion"; "/etc/logarion" ]
|
|
||||||
in
|
|
||||||
List.map Fpath.v paths
|
|
||||||
|
|
||||||
let with_file ?(conventional_paths=conventional_paths) config_file =
|
|
||||||
let (//) = Fpath.(//) in
|
|
||||||
let basepath = Fpath.v config_file in
|
|
||||||
let existing dir = path_exists (dir // basepath) in
|
|
||||||
try Ok (List.find existing conventional_paths // basepath)
|
|
||||||
with Not_found -> Error (config_file ^ " not found in: " ^ String.concat ", " (List.map Fpath.to_string conventional_paths))
|
|
||||||
end
|
|
||||||
|
|
||||||
let with_default default = function Some x -> x | None -> default
|
|
||||||
|
|
||||||
let with_default_paths default =
|
|
||||||
function Some ss -> List.map Fpath.v ss | None -> default
|
|
||||||
|
|
||||||
let mandatory = function Some x -> x | None -> failwith "undefined mandatory setting"
|
|
||||||
|
|
||||||
let (&>) a b = match a with Ok x -> b x | Error e -> Error e
|
|
||||||
|
|
||||||
module type Store = sig
|
|
||||||
type t
|
|
||||||
val from_path : Fpath.t -> (t, string) result
|
|
||||||
end
|
|
||||||
|
|
||||||
module Make (S : Store) = struct
|
|
||||||
include S
|
|
||||||
|
|
||||||
let of_path path = S.from_path path
|
|
||||||
|
|
||||||
let (&>) = (&>)
|
|
||||||
|
|
||||||
let to_record converter = function
|
|
||||||
| Ok store -> converter store
|
|
||||||
| Error s -> Error s
|
|
||||||
|
|
||||||
let to_record_or_exit ?(print_error=true) ?(validator=(fun _cfg -> [])) converter store_result =
|
|
||||||
match to_record converter store_result with
|
|
||||||
| Ok cfg -> Validation.terminate_when_invalid (validator cfg); cfg
|
|
||||||
| Error s -> if print_error then prerr_endline s; exit 1
|
|
||||||
end
|
|
@ -1,23 +0,0 @@
|
|||||||
type t = TomlTypes.table
|
|
||||||
|
|
||||||
let from_path path =
|
|
||||||
match Toml.Parser.from_filename (Fpath.to_string path) with
|
|
||||||
| `Error (str, _loc) -> Error str
|
|
||||||
| `Ok toml -> Ok toml
|
|
||||||
|
|
||||||
open TomlLenses
|
|
||||||
let (/) a b = (key a |-- table |-- key b)
|
|
||||||
let (//) a b = (key a |-- table |-- key b |-- table)
|
|
||||||
|
|
||||||
let int toml path = get toml (path |-- int)
|
|
||||||
|
|
||||||
let float toml path = get toml (path |-- float)
|
|
||||||
|
|
||||||
let string toml path = get toml (path |-- string)
|
|
||||||
|
|
||||||
let strings toml path = get toml (path |-- array |-- strings)
|
|
||||||
|
|
||||||
let path toml path = match string toml path with Some s -> Some (Fpath.v s) | None -> None
|
|
||||||
|
|
||||||
let paths toml path = match strings toml path with
|
|
||||||
Some ss -> Some (List.map Fpath.v ss) | None -> None
|
|
@ -1,7 +0,0 @@
|
|||||||
(jbuild_version 1)
|
|
||||||
|
|
||||||
(library
|
|
||||||
((name confix)
|
|
||||||
(public_name logarion.confix)
|
|
||||||
(libraries (fpath toml))
|
|
||||||
))
|
|
@ -1,50 +0,0 @@
|
|||||||
let esc = Xml_print.encode_unsafe_char
|
|
||||||
|
|
||||||
let header config url =
|
|
||||||
let open Logarion.Meta in
|
|
||||||
let open Logarion.Archive.Configuration in
|
|
||||||
"<title>" ^ config.title ^ "</title>"
|
|
||||||
(* TODO: ^ "<subtitle>A subtitle.</subtitle>"*)
|
|
||||||
^ "<link rel=\"alternate\" type=\"text/html\" href=\"" ^ url ^ "\"/>"
|
|
||||||
^ "<link rel=\"self\" type=\"application/atom+xml\" href=\"" ^ url ^ "/feed.atom\" />"
|
|
||||||
^ "<id>urn:uuid:" ^ Id.to_string config.id ^ "</id>"
|
|
||||||
^ "<updated>" ^ Ptime.to_rfc3339 (Ptime_clock.now ()) ^ "</updated>"
|
|
||||||
|
|
||||||
let opt_element tag_name content =
|
|
||||||
if content <> ""
|
|
||||||
then "<" ^ tag_name ^ ">" ^ content ^ "</" ^ tag_name ^ ">"
|
|
||||||
else ""
|
|
||||||
|
|
||||||
let entry url note =
|
|
||||||
let open Logarion in
|
|
||||||
let meta = note.Note.meta in
|
|
||||||
let u = "note/" ^ Meta.alias meta in
|
|
||||||
let open Meta in
|
|
||||||
let authors elt a =
|
|
||||||
a ^ "<author>"
|
|
||||||
^ (opt_element "name" @@ esc elt.Author.name)
|
|
||||||
^ (opt_element "uri" @@ esc (Uri.to_string elt.Author.address))
|
|
||||||
^ "</author>"
|
|
||||||
in
|
|
||||||
("<entry>"
|
|
||||||
^ "<title>" ^ meta.title ^ "</title>"
|
|
||||||
^ "<id>urn:uuid:" ^ Meta.Id.to_string meta.uuid ^ "</id>"
|
|
||||||
^ "<link rel=\"alternate\" href=\"" ^ url ^ "/" ^ u ^ "\" />"
|
|
||||||
^ "<updated>" ^ Date.(meta.date |> listing |> rfc_string) ^ "</updated>"
|
|
||||||
^ Meta.AuthorSet.fold authors meta.authors ""
|
|
||||||
^ opt_element "summary" @@ esc meta.abstract)
|
|
||||||
^ Meta.StringSet.fold (fun elt a -> a ^ "<category term=\"" ^ elt ^ "\"/>") meta.topics ""
|
|
||||||
^ "<content type=\"xhtml\"><div xmlns=\"http://www.w3.org/1999/xhtml\">"
|
|
||||||
^ (Omd.to_html @@ Omd.of_string @@ esc note.Note.body)
|
|
||||||
^ "</div></content>"
|
|
||||||
^ "</entry>"
|
|
||||||
|
|
||||||
let feed config url note_fn articles =
|
|
||||||
let fold_valid feed m = match note_fn m.Logarion.Meta.uuid with
|
|
||||||
| Some note -> feed ^ "\n" ^ entry url note
|
|
||||||
| None -> feed
|
|
||||||
in
|
|
||||||
"<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<feed xmlns=\"http://www.w3.org/2005/Atom\">\n"
|
|
||||||
^ header config url
|
|
||||||
^ List.fold_left fold_valid "" articles
|
|
||||||
^ "</feed>"
|
|
@ -1,133 +0,0 @@
|
|||||||
open Tyxml.Html
|
|
||||||
open Logarion
|
|
||||||
|
|
||||||
let to_string tyxml = Format.asprintf "%a" (Tyxml.Html.pp ()) tyxml
|
|
||||||
|
|
||||||
let head ~style linker t =
|
|
||||||
head (title (pcdata t)) [
|
|
||||||
link ~rel:[`Stylesheet] ~href:(linker style) ();
|
|
||||||
link ~rel:[`Alternate] ~href:(linker "/feed.atom") ~a:[a_mime_type "application/atom+xml"] ();
|
|
||||||
meta ~a:[a_charset "utf-8"] ();
|
|
||||||
]
|
|
||||||
|
|
||||||
let default_style = "/static/main.css"
|
|
||||||
|
|
||||||
let page ?(style=default_style) linker head_title header main =
|
|
||||||
html (head ~style linker head_title) (body [ header; main ])
|
|
||||||
|
|
||||||
let anchor url content = a ~a:[ a_href (uri_of_string url) ] content
|
|
||||||
|
|
||||||
let div ?(style_class="") content =
|
|
||||||
let a = if style_class <> "" then [a_class [style_class]] else [] in
|
|
||||||
div ~a content
|
|
||||||
|
|
||||||
let main = main
|
|
||||||
|
|
||||||
let unescaped_data = Unsafe.data
|
|
||||||
let data = pcdata
|
|
||||||
let title = h1
|
|
||||||
let header = header
|
|
||||||
|
|
||||||
let pipe = span ~a:[a_class ["pipe"]] [pcdata " | "]
|
|
||||||
|
|
||||||
let meta ~abstract ~authors ~date ~series ~topics ~keywords ~uuid =
|
|
||||||
let opt_span name value = if String.length value > 0 then (span [pipe; pcdata (name ^ value)]) else pcdata "" in
|
|
||||||
let authors = List.fold_left (fun acc x -> a ~a:[a_rel [`Author]] [pcdata x] :: acc) [] authors in
|
|
||||||
[ p ~a:[a_class ["abstract"]] [Unsafe.data abstract]; ]
|
|
||||||
@ authors
|
|
||||||
@ [
|
|
||||||
pipe;
|
|
||||||
time ~a:[a_datetime date] [pcdata date];
|
|
||||||
pipe;
|
|
||||||
opt_span "series: " series;
|
|
||||||
opt_span "topics: " topics;
|
|
||||||
opt_span "keywords: " keywords;
|
|
||||||
div [pcdata ("id: " ^ uuid)];
|
|
||||||
]
|
|
||||||
|> div ~style_class:"meta"
|
|
||||||
|
|
||||||
let note = article
|
|
||||||
|
|
||||||
let text_item path meta =
|
|
||||||
let module Meta = Logarion.Meta in
|
|
||||||
tr [
|
|
||||||
td [ a ~a:[a_class ["title"]; a_href (path ^ Meta.alias meta ^ ".html")] [data meta.Meta.title] ];
|
|
||||||
td [ span [pcdata Meta.(stringset_csv meta.keywords)] ];
|
|
||||||
td [ time @@ [unescaped_data Meta.Date.(pretty_date (listing meta.Meta.date))] ];
|
|
||||||
]
|
|
||||||
|
|
||||||
let listing_texts path metas =
|
|
||||||
let item meta = text_item path meta in
|
|
||||||
table @@ List.map item metas
|
|
||||||
|
|
||||||
let listing_index path metas =
|
|
||||||
let items topic =
|
|
||||||
List.fold_left Meta.(fun a e -> if StringSet.mem topic e.topics then text_item path e :: a else a)
|
|
||||||
[] metas
|
|
||||||
in
|
|
||||||
let item topic =
|
|
||||||
let module Meta = Logarion.Meta in
|
|
||||||
[ h3 ~a:[a_id topic] [pcdata topic]; table (items topic)]
|
|
||||||
in
|
|
||||||
List.fold_left (fun a e -> a @ item e) []
|
|
||||||
@@ Meta.StringSet.elements
|
|
||||||
@@ List.fold_left Meta.(fun a e -> unique_topics a e) Meta.StringSet.empty metas
|
|
||||||
|
|
||||||
module Renderer = struct
|
|
||||||
let meta meta e =
|
|
||||||
let e = List.hd e in
|
|
||||||
match e with
|
|
||||||
| "urn_name" -> [unescaped_data @@ "/note/" ^ Logarion.Meta.alias meta]
|
|
||||||
| "date" | "date_created" | "date_edited" | "date_published" | "date_human" ->
|
|
||||||
[time @@ [unescaped_data @@ Logarion.Meta.value_with_name meta e]]
|
|
||||||
| tag -> [unescaped_data @@ Logarion.Meta.value_with_name meta tag]
|
|
||||||
|
|
||||||
let note note e = match List.hd e with
|
|
||||||
| "body" -> [unescaped_data @@ Omd.to_html @@ Omd.of_string note.Logarion.Note.body]
|
|
||||||
| _ -> meta note.Logarion.Note.meta e
|
|
||||||
|
|
||||||
let archive archive e = match List.hd e with
|
|
||||||
| "title" -> [h1 [anchor ("index.html") [data archive.Logarion.Archive.Configuration.title]]]
|
|
||||||
| tag -> prerr_endline ("unknown tag: " ^ tag); [unescaped_data ""]
|
|
||||||
end
|
|
||||||
|
|
||||||
let form ymd =
|
|
||||||
let article_form =
|
|
||||||
let input_set title input = p [ label [ pcdata title; input ] ] in
|
|
||||||
let open Note in
|
|
||||||
let open Meta in
|
|
||||||
let authors = AuthorSet.to_string ymd.meta.authors in
|
|
||||||
[
|
|
||||||
input ~a:[a_name "uuid"; a_value (Id.to_string ymd.meta.uuid); a_input_type `Hidden] ();
|
|
||||||
input_set
|
|
||||||
"Title"
|
|
||||||
(input ~a:[a_name "title"; a_value ymd.meta.title; a_required ()] ());
|
|
||||||
input_set
|
|
||||||
"Authors"
|
|
||||||
(input ~a:[a_name "authors"; a_value authors] ());
|
|
||||||
input_set
|
|
||||||
"Topics"
|
|
||||||
(input ~a:[a_name "topics"; a_value (stringset_csv ymd.meta.topics)] ());
|
|
||||||
input_set
|
|
||||||
"Categories"
|
|
||||||
(input ~a:[a_name "categories"; a_value (CategorySet.to_csv ymd.meta.categories)] ());
|
|
||||||
input_set
|
|
||||||
"Keywords"
|
|
||||||
(input ~a:[a_name "keywords"; a_value (stringset_csv ymd.meta.keywords)] ());
|
|
||||||
input_set
|
|
||||||
"Series"
|
|
||||||
(input ~a:[a_name "series"; a_value (stringset_csv ymd.meta.series)] ());
|
|
||||||
input_set
|
|
||||||
"Abstract"
|
|
||||||
(input ~a:[a_name "abstract"; a_value ymd.meta.abstract] ());
|
|
||||||
input_set
|
|
||||||
"Text"
|
|
||||||
(textarea ~a:[a_name "body"] (pcdata ymd.body));
|
|
||||||
p [ button ~a:[a_button_type `Submit] [pcdata "Submit"] ];
|
|
||||||
]
|
|
||||||
in
|
|
||||||
div
|
|
||||||
[ form
|
|
||||||
~a:[a_method `Post; a_action (uri_of_string "/post.note"); a_accept_charset ["utf-8"]]
|
|
||||||
[ fieldset ~legend:(legend [pcdata "Article"]) article_form ]
|
|
||||||
]
|
|
@ -1,5 +0,0 @@
|
|||||||
(library
|
|
||||||
((name converters)
|
|
||||||
(public_name logarion.converters)
|
|
||||||
(libraries (logarion logarion.file mustache tyxml ptime ptime.clock.os))
|
|
||||||
))
|
|
@ -1,81 +0,0 @@
|
|||||||
type t = Mustache.t
|
|
||||||
|
|
||||||
let of_string = Mustache.of_string
|
|
||||||
let of_file f = File.load f |> of_string
|
|
||||||
|
|
||||||
let string s = [Html.data s]
|
|
||||||
let section ~inverted:_ _name _contents = prerr_endline "Mustache sections unsupported"; []
|
|
||||||
let unescaped _elts = prerr_endline "Mustache unescaped not supported; used escaped instead"; []
|
|
||||||
let partial ?indent:_ _name _ _ = prerr_endline "Mustache sections unsupported"; []
|
|
||||||
let comment _ = [Html.data ""]
|
|
||||||
let concat = List.concat
|
|
||||||
|
|
||||||
let escaped_index ~from:_ ~n:_ _metas _e = [Html.data "temp"]
|
|
||||||
(* match List.hd e with *)
|
|
||||||
(* | "topics" -> *)
|
|
||||||
(* let topics = *)
|
|
||||||
(* ListLabels.fold_left *)
|
|
||||||
(* ~init:(Logarion.Meta.StringSet.empty) *)
|
|
||||||
(* ~f:(fun a e -> Logarion.Meta.unique_topics a e ) metas *)
|
|
||||||
(* in *)
|
|
||||||
(* Logarion.Meta.StringSet.fold (fun e a -> a ^ "<li><a href=\"/topic/" ^ e ^ "\">" ^ e ^ "</a></li>") topics "" *)
|
|
||||||
|
|
||||||
let header_custom template _linker archive =
|
|
||||||
Mustache.fold ~string ~section ~escaped:(Html.Renderer.archive archive) ~unescaped ~partial ~comment ~concat template
|
|
||||||
|> Html.header
|
|
||||||
|
|
||||||
let header_default linker archive =
|
|
||||||
Html.(header [title [anchor (linker "/") [data archive.Logarion.Archive.Configuration.title]]])
|
|
||||||
|
|
||||||
let meta meta =
|
|
||||||
let open Logarion.Meta in
|
|
||||||
let abstract = meta.abstract in
|
|
||||||
let authors = List.map (fun elt -> elt.Author.name) @@ AuthorSet.elements meta.authors in
|
|
||||||
let date = Date.(pretty_date @@ listing meta.date) in
|
|
||||||
let series = stringset_csv meta.series in
|
|
||||||
let topics = stringset_csv meta.topics in
|
|
||||||
let keywords = stringset_csv meta.keywords in
|
|
||||||
let uuid = Id.to_string meta.uuid in
|
|
||||||
Html.meta ~abstract ~authors ~date ~series ~topics ~keywords ~uuid
|
|
||||||
|
|
||||||
let body_custom template note =
|
|
||||||
Mustache.fold ~string ~section ~escaped:(Html.Renderer.note note) ~unescaped ~partial ~comment ~concat template
|
|
||||||
|> Html.note
|
|
||||||
|
|
||||||
let body_default note =
|
|
||||||
Html.note
|
|
||||||
[ Html.title [Html.unescaped_data note.Logarion.Note.meta.Logarion.Meta.title]; (* Don't add title if body contains one *)
|
|
||||||
meta note.meta;
|
|
||||||
Html.unescaped_data @@ Omd.to_html @@ Omd.of_string note.Logarion.Note.body ]
|
|
||||||
|
|
||||||
let page ~style linker title header body =
|
|
||||||
Html.to_string @@ Html.page ~style linker title header body
|
|
||||||
|
|
||||||
let of_config config k = match config with
|
|
||||||
| Error msg -> prerr_endline ("Couldn't load [templates] section;" ^ msg); None
|
|
||||||
| Ok c ->
|
|
||||||
let open Confix.ConfixToml in
|
|
||||||
path c ("templates" / k)
|
|
||||||
|
|
||||||
let converter default custom = function
|
|
||||||
| Some p ->
|
|
||||||
if Confix.Config.Path.path_exists p then custom @@ of_file p
|
|
||||||
else (prerr_endline @@ "Couldn't find: " ^ Fpath.to_string p; default)
|
|
||||||
| None -> default
|
|
||||||
|
|
||||||
let header_converter config = converter header_default header_custom @@ of_config config "header"
|
|
||||||
let body_converter config = converter body_default body_custom @@ of_config config "body"
|
|
||||||
|
|
||||||
let default_style = Html.default_style
|
|
||||||
|
|
||||||
let page_of_index ~style linker header archive metas =
|
|
||||||
page ~style linker ("Index | " ^ archive.Logarion.Archive.Configuration.title) (header linker archive) (Html.main (Html.listing_index "" metas))
|
|
||||||
|
|
||||||
let page_of_log ~style linker header archive metas =
|
|
||||||
page ~style linker ("Log | " ^ archive.Logarion.Archive.Configuration.title) (header linker archive) (Html.main [Html.listing_texts "" metas])
|
|
||||||
|
|
||||||
let page_of_note ~style linker header body archive note =
|
|
||||||
page ~style linker note.Logarion.Note.meta.Logarion.Meta.title (header linker archive) (body note)
|
|
||||||
|
|
||||||
let page_of_msg ~style linker header archive title msg =
|
|
||||||
page ~style linker title (header linker archive) (Html.div [Html.data msg])
|
|
@ -1,89 +0,0 @@
|
|||||||
module Id = Meta.Id
|
|
||||||
type alias_t = string
|
|
||||||
|
|
||||||
module Configuration = struct
|
|
||||||
type t = {
|
|
||||||
repository : Lpath.repo_t;
|
|
||||||
title : string;
|
|
||||||
owner : string;
|
|
||||||
email : string;
|
|
||||||
id : Id.t;
|
|
||||||
}
|
|
||||||
|
|
||||||
let of_config config =
|
|
||||||
let open Confix in
|
|
||||||
let open Confix.Config in
|
|
||||||
let str k = ConfixToml.(string config ("archive" / k)) in
|
|
||||||
try
|
|
||||||
Ok {
|
|
||||||
repository =
|
|
||||||
(try Lpath.repo_of_string (str "repository" |> with_default ".")
|
|
||||||
with
|
|
||||||
| Invalid_argument s -> failwith ("Invalid repository: " ^ s)
|
|
||||||
| Failure s -> failwith ("Missing repository value: " ^ s));
|
|
||||||
title = str "title" |> with_default "";
|
|
||||||
owner = str "owner" |> with_default "";
|
|
||||||
email = str "email" |> with_default "";
|
|
||||||
id = match Id.of_string (str "uuid" |> mandatory) with Some id -> id | None -> failwith "Invalid UUID in config";
|
|
||||||
}
|
|
||||||
with Failure str -> Error str
|
|
||||||
|
|
||||||
let validity config =
|
|
||||||
let repo = Lpath.fpath_of_repo config.repository in
|
|
||||||
let open Confix.Config.Validation in
|
|
||||||
empty
|
|
||||||
&> is_directory repo
|
|
||||||
end
|
|
||||||
|
|
||||||
module AliasMap = Meta.AliasMap
|
|
||||||
|
|
||||||
module Make (Store : Store.T) = struct
|
|
||||||
type t = {
|
|
||||||
config : Configuration.t;
|
|
||||||
store : Store.t;
|
|
||||||
}
|
|
||||||
|
|
||||||
let note_lens note = note
|
|
||||||
let meta_lens note = note.Note.meta
|
|
||||||
|
|
||||||
let recency_order a b = Meta.(Date.compare a.date b.date)
|
|
||||||
|
|
||||||
let latest archive =
|
|
||||||
Store.to_list ~order:recency_order meta_lens archive.store
|
|
||||||
|
|
||||||
let listed archive =
|
|
||||||
let notes = Store.to_list meta_lens archive.store in
|
|
||||||
List.filter Meta.(fun e -> CategorySet.listed e.categories) notes
|
|
||||||
|
|
||||||
let published archive =
|
|
||||||
let notes = Store.to_list meta_lens archive.store in
|
|
||||||
List.filter Meta.(fun e -> CategorySet.published e.categories) notes
|
|
||||||
|
|
||||||
let latest_listed archive =
|
|
||||||
let notes = Store.to_list ~order:recency_order meta_lens archive.store in
|
|
||||||
List.filter Meta.(fun e -> CategorySet.listed e.categories) notes
|
|
||||||
|
|
||||||
let with_topic archive topic =
|
|
||||||
let notes = Store.to_list ~order:recency_order meta_lens archive.store in
|
|
||||||
List.filter Meta.(fun e -> StringSet.exists (fun t -> t = topic) e.topics) notes
|
|
||||||
|
|
||||||
let topics archive =
|
|
||||||
let notes = Store.to_list meta_lens archive.store in
|
|
||||||
List.fold_left Meta.(fun a e -> unique_topics a e) Meta.StringSet.empty notes
|
|
||||||
|
|
||||||
let latest_entry archive fragment =
|
|
||||||
let notes = Store.to_list ~order:recency_order meta_lens archive.store in
|
|
||||||
let containing_fragment e = Re.Str.(string_match (regexp fragment)) (e.Meta.title) 0 in
|
|
||||||
try Some (List.find containing_fragment notes)
|
|
||||||
with Not_found -> None
|
|
||||||
|
|
||||||
let note_with_id archive id = Store.note_with_id archive.store id
|
|
||||||
let note_with_alias archive alias = Store.note_with_alias archive.store alias
|
|
||||||
|
|
||||||
let with_note archive note = Store.with_note archive.store note
|
|
||||||
|
|
||||||
let sublist ~from ~n list =
|
|
||||||
let aggregate_subrange (i, elms) e = succ i, if i >= from && i <= n then e::elms else elms in
|
|
||||||
List.fold_left aggregate_subrange (0, []) list |> snd
|
|
||||||
|
|
||||||
end
|
|
@ -1,5 +0,0 @@
|
|||||||
(library
|
|
||||||
((name logarion)
|
|
||||||
(public_name logarion)
|
|
||||||
(libraries (confix omd ptime lwt uuidm uri re.str emile))
|
|
||||||
))
|
|
@ -1,25 +0,0 @@
|
|||||||
open Fpath
|
|
||||||
type repo_t = Repo of t
|
|
||||||
type note_t = Note of { repo: repo_t; basename: t }
|
|
||||||
|
|
||||||
let fpath_of_repo = function Repo p -> p
|
|
||||||
let string_of_repo r = fpath_of_repo r |> to_string
|
|
||||||
let repo_of_string s = Repo (v s)
|
|
||||||
|
|
||||||
let fpath_of_note = function Note n -> (fpath_of_repo n.repo // n.basename)
|
|
||||||
let string_of_note n = fpath_of_note n |> to_string
|
|
||||||
let note_of_basename repo s = Note { repo; basename = v s }
|
|
||||||
|
|
||||||
let alias_of_note = function Note n -> n.basename |> rem_ext |> to_string
|
|
||||||
let note_of_alias repo extension alias = note_of_basename repo (alias ^ extension)
|
|
||||||
|
|
||||||
let versioned_basename_of_title ?(version=0) repo extension (title : string) =
|
|
||||||
let notes_fpath = fpath_of_repo repo in
|
|
||||||
let basename = v @@ Meta.string_alias title in
|
|
||||||
let rec next version =
|
|
||||||
let candidate = basename |> add_ext (string_of_int version) |> add_ext extension in
|
|
||||||
if Sys.file_exists (to_string (notes_fpath // candidate))
|
|
||||||
then next (succ version)
|
|
||||||
else note_of_basename repo (to_string candidate)
|
|
||||||
in
|
|
||||||
next version
|
|
222
src/core/meta.ml
222
src/core/meta.ml
@ -1,222 +0,0 @@
|
|||||||
module Date = struct
|
|
||||||
type t = {
|
|
||||||
created: Ptime.t option;
|
|
||||||
published: Ptime.t option;
|
|
||||||
edited: Ptime.t option;
|
|
||||||
} [@@deriving lens { submodule = true }]
|
|
||||||
|
|
||||||
let rfc_string date = match date with Some t -> Ptime.to_rfc3339 t | None -> ""
|
|
||||||
|
|
||||||
let of_string (rfc : string) = match Ptime.of_rfc3339 rfc with
|
|
||||||
Ok (t,_,_) -> Some t | Error _ -> None
|
|
||||||
|
|
||||||
let listing date = match date.published, date.created with
|
|
||||||
| Some _, _ -> date.published
|
|
||||||
| None, Some _ -> date.created
|
|
||||||
| None, None -> None
|
|
||||||
|
|
||||||
let compare = compare
|
|
||||||
|
|
||||||
let pretty_date = function
|
|
||||||
| Some t -> Ptime.to_date t |> fun (y, m, d) -> Printf.sprintf "%04d-%02d-%02d" y m d
|
|
||||||
| None -> ""
|
|
||||||
end
|
|
||||||
|
|
||||||
module Id = struct
|
|
||||||
let random_state = Random.State.make_self_init ()
|
|
||||||
type t = Uuidm.t
|
|
||||||
let compare = Uuidm.compare
|
|
||||||
let to_string = Uuidm.to_string
|
|
||||||
let of_string = Uuidm.of_string
|
|
||||||
let generate ?(random_state=random_state) = Uuidm.v4_gen random_state
|
|
||||||
end
|
|
||||||
|
|
||||||
module Author = struct
|
|
||||||
type name_t = string
|
|
||||||
type address_t = Uri.t
|
|
||||||
type t = {
|
|
||||||
name: name_t;
|
|
||||||
address: address_t;
|
|
||||||
} [@@deriving lens { submodule = true } ]
|
|
||||||
|
|
||||||
let empty = { name = ""; address = Uri.empty }
|
|
||||||
|
|
||||||
let compare = Pervasives.compare
|
|
||||||
|
|
||||||
end
|
|
||||||
|
|
||||||
module AuthorSet = struct
|
|
||||||
include Set.Make(Author)
|
|
||||||
|
|
||||||
let to_string authors =
|
|
||||||
let to_string a = a.Author.name ^ " <" ^ Uri.to_string a.Author.address ^ ">" in
|
|
||||||
let f elt acc = if String.length acc > 1 then acc ^ ", " ^ to_string elt else to_string elt in
|
|
||||||
fold f authors ""
|
|
||||||
|
|
||||||
let of_string s =
|
|
||||||
match Emile.List.of_string s with
|
|
||||||
| Error _ -> prerr_endline @@ "Error parsing: " ^ s; empty
|
|
||||||
| Ok emails ->
|
|
||||||
let to_author =
|
|
||||||
let module L = List in
|
|
||||||
let open Emile in
|
|
||||||
function
|
|
||||||
| `Group _ -> prerr_endline @@ "Can't deal with groups in author: " ^ s; Author.empty
|
|
||||||
| `Mailbox { name; local; _ } ->
|
|
||||||
let s_of_phrase = function `Dot -> "" | `Word w -> (match w with `Atom a -> a | `String s -> s) | `Encoded _ -> "" in
|
|
||||||
let name = match name with None -> "" | Some phrase -> L.fold_left (fun a e -> a ^ s_of_phrase e) "" phrase in
|
|
||||||
let address =
|
|
||||||
L.fold_left (fun a e -> a ^ match e with `Atom a -> a | `String s -> s) "" local ^ "@" (* TODO: Author address unimplemented *)
|
|
||||||
in
|
|
||||||
Author.{ name; address = Uri.of_string address }
|
|
||||||
in
|
|
||||||
of_list @@ List.map to_author emails
|
|
||||||
end
|
|
||||||
|
|
||||||
module Category = struct
|
|
||||||
type t = Draft | Unlisted | Published | Custom of string
|
|
||||||
|
|
||||||
let compare = Pervasives.compare
|
|
||||||
|
|
||||||
let of_string = function
|
|
||||||
| "draft" -> Draft
|
|
||||||
| "unlisted" -> Unlisted
|
|
||||||
| "published" -> Published
|
|
||||||
| c -> Custom c
|
|
||||||
|
|
||||||
let to_string = function
|
|
||||||
| Draft -> "draft"
|
|
||||||
| Unlisted -> "unlisted"
|
|
||||||
| Published -> "published"
|
|
||||||
| Custom c -> c
|
|
||||||
end
|
|
||||||
|
|
||||||
module CategorySet = struct
|
|
||||||
include Set.Make(Category)
|
|
||||||
let to_csv set =
|
|
||||||
let f elt a =
|
|
||||||
let s = Category.to_string elt in
|
|
||||||
if a <> "" then a ^ ", " ^ s else s
|
|
||||||
in
|
|
||||||
fold f set ""
|
|
||||||
let categorised categs cs = of_list categs |> (fun s -> subset s cs)
|
|
||||||
let published = categorised [Category.Published]
|
|
||||||
let listed cs = not @@ categorised [Category.Unlisted] cs
|
|
||||||
end
|
|
||||||
|
|
||||||
module StringSet = Set.Make(String)
|
|
||||||
|
|
||||||
let stringset_csv set =
|
|
||||||
let f elt a = if a <> "" then a ^ ", " ^ elt else elt in
|
|
||||||
StringSet.fold f set ""
|
|
||||||
|
|
||||||
let string_alias t =
|
|
||||||
let is_reserved = function
|
|
||||||
| '!' | '*' | '\'' | '(' | ')' | ';' | ':' | '@' | '&' | '=' | '+' | '$'
|
|
||||||
| ',' | '/' | '?' | '#' | '[' | ']' | ' ' | '\t' | '\x00' -> true
|
|
||||||
| _ -> false
|
|
||||||
in
|
|
||||||
let b = Buffer.create (String.length t) in
|
|
||||||
let filter char =
|
|
||||||
let open Buffer in
|
|
||||||
if is_reserved char then (try (if nth b (pred (length b)) <> '-' then add_char b '-') with Invalid_argument _ -> prerr_endline "reserved")
|
|
||||||
else add_char b char
|
|
||||||
in
|
|
||||||
String.(iter filter (lowercase_ascii t));
|
|
||||||
Buffer.contents b
|
|
||||||
|
|
||||||
type t = {
|
|
||||||
title: string;
|
|
||||||
authors: AuthorSet.t;
|
|
||||||
date: Date.t;
|
|
||||||
categories: CategorySet.t;
|
|
||||||
topics: StringSet.t;
|
|
||||||
keywords: StringSet.t;
|
|
||||||
series: StringSet.t;
|
|
||||||
abstract: string;
|
|
||||||
uuid: Id.t;
|
|
||||||
alias: string;
|
|
||||||
} [@@deriving lens { submodule = true }]
|
|
||||||
|
|
||||||
let blank ?(uuid=(Id.generate ())) () = {
|
|
||||||
title = "";
|
|
||||||
authors = AuthorSet.empty;
|
|
||||||
date = Date.({ created = None; edited = None; published = None });
|
|
||||||
categories = CategorySet.empty;
|
|
||||||
topics = StringSet.empty;
|
|
||||||
keywords = StringSet.empty;
|
|
||||||
series = StringSet.empty;
|
|
||||||
abstract = "";
|
|
||||||
uuid;
|
|
||||||
alias = "";
|
|
||||||
}
|
|
||||||
|
|
||||||
let listed e = CategorySet.listed e.categories
|
|
||||||
let published e = CategorySet.published e.categories
|
|
||||||
let unique_topics ts x = StringSet.union ts x.topics
|
|
||||||
|
|
||||||
module AliasMap = Map.Make(String)
|
|
||||||
module IdMap = Map.Make(Id)
|
|
||||||
|
|
||||||
let alias meta = if meta.alias = "" then string_alias meta.title else meta.alias
|
|
||||||
|
|
||||||
let value_with_name (_meta as m) = function
|
|
||||||
| "Title" -> m.title
|
|
||||||
| "Abstract" -> m.abstract
|
|
||||||
| "Authors" -> AuthorSet.to_string m.authors
|
|
||||||
| "Date" -> Date.(rfc_string m.date.created)
|
|
||||||
| "Edited" -> Date.(rfc_string m.date.edited)
|
|
||||||
| "Published"-> Date.(rfc_string m.date.published)
|
|
||||||
| "Human" -> Date.(pretty_date @@ listing m.date)
|
|
||||||
| "Topics" -> stringset_csv m.topics;
|
|
||||||
| "Categories" -> CategorySet.to_csv m.categories;
|
|
||||||
| "Keywords" -> stringset_csv m.keywords;
|
|
||||||
| "Series" -> stringset_csv m.series;
|
|
||||||
| "ID" -> Id.to_string m.uuid
|
|
||||||
| "Alias" -> alias m
|
|
||||||
| e -> invalid_arg e
|
|
||||||
|
|
||||||
let with_kv meta (k,v) =
|
|
||||||
let list_of_csv = Re.Str.(split (regexp " *, *")) in
|
|
||||||
let trim = String.trim in
|
|
||||||
match k with
|
|
||||||
| "Title" -> { meta with title = trim v }
|
|
||||||
| "Author"
|
|
||||||
| "Authors" -> { meta with authors = AuthorSet.of_string (trim v)}
|
|
||||||
| "Abstract" -> { meta with abstract = trim v }
|
|
||||||
| "Date" -> { meta with date = Date.{ meta.date with created = Date.of_string v }}
|
|
||||||
| "Published" -> { meta with date = Date.{ meta.date with published = Date.of_string v }}
|
|
||||||
| "Edited" -> { meta with date = Date.{ meta.date with edited = Date.of_string v }}
|
|
||||||
| "Topics" -> { meta with topics = trim v |> list_of_csv |> StringSet.of_list }
|
|
||||||
| "Keywords" -> { meta with keywords = trim v |> list_of_csv |> StringSet.of_list }
|
|
||||||
| "Categories"->
|
|
||||||
let categories = trim v |> list_of_csv |> List.map Category.of_string |> CategorySet.of_list in
|
|
||||||
{ meta with categories }
|
|
||||||
| "Series" -> { meta with series = trim v |> list_of_csv |> StringSet.of_list }
|
|
||||||
| "ID" -> (match Id.of_string v with Some id -> { meta with uuid = id } | None -> meta)
|
|
||||||
| "Alias" -> { meta with alias = v }
|
|
||||||
| k -> prerr_endline ("Unknown key: " ^ k ^ ", with value: " ^ v ); meta
|
|
||||||
|
|
||||||
let to_string (_meta as m) =
|
|
||||||
let has_len v = String.length v > 0 in
|
|
||||||
let s field value = if has_len value then field ^ ": " ^ value ^ "\n" else "" in
|
|
||||||
let a value = if AuthorSet.is_empty value then "" else "Authors: " ^ AuthorSet.to_string value ^ "\n" in
|
|
||||||
let d field value = match value with
|
|
||||||
| Some _ -> field ^ ": " ^ Date.rfc_string value ^ "\n" | None -> ""
|
|
||||||
in
|
|
||||||
let rows =
|
|
||||||
[ s "Title" m.title;
|
|
||||||
a m.authors;
|
|
||||||
d "Date" m.date.Date.created;
|
|
||||||
d "Edited" m.date.Date.edited;
|
|
||||||
d "Published" m.date.Date.published;
|
|
||||||
s "Topics" (stringset_csv m.topics);
|
|
||||||
s "Categories" (CategorySet.to_csv m.categories);
|
|
||||||
s "Keywords" (stringset_csv m.keywords);
|
|
||||||
s "Series" (stringset_csv m.series);
|
|
||||||
s "Abstract" m.abstract;
|
|
||||||
s "ID" (Uuidm.to_string m.uuid);
|
|
||||||
s "Alias" m.alias
|
|
||||||
]
|
|
||||||
in
|
|
||||||
String.concat "" rows
|
|
@ -1,47 +0,0 @@
|
|||||||
type t = {
|
|
||||||
meta: Meta.t;
|
|
||||||
body: string;
|
|
||||||
} [@@deriving lens { submodule = true }]
|
|
||||||
|
|
||||||
let blank ?(uuid=(Meta.Id.generate ())) () = { meta = Meta.blank ~uuid (); body = "" }
|
|
||||||
|
|
||||||
let title ymd =
|
|
||||||
let mtitle = ymd.meta.Meta.title in
|
|
||||||
if String.length mtitle > 0 then mtitle else
|
|
||||||
let open Omd in
|
|
||||||
try List.find (function H1 _ -> true | _ -> false) (Omd.of_string ymd.body)
|
|
||||||
|> function H1 h -> to_text h | _ -> ""
|
|
||||||
with Not_found -> ""
|
|
||||||
|
|
||||||
let categorised categs ymd = Meta.CategorySet.categorised categs ymd.meta.Meta.categories
|
|
||||||
|
|
||||||
let with_kv ymd (k,v) = match k with
|
|
||||||
| "body" -> { ymd with body = String.trim v }
|
|
||||||
| _ -> { ymd with meta = Meta.with_kv ymd.meta (k,v) }
|
|
||||||
|
|
||||||
let meta_pair_of_string line = match Re.Str.(bounded_split (regexp ": *")) line 2 with
|
|
||||||
| [ key; value ] -> Re.Str.(replace_first (regexp "^#\\+") "" key), value
|
|
||||||
| [ key ] -> Re.Str.(replace_first (regexp "^#\\+") "" key), ""
|
|
||||||
| _ -> prerr_endline line; ("","")
|
|
||||||
|
|
||||||
let meta_of_string front_matter =
|
|
||||||
let fields = List.map meta_pair_of_string (String.split_on_char '\n' front_matter) in
|
|
||||||
List.fold_left Meta.with_kv (Meta.blank ()) fields
|
|
||||||
|
|
||||||
exception Syntax_error of string
|
|
||||||
|
|
||||||
let front_matter_body_split s =
|
|
||||||
if Re.Str.(string_match (regexp ".*:.*")) s 0
|
|
||||||
then match Re.Str.(bounded_split (regexp "\n\n")) s 2 with
|
|
||||||
| front::body::[] -> (front, body)
|
|
||||||
| _ -> ("", s)
|
|
||||||
else ("", s)
|
|
||||||
|
|
||||||
let of_string s =
|
|
||||||
let (front_matter, body) = front_matter_body_split s in
|
|
||||||
try
|
|
||||||
let note = { meta = meta_of_string front_matter; body } in
|
|
||||||
{ note with meta = { note.meta with title = title note } }
|
|
||||||
with _ -> prerr_endline ("Failed parsing" ^ s); blank ()
|
|
||||||
|
|
||||||
let to_string ymd = Meta.to_string ymd.meta ^ "\n" ^ ymd.body
|
|
@ -1,7 +0,0 @@
|
|||||||
module type T = sig
|
|
||||||
type t
|
|
||||||
val to_list: ?order:('a -> 'a -> int) -> (Note.t -> 'a) -> t -> 'a list
|
|
||||||
val note_with_id: t -> Meta.Id.t -> Note.t option
|
|
||||||
val note_with_alias: t -> string -> Note.t option
|
|
||||||
val with_note: t -> Note.t -> Note.t Lwt.t
|
|
||||||
end
|
|
16
src/jbuild
16
src/jbuild
@ -1,16 +0,0 @@
|
|||||||
(executable
|
|
||||||
((name logarion_cli)
|
|
||||||
(public_name logarion_cli)
|
|
||||||
(modules logarion_cli)
|
|
||||||
(libraries (logarion logarion.confix logarion.converters logarion.file re.str cmdliner bos))))
|
|
||||||
|
|
||||||
(install
|
|
||||||
((section share)
|
|
||||||
(files (
|
|
||||||
(../share/config.toml as config.toml)
|
|
||||||
(../share/template/frontpage.mustache as template/frontpage.mustache)
|
|
||||||
(../share/template/header.mustache as template/header.mustache)
|
|
||||||
(../share/template/item.mustache as template/item.mustache)
|
|
||||||
(../share/template/list.mustache as template/list.mustache)
|
|
||||||
(../share/template/note.mustache as template/note.mustache)
|
|
||||||
))))
|
|
@ -1,176 +0,0 @@
|
|||||||
let version = "0.5"
|
|
||||||
open Cmdliner
|
|
||||||
open Logarion
|
|
||||||
module C = Archive.Configuration
|
|
||||||
module Lpath = Logarion.Lpath
|
|
||||||
|
|
||||||
let conf () =
|
|
||||||
let module Config = Confix.Config.Make (Confix.ConfixToml) in
|
|
||||||
let archive_res =
|
|
||||||
let open Confix.Config in
|
|
||||||
Confix.Config.Path.with_file "config.toml"
|
|
||||||
&> Config.from_path
|
|
||||||
|> Config.to_record C.of_config
|
|
||||||
in
|
|
||||||
match archive_res with
|
|
||||||
| Ok config -> config
|
|
||||||
| Error str -> prerr_endline str; exit 1
|
|
||||||
|
|
||||||
let create_dir dir = Bos.OS.Dir.create (Fpath.v dir)
|
|
||||||
|
|
||||||
let create_dir_msg ?(descr="") dir res =
|
|
||||||
let () = match res with
|
|
||||||
| Ok true -> print_endline ("Created " ^ descr ^ " directory " ^ dir)
|
|
||||||
| Ok false -> print_endline ("Reinitialise existing " ^ descr ^ " directory " ^ dir)
|
|
||||||
| Error (`Msg msg) -> prerr_endline @@ "Failed making " ^ descr ^ ". " ^ msg
|
|
||||||
in
|
|
||||||
res
|
|
||||||
|
|
||||||
let copy ?(recursive = false) src dst =
|
|
||||||
Bos.OS.Cmd.run (Bos.Cmd.(v "cp" %% (on recursive @@ v "-r") % src % dst))
|
|
||||||
|
|
||||||
let init _force =
|
|
||||||
let rec create_dirs = function
|
|
||||||
| [] -> ()
|
|
||||||
| (dir,descr)::tl ->
|
|
||||||
match create_dir dir |> create_dir_msg ~descr dir with
|
|
||||||
| Ok _ -> create_dirs tl
|
|
||||||
| Error _ -> ()
|
|
||||||
in
|
|
||||||
let dirs = [
|
|
||||||
".logarion", "Logarion";
|
|
||||||
".logarion/static", "static files";
|
|
||||||
".logarion/html-templates", "templates";
|
|
||||||
]
|
|
||||||
in
|
|
||||||
let toml_data =
|
|
||||||
let open Toml in
|
|
||||||
let open TomlTypes in
|
|
||||||
of_key_values [
|
|
||||||
key "archive",
|
|
||||||
TTable (
|
|
||||||
of_key_values [
|
|
||||||
key "title", TString "";
|
|
||||||
key "owner", TString (Bos.OS.Env.opt_var "USER" ~absent:"");
|
|
||||||
key "email", TString (Bos.OS.Env.opt_var "EMAIL" ~absent:"");
|
|
||||||
key "uuid", TString (Meta.Id.(generate () |> to_string));
|
|
||||||
]);
|
|
||||||
key "web",
|
|
||||||
TTable (
|
|
||||||
of_key_values [
|
|
||||||
key "url", TString "http://localhost:3666";
|
|
||||||
key "stylesheets", TArray ( NodeString ["main.css"] );
|
|
||||||
key "static_dir", TString ".logarion/static";
|
|
||||||
]);
|
|
||||||
key "templates", TTable (of_key_values []);
|
|
||||||
]
|
|
||||||
in
|
|
||||||
create_dirs dirs;
|
|
||||||
let config_file = open_out "config.toml" in
|
|
||||||
output_bytes config_file (Toml.Printer.string_of_table toml_data |> Bytes.of_string);
|
|
||||||
close_out config_file
|
|
||||||
|
|
||||||
let init_term =
|
|
||||||
let force =
|
|
||||||
let doc = "Initialise repository even if directory is non empty" in
|
|
||||||
Arg.(value & flag & info ["f"; "force"] ~doc)
|
|
||||||
in
|
|
||||||
Term.(const init $ force),
|
|
||||||
Term.info
|
|
||||||
"init" ~doc:"initialise a logarion repository in present directory"
|
|
||||||
~man:[ `S "DESCRIPTION"; `P "Create a repository in current directory" ]
|
|
||||||
|
|
||||||
let create_term =
|
|
||||||
let title =
|
|
||||||
Arg.(value & pos 0 string "" & info [] ~docv:"TITLE" ~doc:"Title for new article")
|
|
||||||
in
|
|
||||||
let f title =
|
|
||||||
let conf = conf () in
|
|
||||||
let t = match title with "" -> "Draft" | _ -> title in
|
|
||||||
let note =
|
|
||||||
let meta =
|
|
||||||
let open Meta in
|
|
||||||
let authors = AuthorSet.singleton Author.({ name = conf.C.owner; address = Uri.of_string conf.C.email }) in
|
|
||||||
let date = Date.({ created = Some (Ptime_clock.now ()); published = None; edited = None }) in
|
|
||||||
{ (blank ()) with title = t; authors; date }
|
|
||||||
in
|
|
||||||
Note.({ (blank ()) with meta })
|
|
||||||
in
|
|
||||||
File.Lwt.with_note (File.store conf.C.repository) note
|
|
||||||
|> Lwt_main.run
|
|
||||||
|> ignore
|
|
||||||
in
|
|
||||||
Term.(const f $ title),
|
|
||||||
Term.info "create"
|
|
||||||
~doc:"create a new article"
|
|
||||||
~man:[ `S "DESCRIPTION"; `P "Create a new article, with title 'Draft' when none provided"]
|
|
||||||
|
|
||||||
let convert directory =
|
|
||||||
let module Config = Confix.Config.Make (Confix.ConfixToml) in
|
|
||||||
|
|
||||||
let toml_config =
|
|
||||||
let open Confix.Config in
|
|
||||||
Path.with_file "config.toml"
|
|
||||||
|> function Ok cfg -> Config.from_path cfg | Error str -> prerr_endline str; exit 1
|
|
||||||
in
|
|
||||||
let config = Config.to_record_or_exit Logarion.Archive.Configuration.of_config toml_config in
|
|
||||||
|
|
||||||
let module L = Logarion.Archive.Make(File) in
|
|
||||||
let store = File.store config.repository in
|
|
||||||
let archive = L.{ config; store } in
|
|
||||||
let notes =
|
|
||||||
List.filter Meta.(fun n -> CategorySet.published n.Note.meta.categories)
|
|
||||||
@@ File.to_list L.note_lens archive.store
|
|
||||||
in
|
|
||||||
let metas =
|
|
||||||
List.filter Meta.(fun m -> CategorySet.published m.categories && CategorySet.listed m.categories)
|
|
||||||
@@ File.to_list ~order:(L.recency_order) L.meta_lens archive.store
|
|
||||||
in
|
|
||||||
|
|
||||||
let template_config = toml_config in
|
|
||||||
let module T = Converters.Template in
|
|
||||||
let header = T.header_converter template_config in
|
|
||||||
let body = T.body_converter template_config in
|
|
||||||
let style = T.default_style in
|
|
||||||
let linker x = match Fpath.(relativize ~root:(v "/") (v x)) with Some l -> Fpath.to_string l | None -> "" in
|
|
||||||
let page_of_log metas = T.page_of_log linker header config metas in
|
|
||||||
let page_of_index metas = T.page_of_index linker header config metas in
|
|
||||||
let page_of_note note = T.page_of_note linker header body config note in
|
|
||||||
let path_of_note note = directory ^ "/" ^ Meta.alias note.Note.meta ^ ".html" in
|
|
||||||
let file_creation path content =
|
|
||||||
let out = open_out path in
|
|
||||||
output_string out content;
|
|
||||||
close_out out
|
|
||||||
in
|
|
||||||
match create_dir directory |> create_dir_msg ~descr:"export" directory with
|
|
||||||
| Error _ -> ()
|
|
||||||
| Ok _ ->
|
|
||||||
match copy ~recursive:true ".logarion/static" (directory) with
|
|
||||||
| Ok _ ->
|
|
||||||
let note_write note = file_creation (path_of_note note) (page_of_note ~style note) in
|
|
||||||
List.iter note_write notes;
|
|
||||||
file_creation (directory ^ "/log.html") (page_of_log ~style metas);
|
|
||||||
file_creation (directory ^ "/index.html") (page_of_index ~style metas);
|
|
||||||
file_creation (directory ^ "/feed.atom") (Converters.Atom.feed config "/" (L.note_with_id archive) metas)
|
|
||||||
| Error (`Msg m) -> prerr_endline m
|
|
||||||
|
|
||||||
let convert_term =
|
|
||||||
let directory =
|
|
||||||
Arg.(value & pos 0 string "html-conversion" & info [] ~docv:"Directory" ~doc:"Directory to convert to")
|
|
||||||
in
|
|
||||||
Term.(const convert $ directory),
|
|
||||||
Term.info
|
|
||||||
"convert" ~doc:"convert archive to HTML"
|
|
||||||
~man:[ `S "DESCRIPTION"; `P "Create a repository in current directory" ]
|
|
||||||
|
|
||||||
let default_cmd =
|
|
||||||
Term.(ret (const (`Help (`Pager, None)))),
|
|
||||||
Term.info "logarion" ~version ~doc:"an article collection & publishing system"
|
|
||||||
~man:[ `S "BUGS";
|
|
||||||
`P "Submit bugs <mailto:logarion@lists.orbitalfox.eu?subject=[Issue] summary-here>"; ]
|
|
||||||
|
|
||||||
let cmds = [ init_term; create_term; convert_term ]
|
|
||||||
|
|
||||||
let () =
|
|
||||||
Random.self_init();
|
|
||||||
match Term.eval_choice default_cmd cmds with `Error _ -> exit 1 | _ -> exit 0
|
|
@ -1,112 +0,0 @@
|
|||||||
let extensions = [ ".md"; ".org" ]
|
|
||||||
|
|
||||||
open Logarion
|
|
||||||
let load f =
|
|
||||||
let ic = open_in (Fpath.to_string f) in
|
|
||||||
let n = in_channel_length ic in
|
|
||||||
let s = Bytes.create n in
|
|
||||||
really_input ic s 0 n;
|
|
||||||
close_in ic;
|
|
||||||
Bytes.to_string s
|
|
||||||
|
|
||||||
let note path = Lpath.fpath_of_note path |> load |> Note.of_string
|
|
||||||
|
|
||||||
type t = { repo_path : Lpath.repo_t }
|
|
||||||
|
|
||||||
let note_filetype name =
|
|
||||||
try Fpath.(mem_ext extensions @@ v name) with
|
|
||||||
| Invalid_argument _ -> false
|
|
||||||
|
|
||||||
let to_list ?(order) lens_fn store =
|
|
||||||
let repo_path = store.repo_path in
|
|
||||||
let cons_valid_meta list path =
|
|
||||||
try
|
|
||||||
let note = note (Lpath.note_of_basename repo_path path) in
|
|
||||||
lens_fn note :: list
|
|
||||||
with Note.Syntax_error str -> prerr_endline str; list
|
|
||||||
in
|
|
||||||
Lpath.string_of_repo repo_path
|
|
||||||
|> Sys.readdir
|
|
||||||
|> Array.to_list
|
|
||||||
|> List.filter note_filetype
|
|
||||||
|> List.fold_left cons_valid_meta []
|
|
||||||
|> match order with
|
|
||||||
| Some fn -> List.fast_sort fn
|
|
||||||
| None -> (fun x -> x)
|
|
||||||
|
|
||||||
let note_with_id store id =
|
|
||||||
let repo_path = store.repo_path in
|
|
||||||
let note_of_path path = note (Lpath.note_of_basename repo_path path) in
|
|
||||||
let with_id path =
|
|
||||||
try
|
|
||||||
let note = note_of_path path in
|
|
||||||
note.Note.meta.Meta.uuid = id
|
|
||||||
with Note.Syntax_error str -> prerr_endline str; false
|
|
||||||
in
|
|
||||||
let notes =
|
|
||||||
Lpath.string_of_repo repo_path
|
|
||||||
|> Sys.readdir
|
|
||||||
|> Array.to_list
|
|
||||||
|> List.filter note_filetype
|
|
||||||
in
|
|
||||||
try Some (note_of_path (List.find with_id notes))
|
|
||||||
with Not_found -> None
|
|
||||||
|
|
||||||
let note_with_alias store alias =
|
|
||||||
let repo_path = store.repo_path in
|
|
||||||
let cons_valid_meta list path =
|
|
||||||
try (note (Lpath.note_of_basename repo_path path)) :: list
|
|
||||||
with Note.Syntax_error str -> prerr_endline str; list
|
|
||||||
in
|
|
||||||
let recency_order a b = Meta.(Date.compare b.date a.date) in
|
|
||||||
let notes =
|
|
||||||
Lpath.string_of_repo repo_path
|
|
||||||
|> Sys.readdir
|
|
||||||
|> Array.to_list
|
|
||||||
|> List.filter note_filetype
|
|
||||||
|> List.fold_left cons_valid_meta []
|
|
||||||
|> List.filter (fun note -> Meta.alias note.Note.meta = alias)
|
|
||||||
|> List.fast_sort (fun a b -> recency_order a.Note.meta b.Note.meta)
|
|
||||||
in
|
|
||||||
try Some (List.hd notes)
|
|
||||||
with Failure _ -> None
|
|
||||||
|
|
||||||
let notepath_with_id _store _id = None
|
|
||||||
|
|
||||||
let store repo_path = { repo_path }
|
|
||||||
|
|
||||||
module Lwt = struct
|
|
||||||
let of_filename f =
|
|
||||||
let open Lwt in
|
|
||||||
Lwt_io.(open_file ~mode:(Input) f >|= read_lines)
|
|
||||||
>|= (fun stream -> Lwt_stream.fold (^) stream "")
|
|
||||||
|
|
||||||
let with_note store new_note =
|
|
||||||
let extension = List.hd extensions in
|
|
||||||
let open Lwt in
|
|
||||||
let open Lwt.Infix in
|
|
||||||
let store =
|
|
||||||
let write_note out = Lwt_io.write out (Note.to_string new_note) in
|
|
||||||
match notepath_with_id store new_note.Note.meta.Meta.uuid with
|
|
||||||
| Some previous_path ->
|
|
||||||
let filepath =
|
|
||||||
let open Note in
|
|
||||||
let open Meta in
|
|
||||||
if (note previous_path).meta.title <> new_note.meta.title
|
|
||||||
then Lpath.versioned_basename_of_title store.repo_path extension new_note.meta.title
|
|
||||||
else previous_path
|
|
||||||
in
|
|
||||||
Lwt_io.with_file ~mode:Lwt_io.output (Lpath.string_of_note filepath) write_note
|
|
||||||
>>= (fun () ->
|
|
||||||
if previous_path <> filepath
|
|
||||||
then Lwt_unix.unlink @@ Lpath.string_of_note previous_path
|
|
||||||
else Lwt.return_unit
|
|
||||||
)
|
|
||||||
| None ->
|
|
||||||
let filepath = Lpath.versioned_basename_of_title store.repo_path extension new_note.meta.title in
|
|
||||||
Lwt_io.with_file ~mode:Lwt_io.output (Lpath.string_of_note filepath) write_note
|
|
||||||
in
|
|
||||||
store >>= (fun () -> return new_note);
|
|
||||||
end
|
|
||||||
|
|
||||||
let with_note = Lwt.with_note
|
|
@ -1,7 +0,0 @@
|
|||||||
(jbuild_version 1)
|
|
||||||
|
|
||||||
(library
|
|
||||||
((name file)
|
|
||||||
(public_name logarion.file)
|
|
||||||
(libraries (logarion lwt lwt.unix))
|
|
||||||
))
|
|
Loading…
x
Reference in New Issue
Block a user