initial simple example with omd
This commit is contained in:
commit
3d92789cdb
9
.gitignore
vendored
Normal file
9
.gitignore
vendored
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
.merlin
|
||||||
|
.logarion
|
||||||
|
*.ymd
|
||||||
|
\#*\#
|
||||||
|
.\#*1
|
||||||
|
*~
|
||||||
|
*.o
|
||||||
|
*.native
|
||||||
|
_build
|
83
CONTRIBUTING.md
Normal file
83
CONTRIBUTING.md
Normal file
@ -0,0 +1,83 @@
|
|||||||
|
# Contributing to Logarion
|
||||||
|
|
||||||
|
Logarions primary aim is to create a note system, which doesn't waste resources.
|
||||||
|
The secondary aim is to provide an exemplary OCaml project to demonstrate and promote the language (as it happens with many other "Blogging" systems written in other languages).
|
||||||
|
|
||||||
|
As part of the secondary aim, the source code needs to written in a way that encourages the language's adoption and the participation to the OCaml developer community.
|
||||||
|
|
||||||
|
## Starting with OCaml
|
||||||
|
|
||||||
|
_"OCaml is an industrial strength programming language supporting functional, imperative and object-oriented styles"_ -- https://ocaml.org/
|
||||||
|
|
||||||
|
OCaml simply rocks.
|
||||||
|
|
||||||
|
If you are unfamiliar with OCaml, consider starting with these resources:
|
||||||
|
|
||||||
|
- Install OCaml: https://ocaml.org/docs/install.html
|
||||||
|
- Read about OCaml: https://ocaml.org/learn/books.html
|
||||||
|
- Ask questions & join the community:
|
||||||
|
- Mailing lists: https://ocaml.org/community/
|
||||||
|
- IRC: irc://irc.freenode.net/#ocaml (Web client: https://riot.im/app/#/room/#freenode_#ocaml:matrix.org )
|
||||||
|
- Reddit: http://www.reddit.com/r/ocaml/
|
||||||
|
- Discourse: https://discuss.ocaml.org/
|
||||||
|
- .. other: https://ocaml.org/community/
|
||||||
|
|
||||||
|
## Design principles
|
||||||
|
|
||||||
|
[Unix philosophy](https://en.wikipedia.org/wiki/Unix_philosophy#Do_One_Thing_and_Do_It_Well)
|
||||||
|
|
||||||
|
1. System simplicity & interoperability.
|
||||||
|
2. Output quality.
|
||||||
|
3. Distributed interactivity, like sharing with friends.
|
||||||
|
|
||||||
|
## Developing & contributing
|
||||||
|
|
||||||
|
### Clone
|
||||||
|
|
||||||
|
```
|
||||||
|
git clone https://cgit.orbitalfox.eu/logarion/
|
||||||
|
```
|
||||||
|
|
||||||
|
Install dependencies:
|
||||||
|
|
||||||
|
```
|
||||||
|
cd logarion
|
||||||
|
pin add logarion . -n
|
||||||
|
opam depext --install logarion
|
||||||
|
```
|
||||||
|
|
||||||
|
Build the project:
|
||||||
|
|
||||||
|
```
|
||||||
|
dune build src/logarion.exe
|
||||||
|
```
|
||||||
|
|
||||||
|
This will create `_build/default/src/logarion.exe` (the command line interface).
|
||||||
|
|
||||||
|
### Project structure
|
||||||
|
|
||||||
|
There are three layers:
|
||||||
|
|
||||||
|
- notes
|
||||||
|
- archive
|
||||||
|
- interfaces & intermediate formats
|
||||||
|
|
||||||
|
### Core
|
||||||
|
|
||||||
|
- `logarion.ml`: repository related functions (listing, adding/removing, etc). ([src/logarion.ml](https://gitlab.com/orbifx/logarion/blob/master/src/logarion.ml))
|
||||||
|
- `note.ml`: parsing from and to note files. ([src/note.ml](https://gitlab.com/orbifx/logarion/blob/master/src/note.ml))
|
||||||
|
|
||||||
|
### Intermediate formats
|
||||||
|
|
||||||
|
Converters:
|
||||||
|
|
||||||
|
- `html.ml`: archive to HTML pages.
|
||||||
|
- `atom.ml`: archive to Atom feeds.
|
||||||
|
|
||||||
|
### Servers & utilities
|
||||||
|
|
||||||
|
Logarion's archives can be served over various protocols using servers.
|
||||||
|
Find related software here:
|
||||||
|
|
||||||
|
- https://logarion.orbitalfox.eu/
|
||||||
|
- https://cgit.orbitalfox.eu/
|
18
Makefile
Normal file
18
Makefile
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
all: cli
|
||||||
|
|
||||||
|
cli:
|
||||||
|
dune build src/logarion_cli.exe
|
||||||
|
|
||||||
|
clean:
|
||||||
|
dune clean
|
||||||
|
|
||||||
|
theme-dark:
|
||||||
|
sassc share/sass/main-dark.sass > share/static/main.css
|
||||||
|
|
||||||
|
theme-light:
|
||||||
|
sassc share/sass/main-light.sass > share/static/main.css
|
||||||
|
|
||||||
|
tgz:
|
||||||
|
cp _build/default/src/logarion_cli.exe logarion
|
||||||
|
strip logarion
|
||||||
|
tar czvf "logarion-$(shell ./logarion --version)-$(shell uname -s)-$(shell uname -m)-$(shell git rev-parse --short HEAD).tar.gz" share logarion
|
50
README.md
Normal file
50
README.md
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
# Logarion
|
||||||
|
|
||||||
|
Logarion is a [free and open-source][Licence] personal note taking, journaling and publication system; a blog-wiki hybrid.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Plain file system store, where each note is a file.
|
||||||
|
- Command line & web interfaces.
|
||||||
|
- Atom feeds
|
||||||
|
- Static (conversion to files for uploading) & dynamic serving (HTTP, Gopher, ..).
|
||||||
|
|
||||||
|
|
||||||
|
## Community & support
|
||||||
|
|
||||||
|
- Website: <https://logarion.orbitalfox.eu>
|
||||||
|
- Mailing list: <https://lists.orbitalfox.eu/listinfo/logarion>
|
||||||
|
- Matrix (chat): `#logarion:matrix.org`. Via Riot web-app: <https://riot.im/app/#/room/#logarion:matrix.org>
|
||||||
|
- For issues peferably email to [mailto:logarion@lists.orbitalfox.eu](mailto:logarion@lists.orbitalfox.eu?subject=[Issue] summary-here).
|
||||||
|
Alternatively <https://gitlab.com/orbifx/logarion/issues>
|
||||||
|
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
The following instructions are the quickest way to install Logarion (in the absence of binary releases).
|
||||||
|
|
||||||
|
```
|
||||||
|
opam pin add logarion git://orbitalfox.eu/logarion
|
||||||
|
opam install logarion
|
||||||
|
```
|
||||||
|
|
||||||
|
Once installed you will have `logarion` for command line control of the repository.
|
||||||
|
|
||||||
|
## Archives
|
||||||
|
|
||||||
|
### Command line
|
||||||
|
|
||||||
|
Create a folder and run `logarion init` from within it to produce `.logarion/config.toml`, which is the core configuration file.
|
||||||
|
The archive options are under the `[archive]` section.
|
||||||
|
|
||||||
|
Run `logarion --help` for more options.
|
||||||
|
|
||||||
|
|
||||||
|
#### Theme
|
||||||
|
|
||||||
|
Optionally install a [Sass](http://sass-lang.com/) compiler, like [sassc](http://sass-lang.com/libsass#sassc), and then run `make theme-dark` or `make theme-light`, to generate a stylesheet as `share/static/main.css`, using the respective Sass files in `share/sass/`.
|
||||||
|
|
||||||
|
## See also
|
||||||
|
|
||||||
|
- [CONTRIBUTING.md](CONTRIBUTING.md)
|
||||||
|
- [Licence](https://joinup.ec.europa.eu/software/page/eupl)
|
3
doc/logarion.odocl
Normal file
3
doc/logarion.odocl
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
Logarion
|
||||||
|
Ymd
|
||||||
|
Web
|
29
logarion.opam
Normal file
29
logarion.opam
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
opam-version: "1.2"
|
||||||
|
name: "logarion"
|
||||||
|
version: "0.5.0"
|
||||||
|
homepage: "https://logarion.orbitalfox.eu"
|
||||||
|
dev-repo: "git://orbitalfox.eu/logarion"
|
||||||
|
bug-reports: "mailto:logarion@lists.orbitalfox.eu?subject=[Issue]"
|
||||||
|
maintainer: "Stavros Polymenis <sp@orbitalfox.eu>"
|
||||||
|
authors: "Stavros Polymenis <sp@orbitalfox.eu>"
|
||||||
|
license: "EUPL"
|
||||||
|
build: [
|
||||||
|
["dune" "build" "--root" "." "-j" jobs "@install"]
|
||||||
|
]
|
||||||
|
depends: [
|
||||||
|
"dune" {build}
|
||||||
|
"ptime"
|
||||||
|
"uuidm"
|
||||||
|
"uri"
|
||||||
|
"re"
|
||||||
|
"emile"
|
||||||
|
"omd"
|
||||||
|
"lwt"
|
||||||
|
"mustache"
|
||||||
|
"tyxml"
|
||||||
|
|
||||||
|
"cmdliner"
|
||||||
|
"bos"
|
||||||
|
"toml"
|
||||||
|
"fpath"
|
||||||
|
]
|
16
share/config.toml
Normal file
16
share/config.toml
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
#This is an exemplar config file. Use `logarion_cli init` to have one generated.
|
||||||
|
|
||||||
|
[archive]
|
||||||
|
title = "Logarion"
|
||||||
|
owner = "Name"
|
||||||
|
email = "name@example.com"
|
||||||
|
uuid = "" # Generate UUID using `uuidgen` or https://www.uuidgenerator.net/
|
||||||
|
|
||||||
|
[web]
|
||||||
|
static_dir = ".logarion/static"
|
||||||
|
stylesheets = ["main.css"]
|
||||||
|
url = "http://localhost:3666"
|
||||||
|
|
||||||
|
[gopher]
|
||||||
|
static_dir = ".logarion/static"
|
||||||
|
url = "gopher://localhost"
|
15
share/sass/fonts/orbitron.sass
Normal file
15
share/sass/fonts/orbitron.sass
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
@font-face
|
||||||
|
font-family: "Orbitron Medium"
|
||||||
|
src: url('#{$font-url}/orbitron/orbitron-medium.otf')
|
||||||
|
|
||||||
|
@font-face
|
||||||
|
font-family: "Orbitron Light"
|
||||||
|
src: url('#{$font-url}/orbitron/orbitron-light.otf')
|
||||||
|
|
||||||
|
@font-face
|
||||||
|
font-family: "Orbitron Bold"
|
||||||
|
src: url('#{$font-url}/orbitron/orbitron-bold.otf')
|
||||||
|
|
||||||
|
@font-face
|
||||||
|
font-family: "Orbitron Black"
|
||||||
|
src: url('#{$font-url}/orbitron/orbitron-black.otf')
|
99
share/sass/layout.sass
Normal file
99
share/sass/layout.sass
Normal file
@ -0,0 +1,99 @@
|
|||||||
|
$font-url: "fonts"
|
||||||
|
|
||||||
|
@import fonts/orbitron.sass
|
||||||
|
|
||||||
|
$font-face: "DejaVu Sans"
|
||||||
|
|
||||||
|
body
|
||||||
|
font-family: $font-face
|
||||||
|
font-weight: 400
|
||||||
|
|
||||||
|
main, article
|
||||||
|
margin: auto
|
||||||
|
padding: 2pt
|
||||||
|
|
||||||
|
main, article, p, img, h1, h2, h3, h4, h5
|
||||||
|
max-width: 75ch
|
||||||
|
|
||||||
|
pre
|
||||||
|
display: block
|
||||||
|
overflow: auto
|
||||||
|
padding-left: 1ch
|
||||||
|
|
||||||
|
blockquote
|
||||||
|
font-style: italic
|
||||||
|
|
||||||
|
article > .meta
|
||||||
|
margin: auto 2ch
|
||||||
|
|
||||||
|
article > h1
|
||||||
|
text-align: center
|
||||||
|
|
||||||
|
header > h1
|
||||||
|
font-family: "Orbitron Light"
|
||||||
|
|
||||||
|
header, footer
|
||||||
|
text-align: center
|
||||||
|
|
||||||
|
li a, header a, header a:hover
|
||||||
|
text-decoration: none
|
||||||
|
|
||||||
|
a:hover
|
||||||
|
text-decoration: underline
|
||||||
|
|
||||||
|
h1, h2, h3, h4, h5
|
||||||
|
font-family: "Orbitron Medium"
|
||||||
|
|
||||||
|
footer
|
||||||
|
clear: both
|
||||||
|
margin-top: 2em
|
||||||
|
border-top: 1px dotted
|
||||||
|
padding: 1em 0
|
||||||
|
|
||||||
|
fieldset
|
||||||
|
border: .5mm dashed
|
||||||
|
|
||||||
|
fieldset > p
|
||||||
|
margin: .5em auto
|
||||||
|
padding: .5em
|
||||||
|
float: left
|
||||||
|
|
||||||
|
label
|
||||||
|
margin: .2em
|
||||||
|
display: block
|
||||||
|
|
||||||
|
input, textarea
|
||||||
|
display: block
|
||||||
|
border: none
|
||||||
|
border-bottom: .5mm solid
|
||||||
|
min-width: 100%
|
||||||
|
|
||||||
|
textarea
|
||||||
|
border: .5mm solid
|
||||||
|
width: 80ch
|
||||||
|
height: 40ch
|
||||||
|
display: block-inline
|
||||||
|
clear: both
|
||||||
|
|
||||||
|
button
|
||||||
|
clear: both
|
||||||
|
display: block
|
||||||
|
margin: 1em auto
|
||||||
|
border: .5mm solid
|
||||||
|
|
||||||
|
.topics > li
|
||||||
|
list-style-type: none
|
||||||
|
text-transform: capitalize
|
||||||
|
|
||||||
|
ul.listing
|
||||||
|
padding: 0 1ch
|
||||||
|
|
||||||
|
.listing > li
|
||||||
|
list-style-type: none
|
||||||
|
text-transform: none
|
||||||
|
padding: 4px
|
||||||
|
margin-bottom: .5em
|
||||||
|
|
||||||
|
.listing p
|
||||||
|
padding: 0
|
||||||
|
margin: 0
|
23
share/sass/main-dark.sass
Normal file
23
share/sass/main-dark.sass
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
@import layout.sass
|
||||||
|
|
||||||
|
body
|
||||||
|
background-color: #191b22
|
||||||
|
|
||||||
|
body, a, header a:visited
|
||||||
|
color: #f2f2f2
|
||||||
|
|
||||||
|
pre
|
||||||
|
border-left: 1mm solid #f2f2f233
|
||||||
|
|
||||||
|
a
|
||||||
|
color: PaleTurquoise
|
||||||
|
|
||||||
|
.abstract, .meta
|
||||||
|
color: #909090
|
||||||
|
|
||||||
|
article, .listing > li
|
||||||
|
background-color: rgba(100,100,100,.1)
|
||||||
|
border: 1px solid rgba(100,100,100,.2)
|
||||||
|
|
||||||
|
.pipe
|
||||||
|
opacity: .3
|
23
share/sass/main-light.sass
Normal file
23
share/sass/main-light.sass
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
@import layout.sass
|
||||||
|
|
||||||
|
body
|
||||||
|
background-color: WhiteSmoke
|
||||||
|
|
||||||
|
body, a, header a:visited
|
||||||
|
color: #191B22
|
||||||
|
|
||||||
|
pre
|
||||||
|
border-left: 1mm solid #191B22
|
||||||
|
|
||||||
|
a
|
||||||
|
color: SteelBlue
|
||||||
|
|
||||||
|
.abstract, .meta
|
||||||
|
color: #909090
|
||||||
|
|
||||||
|
article, .listing > li
|
||||||
|
background-color: rgba(100,100,100,.1)
|
||||||
|
border: 1px solid rgba(100,100,100,.2)
|
||||||
|
|
||||||
|
.pipe
|
||||||
|
opacity: .3
|
131
share/static/main.css
Normal file
131
share/static/main.css
Normal file
@ -0,0 +1,131 @@
|
|||||||
|
@font-face {
|
||||||
|
font-family: "Orbitron Medium";
|
||||||
|
src: url("fonts/orbitron/orbitron-medium.otf"); }
|
||||||
|
|
||||||
|
@font-face {
|
||||||
|
font-family: "Orbitron Light";
|
||||||
|
src: url("fonts/orbitron/orbitron-light.otf"); }
|
||||||
|
|
||||||
|
@font-face {
|
||||||
|
font-family: "Orbitron Bold";
|
||||||
|
src: url("fonts/orbitron/orbitron-bold.otf"); }
|
||||||
|
|
||||||
|
@font-face {
|
||||||
|
font-family: "Orbitron Black";
|
||||||
|
src: url("fonts/orbitron/orbitron-black.otf"); }
|
||||||
|
|
||||||
|
body {
|
||||||
|
font-family: "DejaVu Sans";
|
||||||
|
font-weight: 400; }
|
||||||
|
|
||||||
|
main, article {
|
||||||
|
margin: auto;
|
||||||
|
padding: 2pt; }
|
||||||
|
|
||||||
|
main, article, p, img, h1, h2, h3, h4, h5 {
|
||||||
|
max-width: 75ch; }
|
||||||
|
|
||||||
|
pre {
|
||||||
|
display: block;
|
||||||
|
overflow: auto;
|
||||||
|
padding-left: 1ch; }
|
||||||
|
|
||||||
|
blockquote {
|
||||||
|
font-style: italic; }
|
||||||
|
|
||||||
|
article > .meta {
|
||||||
|
margin: auto 2ch; }
|
||||||
|
|
||||||
|
article > h1 {
|
||||||
|
text-align: center; }
|
||||||
|
|
||||||
|
header > h1 {
|
||||||
|
font-family: "Orbitron Light"; }
|
||||||
|
|
||||||
|
header, footer {
|
||||||
|
text-align: center; }
|
||||||
|
|
||||||
|
li a, header a, header a:hover {
|
||||||
|
text-decoration: none; }
|
||||||
|
|
||||||
|
a:hover {
|
||||||
|
text-decoration: underline; }
|
||||||
|
|
||||||
|
h1, h2, h3, h4, h5 {
|
||||||
|
font-family: "Orbitron Medium"; }
|
||||||
|
|
||||||
|
footer {
|
||||||
|
clear: both;
|
||||||
|
margin-top: 2em;
|
||||||
|
border-top: 1px dotted;
|
||||||
|
padding: 1em 0; }
|
||||||
|
|
||||||
|
fieldset {
|
||||||
|
border: .5mm dashed; }
|
||||||
|
|
||||||
|
fieldset > p {
|
||||||
|
margin: .5em auto;
|
||||||
|
padding: .5em;
|
||||||
|
float: left; }
|
||||||
|
|
||||||
|
label {
|
||||||
|
margin: .2em;
|
||||||
|
display: block; }
|
||||||
|
|
||||||
|
input, textarea {
|
||||||
|
display: block;
|
||||||
|
border: none;
|
||||||
|
border-bottom: .5mm solid;
|
||||||
|
min-width: 100%; }
|
||||||
|
|
||||||
|
textarea {
|
||||||
|
border: .5mm solid;
|
||||||
|
width: 80ch;
|
||||||
|
height: 40ch;
|
||||||
|
display: block-inline;
|
||||||
|
clear: both; }
|
||||||
|
|
||||||
|
button {
|
||||||
|
clear: both;
|
||||||
|
display: block;
|
||||||
|
margin: 1em auto;
|
||||||
|
border: .5mm solid; }
|
||||||
|
|
||||||
|
.topics > li {
|
||||||
|
list-style-type: none;
|
||||||
|
text-transform: capitalize; }
|
||||||
|
|
||||||
|
ul.listing {
|
||||||
|
padding: 0 1ch; }
|
||||||
|
|
||||||
|
.listing > li {
|
||||||
|
list-style-type: none;
|
||||||
|
text-transform: none;
|
||||||
|
padding: 4px;
|
||||||
|
margin-bottom: .5em; }
|
||||||
|
|
||||||
|
.listing p {
|
||||||
|
padding: 0;
|
||||||
|
margin: 0; }
|
||||||
|
|
||||||
|
body {
|
||||||
|
background-color: #191b22; }
|
||||||
|
|
||||||
|
body, a, header a:visited {
|
||||||
|
color: #f2f2f2; }
|
||||||
|
|
||||||
|
pre {
|
||||||
|
border-left: 1mm solid #f2f2f233; }
|
||||||
|
|
||||||
|
a {
|
||||||
|
color: PaleTurquoise; }
|
||||||
|
|
||||||
|
.abstract, .meta {
|
||||||
|
color: #909090; }
|
||||||
|
|
||||||
|
article, .listing > li {
|
||||||
|
background-color: rgba(100, 100, 100, 0.1);
|
||||||
|
border: 1px solid rgba(100, 100, 100, 0.2); }
|
||||||
|
|
||||||
|
.pipe {
|
||||||
|
opacity: .3; }
|
3
share/template/frontpage.mustache
Normal file
3
share/template/frontpage.mustache
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
## Articles
|
||||||
|
|
||||||
|
{{recent_texts_listing}}
|
1
share/template/header.mustache
Normal file
1
share/template/header.mustache
Normal file
@ -0,0 +1 @@
|
|||||||
|
{{title}}
|
3
share/template/item.mustache
Normal file
3
share/template/item.mustache
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
{{date_human}}
|
||||||
|
{{link}}
|
||||||
|
{{abstract}}
|
7
share/template/list.mustache
Normal file
7
share/template/list.mustache
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
### Topics
|
||||||
|
|
||||||
|
{{topics}}
|
||||||
|
|
||||||
|
### Recent articles
|
||||||
|
|
||||||
|
{{recent_texts_listing}}
|
5
share/template/note.mustache
Normal file
5
share/template/note.mustache
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
# {{title}}
|
||||||
|
|
||||||
|
{{details}}
|
||||||
|
|
||||||
|
{{body}}
|
82
src/confix/config.ml
Normal file
82
src/confix/config.ml
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
module Validation = struct
|
||||||
|
let empty = []
|
||||||
|
|
||||||
|
let (&>) report = function None -> report | Some msg -> msg :: report
|
||||||
|
let (&&>) report = function [] -> report | msgs -> msgs @ report
|
||||||
|
|
||||||
|
let check ok msg = if ok then None else Some msg
|
||||||
|
|
||||||
|
let file_exists ?(msg=(fun s -> (s ^ " is not a file"))) ?(parent_dir=Fpath.v ".") file =
|
||||||
|
let str = Fpath.(to_string (parent_dir // file)) in
|
||||||
|
check (Sys.file_exists str) (msg str)
|
||||||
|
|
||||||
|
let is_directory ?(msg=(fun s -> (s ^ " is not a directory"))) dir =
|
||||||
|
let str = Fpath.to_string dir in
|
||||||
|
check (Sys.file_exists str && Sys.is_directory str) (msg str)
|
||||||
|
|
||||||
|
let files_exist ?(msg=(fun s -> (s ^ " is not a file"))) ?(parent_dir=Fpath.v ".") files =
|
||||||
|
let f report file = report &> file_exists ~msg ~parent_dir file in
|
||||||
|
List.fold_left f empty files
|
||||||
|
|
||||||
|
let terminate_when_invalid ?(print_error=true) =
|
||||||
|
let error i msg = prerr_endline ("Error " ^ string_of_int i ^ ": " ^ msg) in
|
||||||
|
function
|
||||||
|
| [] -> ()
|
||||||
|
| msgs -> if print_error then List.iteri error (List.rev msgs); exit 1
|
||||||
|
end
|
||||||
|
|
||||||
|
module Path = struct
|
||||||
|
let of_string str =
|
||||||
|
if Sys.file_exists str then
|
||||||
|
match Fpath.v str with
|
||||||
|
| path -> Ok path
|
||||||
|
| exception (Invalid_argument msg) -> Error ("Invalid path " ^ msg)
|
||||||
|
else Error (str ^ " not found")
|
||||||
|
|
||||||
|
let path_exists x = Fpath.to_string x |> Sys.file_exists
|
||||||
|
|
||||||
|
let conventional_paths =
|
||||||
|
let paths =
|
||||||
|
try [ ".logarion"; Sys.getenv "HOME" ^ "/.config/logarion"; "/etc/logarion" ]
|
||||||
|
with Not_found -> [ ".logarion"; "/etc/logarion" ]
|
||||||
|
in
|
||||||
|
List.map Fpath.v paths
|
||||||
|
|
||||||
|
let with_file ?(conventional_paths=conventional_paths) config_file =
|
||||||
|
let (//) = Fpath.(//) in
|
||||||
|
let basepath = Fpath.v config_file in
|
||||||
|
let existing dir = path_exists (dir // basepath) in
|
||||||
|
try Ok (List.find existing conventional_paths // basepath)
|
||||||
|
with Not_found -> Error (config_file ^ " not found in: " ^ String.concat ", " (List.map Fpath.to_string conventional_paths))
|
||||||
|
end
|
||||||
|
|
||||||
|
let with_default default = function Some x -> x | None -> default
|
||||||
|
|
||||||
|
let with_default_paths default =
|
||||||
|
function Some ss -> List.map Fpath.v ss | None -> default
|
||||||
|
|
||||||
|
let mandatory = function Some x -> x | None -> failwith "undefined mandatory setting"
|
||||||
|
|
||||||
|
let (&>) a b = match a with Ok x -> b x | Error e -> Error e
|
||||||
|
|
||||||
|
module type Store = sig
|
||||||
|
type t
|
||||||
|
val from_path : Fpath.t -> (t, string) result
|
||||||
|
end
|
||||||
|
|
||||||
|
module Make (S : Store) = struct
|
||||||
|
include S
|
||||||
|
|
||||||
|
let of_path path = S.from_path path
|
||||||
|
|
||||||
|
let (&>) = (&>)
|
||||||
|
|
||||||
|
let to_record converter = function
|
||||||
|
| Ok store -> converter store
|
||||||
|
| Error s -> Error s
|
||||||
|
|
||||||
|
let to_record_or_exit ?(print_error=true) ?(validator=(fun _cfg -> [])) converter store_result =
|
||||||
|
match to_record converter store_result with
|
||||||
|
| Ok cfg -> Validation.terminate_when_invalid (validator cfg); cfg
|
||||||
|
| Error s -> if print_error then prerr_endline s; exit 1
|
||||||
|
end
|
23
src/confix/confixToml.ml
Normal file
23
src/confix/confixToml.ml
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
type t = TomlTypes.table
|
||||||
|
|
||||||
|
let from_path path =
|
||||||
|
match Toml.Parser.from_filename (Fpath.to_string path) with
|
||||||
|
| `Error (str, _loc) -> Error str
|
||||||
|
| `Ok toml -> Ok toml
|
||||||
|
|
||||||
|
open TomlLenses
|
||||||
|
let (/) a b = (key a |-- table |-- key b)
|
||||||
|
let (//) a b = (key a |-- table |-- key b |-- table)
|
||||||
|
|
||||||
|
let int toml path = get toml (path |-- int)
|
||||||
|
|
||||||
|
let float toml path = get toml (path |-- float)
|
||||||
|
|
||||||
|
let string toml path = get toml (path |-- string)
|
||||||
|
|
||||||
|
let strings toml path = get toml (path |-- array |-- strings)
|
||||||
|
|
||||||
|
let path toml path = match string toml path with Some s -> Some (Fpath.v s) | None -> None
|
||||||
|
|
||||||
|
let paths toml path = match strings toml path with
|
||||||
|
Some ss -> Some (List.map Fpath.v ss) | None -> None
|
7
src/confix/jbuild
Normal file
7
src/confix/jbuild
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
(jbuild_version 1)
|
||||||
|
|
||||||
|
(library
|
||||||
|
((name confix)
|
||||||
|
(public_name logarion.confix)
|
||||||
|
(libraries (fpath toml))
|
||||||
|
))
|
50
src/converters/atom.ml
Normal file
50
src/converters/atom.ml
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
let esc = Xml_print.encode_unsafe_char
|
||||||
|
|
||||||
|
let header config url =
|
||||||
|
let open Logarion.Meta in
|
||||||
|
let open Logarion.Archive.Configuration in
|
||||||
|
"<title>" ^ config.title ^ "</title>"
|
||||||
|
(* TODO: ^ "<subtitle>A subtitle.</subtitle>"*)
|
||||||
|
^ "<link rel=\"alternate\" type=\"text/html\" href=\"" ^ url ^ "\"/>"
|
||||||
|
^ "<link rel=\"self\" type=\"application/atom+xml\" href=\"" ^ url ^ "/feed.atom\" />"
|
||||||
|
^ "<id>urn:uuid:" ^ Id.to_string config.id ^ "</id>"
|
||||||
|
^ "<updated>" ^ Ptime.to_rfc3339 (Ptime_clock.now ()) ^ "</updated>"
|
||||||
|
|
||||||
|
let opt_element tag_name content =
|
||||||
|
if content <> ""
|
||||||
|
then "<" ^ tag_name ^ ">" ^ content ^ "</" ^ tag_name ^ ">"
|
||||||
|
else ""
|
||||||
|
|
||||||
|
let entry url note =
|
||||||
|
let open Logarion in
|
||||||
|
let meta = note.Note.meta in
|
||||||
|
let u = "note/" ^ Meta.alias meta in
|
||||||
|
let open Meta in
|
||||||
|
let authors elt a =
|
||||||
|
a ^ "<author>"
|
||||||
|
^ (opt_element "name" @@ esc elt.Author.name)
|
||||||
|
^ (opt_element "uri" @@ esc (Uri.to_string elt.Author.address))
|
||||||
|
^ "</author>"
|
||||||
|
in
|
||||||
|
("<entry>"
|
||||||
|
^ "<title>" ^ meta.title ^ "</title>"
|
||||||
|
^ "<id>urn:uuid:" ^ Meta.Id.to_string meta.uuid ^ "</id>"
|
||||||
|
^ "<link rel=\"alternate\" href=\"" ^ url ^ "/" ^ u ^ "\" />"
|
||||||
|
^ "<updated>" ^ Date.(meta.date |> listing |> rfc_string) ^ "</updated>"
|
||||||
|
^ Meta.AuthorSet.fold authors meta.authors ""
|
||||||
|
^ opt_element "summary" @@ esc meta.abstract)
|
||||||
|
^ Meta.StringSet.fold (fun elt a -> a ^ "<category term=\"" ^ elt ^ "\"/>") meta.topics ""
|
||||||
|
^ "<content type=\"xhtml\"><div xmlns=\"http://www.w3.org/1999/xhtml\">"
|
||||||
|
^ (Omd.to_html @@ Omd.of_string @@ esc note.Note.body)
|
||||||
|
^ "</div></content>"
|
||||||
|
^ "</entry>"
|
||||||
|
|
||||||
|
let feed config url note_fn articles =
|
||||||
|
let fold_valid feed m = match note_fn m.Logarion.Meta.uuid with
|
||||||
|
| Some note -> feed ^ "\n" ^ entry url note
|
||||||
|
| None -> feed
|
||||||
|
in
|
||||||
|
"<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<feed xmlns=\"http://www.w3.org/2005/Atom\">\n"
|
||||||
|
^ header config url
|
||||||
|
^ List.fold_left fold_valid "" articles
|
||||||
|
^ "</feed>"
|
133
src/converters/html.ml
Normal file
133
src/converters/html.ml
Normal file
@ -0,0 +1,133 @@
|
|||||||
|
open Tyxml.Html
|
||||||
|
open Logarion
|
||||||
|
|
||||||
|
let to_string tyxml = Format.asprintf "%a" (Tyxml.Html.pp ()) tyxml
|
||||||
|
|
||||||
|
let head ~style linker t =
|
||||||
|
head (title (pcdata t)) [
|
||||||
|
link ~rel:[`Stylesheet] ~href:(linker style) ();
|
||||||
|
link ~rel:[`Alternate] ~href:(linker "/feed.atom") ~a:[a_mime_type "application/atom+xml"] ();
|
||||||
|
meta ~a:[a_charset "utf-8"] ();
|
||||||
|
]
|
||||||
|
|
||||||
|
let default_style = "/static/main.css"
|
||||||
|
|
||||||
|
let page ?(style=default_style) linker head_title header main =
|
||||||
|
html (head ~style linker head_title) (body [ header; main ])
|
||||||
|
|
||||||
|
let anchor url content = a ~a:[ a_href (uri_of_string url) ] content
|
||||||
|
|
||||||
|
let div ?(style_class="") content =
|
||||||
|
let a = if style_class <> "" then [a_class [style_class]] else [] in
|
||||||
|
div ~a content
|
||||||
|
|
||||||
|
let main = main
|
||||||
|
|
||||||
|
let unescaped_data = Unsafe.data
|
||||||
|
let data = pcdata
|
||||||
|
let title = h1
|
||||||
|
let header = header
|
||||||
|
|
||||||
|
let pipe = span ~a:[a_class ["pipe"]] [pcdata " | "]
|
||||||
|
|
||||||
|
let meta ~abstract ~authors ~date ~series ~topics ~keywords ~uuid =
|
||||||
|
let opt_span name value = if String.length value > 0 then (span [pipe; pcdata (name ^ value)]) else pcdata "" in
|
||||||
|
let authors = List.fold_left (fun acc x -> a ~a:[a_rel [`Author]] [pcdata x] :: acc) [] authors in
|
||||||
|
[ p ~a:[a_class ["abstract"]] [Unsafe.data abstract]; ]
|
||||||
|
@ authors
|
||||||
|
@ [
|
||||||
|
pipe;
|
||||||
|
time ~a:[a_datetime date] [pcdata date];
|
||||||
|
pipe;
|
||||||
|
opt_span "series: " series;
|
||||||
|
opt_span "topics: " topics;
|
||||||
|
opt_span "keywords: " keywords;
|
||||||
|
div [pcdata ("id: " ^ uuid)];
|
||||||
|
]
|
||||||
|
|> div ~style_class:"meta"
|
||||||
|
|
||||||
|
let note = article
|
||||||
|
|
||||||
|
let text_item path meta =
|
||||||
|
let module Meta = Logarion.Meta in
|
||||||
|
tr [
|
||||||
|
td [ a ~a:[a_class ["title"]; a_href (path ^ Meta.alias meta ^ ".html")] [data meta.Meta.title] ];
|
||||||
|
td [ span [pcdata Meta.(stringset_csv meta.keywords)] ];
|
||||||
|
td [ time @@ [unescaped_data Meta.Date.(pretty_date (listing meta.Meta.date))] ];
|
||||||
|
]
|
||||||
|
|
||||||
|
let listing_texts path metas =
|
||||||
|
let item meta = text_item path meta in
|
||||||
|
table @@ List.map item metas
|
||||||
|
|
||||||
|
let listing_index path metas =
|
||||||
|
let items topic =
|
||||||
|
List.fold_left Meta.(fun a e -> if StringSet.mem topic e.topics then text_item path e :: a else a)
|
||||||
|
[] metas
|
||||||
|
in
|
||||||
|
let item topic =
|
||||||
|
let module Meta = Logarion.Meta in
|
||||||
|
[ h3 ~a:[a_id topic] [pcdata topic]; table (items topic)]
|
||||||
|
in
|
||||||
|
List.fold_left (fun a e -> a @ item e) []
|
||||||
|
@@ Meta.StringSet.elements
|
||||||
|
@@ List.fold_left Meta.(fun a e -> unique_topics a e) Meta.StringSet.empty metas
|
||||||
|
|
||||||
|
module Renderer = struct
|
||||||
|
let meta meta e =
|
||||||
|
let e = List.hd e in
|
||||||
|
match e with
|
||||||
|
| "urn_name" -> [unescaped_data @@ "/note/" ^ Logarion.Meta.alias meta]
|
||||||
|
| "date" | "date_created" | "date_edited" | "date_published" | "date_human" ->
|
||||||
|
[time @@ [unescaped_data @@ Logarion.Meta.value_with_name meta e]]
|
||||||
|
| tag -> [unescaped_data @@ Logarion.Meta.value_with_name meta tag]
|
||||||
|
|
||||||
|
let note note e = match List.hd e with
|
||||||
|
| "body" -> [unescaped_data @@ Omd.to_html @@ Omd.of_string note.Logarion.Note.body]
|
||||||
|
| _ -> meta note.Logarion.Note.meta e
|
||||||
|
|
||||||
|
let archive archive e = match List.hd e with
|
||||||
|
| "title" -> [h1 [anchor ("index.html") [data archive.Logarion.Archive.Configuration.title]]]
|
||||||
|
| tag -> prerr_endline ("unknown tag: " ^ tag); [unescaped_data ""]
|
||||||
|
end
|
||||||
|
|
||||||
|
let form ymd =
|
||||||
|
let article_form =
|
||||||
|
let input_set title input = p [ label [ pcdata title; input ] ] in
|
||||||
|
let open Note in
|
||||||
|
let open Meta in
|
||||||
|
let authors = AuthorSet.to_string ymd.meta.authors in
|
||||||
|
[
|
||||||
|
input ~a:[a_name "uuid"; a_value (Id.to_string ymd.meta.uuid); a_input_type `Hidden] ();
|
||||||
|
input_set
|
||||||
|
"Title"
|
||||||
|
(input ~a:[a_name "title"; a_value ymd.meta.title; a_required ()] ());
|
||||||
|
input_set
|
||||||
|
"Authors"
|
||||||
|
(input ~a:[a_name "authors"; a_value authors] ());
|
||||||
|
input_set
|
||||||
|
"Topics"
|
||||||
|
(input ~a:[a_name "topics"; a_value (stringset_csv ymd.meta.topics)] ());
|
||||||
|
input_set
|
||||||
|
"Categories"
|
||||||
|
(input ~a:[a_name "categories"; a_value (CategorySet.to_csv ymd.meta.categories)] ());
|
||||||
|
input_set
|
||||||
|
"Keywords"
|
||||||
|
(input ~a:[a_name "keywords"; a_value (stringset_csv ymd.meta.keywords)] ());
|
||||||
|
input_set
|
||||||
|
"Series"
|
||||||
|
(input ~a:[a_name "series"; a_value (stringset_csv ymd.meta.series)] ());
|
||||||
|
input_set
|
||||||
|
"Abstract"
|
||||||
|
(input ~a:[a_name "abstract"; a_value ymd.meta.abstract] ());
|
||||||
|
input_set
|
||||||
|
"Text"
|
||||||
|
(textarea ~a:[a_name "body"] (pcdata ymd.body));
|
||||||
|
p [ button ~a:[a_button_type `Submit] [pcdata "Submit"] ];
|
||||||
|
]
|
||||||
|
in
|
||||||
|
div
|
||||||
|
[ form
|
||||||
|
~a:[a_method `Post; a_action (uri_of_string "/post.note"); a_accept_charset ["utf-8"]]
|
||||||
|
[ fieldset ~legend:(legend [pcdata "Article"]) article_form ]
|
||||||
|
]
|
5
src/converters/jbuild
Normal file
5
src/converters/jbuild
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
(library
|
||||||
|
((name converters)
|
||||||
|
(public_name logarion.converters)
|
||||||
|
(libraries (logarion logarion.file mustache tyxml ptime ptime.clock.os))
|
||||||
|
))
|
81
src/converters/template.ml
Normal file
81
src/converters/template.ml
Normal file
@ -0,0 +1,81 @@
|
|||||||
|
type t = Mustache.t
|
||||||
|
|
||||||
|
let of_string = Mustache.of_string
|
||||||
|
let of_file f = File.load f |> of_string
|
||||||
|
|
||||||
|
let string s = [Html.data s]
|
||||||
|
let section ~inverted:_ _name _contents = prerr_endline "Mustache sections unsupported"; []
|
||||||
|
let unescaped _elts = prerr_endline "Mustache unescaped not supported; used escaped instead"; []
|
||||||
|
let partial ?indent:_ _name _ _ = prerr_endline "Mustache sections unsupported"; []
|
||||||
|
let comment _ = [Html.data ""]
|
||||||
|
let concat = List.concat
|
||||||
|
|
||||||
|
let escaped_index ~from:_ ~n:_ _metas _e = [Html.data "temp"]
|
||||||
|
(* match List.hd e with *)
|
||||||
|
(* | "topics" -> *)
|
||||||
|
(* let topics = *)
|
||||||
|
(* ListLabels.fold_left *)
|
||||||
|
(* ~init:(Logarion.Meta.StringSet.empty) *)
|
||||||
|
(* ~f:(fun a e -> Logarion.Meta.unique_topics a e ) metas *)
|
||||||
|
(* in *)
|
||||||
|
(* Logarion.Meta.StringSet.fold (fun e a -> a ^ "<li><a href=\"/topic/" ^ e ^ "\">" ^ e ^ "</a></li>") topics "" *)
|
||||||
|
|
||||||
|
let header_custom template _linker archive =
|
||||||
|
Mustache.fold ~string ~section ~escaped:(Html.Renderer.archive archive) ~unescaped ~partial ~comment ~concat template
|
||||||
|
|> Html.header
|
||||||
|
|
||||||
|
let header_default linker archive =
|
||||||
|
Html.(header [title [anchor (linker "/") [data archive.Logarion.Archive.Configuration.title]]])
|
||||||
|
|
||||||
|
let meta meta =
|
||||||
|
let open Logarion.Meta in
|
||||||
|
let abstract = meta.abstract in
|
||||||
|
let authors = List.map (fun elt -> elt.Author.name) @@ AuthorSet.elements meta.authors in
|
||||||
|
let date = Date.(pretty_date @@ listing meta.date) in
|
||||||
|
let series = stringset_csv meta.series in
|
||||||
|
let topics = stringset_csv meta.topics in
|
||||||
|
let keywords = stringset_csv meta.keywords in
|
||||||
|
let uuid = Id.to_string meta.uuid in
|
||||||
|
Html.meta ~abstract ~authors ~date ~series ~topics ~keywords ~uuid
|
||||||
|
|
||||||
|
let body_custom template note =
|
||||||
|
Mustache.fold ~string ~section ~escaped:(Html.Renderer.note note) ~unescaped ~partial ~comment ~concat template
|
||||||
|
|> Html.note
|
||||||
|
|
||||||
|
let body_default note =
|
||||||
|
Html.note
|
||||||
|
[ Html.title [Html.unescaped_data note.Logarion.Note.meta.Logarion.Meta.title]; (* Don't add title if body contains one *)
|
||||||
|
meta note.meta;
|
||||||
|
Html.unescaped_data @@ Omd.to_html @@ Omd.of_string note.Logarion.Note.body ]
|
||||||
|
|
||||||
|
let page ~style linker title header body =
|
||||||
|
Html.to_string @@ Html.page ~style linker title header body
|
||||||
|
|
||||||
|
let of_config config k = match config with
|
||||||
|
| Error msg -> prerr_endline ("Couldn't load [templates] section;" ^ msg); None
|
||||||
|
| Ok c ->
|
||||||
|
let open Confix.ConfixToml in
|
||||||
|
path c ("templates" / k)
|
||||||
|
|
||||||
|
let converter default custom = function
|
||||||
|
| Some p ->
|
||||||
|
if Confix.Config.Path.path_exists p then custom @@ of_file p
|
||||||
|
else (prerr_endline @@ "Couldn't find: " ^ Fpath.to_string p; default)
|
||||||
|
| None -> default
|
||||||
|
|
||||||
|
let header_converter config = converter header_default header_custom @@ of_config config "header"
|
||||||
|
let body_converter config = converter body_default body_custom @@ of_config config "body"
|
||||||
|
|
||||||
|
let default_style = Html.default_style
|
||||||
|
|
||||||
|
let page_of_index ~style linker header archive metas =
|
||||||
|
page ~style linker ("Index | " ^ archive.Logarion.Archive.Configuration.title) (header linker archive) (Html.main (Html.listing_index "" metas))
|
||||||
|
|
||||||
|
let page_of_log ~style linker header archive metas =
|
||||||
|
page ~style linker ("Log | " ^ archive.Logarion.Archive.Configuration.title) (header linker archive) (Html.main [Html.listing_texts "" metas])
|
||||||
|
|
||||||
|
let page_of_note ~style linker header body archive note =
|
||||||
|
page ~style linker note.Logarion.Note.meta.Logarion.Meta.title (header linker archive) (body note)
|
||||||
|
|
||||||
|
let page_of_msg ~style linker header archive title msg =
|
||||||
|
page ~style linker title (header linker archive) (Html.div [Html.data msg])
|
89
src/core/archive.ml
Normal file
89
src/core/archive.ml
Normal file
@ -0,0 +1,89 @@
|
|||||||
|
module Id = Meta.Id
|
||||||
|
type alias_t = string
|
||||||
|
|
||||||
|
module Configuration = struct
|
||||||
|
type t = {
|
||||||
|
repository : Lpath.repo_t;
|
||||||
|
title : string;
|
||||||
|
owner : string;
|
||||||
|
email : string;
|
||||||
|
id : Id.t;
|
||||||
|
}
|
||||||
|
|
||||||
|
let of_config config =
|
||||||
|
let open Confix in
|
||||||
|
let open Confix.Config in
|
||||||
|
let str k = ConfixToml.(string config ("archive" / k)) in
|
||||||
|
try
|
||||||
|
Ok {
|
||||||
|
repository =
|
||||||
|
(try Lpath.repo_of_string (str "repository" |> with_default ".")
|
||||||
|
with
|
||||||
|
| Invalid_argument s -> failwith ("Invalid repository: " ^ s)
|
||||||
|
| Failure s -> failwith ("Missing repository value: " ^ s));
|
||||||
|
title = str "title" |> with_default "";
|
||||||
|
owner = str "owner" |> with_default "";
|
||||||
|
email = str "email" |> with_default "";
|
||||||
|
id = match Id.of_string (str "uuid" |> mandatory) with Some id -> id | None -> failwith "Invalid UUID in config";
|
||||||
|
}
|
||||||
|
with Failure str -> Error str
|
||||||
|
|
||||||
|
let validity config =
|
||||||
|
let repo = Lpath.fpath_of_repo config.repository in
|
||||||
|
let open Confix.Config.Validation in
|
||||||
|
empty
|
||||||
|
&> is_directory repo
|
||||||
|
end
|
||||||
|
|
||||||
|
module AliasMap = Meta.AliasMap
|
||||||
|
|
||||||
|
module Make (Store : Store.T) = struct
|
||||||
|
type t = {
|
||||||
|
config : Configuration.t;
|
||||||
|
store : Store.t;
|
||||||
|
}
|
||||||
|
|
||||||
|
let note_lens note = note
|
||||||
|
let meta_lens note = note.Note.meta
|
||||||
|
|
||||||
|
let recency_order a b = Meta.(Date.compare a.date b.date)
|
||||||
|
|
||||||
|
let latest archive =
|
||||||
|
Store.to_list ~order:recency_order meta_lens archive.store
|
||||||
|
|
||||||
|
let listed archive =
|
||||||
|
let notes = Store.to_list meta_lens archive.store in
|
||||||
|
List.filter Meta.(fun e -> CategorySet.listed e.categories) notes
|
||||||
|
|
||||||
|
let published archive =
|
||||||
|
let notes = Store.to_list meta_lens archive.store in
|
||||||
|
List.filter Meta.(fun e -> CategorySet.published e.categories) notes
|
||||||
|
|
||||||
|
let latest_listed archive =
|
||||||
|
let notes = Store.to_list ~order:recency_order meta_lens archive.store in
|
||||||
|
List.filter Meta.(fun e -> CategorySet.listed e.categories) notes
|
||||||
|
|
||||||
|
let with_topic archive topic =
|
||||||
|
let notes = Store.to_list ~order:recency_order meta_lens archive.store in
|
||||||
|
List.filter Meta.(fun e -> StringSet.exists (fun t -> t = topic) e.topics) notes
|
||||||
|
|
||||||
|
let topics archive =
|
||||||
|
let notes = Store.to_list meta_lens archive.store in
|
||||||
|
List.fold_left Meta.(fun a e -> unique_topics a e) Meta.StringSet.empty notes
|
||||||
|
|
||||||
|
let latest_entry archive fragment =
|
||||||
|
let notes = Store.to_list ~order:recency_order meta_lens archive.store in
|
||||||
|
let containing_fragment e = Re.Str.(string_match (regexp fragment)) (e.Meta.title) 0 in
|
||||||
|
try Some (List.find containing_fragment notes)
|
||||||
|
with Not_found -> None
|
||||||
|
|
||||||
|
let note_with_id archive id = Store.note_with_id archive.store id
|
||||||
|
let note_with_alias archive alias = Store.note_with_alias archive.store alias
|
||||||
|
|
||||||
|
let with_note archive note = Store.with_note archive.store note
|
||||||
|
|
||||||
|
let sublist ~from ~n list =
|
||||||
|
let aggregate_subrange (i, elms) e = succ i, if i >= from && i <= n then e::elms else elms in
|
||||||
|
List.fold_left aggregate_subrange (0, []) list |> snd
|
||||||
|
|
||||||
|
end
|
5
src/core/jbuild
Normal file
5
src/core/jbuild
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
(library
|
||||||
|
((name logarion)
|
||||||
|
(public_name logarion)
|
||||||
|
(libraries (confix omd ptime lwt uuidm uri re.str emile))
|
||||||
|
))
|
25
src/core/lpath.ml
Normal file
25
src/core/lpath.ml
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
open Fpath
|
||||||
|
type repo_t = Repo of t
|
||||||
|
type note_t = Note of { repo: repo_t; basename: t }
|
||||||
|
|
||||||
|
let fpath_of_repo = function Repo p -> p
|
||||||
|
let string_of_repo r = fpath_of_repo r |> to_string
|
||||||
|
let repo_of_string s = Repo (v s)
|
||||||
|
|
||||||
|
let fpath_of_note = function Note n -> (fpath_of_repo n.repo // n.basename)
|
||||||
|
let string_of_note n = fpath_of_note n |> to_string
|
||||||
|
let note_of_basename repo s = Note { repo; basename = v s }
|
||||||
|
|
||||||
|
let alias_of_note = function Note n -> n.basename |> rem_ext |> to_string
|
||||||
|
let note_of_alias repo extension alias = note_of_basename repo (alias ^ extension)
|
||||||
|
|
||||||
|
let versioned_basename_of_title ?(version=0) repo extension (title : string) =
|
||||||
|
let notes_fpath = fpath_of_repo repo in
|
||||||
|
let basename = v @@ Meta.string_alias title in
|
||||||
|
let rec next version =
|
||||||
|
let candidate = basename |> add_ext (string_of_int version) |> add_ext extension in
|
||||||
|
if Sys.file_exists (to_string (notes_fpath // candidate))
|
||||||
|
then next (succ version)
|
||||||
|
else note_of_basename repo (to_string candidate)
|
||||||
|
in
|
||||||
|
next version
|
222
src/core/meta.ml
Normal file
222
src/core/meta.ml
Normal file
@ -0,0 +1,222 @@
|
|||||||
|
module Date = struct
|
||||||
|
type t = {
|
||||||
|
created: Ptime.t option;
|
||||||
|
published: Ptime.t option;
|
||||||
|
edited: Ptime.t option;
|
||||||
|
} [@@deriving lens { submodule = true }]
|
||||||
|
|
||||||
|
let rfc_string date = match date with Some t -> Ptime.to_rfc3339 t | None -> ""
|
||||||
|
|
||||||
|
let of_string (rfc : string) = match Ptime.of_rfc3339 rfc with
|
||||||
|
Ok (t,_,_) -> Some t | Error _ -> None
|
||||||
|
|
||||||
|
let listing date = match date.published, date.created with
|
||||||
|
| Some _, _ -> date.published
|
||||||
|
| None, Some _ -> date.created
|
||||||
|
| None, None -> None
|
||||||
|
|
||||||
|
let compare = compare
|
||||||
|
|
||||||
|
let pretty_date = function
|
||||||
|
| Some t -> Ptime.to_date t |> fun (y, m, d) -> Printf.sprintf "%04d-%02d-%02d" y m d
|
||||||
|
| None -> ""
|
||||||
|
end
|
||||||
|
|
||||||
|
module Id = struct
|
||||||
|
let random_state = Random.State.make_self_init ()
|
||||||
|
type t = Uuidm.t
|
||||||
|
let compare = Uuidm.compare
|
||||||
|
let to_string = Uuidm.to_string
|
||||||
|
let of_string = Uuidm.of_string
|
||||||
|
let generate ?(random_state=random_state) = Uuidm.v4_gen random_state
|
||||||
|
end
|
||||||
|
|
||||||
|
module Author = struct
|
||||||
|
type name_t = string
|
||||||
|
type address_t = Uri.t
|
||||||
|
type t = {
|
||||||
|
name: name_t;
|
||||||
|
address: address_t;
|
||||||
|
} [@@deriving lens { submodule = true } ]
|
||||||
|
|
||||||
|
let empty = { name = ""; address = Uri.empty }
|
||||||
|
|
||||||
|
let compare = Pervasives.compare
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
|
module AuthorSet = struct
|
||||||
|
include Set.Make(Author)
|
||||||
|
|
||||||
|
let to_string authors =
|
||||||
|
let to_string a = a.Author.name ^ " <" ^ Uri.to_string a.Author.address ^ ">" in
|
||||||
|
let f elt acc = if String.length acc > 1 then acc ^ ", " ^ to_string elt else to_string elt in
|
||||||
|
fold f authors ""
|
||||||
|
|
||||||
|
let of_string s =
|
||||||
|
match Emile.List.of_string s with
|
||||||
|
| Error _ -> prerr_endline @@ "Error parsing: " ^ s; empty
|
||||||
|
| Ok emails ->
|
||||||
|
let to_author =
|
||||||
|
let module L = List in
|
||||||
|
let open Emile in
|
||||||
|
function
|
||||||
|
| `Group _ -> prerr_endline @@ "Can't deal with groups in author: " ^ s; Author.empty
|
||||||
|
| `Mailbox { name; local; _ } ->
|
||||||
|
let s_of_phrase = function `Dot -> "" | `Word w -> (match w with `Atom a -> a | `String s -> s) | `Encoded _ -> "" in
|
||||||
|
let name = match name with None -> "" | Some phrase -> L.fold_left (fun a e -> a ^ s_of_phrase e) "" phrase in
|
||||||
|
let address =
|
||||||
|
L.fold_left (fun a e -> a ^ match e with `Atom a -> a | `String s -> s) "" local ^ "@" (* TODO: Author address unimplemented *)
|
||||||
|
in
|
||||||
|
Author.{ name; address = Uri.of_string address }
|
||||||
|
in
|
||||||
|
of_list @@ List.map to_author emails
|
||||||
|
end
|
||||||
|
|
||||||
|
module Category = struct
|
||||||
|
type t = Draft | Unlisted | Published | Custom of string
|
||||||
|
|
||||||
|
let compare = Pervasives.compare
|
||||||
|
|
||||||
|
let of_string = function
|
||||||
|
| "draft" -> Draft
|
||||||
|
| "unlisted" -> Unlisted
|
||||||
|
| "published" -> Published
|
||||||
|
| c -> Custom c
|
||||||
|
|
||||||
|
let to_string = function
|
||||||
|
| Draft -> "draft"
|
||||||
|
| Unlisted -> "unlisted"
|
||||||
|
| Published -> "published"
|
||||||
|
| Custom c -> c
|
||||||
|
end
|
||||||
|
|
||||||
|
module CategorySet = struct
|
||||||
|
include Set.Make(Category)
|
||||||
|
let to_csv set =
|
||||||
|
let f elt a =
|
||||||
|
let s = Category.to_string elt in
|
||||||
|
if a <> "" then a ^ ", " ^ s else s
|
||||||
|
in
|
||||||
|
fold f set ""
|
||||||
|
let categorised categs cs = of_list categs |> (fun s -> subset s cs)
|
||||||
|
let published = categorised [Category.Published]
|
||||||
|
let listed cs = not @@ categorised [Category.Unlisted] cs
|
||||||
|
end
|
||||||
|
|
||||||
|
module StringSet = Set.Make(String)
|
||||||
|
|
||||||
|
let stringset_csv set =
|
||||||
|
let f elt a = if a <> "" then a ^ ", " ^ elt else elt in
|
||||||
|
StringSet.fold f set ""
|
||||||
|
|
||||||
|
let string_alias t =
|
||||||
|
let is_reserved = function
|
||||||
|
| '!' | '*' | '\'' | '(' | ')' | ';' | ':' | '@' | '&' | '=' | '+' | '$'
|
||||||
|
| ',' | '/' | '?' | '#' | '[' | ']' | ' ' | '\t' | '\x00' -> true
|
||||||
|
| _ -> false
|
||||||
|
in
|
||||||
|
let b = Buffer.create (String.length t) in
|
||||||
|
let filter char =
|
||||||
|
let open Buffer in
|
||||||
|
if is_reserved char then (try (if nth b (pred (length b)) <> '-' then add_char b '-') with Invalid_argument _ -> prerr_endline "reserved")
|
||||||
|
else add_char b char
|
||||||
|
in
|
||||||
|
String.(iter filter (lowercase_ascii t));
|
||||||
|
Buffer.contents b
|
||||||
|
|
||||||
|
type t = {
|
||||||
|
title: string;
|
||||||
|
authors: AuthorSet.t;
|
||||||
|
date: Date.t;
|
||||||
|
categories: CategorySet.t;
|
||||||
|
topics: StringSet.t;
|
||||||
|
keywords: StringSet.t;
|
||||||
|
series: StringSet.t;
|
||||||
|
abstract: string;
|
||||||
|
uuid: Id.t;
|
||||||
|
alias: string;
|
||||||
|
} [@@deriving lens { submodule = true }]
|
||||||
|
|
||||||
|
let blank ?(uuid=(Id.generate ())) () = {
|
||||||
|
title = "";
|
||||||
|
authors = AuthorSet.empty;
|
||||||
|
date = Date.({ created = None; edited = None; published = None });
|
||||||
|
categories = CategorySet.empty;
|
||||||
|
topics = StringSet.empty;
|
||||||
|
keywords = StringSet.empty;
|
||||||
|
series = StringSet.empty;
|
||||||
|
abstract = "";
|
||||||
|
uuid;
|
||||||
|
alias = "";
|
||||||
|
}
|
||||||
|
|
||||||
|
let listed e = CategorySet.listed e.categories
|
||||||
|
let published e = CategorySet.published e.categories
|
||||||
|
let unique_topics ts x = StringSet.union ts x.topics
|
||||||
|
|
||||||
|
module AliasMap = Map.Make(String)
|
||||||
|
module IdMap = Map.Make(Id)
|
||||||
|
|
||||||
|
let alias meta = if meta.alias = "" then string_alias meta.title else meta.alias
|
||||||
|
|
||||||
|
let value_with_name (_meta as m) = function
|
||||||
|
| "Title" -> m.title
|
||||||
|
| "Abstract" -> m.abstract
|
||||||
|
| "Authors" -> AuthorSet.to_string m.authors
|
||||||
|
| "Date" -> Date.(rfc_string m.date.created)
|
||||||
|
| "Edited" -> Date.(rfc_string m.date.edited)
|
||||||
|
| "Published"-> Date.(rfc_string m.date.published)
|
||||||
|
| "Human" -> Date.(pretty_date @@ listing m.date)
|
||||||
|
| "Topics" -> stringset_csv m.topics;
|
||||||
|
| "Categories" -> CategorySet.to_csv m.categories;
|
||||||
|
| "Keywords" -> stringset_csv m.keywords;
|
||||||
|
| "Series" -> stringset_csv m.series;
|
||||||
|
| "ID" -> Id.to_string m.uuid
|
||||||
|
| "Alias" -> alias m
|
||||||
|
| e -> invalid_arg e
|
||||||
|
|
||||||
|
let with_kv meta (k,v) =
|
||||||
|
let list_of_csv = Re.Str.(split (regexp " *, *")) in
|
||||||
|
let trim = String.trim in
|
||||||
|
match k with
|
||||||
|
| "Title" -> { meta with title = trim v }
|
||||||
|
| "Author"
|
||||||
|
| "Authors" -> { meta with authors = AuthorSet.of_string (trim v)}
|
||||||
|
| "Abstract" -> { meta with abstract = trim v }
|
||||||
|
| "Date" -> { meta with date = Date.{ meta.date with created = Date.of_string v }}
|
||||||
|
| "Published" -> { meta with date = Date.{ meta.date with published = Date.of_string v }}
|
||||||
|
| "Edited" -> { meta with date = Date.{ meta.date with edited = Date.of_string v }}
|
||||||
|
| "Topics" -> { meta with topics = trim v |> list_of_csv |> StringSet.of_list }
|
||||||
|
| "Keywords" -> { meta with keywords = trim v |> list_of_csv |> StringSet.of_list }
|
||||||
|
| "Categories"->
|
||||||
|
let categories = trim v |> list_of_csv |> List.map Category.of_string |> CategorySet.of_list in
|
||||||
|
{ meta with categories }
|
||||||
|
| "Series" -> { meta with series = trim v |> list_of_csv |> StringSet.of_list }
|
||||||
|
| "ID" -> (match Id.of_string v with Some id -> { meta with uuid = id } | None -> meta)
|
||||||
|
| "Alias" -> { meta with alias = v }
|
||||||
|
| k -> prerr_endline ("Unknown key: " ^ k ^ ", with value: " ^ v ); meta
|
||||||
|
|
||||||
|
let to_string (_meta as m) =
|
||||||
|
let has_len v = String.length v > 0 in
|
||||||
|
let s field value = if has_len value then field ^ ": " ^ value ^ "\n" else "" in
|
||||||
|
let a value = if AuthorSet.is_empty value then "" else "Authors: " ^ AuthorSet.to_string value ^ "\n" in
|
||||||
|
let d field value = match value with
|
||||||
|
| Some _ -> field ^ ": " ^ Date.rfc_string value ^ "\n" | None -> ""
|
||||||
|
in
|
||||||
|
let rows =
|
||||||
|
[ s "Title" m.title;
|
||||||
|
a m.authors;
|
||||||
|
d "Date" m.date.Date.created;
|
||||||
|
d "Edited" m.date.Date.edited;
|
||||||
|
d "Published" m.date.Date.published;
|
||||||
|
s "Topics" (stringset_csv m.topics);
|
||||||
|
s "Categories" (CategorySet.to_csv m.categories);
|
||||||
|
s "Keywords" (stringset_csv m.keywords);
|
||||||
|
s "Series" (stringset_csv m.series);
|
||||||
|
s "Abstract" m.abstract;
|
||||||
|
s "ID" (Uuidm.to_string m.uuid);
|
||||||
|
s "Alias" m.alias
|
||||||
|
]
|
||||||
|
in
|
||||||
|
String.concat "" rows
|
47
src/core/note.ml
Normal file
47
src/core/note.ml
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
type t = {
|
||||||
|
meta: Meta.t;
|
||||||
|
body: string;
|
||||||
|
} [@@deriving lens { submodule = true }]
|
||||||
|
|
||||||
|
let blank ?(uuid=(Meta.Id.generate ())) () = { meta = Meta.blank ~uuid (); body = "" }
|
||||||
|
|
||||||
|
let title ymd =
|
||||||
|
let mtitle = ymd.meta.Meta.title in
|
||||||
|
if String.length mtitle > 0 then mtitle else
|
||||||
|
let open Omd in
|
||||||
|
try List.find (function H1 _ -> true | _ -> false) (Omd.of_string ymd.body)
|
||||||
|
|> function H1 h -> to_text h | _ -> ""
|
||||||
|
with Not_found -> ""
|
||||||
|
|
||||||
|
let categorised categs ymd = Meta.CategorySet.categorised categs ymd.meta.Meta.categories
|
||||||
|
|
||||||
|
let with_kv ymd (k,v) = match k with
|
||||||
|
| "body" -> { ymd with body = String.trim v }
|
||||||
|
| _ -> { ymd with meta = Meta.with_kv ymd.meta (k,v) }
|
||||||
|
|
||||||
|
let meta_pair_of_string line = match Re.Str.(bounded_split (regexp ": *")) line 2 with
|
||||||
|
| [ key; value ] -> Re.Str.(replace_first (regexp "^#\\+") "" key), value
|
||||||
|
| [ key ] -> Re.Str.(replace_first (regexp "^#\\+") "" key), ""
|
||||||
|
| _ -> prerr_endline line; ("","")
|
||||||
|
|
||||||
|
let meta_of_string front_matter =
|
||||||
|
let fields = List.map meta_pair_of_string (String.split_on_char '\n' front_matter) in
|
||||||
|
List.fold_left Meta.with_kv (Meta.blank ()) fields
|
||||||
|
|
||||||
|
exception Syntax_error of string
|
||||||
|
|
||||||
|
let front_matter_body_split s =
|
||||||
|
if Re.Str.(string_match (regexp ".*:.*")) s 0
|
||||||
|
then match Re.Str.(bounded_split (regexp "\n\n")) s 2 with
|
||||||
|
| front::body::[] -> (front, body)
|
||||||
|
| _ -> ("", s)
|
||||||
|
else ("", s)
|
||||||
|
|
||||||
|
let of_string s =
|
||||||
|
let (front_matter, body) = front_matter_body_split s in
|
||||||
|
try
|
||||||
|
let note = { meta = meta_of_string front_matter; body } in
|
||||||
|
{ note with meta = { note.meta with title = title note } }
|
||||||
|
with _ -> prerr_endline ("Failed parsing" ^ s); blank ()
|
||||||
|
|
||||||
|
let to_string ymd = Meta.to_string ymd.meta ^ "\n" ^ ymd.body
|
7
src/core/store.ml
Normal file
7
src/core/store.ml
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
module type T = sig
|
||||||
|
type t
|
||||||
|
val to_list: ?order:('a -> 'a -> int) -> (Note.t -> 'a) -> t -> 'a list
|
||||||
|
val note_with_id: t -> Meta.Id.t -> Note.t option
|
||||||
|
val note_with_alias: t -> string -> Note.t option
|
||||||
|
val with_note: t -> Note.t -> Note.t Lwt.t
|
||||||
|
end
|
16
src/jbuild
Normal file
16
src/jbuild
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
(executable
|
||||||
|
((name logarion_cli)
|
||||||
|
(public_name logarion_cli)
|
||||||
|
(modules logarion_cli)
|
||||||
|
(libraries (logarion logarion.confix logarion.converters logarion.file re.str cmdliner bos))))
|
||||||
|
|
||||||
|
(install
|
||||||
|
((section share)
|
||||||
|
(files (
|
||||||
|
(../share/config.toml as config.toml)
|
||||||
|
(../share/template/frontpage.mustache as template/frontpage.mustache)
|
||||||
|
(../share/template/header.mustache as template/header.mustache)
|
||||||
|
(../share/template/item.mustache as template/item.mustache)
|
||||||
|
(../share/template/list.mustache as template/list.mustache)
|
||||||
|
(../share/template/note.mustache as template/note.mustache)
|
||||||
|
))))
|
176
src/logarion_cli.ml
Normal file
176
src/logarion_cli.ml
Normal file
@ -0,0 +1,176 @@
|
|||||||
|
let version = "0.5"
|
||||||
|
open Cmdliner
|
||||||
|
open Logarion
|
||||||
|
module C = Archive.Configuration
|
||||||
|
module Lpath = Logarion.Lpath
|
||||||
|
|
||||||
|
let conf () =
|
||||||
|
let module Config = Confix.Config.Make (Confix.ConfixToml) in
|
||||||
|
let archive_res =
|
||||||
|
let open Confix.Config in
|
||||||
|
Confix.Config.Path.with_file "config.toml"
|
||||||
|
&> Config.from_path
|
||||||
|
|> Config.to_record C.of_config
|
||||||
|
in
|
||||||
|
match archive_res with
|
||||||
|
| Ok config -> config
|
||||||
|
| Error str -> prerr_endline str; exit 1
|
||||||
|
|
||||||
|
let create_dir dir = Bos.OS.Dir.create (Fpath.v dir)
|
||||||
|
|
||||||
|
let create_dir_msg ?(descr="") dir res =
|
||||||
|
let () = match res with
|
||||||
|
| Ok true -> print_endline ("Created " ^ descr ^ " directory " ^ dir)
|
||||||
|
| Ok false -> print_endline ("Reinitialise existing " ^ descr ^ " directory " ^ dir)
|
||||||
|
| Error (`Msg msg) -> prerr_endline @@ "Failed making " ^ descr ^ ". " ^ msg
|
||||||
|
in
|
||||||
|
res
|
||||||
|
|
||||||
|
let copy ?(recursive = false) src dst =
|
||||||
|
Bos.OS.Cmd.run (Bos.Cmd.(v "cp" %% (on recursive @@ v "-r") % src % dst))
|
||||||
|
|
||||||
|
let init _force =
|
||||||
|
let rec create_dirs = function
|
||||||
|
| [] -> ()
|
||||||
|
| (dir,descr)::tl ->
|
||||||
|
match create_dir dir |> create_dir_msg ~descr dir with
|
||||||
|
| Ok _ -> create_dirs tl
|
||||||
|
| Error _ -> ()
|
||||||
|
in
|
||||||
|
let dirs = [
|
||||||
|
".logarion", "Logarion";
|
||||||
|
".logarion/static", "static files";
|
||||||
|
".logarion/html-templates", "templates";
|
||||||
|
]
|
||||||
|
in
|
||||||
|
let toml_data =
|
||||||
|
let open Toml in
|
||||||
|
let open TomlTypes in
|
||||||
|
of_key_values [
|
||||||
|
key "archive",
|
||||||
|
TTable (
|
||||||
|
of_key_values [
|
||||||
|
key "title", TString "";
|
||||||
|
key "owner", TString (Bos.OS.Env.opt_var "USER" ~absent:"");
|
||||||
|
key "email", TString (Bos.OS.Env.opt_var "EMAIL" ~absent:"");
|
||||||
|
key "uuid", TString (Meta.Id.(generate () |> to_string));
|
||||||
|
]);
|
||||||
|
key "web",
|
||||||
|
TTable (
|
||||||
|
of_key_values [
|
||||||
|
key "url", TString "http://localhost:3666";
|
||||||
|
key "stylesheets", TArray ( NodeString ["main.css"] );
|
||||||
|
key "static_dir", TString ".logarion/static";
|
||||||
|
]);
|
||||||
|
key "templates", TTable (of_key_values []);
|
||||||
|
]
|
||||||
|
in
|
||||||
|
create_dirs dirs;
|
||||||
|
let config_file = open_out "config.toml" in
|
||||||
|
output_bytes config_file (Toml.Printer.string_of_table toml_data |> Bytes.of_string);
|
||||||
|
close_out config_file
|
||||||
|
|
||||||
|
let init_term =
|
||||||
|
let force =
|
||||||
|
let doc = "Initialise repository even if directory is non empty" in
|
||||||
|
Arg.(value & flag & info ["f"; "force"] ~doc)
|
||||||
|
in
|
||||||
|
Term.(const init $ force),
|
||||||
|
Term.info
|
||||||
|
"init" ~doc:"initialise a logarion repository in present directory"
|
||||||
|
~man:[ `S "DESCRIPTION"; `P "Create a repository in current directory" ]
|
||||||
|
|
||||||
|
let create_term =
|
||||||
|
let title =
|
||||||
|
Arg.(value & pos 0 string "" & info [] ~docv:"TITLE" ~doc:"Title for new article")
|
||||||
|
in
|
||||||
|
let f title =
|
||||||
|
let conf = conf () in
|
||||||
|
let t = match title with "" -> "Draft" | _ -> title in
|
||||||
|
let note =
|
||||||
|
let meta =
|
||||||
|
let open Meta in
|
||||||
|
let authors = AuthorSet.singleton Author.({ name = conf.C.owner; address = Uri.of_string conf.C.email }) in
|
||||||
|
let date = Date.({ created = Some (Ptime_clock.now ()); published = None; edited = None }) in
|
||||||
|
{ (blank ()) with title = t; authors; date }
|
||||||
|
in
|
||||||
|
Note.({ (blank ()) with meta })
|
||||||
|
in
|
||||||
|
File.Lwt.with_note (File.store conf.C.repository) note
|
||||||
|
|> Lwt_main.run
|
||||||
|
|> ignore
|
||||||
|
in
|
||||||
|
Term.(const f $ title),
|
||||||
|
Term.info "create"
|
||||||
|
~doc:"create a new article"
|
||||||
|
~man:[ `S "DESCRIPTION"; `P "Create a new article, with title 'Draft' when none provided"]
|
||||||
|
|
||||||
|
let convert directory =
|
||||||
|
let module Config = Confix.Config.Make (Confix.ConfixToml) in
|
||||||
|
|
||||||
|
let toml_config =
|
||||||
|
let open Confix.Config in
|
||||||
|
Path.with_file "config.toml"
|
||||||
|
|> function Ok cfg -> Config.from_path cfg | Error str -> prerr_endline str; exit 1
|
||||||
|
in
|
||||||
|
let config = Config.to_record_or_exit Logarion.Archive.Configuration.of_config toml_config in
|
||||||
|
|
||||||
|
let module L = Logarion.Archive.Make(File) in
|
||||||
|
let store = File.store config.repository in
|
||||||
|
let archive = L.{ config; store } in
|
||||||
|
let notes =
|
||||||
|
List.filter Meta.(fun n -> CategorySet.published n.Note.meta.categories)
|
||||||
|
@@ File.to_list L.note_lens archive.store
|
||||||
|
in
|
||||||
|
let metas =
|
||||||
|
List.filter Meta.(fun m -> CategorySet.published m.categories && CategorySet.listed m.categories)
|
||||||
|
@@ File.to_list ~order:(L.recency_order) L.meta_lens archive.store
|
||||||
|
in
|
||||||
|
|
||||||
|
let template_config = toml_config in
|
||||||
|
let module T = Converters.Template in
|
||||||
|
let header = T.header_converter template_config in
|
||||||
|
let body = T.body_converter template_config in
|
||||||
|
let style = T.default_style in
|
||||||
|
let linker x = match Fpath.(relativize ~root:(v "/") (v x)) with Some l -> Fpath.to_string l | None -> "" in
|
||||||
|
let page_of_log metas = T.page_of_log linker header config metas in
|
||||||
|
let page_of_index metas = T.page_of_index linker header config metas in
|
||||||
|
let page_of_note note = T.page_of_note linker header body config note in
|
||||||
|
let path_of_note note = directory ^ "/" ^ Meta.alias note.Note.meta ^ ".html" in
|
||||||
|
let file_creation path content =
|
||||||
|
let out = open_out path in
|
||||||
|
output_string out content;
|
||||||
|
close_out out
|
||||||
|
in
|
||||||
|
match create_dir directory |> create_dir_msg ~descr:"export" directory with
|
||||||
|
| Error _ -> ()
|
||||||
|
| Ok _ ->
|
||||||
|
match copy ~recursive:true ".logarion/static" (directory) with
|
||||||
|
| Ok _ ->
|
||||||
|
let note_write note = file_creation (path_of_note note) (page_of_note ~style note) in
|
||||||
|
List.iter note_write notes;
|
||||||
|
file_creation (directory ^ "/log.html") (page_of_log ~style metas);
|
||||||
|
file_creation (directory ^ "/index.html") (page_of_index ~style metas);
|
||||||
|
file_creation (directory ^ "/feed.atom") (Converters.Atom.feed config "/" (L.note_with_id archive) metas)
|
||||||
|
| Error (`Msg m) -> prerr_endline m
|
||||||
|
|
||||||
|
let convert_term =
|
||||||
|
let directory =
|
||||||
|
Arg.(value & pos 0 string "html-conversion" & info [] ~docv:"Directory" ~doc:"Directory to convert to")
|
||||||
|
in
|
||||||
|
Term.(const convert $ directory),
|
||||||
|
Term.info
|
||||||
|
"convert" ~doc:"convert archive to HTML"
|
||||||
|
~man:[ `S "DESCRIPTION"; `P "Create a repository in current directory" ]
|
||||||
|
|
||||||
|
let default_cmd =
|
||||||
|
Term.(ret (const (`Help (`Pager, None)))),
|
||||||
|
Term.info "logarion" ~version ~doc:"an article collection & publishing system"
|
||||||
|
~man:[ `S "BUGS";
|
||||||
|
`P "Submit bugs <mailto:logarion@lists.orbitalfox.eu?subject=[Issue] summary-here>"; ]
|
||||||
|
|
||||||
|
let cmds = [ init_term; create_term; convert_term ]
|
||||||
|
|
||||||
|
let () =
|
||||||
|
Random.self_init();
|
||||||
|
match Term.eval_choice default_cmd cmds with `Error _ -> exit 1 | _ -> exit 0
|
112
src/store/file.ml
Normal file
112
src/store/file.ml
Normal file
@ -0,0 +1,112 @@
|
|||||||
|
let extensions = [ ".md"; ".org" ]
|
||||||
|
|
||||||
|
open Logarion
|
||||||
|
let load f =
|
||||||
|
let ic = open_in (Fpath.to_string f) in
|
||||||
|
let n = in_channel_length ic in
|
||||||
|
let s = Bytes.create n in
|
||||||
|
really_input ic s 0 n;
|
||||||
|
close_in ic;
|
||||||
|
Bytes.to_string s
|
||||||
|
|
||||||
|
let note path = Lpath.fpath_of_note path |> load |> Note.of_string
|
||||||
|
|
||||||
|
type t = { repo_path : Lpath.repo_t }
|
||||||
|
|
||||||
|
let note_filetype name =
|
||||||
|
try Fpath.(mem_ext extensions @@ v name) with
|
||||||
|
| Invalid_argument _ -> false
|
||||||
|
|
||||||
|
let to_list ?(order) lens_fn store =
|
||||||
|
let repo_path = store.repo_path in
|
||||||
|
let cons_valid_meta list path =
|
||||||
|
try
|
||||||
|
let note = note (Lpath.note_of_basename repo_path path) in
|
||||||
|
lens_fn note :: list
|
||||||
|
with Note.Syntax_error str -> prerr_endline str; list
|
||||||
|
in
|
||||||
|
Lpath.string_of_repo repo_path
|
||||||
|
|> Sys.readdir
|
||||||
|
|> Array.to_list
|
||||||
|
|> List.filter note_filetype
|
||||||
|
|> List.fold_left cons_valid_meta []
|
||||||
|
|> match order with
|
||||||
|
| Some fn -> List.fast_sort fn
|
||||||
|
| None -> (fun x -> x)
|
||||||
|
|
||||||
|
let note_with_id store id =
|
||||||
|
let repo_path = store.repo_path in
|
||||||
|
let note_of_path path = note (Lpath.note_of_basename repo_path path) in
|
||||||
|
let with_id path =
|
||||||
|
try
|
||||||
|
let note = note_of_path path in
|
||||||
|
note.Note.meta.Meta.uuid = id
|
||||||
|
with Note.Syntax_error str -> prerr_endline str; false
|
||||||
|
in
|
||||||
|
let notes =
|
||||||
|
Lpath.string_of_repo repo_path
|
||||||
|
|> Sys.readdir
|
||||||
|
|> Array.to_list
|
||||||
|
|> List.filter note_filetype
|
||||||
|
in
|
||||||
|
try Some (note_of_path (List.find with_id notes))
|
||||||
|
with Not_found -> None
|
||||||
|
|
||||||
|
let note_with_alias store alias =
|
||||||
|
let repo_path = store.repo_path in
|
||||||
|
let cons_valid_meta list path =
|
||||||
|
try (note (Lpath.note_of_basename repo_path path)) :: list
|
||||||
|
with Note.Syntax_error str -> prerr_endline str; list
|
||||||
|
in
|
||||||
|
let recency_order a b = Meta.(Date.compare b.date a.date) in
|
||||||
|
let notes =
|
||||||
|
Lpath.string_of_repo repo_path
|
||||||
|
|> Sys.readdir
|
||||||
|
|> Array.to_list
|
||||||
|
|> List.filter note_filetype
|
||||||
|
|> List.fold_left cons_valid_meta []
|
||||||
|
|> List.filter (fun note -> Meta.alias note.Note.meta = alias)
|
||||||
|
|> List.fast_sort (fun a b -> recency_order a.Note.meta b.Note.meta)
|
||||||
|
in
|
||||||
|
try Some (List.hd notes)
|
||||||
|
with Failure _ -> None
|
||||||
|
|
||||||
|
let notepath_with_id _store _id = None
|
||||||
|
|
||||||
|
let store repo_path = { repo_path }
|
||||||
|
|
||||||
|
module Lwt = struct
|
||||||
|
let of_filename f =
|
||||||
|
let open Lwt in
|
||||||
|
Lwt_io.(open_file ~mode:(Input) f >|= read_lines)
|
||||||
|
>|= (fun stream -> Lwt_stream.fold (^) stream "")
|
||||||
|
|
||||||
|
let with_note store new_note =
|
||||||
|
let extension = List.hd extensions in
|
||||||
|
let open Lwt in
|
||||||
|
let open Lwt.Infix in
|
||||||
|
let store =
|
||||||
|
let write_note out = Lwt_io.write out (Note.to_string new_note) in
|
||||||
|
match notepath_with_id store new_note.Note.meta.Meta.uuid with
|
||||||
|
| Some previous_path ->
|
||||||
|
let filepath =
|
||||||
|
let open Note in
|
||||||
|
let open Meta in
|
||||||
|
if (note previous_path).meta.title <> new_note.meta.title
|
||||||
|
then Lpath.versioned_basename_of_title store.repo_path extension new_note.meta.title
|
||||||
|
else previous_path
|
||||||
|
in
|
||||||
|
Lwt_io.with_file ~mode:Lwt_io.output (Lpath.string_of_note filepath) write_note
|
||||||
|
>>= (fun () ->
|
||||||
|
if previous_path <> filepath
|
||||||
|
then Lwt_unix.unlink @@ Lpath.string_of_note previous_path
|
||||||
|
else Lwt.return_unit
|
||||||
|
)
|
||||||
|
| None ->
|
||||||
|
let filepath = Lpath.versioned_basename_of_title store.repo_path extension new_note.meta.title in
|
||||||
|
Lwt_io.with_file ~mode:Lwt_io.output (Lpath.string_of_note filepath) write_note
|
||||||
|
in
|
||||||
|
store >>= (fun () -> return new_note);
|
||||||
|
end
|
||||||
|
|
||||||
|
let with_note = Lwt.with_note
|
7
src/store/jbuild
Normal file
7
src/store/jbuild
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
(jbuild_version 1)
|
||||||
|
|
||||||
|
(library
|
||||||
|
((name file)
|
||||||
|
(public_name logarion.file)
|
||||||
|
(libraries (logarion lwt lwt.unix))
|
||||||
|
))
|
Loading…
x
Reference in New Issue
Block a user