" ^ esc text.Logarion.Text.title ^ ""
let authors text =
- let u acc addr = acc ^ element "uri" (Uri.to_string addr) in
+ let u acc addr = acc ^ element "uri" addr in
let open Logarion in
let fn txt a =
a ^ "" ^ (opt_element "name" @@ esc txt.Person.name)
@@ -51,7 +51,7 @@ let feed title archive_id base_url alternate_type texts =
{||}
^ title ^ {|urn:uuid:|} ^ Logarion.Id.to_string archive_id ^ ""
- ^ Ptime.to_rfc3339 (Ptime_clock.now ()) ^ "\n"
+ ^ self ^ {|" />urn:uuid:|} ^ archive_id ^ ""
+ ^ Logarion.Date.now () ^ "\n"
^ List.fold_left (fun acc t -> acc ^ entry base_url t) "" texts
^ ""
diff --git a/cli/authors.ml b/cli/authors.ml
new file mode 100644
index 0000000..32adcf4
--- /dev/null
+++ b/cli/authors.ml
@@ -0,0 +1,17 @@
+open Logarion
+let authors r topics_opt =
+ let predicates = Archive.(predicate topics topics_opt) in
+ let predicate text = List.fold_left (fun a e -> a && e text) true predicates in
+ let author_union a (e, _) = Person.Set.union a e.Text.authors in
+ let s = File_store.fold ~r ~predicate author_union Person.Set.empty in
+ Person.Set.iter (fun x -> print_endline (Person.to_string x)) s
+
+open Cmdliner
+let term =
+ let recurse = Arg.(value & flag & info ["R"]
+ ~doc:"include texts in subdirectories too") in
+ let topics = Arg.(value & opt (some string) None & info ["topics"]
+ ~docv:"TOPICS" ~doc:"display authors who have written on topics") in
+ Term.(const authors $ recurse $ topics),
+ Term.info "authors" ~doc:"list authors"
+ ~man:[ `S "DESCRIPTION"; `P "List author names" ]
diff --git a/cli/cli.ml b/cli/cli.ml
deleted file mode 100644
index ba85022..0000000
--- a/cli/cli.ml
+++ /dev/null
@@ -1,142 +0,0 @@
-let version = "%%VERSION%%"
-
-open Cmdliner
-open Logarion
-module A = Logarion.Archive.Make(File_store)
-
-(* TODO: merge in lib/ so other modules can use (.e.g HTTP pull) *)
-let text_list order_opt reverse_opt number_opt values_opt authors_opt topics_opt =
- match A.of_path (Sys.getcwd ()) with
- | Error msg -> prerr_endline msg
- | Ok archive ->
- let predicates = A.predicate A.authored authors_opt @ A.predicate A.topics topics_opt in
- let predicate text = List.fold_left (fun a e -> a && e text) true predicates in
- let print_fold ~predicate fn =
- let ts = A.fold ~predicate fn String_set.empty archive in
- String_set.iter (print_endline) ts
- in
- let list_text (t, fname) = print_endline (Text.short_id t ^ " " ^ t.Text.title ^ "\t" ^ fname) in
- match values_opt with
- | Some "topics" -> print_fold ~predicate (fun a (e,_) -> (String_set.union a (Text.set "topics" e)))
- | Some "authors" ->
- let s = A.fold ~predicate (fun a (e,_) -> Person.Set.union a e.Text.authors) Person.Set.empty archive in
- print_endline @@ Person.Set.to_string s
- | Some x -> prerr_endline @@ "Unrecognised field: " ^ x
- | None -> match order_opt with
- | false -> A.iter ~predicate list_text archive
- | true ->
- let order = match reverse_opt with true -> A.newest | false -> A.oldest in
- match number_opt with
- | Some number -> A.iter ~predicate ~order ~number list_text archive
- | None -> A.iter ~predicate ~order list_text archive
-
-let list_term =
- let reverse = Arg.(value & flag & info ["r"] ~doc:"reverse order") in
- let time = Arg.(value & flag & info ["t"] ~doc:"Sort by time, newest first") in
- let number = Arg.(value & opt (some int) None & info ["n"] ~docv:"NUMBER" ~doc:"number of entries to list") in
- let values = Arg.(value & opt (some string) None & info ["values"] ~docv:"HEADER-FIELD" ~doc:"unique values for header field") in
- let authed = Arg.(value & opt (some string) None & info ["authored"] ~docv:"AUTHORS" ~doc:"texts by authors") in
- let topics = Arg.(value & opt (some string) None & info ["topics"] ~docv:"TOPICS" ~doc:"texts with topics") in
- Term.(const text_list $ time $ reverse $ number $ values $ authed $ topics),
- Term.info "list" ~doc:"list texts" ~man:[ `S "DESCRIPTION"; `P "List texts" ]
-
-let print_last search_mine =
- let last a ((t,_) as pair) = match a with None -> Some pair
- | Some (t', _) as pair' -> if Text.newest t t' > 0 then Some pair else pair' in
- match A.of_path (Sys.getcwd ()) with
- | Error msg -> prerr_endline msg
- | Ok archive ->
- let last_mine a ((t,_) as pair) =
- let open Text in
- match a with None ->
- if Person.Set.subset archive.A.archivists t.authors then Some pair else None
- | Some (t', _) as pair' ->
- if Text.newest t t' > 0 && Person.Set.subset archive.A.archivists t'.authors
- then Some pair else pair'
- in
- match A.fold (if search_mine then last_mine else last) None archive with
- | Some (_,f) -> print_endline f | None -> ()
-
-let last_term =
- let mine = Arg.(value & flag & info ["mine"] ~doc:"last text authored by me") in
- Term.(const print_last $ mine),
- Term.info "last" ~doc:"most recent test" ~man:[ `S "DESCRIPTION"; `P "Print the filename of most recent text" ]
-
-let split_filetypes files =
- let acc (dirs, files) x = if Sys.is_directory x then (x::dirs, files) else (dirs, x::files) in
- List.fold_left acc ([],[]) files
-
-let file files = match A.of_path "." with
- | Error msg -> prerr_endline msg
- | Ok _archive ->
- let dirs, files = split_filetypes files in
- let _link_as_named dir file = Unix.link file (dir ^"/"^ file) in
- let link_with_id dir file =
- match File_store.to_text file with Error s -> prerr_endline s
- | Ok t -> Unix.link file (dir ^"/"^ String.sub (Id.to_string (t.Text.uuid)) 0 8 ^".txt")
- in
- let link = link_with_id in
- List.iter (fun d -> List.iter (link d) files) dirs
-
-let file_term =
- let files = Arg.(value & pos_all string [] & info [] ~doc:"filenames") in
- let doc = "file texts in directories" in
- let man = [ `S "DESCRIPTION"; `P doc ] in
- Term.(const file $ files), Term.info "file" ~doc ~man
-
-let unfile files = match A.of_path "." with
- | Error msg -> prerr_endline msg
- | Ok _archive ->
- let dirs, files = split_filetypes files in
- let unlink dir file = try Unix.unlink (dir ^"/"^ file) with Unix.(Unix_error(ENOENT,_,_))-> () in
- List.iter (fun d -> List.iter (unlink d) files) dirs
-
-let unfile_term =
- let files = Arg.(value & pos_all string [] & info [] ~doc:"filenames") in
- let doc = "unfile texts from directories" in
- let man = [ `S "DESCRIPTION"; `P doc ] in
- Term.(const unfile $ files), Term.info "unfile" ~doc ~man
-
-let init _force = File_store.init ()
-
-let init_term =
- let force = Arg.(value & flag & info ["f"; "force"] ~doc:"Initialise even if directory is not empty") in
- let doc = "initialise a text repository in present directory" in
- let man = [ `S "DESCRIPTION"; `P "Start an archive in current directory" ] in
- Term.(const init $ force), Term.info "init" ~doc ~man
-
-let new_term =
- let f title topics_opt interactive =
- match A.of_path "." with
- | Error m -> prerr_endline m
- | Ok archive ->
- let t = match title with "" -> "Draft" | _ -> title in
- let authors = archive.archivists in
- let date = Date.({ created = Some (Ptime_clock.now ()); edited = None }) in
- let text = { (Text.blank ()) with title = t; authors; date } in
- let text = try Text.with_str_set text "Topics" (Option.get topics_opt) with _ -> text in
- match File_store.with_text archive text with
- | Error s -> prerr_endline s
- | Ok (filepath, _note) ->
- match interactive with false -> print_endline filepath
- | true ->
- print_endline @@ "Created: " ^ filepath;
- let _code = Sys.command ("$EDITOR " ^ filepath) in
- ()
- in
- let title = Arg.(value & pos 0 string "" & info [] ~docv:"TITLE" ~doc:"Title for new article") in
- let topics= Arg.(value & opt (some string) None & info ["t"; "topics"] ~docv:"TOPICS" ~doc:"Topics for new article") in
- let inter = Arg.(value & flag & info ["i"; "interactive"] ~doc:"Prompts through the steps of creation and publication") in
- let man = [ `S "DESCRIPTION"; `P "Create a new article, with title 'Draft' when none provided"] in
- Term.(const f $ title $ topics $ inter), Term.info "new" ~doc:"create a new article" ~man
-
-let default_cmd =
- let doc = "text archival & publishing" in
- let man = [ `S "BUGS"; `P "Submit bugs exit 1 | _ -> exit 0
diff --git a/cli/conversion.ml b/cli/conversion.ml
new file mode 100644
index 0000000..a69122f
--- /dev/null
+++ b/cli/conversion.ml
@@ -0,0 +1,14 @@
+open Logarion
+type t = {
+ id: string; dir: string;
+ kv: string Store.KV.t;
+ topic_roots: string list;
+ topics: (String_set.t * String_set.t) Topic_set.Map.t;
+ texts: Text.t list
+}
+
+type fn_t = {
+ ext: string;
+ page: t -> Logarion.Text.t -> string;
+ indices: t -> unit;
+}
diff --git a/cli/convert.ml b/cli/convert.ml
index 2eaa077..a2fc899 100644
--- a/cli/convert.ml
+++ b/cli/convert.ml
@@ -1,90 +1,67 @@
open Logarion
-module A = Archive.Make (Logarion.File_store)
-let convert_modified source dest fn title text =
- if (try Unix.((stat dest).st_mtime < (stat source).st_mtime) with _-> true)
- then (File_store.file dest (fn title text); true) else false
+let is_older source dest = try
+ Unix.((stat dest).st_mtime < (stat source).st_mtime) with _-> true
-let word_fname dir text = dir ^ "/" ^ Text.alias text
-let id_fname dir text = dir ^ "/" ^ Text.short_id text
+let convert cs r (text, files) = match Text.str "Content-Type" text with
+ | "" | "text/plain" ->
+ let source = List.hd files in
+ let dest = Filename.concat r.Conversion.dir (Text.short_id text) in
+ List.fold_left
+ (fun a f ->
+ let dest = dest ^ f.Conversion.ext in
+ if is_older source dest then (File_store.file dest (f.Conversion.page r text); true) else false
+ || a)
+ false cs
+ | x -> Printf.eprintf "Can't convert Content-Type: %s file: %s" x text.Text.title; false
-let writer types dir name (text,store_item) = (* todo: single_parser -> [files] *)
-(* convert_modified store_item idfilename (fun _title -> Text.to_string) text.title text;*)
- let h = if "htm" = types || "all" = types then
- convert_modified store_item (id_fname dir text ^ ".htm") Html.page name text
- else false in
- let g = if "gmi" = types || "all" = types then
- convert_modified store_item (id_fname dir text ^ ".gmi") Gemini.page name text
- else false in
- h || g
+let converters types kv =
+ let t = [] in
+ let t = if ("htm" = types || "all" = types) then
+ (let htm = Html.init kv in
+ Conversion.{ ext = Html.ext; page = Html.page htm; indices = Html.indices htm })::t
+ else t in
+ let t = if ("gmi" = types || "all" = types) then
+ Conversion.{ ext = Gemini.ext; page = Gemini.page; indices = Gemini.indices}::t else t in
+ t
-let index_writer types noindex dir archive topic_roots topic_map texts =
- let name = archive.A.name in
- let file path = File_store.file (dir ^ path) in
- file "/index.pck" (Header_pack.pack archive texts);
- if not noindex && ("htm" = types || "all" = types) then (
- let index_name = try Store.KV.find "HTML-index" archive.File_store.kv
- with Not_found -> "index.html" in
- if index_name <> "" then
- file ("/"^index_name) (Html.topic_main_index name topic_roots texts);
- file "/index.date.htm" (Html.date_index name texts);
- List.iter
- (fun topic -> file ("/index." ^ topic ^ ".htm")
- (Html.topic_sub_index name topic_map topic texts))
- topic_roots;
- let base_url = try Store.KV.find "HTTP-URL" archive.File_store.kv
- with Not_found -> prerr_endline "Missing `HTTP-URL:` in config"; "" in
- file "/feed.atom" (Atom.feed archive.A.name archive.A.id base_url "text/html" texts)
- );
- if not noindex && ("gmi" = types || "all" = types) then (
- let index_name = try Store.KV.find "Gemini-index" archive.File_store.kv
- with Not_found -> "index.gmi" in
- if index_name <> "" then
- file ("/"^index_name) (Gemini.topic_main_index name topic_roots texts);
- file "/index.date.gmi" (Gemini.date_index name texts);
- List.iter
- (fun topic -> file ("/index." ^ topic ^ ".gmi")
- (Gemini.topic_sub_index name topic_map topic texts))
- topic_roots;
- let base_url = try Store.KV.find "GEMINI-URL" archive.File_store.kv
- with Not_found -> prerr_endline "Missing `GEMINI-URL:` in config"; "" in
- file "/gmi.atom" (Atom.feed archive.A.name archive.A.id base_url "text/gemini" texts)
- )
-
-let txt_writer types dir name ((text, _store_item) as r) =
- match Text.str "Content-Type" text with
- | "" | "text/plain" -> writer types dir name r
- | x -> prerr_endline ("Can't convert Content-Type: "^x^" file: " ^text.Text.title); false
-
-let convert_all types noindex dir archive =
- let name = archive.A.name in
- let fn (ts,ls,acc) ((elt,_) as r) =
- (Topic_set.to_map ts (Text.set "topics" elt)),
- elt::ls, if txt_writer types dir name r then acc+1 else acc in
+let convert_all converters noindex dir id kv =
let empty = Topic_set.Map.empty in
- let topic_map, texts, count = A.(fold ~order:newest fn (empty,[],0) archive) in
- let topic_roots = Topic_set.roots topic_map in
- index_writer types noindex dir archive topic_roots topic_map texts;
- print_endline @@ "Converted: " ^ string_of_int (count)
- ^ "\nIndexed: " ^ string_of_int (List.length texts);
- Ok ()
+ let repo = Conversion.{ id; dir; kv; topic_roots = []; topics = empty; texts = [] } in
+ let fn (ts,ls,acc) ((elt,_) as r) =
+ (Topic_set.to_map ts (Text.set "topics" elt)), elt::ls,
+ if convert converters repo r then acc+1 else acc in
+ let topics, texts, count = File_store.(fold ~dir ~order:newest fn (empty,[],0)) in
+ let topic_roots = try List.rev @@ String_set.list_of_csv (Store.KV.find "Topics" kv)
+ with Not_found -> Topic_set.roots topics in
+ let repo = Conversion.{ repo with topic_roots; topics; texts } in
+ if not noindex then List.iter (fun c -> c.Conversion.indices repo) converters;
+ Printf.printf "Converted: %d Indexed: %d\n" count (List.length texts)
-let convert_dir types noindex cmd_dir =
- let (>>=) = Result.bind in
- let with_dir dir =
- Result.map_error (function `Msg m -> m)
- Logarion.File_store.Directory.(directory dir |> print ~descr:"export" dir) in
- (A.of_path "."
- >>= fun archive -> (match cmd_dir with "" -> Error "unspecified export dir" | x -> Ok x)
- >>= fun dir -> with_dir dir
- >>= fun _ -> convert_all types noindex dir { archive with store = dir })
- |> function Ok () -> () | Error x -> prerr_endline x
+let convert_dir types noindex dir =
+ match dir with "" -> prerr_endline "unspecified dir"
+ | dir ->
+ let fname = Filename.concat dir "index.pck" in
+ match Header_pack.of_string @@ File_store.to_string fname with
+ | Error s -> prerr_endline s
+ | Ok { info; _ } ->
+ let kv = let f = Filename.concat dir ".convert.conf" in (* TODO: better place to store convert conf? *)
+ if Sys.file_exists f then File_store.of_kv_file f else Store.KV.empty in
+ let kv = if Store.KV.mem "Title" kv then kv
+ else Store.KV.add "Title" info.Header_pack.title kv in
+ let kv = Store.KV.add "Locations" (String.concat ";\n" info.Header_pack.locations) kv in
+ let cs = converters types kv in
+ convert_all cs noindex dir info.Header_pack.id kv
open Cmdliner
-
let term =
- let directory = Arg.(value & pos 0 string "" & info [] ~docv:"target directory" ~doc:"Directory to convert into") in
- let types = Arg.(value & opt string "all" & info ["t"; "type"] ~docv:"TYPES" ~doc:"Convert to type") in
- let noindex = Arg.(value & flag & info ["noindex"] ~doc:"don't write an index when converting") in
+ let directory = Arg.(value & pos 0 string "" & info [] ~docv:"target directory"
+ ~doc:"Directory to convert") in
+ let types = Arg.(value & opt string "all" & info ["t"; "type"] ~docv:"TYPES"
+ ~doc:"Convert to type") in
+ let noindex = Arg.(value & flag & info ["noindex"]
+ ~doc:"don't create indices in target format") in
Term.(const convert_dir $ types $ noindex $ directory),
- Term.info "convert" ~doc:"convert archive" ~man:[ `S "DESCRIPTION"; `P "Convert Logarion archive" ]
+ Term.info "convert" ~doc:"convert txts"
+ ~man:[ `S "DESCRIPTION"; `P "Convert texts within a directory to another format.
+ Directory must contain an index.pck. Run `txt index` first." ]
diff --git a/cli/dune b/cli/dune
index 3d79834..4e28cd5 100644
--- a/cli/dune
+++ b/cli/dune
@@ -1,5 +1,5 @@
(executable
- (name cli)
+ (name txt)
(public_name txt)
- (modules cli convert html atom gemini)
- (libraries logarion logarion.http re.str cmdliner bos ptime ptime.clock.os text_parse.converter text_parse.parsers msgpck))
+ (modules txt authors convert conversion file index last listing new topics html atom gemini pull)
+ (libraries text_parse.converter text_parse.parsers logarion msgpck curl str cmdliner))
diff --git a/cli/file.ml b/cli/file.ml
new file mode 100644
index 0000000..dcc1845
--- /dev/null
+++ b/cli/file.ml
@@ -0,0 +1,39 @@
+let split_filetypes files =
+ let acc (dirs, files) x = if Sys.is_directory x
+ then (x::dirs, files) else (dirs, x::files) in
+ List.fold_left acc ([],[]) files
+
+open Logarion
+let file files =
+ let dirs, files = split_filetypes files in
+ let _link_as_named dir file = Unix.link file (Filename.concat dir file) in
+ let link_with_id dir file =
+ match File_store.to_text file with Error s -> prerr_endline s
+ | Ok t -> Unix.link file (Filename.concat dir (Text.short_id t^".txt")) in
+ let link = link_with_id in
+ List.iter (fun d -> List.iter (link d) files) dirs
+
+let unfile files =
+ let dirs, files = split_filetypes files in
+ let unlink dir file = try Unix.unlink (Filename.concat dir file)
+ with Unix.(Unix_error(ENOENT,_,_))-> () in
+ List.iter (fun d -> List.iter (unlink d) files) dirs
+
+open Cmdliner
+let term =
+ let files = Arg.(value & pos_all string [] & info []
+ ~docv:"text filenames and subdirectories") in
+ Term.(const file $ files), Term.info "file"
+ ~doc:"file texts in subdirectories"
+ ~man:[ `S "DESCRIPTION"; `P "Files all texts in parameter in every
+ directory in parameter, using hardlinks.
+
+ Use it to create sub-repositories for sharing or converting" ]
+
+let unfile_term =
+ let files = Arg.(value & pos_all string [] & info []
+ ~docv:"text filenames and subdirectories") in
+ Term.(const unfile $ files), Term.info "unfile"
+ ~doc:"unfile texts from subdirectories"
+ ~man:[ `S "DESCRIPTION"; `P "unfile texts in parameter from
+ directories in parameter, by removing hardlinks" ]
diff --git a/cli/gemini.ml b/cli/gemini.ml
index 02bde59..f43fcb0 100644
--- a/cli/gemini.ml
+++ b/cli/gemini.ml
@@ -1,9 +1,17 @@
-let page _archive_title text =
+let ext = ".gmi"
+
+module GeminiConverter = struct
+ include Converter.Gemini
+ let angled_uri u a = if String.sub u 0 10 <> "urn:txtid:" then
+ angled_uri u a else angled_uri (String.(sub u 10 (length u - 10)) ^ ext) a
+end
+
+let page _conversion text =
let open Logarion.Text in
"# " ^ text.title
^ "\nAuthors: " ^ Logarion.Person.Set.to_string text.authors
^ "\nDate: " ^ Logarion.Date.(pretty_date @@ listing text.date)
- ^ let module T = Parsers.Plain_text.Make (Converter.Gemini) in
+ ^ let module T = Parsers.Plain_text.Make (GeminiConverter) in
"\n" ^ T.of_string text.body ""
let date_index title meta_list =
@@ -30,8 +38,9 @@ let to_dated_links ?(limit) meta_list =
^ m.Logarion.Text.title ^ "\n")
"" meta_list
-let topic_link root topic =
- "=> index." ^ root ^ ".gmi " ^ String.capitalize_ascii topic ^ "\n"
+let topic_link root topic =
+ let replaced_space = String.map (function ' '->'+' | x->x) in
+ "=> index." ^ replaced_space root ^ ".gmi " ^ String.capitalize_ascii topic ^ "\n"
let text_item path meta =
let open Logarion in
@@ -71,3 +80,25 @@ let topic_main_index title topic_roots metas =
let topic_sub_index title topic_map topic_root metas =
"# " ^ title ^ "\n\n"
^ listing_index topic_map [topic_root] "" metas
+
+let indices r =
+ let open Logarion in
+ let file name = File_store.file (Filename.concat r.Conversion.dir name) in
+ let index_name = try Store.KV.find "Gemini-index" r.kv with Not_found -> "index.gmi" in
+ let title = try Store.KV.find "Title" r.Conversion.kv with Not_found -> "" in
+
+ if index_name <> "" then
+ file index_name (topic_main_index title r.topic_roots r.texts);
+
+ file "index.date.gmi" (date_index title r.texts);
+
+ List.iter
+ (fun topic -> file ("index." ^ topic ^ ".gmi")
+ (topic_sub_index title r.topics topic r.texts))
+ r.topic_roots;
+
+ let base_url = try
+ let _i = Str.(search_forward (regexp "gemini?://[^;]*") (Store.KV.find "Locations" r.kv) 0) in
+ Str.(matched_string (Store.KV.find "Locations" r.kv))
+ with Not_found -> prerr_endline "Missing location for Gemini"; "" in
+ file "gmi.atom" (Atom.feed title r.id base_url "text/gemini" r.texts)
diff --git a/cli/html.ml b/cli/html.ml
index fb616b8..880dbbe 100644
--- a/cli/html.ml
+++ b/cli/html.ml
@@ -1,26 +1,57 @@
-let wrap (title:string) (subtitle:string) body =
- {||}
- ^ {||}
- ^ subtitle ^ " | " ^ title
- ^ {||}
- ^ {||}
- ^ {||}
- ^ {||}
- ^ {||} ^ title
- ^ {||} ^ body
- ^ ""
+type templates_t = { header: string option; footer: string option }
+type t = { templates : templates_t }
+
+let ext = ".htm"
+let empty_templates = { header = None; footer = None }
+let default_opts = { templates = empty_templates }
+
+let init kv =
+ let open Logarion in
+ let header = match Store.KV.find "HTM-header" kv with
+ | fname -> Some (File_store.to_string fname)
+ | exception Not_found -> None in
+ let footer = match Store.KV.find "HTM-footer" kv with
+ | fname -> Some (File_store.to_string fname)
+ | exception Not_found -> None in
+ { templates = { header; footer} }
+
+let wrap c htm text_title body =
+ let site_title = try Logarion.Store.KV.find "Title" c.Conversion.kv
+ with Not_found -> "" in
+ let replace x = let open Str in
+ global_replace (regexp "{{archive-title}}") site_title x
+ |> global_replace (regexp "{{text-title}}") text_title
+ in
+ let header = match htm.templates.header with
+ | Some x -> replace x
+ | None -> "" ^ site_title ^
+ ""
+ in
+ let footer = match htm.templates.footer with None -> "" | Some x -> replace x in
+ "" ^ text_title ^ " ⢠" ^ site_title ^ "\n\
+ \
+ \
+ \
+ \n" ^ header ^ body ^ footer ^ ""
let topic_link root topic =
let replaced_space = String.map (function ' '->'+' | x->x) in
- {||}
+ ""
^ String.capitalize_ascii topic ^ ""
-let page archive_title text =
+module HtmlConverter = struct
+ include Converter.Html
+ let angled_uri u a = if String.sub u 0 10 <> "urn:txtid:" then
+ angled_uri u a else angled_uri (String.(sub u 10 (length u - 10)) ^ ext) a
+end
+
+let page htm conversion text =
let open Logarion in
let open Text in
- let module T = Parsers.Plain_text.Make (Converter.Html) in
+ let module T = Parsers.Plain_text.Make (HtmlConverter) in
let sep_append ?(sep=", ") a x = match a,x with "",_ -> x | _, "" -> a | _ -> a ^ sep ^ x in
- let opt_kv key value = if String.length value > 0 then "
" ^ key ^ "
" ^ value else "" in
+ let opt_kv key value = if String.length value > 0
+ then "
" ^ key ^ "
" ^ value else "" in
(* let author acc auth = sep_append acc Person.(auth.name ^ " ") in*)
let authors = (Person.Set.to_string text.authors ^ " ") in
let keywords = str_set "keywords" text in
@@ -38,9 +69,9 @@ let page archive_title text =
^ opt_kv "Series: " (str_set "series" text)
^ opt_kv "Topics: " (topic_links (set "topics" text))
^ opt_kv "Keywords: " keywords
- ^ opt_kv "Id: " (Id.to_string text.uuid)
+ ^ opt_kv "Id: " text.id
^ {|
")
+ wrap conversion htm text.title ((T.of_string text.body header) ^ "")
let to_dated_links ?(limit) meta_list =
let meta_list = match limit with
@@ -57,10 +88,10 @@ let to_dated_links ?(limit) meta_list =
^ {||} ^ m.Logarion.Text.title ^ " ")
"" meta_list
-let date_index ?(limit) title meta_list =
+let date_index ?(limit) conv htm meta_list =
match limit with
- | Some limit -> wrap title "Index" (to_dated_links ~limit meta_list)
- | None -> wrap title "Index" (to_dated_links meta_list)
+ | Some limit -> wrap conv htm "Index" (to_dated_links ~limit meta_list)
+ | None -> wrap conv htm "Index" (to_dated_links meta_list)
let fold_topic_roots topic_roots =
let list_item root t = "
" ^ topic_link root t in
@@ -112,14 +143,35 @@ let listing_index topic_map topic_roots path metas =
in
""
-let topic_main_index title topic_roots metas =
- wrap title "Topics"
+let topic_main_index conv htm topic_roots metas =
+ wrap conv htm "Topics"
(fold_topic_roots topic_roots
^ "|} )
-let topic_sub_index title topic_map topic_root metas =
- wrap title topic_root
+let topic_sub_index conv htm topic_map topic_root metas =
+ wrap conv htm topic_root
(fold_topics topic_map [topic_root] metas
(* ^ {||}^ String.capitalize_ascii topic_root ^{| feed |}*)
^ listing_index topic_map [topic_root] "" metas)
+
+open Logarion
+let indices htm c =
+ let file name = Logarion.File_store.file (Filename.concat c.Conversion.dir name) in
+ let index_name = try Store.KV.find "HTM-index" c.Conversion.kv with Not_found -> "index.html" in
+ let title = try Store.KV.find "Title" c.Conversion.kv with Not_found -> "" in
+
+ if index_name <> "" then
+ file index_name (topic_main_index c htm c.topic_roots c.texts);
+
+ file "index.date.htm" (date_index c htm c.texts);
+
+ List.iter
+ (fun root -> file ("index." ^ root ^ ".htm") (topic_sub_index c htm c.topics root c.texts))
+ c.topic_roots;
+
+ let base_url = try
+ let _i = Str.(search_forward (regexp "https?://[^;]*") (Store.KV.find "Locations" c.kv) 0) in
+ Str.(matched_string (Store.KV.find "Locations" c.kv))
+ with Not_found -> prerr_endline "Missing location for HTTP(S)"; "" in
+ file "feed.atom" (Atom.feed title c.id base_url "text/html" c.texts)
diff --git a/cli/index.ml b/cli/index.ml
new file mode 100644
index 0000000..e25c0a2
--- /dev/null
+++ b/cli/index.ml
@@ -0,0 +1,59 @@
+open Logarion
+
+let index print title authors locations peers dir =
+ let fname = Filename.concat dir "index.pck" in
+ let pck = match Header_pack.of_string @@ File_store.to_string fname with
+ | Error s -> failwith s
+ | Ok pck -> let info = Header_pack.{ pck.info with
+ title = if title <> "" then title else pck.info.title;
+ people = if authors <> ""
+ then (String_set.list_of_csv authors) else pck.info.people;
+ locations = if locations <> ""
+ then (String_set.list_of_csv locations) else pck.info.locations;
+ } in
+ Header_pack.{ info; fields;
+ texts = of_text_list @@ File_store.fold ~dir
+ (fun a (t,_) -> of_text a t) [];
+ peers = if peers <> ""
+ then (str_list @@ String_set.list_of_csv peers) else pck.peers;
+ }
+ | exception (Sys_error _) -> Header_pack.{
+ info = {
+ version = version; id = Id.generate (); title;
+ people = String_set.list_of_csv authors;
+ locations = String_set.list_of_csv locations };
+ fields;
+ texts = of_text_list @@ File_store.fold ~dir
+ (fun a (t,_) -> of_text a t) [];
+ peers = str_list @@ String_set.list_of_csv peers;
+ } in
+ File_store.file fname (Header_pack.string pck);
+ let open Header_pack in
+ let s ss = String.concat "\n\t" ss in
+ if print then
+ Printf.printf "Title: %s\nAuthors: %s\nLocations:\n\t%s\nPeers:\n\t%s\n"
+ pck.info.title (String.concat "," pck.info.people)
+ (s pck.info.locations) (s (to_str_list pck.peers))
+
+open Cmdliner
+let term =
+ let print = Arg.(value & flag & info ["print"] ~doc:"print info") in
+ let title= Arg.(value & opt string "" & info ["t"; "title"]
+ ~docv:"string" ~doc:"Title for index") in
+ let auth = Arg.(value & opt string "" & info ["a"; "authors"]
+ ~docv:"comma-separated names" ~doc:"Index authors") in
+ let locs = Arg.(value & opt string "" & info ["l"; "locations"]
+ ~docv:"comma-separated URLs" ~doc:"repository URLs") in
+ let peers= Arg.(value & opt string "" & info ["p"; "peers"]
+ ~docv:"comma-separated URLs" ~doc:"URLs to other known text repositories") in
+ let dir = Arg.(value & pos 0 string "." & info []
+ ~docv:"directory to index") in
+ let doc = "Generate an index.pck for texts in a directory" in
+ Term.(const index $ print $ title $ auth $ locs $ peers $ dir),
+ Term.info "index" ~doc
+ ~man:[ `S "DESCRIPTION"; `Pre "An index contains:\n
+* an info section with: title for the index, the authors, locations (URLs) the texts can be access\n
+* listing of texts with: ID, date, title, authors, topics\n
+* list of other text repositories (peers)\n\n
+MessagePack format. " ]
+
diff --git a/cli/last.ml b/cli/last.ml
new file mode 100644
index 0000000..4695354
--- /dev/null
+++ b/cli/last.ml
@@ -0,0 +1,24 @@
+open Logarion
+let last search_mine =
+ let last a ((t,_) as pair) = match a with None -> Some pair
+ | Some (t', _) as pair' -> if Text.newest t t' > 0
+ then Some pair else pair' in
+ let last_mine a ((t,_) as pair) =
+ let name = Person.Set.of_string (Sys.getenv "USER") in
+ let open Text in
+ match a with
+ | None -> if Person.Set.subset name t.authors then Some pair else None
+ | Some (t', _) as pair' ->
+ if Text.newest t t' > 0 && Person.Set.subset name t'.authors
+ then Some pair else pair'
+ in
+ match File_store.fold (if search_mine then last_mine else last) None with
+ | Some (_,f) -> List.iter print_endline f | None -> ()
+
+open Cmdliner
+let term =
+ let mine = Arg.(value & flag & info ["mine"]
+ ~doc:"last text authored by me") in
+ Term.(const last $ mine),
+ Term.info "last" ~doc:"most recent text"
+ ~man:[ `S "DESCRIPTION"; `P "Print the filename of most recent text" ]
diff --git a/cli/listing.ml b/cli/listing.ml
new file mode 100644
index 0000000..5b2c634
--- /dev/null
+++ b/cli/listing.ml
@@ -0,0 +1,38 @@
+open Logarion
+module FS = File_store
+module A = Archive
+let listing r order_opt reverse_opt number_opt authors_opt topics_opt =
+ let predicates = A.predicate A.authored authors_opt
+ @ A.predicate A.topics topics_opt in
+ let predicate text = List.fold_left (fun a e -> a && e text) true predicates in
+ let list_text a (t, fnames) = a ^ Printf.sprintf "%s %s %s š %s [%s]\n"
+ (Text.short_id t) Date.(pretty_date @@ listing t.Text.date)
+ (Person.Set.to_string ~names_only:true t.Text.authors)
+ t.Text.title (List.hd fnames)
+ in
+ print_string @@ match order_opt with
+ | false -> FS.fold ~r ~predicate list_text ""
+ | true ->
+ let order = match reverse_opt with true -> FS.newest | false -> FS.oldest in
+ match number_opt with
+ | Some number -> FS.fold ~r ~predicate ~order ~number list_text ""
+ | None -> FS.fold ~r ~predicate ~order list_text ""
+
+open Cmdliner
+let term =
+ let recurse = Arg.(value & flag & info ["R"]
+ ~doc:"recursive, include texts in subdirectories too") in
+ let reverse = Arg.(value & flag & info ["r"]
+ ~doc:"reverse order") in
+ let time = Arg.(value & flag & info ["t"]
+ ~doc:"Sort by time, newest first") in
+ let number = Arg.(value & opt (some int) None & info ["n"]
+ ~docv:"number" ~doc:"number of entries to list") in
+ let authed = Arg.(value & opt (some string) None & info ["authored"]
+ ~docv:"comma-separated names" ~doc:"texts by authors") in
+ let topics = Arg.(value & opt (some string) None & info ["topics"]
+ ~docv:"comma-separated topics" ~doc:"texts with topics") in
+ Term.(const listing $ recurse $ time $ reverse $ number $ authed $ topics),
+ Term.info "list" ~doc:"list texts" ~man:[ `S "DESCRIPTION";
+ `P "List header information for current directory. If -R is used, list header
+ information for texts found in subdirectories too, along with their filepaths" ]
diff --git a/cli/new.ml b/cli/new.ml
new file mode 100644
index 0000000..0b429e3
--- /dev/null
+++ b/cli/new.ml
@@ -0,0 +1,27 @@
+open Logarion
+open Cmdliner
+
+let new_txt title topics_opt interactive =
+ let t = match title with "" -> "Draft" | _ -> title in
+ let authors = Person.Set.of_string (Sys.getenv "USER") in
+ let text = { (Text.blank ()) with title = t; authors } in
+ let text = try Text.with_str_set text "Topics" (Option.get topics_opt)
+ with _ -> text in
+ match File_store.with_text text with
+ | Error s -> prerr_endline s
+ | Ok (filepath, _note) ->
+ if not interactive then print_endline filepath
+ else
+ (print_endline @@ "Created: " ^ filepath;
+ Sys.command ("$EDITOR " ^ filepath) |> ignore)
+
+let term =
+ let title = Arg.(value & pos 0 string "" & info []
+ ~docv:"title" ~doc:"Title for new article") in
+ let topics= Arg.(value & opt (some string) None & info ["t"; "topics"]
+ ~docv:"comma-separated topics" ~doc:"Topics for new article") in
+ let inter = Arg.(value & flag & info ["i"; "interactive"]
+ ~doc:"Prompts through the steps of creation") in
+ Term.(const new_txt $ title $ topics $ inter), Term.info "new"
+ ~doc:"create a new article" ~man:[ `S "DESCRIPTION";
+ `P "Create a new article, with title 'Draft' when none provided"]
diff --git a/cli/pull.ml b/cli/pull.ml
new file mode 100644
index 0000000..6337e3f
--- /dev/null
+++ b/cli/pull.ml
@@ -0,0 +1,160 @@
+let writer accum data =
+ Buffer.add_string accum data;
+ String.length data
+
+let showContent content =
+ Printf.printf "%s" (Buffer.contents content);
+ flush stdout
+
+let showInfo connection =
+ Printf.printf "Time: %f for: %s\n"
+ (Curl.get_totaltime connection)
+ (Curl.get_effectiveurl connection)
+
+let getContent connection url =
+ Curl.set_url connection url;
+ Curl.perform connection
+
+let curl_pull url =
+ let result = Buffer.create 4069
+ and errorBuffer = ref "" in
+ let connection = Curl.init () in
+ try
+ Curl.set_errorbuffer connection errorBuffer;
+ Curl.set_writefunction connection (writer result);
+ Curl.set_followlocation connection true;
+ Curl.set_url connection url;
+ Curl.perform connection;
+(* showContent result;*)
+(* showInfo connection;*)
+ Curl.cleanup connection;
+ Ok result
+ with
+ | Curl.CurlException (_reason, _code, _str) ->
+ Curl.cleanup connection;
+ Error (Printf.sprintf "Error: %s %s" url !errorBuffer)
+ | Failure s ->
+ Curl.cleanup connection;
+ Error (Printf.sprintf "Caught exception: %s" s)
+
+let newer time id dir =
+ match Logarion.File_store.to_text @@ Filename.(concat dir (Logarion.Id.short id) ^ ".txt") with
+ | Error x -> prerr_endline x; true
+ | Ok txt -> time > (Logarion.(Header_pack.date (Date.listing txt.date)))
+ | exception (Sys_error _) -> true
+
+let print_peers p =
+ let open Logarion.Header_pack in
+ match Msgpck.to_list p.peers with [] -> ()
+ | ps -> print_endline @@
+ List.fold_left (fun a x -> Printf.sprintf "%s %s" a (Msgpck.to_string x)) "peers: " ps
+
+let parse_index _is_selected fn url dir p =
+ let open Logarion.Header_pack in
+ match Msgpck.to_list p.texts with
+ | [] -> Printf.printf "%s => %s, has empty index\n" p.info.title dir; false
+ | texts ->
+ let numof_texts = string_of_int @@ List.length texts in
+ let text_num_len = String.length numof_texts in
+ Printf.printf "%*d/%s %s => %s\r" text_num_len 0 numof_texts p.info.title dir;
+ let of_pck i x =
+ Printf.printf "\r%*d/%s %!" text_num_len (i+1) numof_texts;
+ match x with
+ | Msgpck.List (id::time::title::_authors::_topics) ->
+ (match Logarion.Header_pack.to_id id with
+ | "" -> Printf.eprintf "Invalid id for%s " (Msgpck.to_string title)
+ | id ->
+ let t = match time with Msgpck.Int i -> Int32.of_int i | Msgpck.Uint32 i -> i | x -> Msgpck.to_uint32 x in
+ if newer t id dir then fn url dir id)
+ | _ -> prerr_endline ("Invalid record structure") in
+ List.iteri of_pck texts;
+ print_newline ();
+ true
+
+let fname dir text = Filename.concat dir (Logarion.Text.short_id text ^ ".txt")
+let pull_text url dir id =
+ let u = Filename.concat url ((Logarion.Id.short id) ^ ".txt") in
+ match curl_pull u with
+ | Error msg -> Printf.eprintf "Failed getting %s: %s" u msg
+ | Ok txt ->
+ let txt = Buffer.contents txt in
+ match Logarion.Text.of_string txt with
+ | Error s -> prerr_endline s
+ | Ok text ->
+ let file = open_out_gen [Open_creat; Open_trunc; Open_wronly] 0o640 (fname dir text) in
+ output_string file txt; close_out file
+
+let pull_index url _authors _topics =
+ let index_url = url ^ "/index.pck" in
+ match curl_pull index_url with
+ | Error s -> prerr_endline s; false
+ | Ok body ->
+ match Logarion.Header_pack.of_string (Buffer.contents body) with
+ | Error s -> Printf.printf "Error with %s: %s\n" url s; false
+ | Ok pk ->
+ let dir = Filename.concat Logarion.Peers.text_dir pk.info.id in
+ Logarion.File_store.with_dir dir;
+ let file = open_out_gen [Open_creat; Open_trunc; Open_wronly] 0o640 (Filename.concat dir "index.pck") in
+ output_string file ( Logarion.Header_pack.string {
+ pk with info = { pk.info with locations = url::pk.info.locations }});
+ close_out file;
+(* let predicates = A.predicate A.authored authors_opt*)
+(* @ A.predicate A.topics topics_opt in*)
+ let is_selected text = List.fold_left (fun a e -> a && e text) true [](*predicates*) in
+ try parse_index is_selected pull_text url dir pk with
+ Invalid_argument msg -> Printf.eprintf "Failed to parse: %s\n%!" msg; false
+
+let pull_list auths topics =
+ Curl.global_init Curl.CURLINIT_GLOBALALL;
+ let pull got_one peer_url = if got_one then got_one else
+ (pull_index peer_url auths topics) in
+ Logarion.Peers.fold pull false;
+ Curl.global_cleanup ()
+
+let pull url auths topics = match url with
+ | "" -> pull_list auths topics | x -> ignore (pull_index x auths topics)
+
+open Cmdliner
+let term =
+ let authors = Arg.(value & opt (some string) None & info ["a"; "authors"]
+ ~docv:"comma-separated names" ~doc:"filter by authors") in
+ let topics = Arg.(value & opt (some string) None & info ["t"; "topics"]
+ ~docv:"comma-separated topics" ~doc:"filter by topics") in
+ let url = Arg.(value & pos 0 string "" & info [] ~docv:"URL"
+ ~doc:"Repository location") in
+ Term.(const pull $ url $ authors $ topics),
+ Term.info "pull" ~doc:"pull listed texts" ~man:[ `S "DESCRIPTION";
+ `P "Pull texts from known repositories. To add a new repository use:";
+ `P "txt pull [url]";
+ `P ("This creates a directory in " ^ Logarion.Peers.text_dir
+ ^ " and downloads the text index.pck file in it")]
+
+(*module Msg = struct*)
+(* type t = string * string*)
+(* let compare (x0,y0) (x1,y1) =*)
+(* match compare x1 x0 with 0 -> String.compare y0 y1 | c -> c*)
+(*end*)
+(*module MsgSet = Set.Make(Msg)*)
+(*let pull_msgs url _authors _topics =*)
+(* match http_apply response url with*)
+(* | Error msg ->*)
+(* Printf.eprintf "Failed index request for %s %s" url msg*)
+(* | Ok body ->*)
+(* let rec fold_msgs s a fn =*)
+(* let t, msg = Scanf.bscanf s "%s %s@\n" (fun t m -> t, m) in*)
+(* if t <> "" then fold_msgs s (fn a t msg) fn else a*)
+(* in*)
+(* let s = Scanf.Scanning.from_string body in*)
+(* let msgs = MsgSet.empty in*)
+(* let date_string t = Ptime.to_date t |>*)
+(* fun (y, m, d) -> Printf.sprintf "%04d-%02d-%02d" y m d in*)
+(* let msgs = fold_msgs s msgs*)
+(* (fun msgs t m -> match Ptime.of_rfc3339 t with*)
+(* | Ok (v,_,_) -> let open MsgSet in*)
+(* let msgs = if cardinal msgs > 1 then remove (max_elt msgs) msgs else msgs in*)
+(* add (v,m) msgs*)
+(* | _ -> msgs) in*)
+(* let msg_string = MsgSet.fold*)
+(* (fun (t,m) a -> a ^ Printf.sprintf " %s š %s\n" (date_string t) m)*)
+(* msgs "" in*)
+(* Printf.printf "āāāā{ %s }āāāā\n%s" url msg_string*)
diff --git a/cli/topics.ml b/cli/topics.ml
new file mode 100644
index 0000000..44af03b
--- /dev/null
+++ b/cli/topics.ml
@@ -0,0 +1,17 @@
+open Logarion
+let topics r authors_opt =
+ let predicates = Archive.(predicate authored authors_opt) in
+ let predicate text = List.fold_left (fun a e -> a && e text) true predicates in
+ let topic_union a (e, _) = String_set.union a (Text.set "topics" e) in
+ let s = File_store.fold ~r ~predicate topic_union String_set.empty in
+ print_endline @@ String_set.to_string s
+
+open Cmdliner
+let term =
+ let recurse = Arg.(value & flag & info ["R"]
+ ~doc:"include texts in subdirectories") in
+ let authed = Arg.(value & opt (some string) None & info ["authored"]
+ ~docv:"comma-separated authors" ~doc:"topics by authors") in
+ Term.(const topics $ recurse $ authed),
+ Term.info "topics" ~doc:"list topics" ~man:[ `S "DESCRIPTION";
+ `P "List of topics" ]
diff --git a/cli/txt.ml b/cli/txt.ml
new file mode 100644
index 0000000..b586b2d
--- /dev/null
+++ b/cli/txt.ml
@@ -0,0 +1,19 @@
+let version = "%%VERSION%%"
+
+open Cmdliner
+let default_cmd =
+ let doc = "Discover, collect & exchange texts" in
+ let man = [ `S "Contact"; `P "" ] in
+ Term.(ret (const (`Help (`Pager, None)))), Term.info "txt" ~version ~doc ~man
+
+let () = match Term.eval_choice default_cmd [
+ Authors.term;
+ Convert.term;
+ File.term; File.unfile_term;
+ Index.term;
+ Last.term;
+ Listing.term;
+ New.term;
+ Pull.term;
+ Topics.term;
+ ] with `Error _ -> exit 1 | _ -> exit 0
diff --git a/dune-project b/dune-project
index 0160b76..38293ff 100644
--- a/dune-project
+++ b/dune-project
@@ -1,16 +1,13 @@
(lang dune 2.0)
(name logarion)
-(homepage "https://logarion.orbitalfox.eu")
-
-(source (uri git://orbitalfox.eu/logarion))
(license EUPL-1.2)
-(authors "orbifx")
-(maintainers "fox@orbitalfox.eu")
-(bug_reports "mailto:logarion@lists.orbitalfox.eu?subject=Issue:")
+(maintainers "orbifx ")
+(homepage "http://logarion.orbitalfox.eu")
+(source (uri git+https://git.disroot.org/orbifx/logarion.git))
(generate_opam_files true)
(package
(name logarion)
(synopsis "Texts archival and exchange")
- (depends re cmdliner bos ptime uuidm uri text_parse msgpck cohttp-lwt-unix tls))
+ (depends text_parse (cmdliner (<= 1.0.4)) msgpck ocurl))
diff --git a/http/dune b/http/dune
deleted file mode 100644
index 2732878..0000000
--- a/http/dune
+++ /dev/null
@@ -1,4 +0,0 @@
-(library
- (name http)
- (public_name logarion.http)
- (libraries logarion uri cmdliner lwt cohttp cohttp-lwt cohttp-lwt-unix tls msgpck))
diff --git a/http/http.ml b/http/http.ml
deleted file mode 100644
index 77779f6..0000000
--- a/http/http.ml
+++ /dev/null
@@ -1,143 +0,0 @@
-let http_body fn uri =
- let open Lwt in
- let open Cohttp_lwt_unix in
- Client.get uri >>= fun (headers, body) ->
- body |> Cohttp_lwt.Body.to_string >|= fun body -> fn (headers, body)
-
-let response (headers, body) =
- let open Cohttp in
- match Header.get (headers |> Response.headers) "content-type" with
- | Some "application/msgpack" | Some "application/octet-stream"
- | Some "text/plain" | Some "text/plain; charset=utf-8" -> Ok body
- | Some x -> Error ("Invalid content-type: " ^ x)
- | None -> Ok body
-
-let http_apply fn uri = Lwt_main.run (http_body fn uri)
-
-module S = Set.Make(String)
-
-(*let is_selected sl =*)
-(* let check str a b c = Option.(fold ~none:(is_none b && is_none c) ~some:(fun x -> x = str) a) in*)
-(* function*)
-(* | `Author s -> check s sl.authors sl.topics*)
-(* | `Topic s -> check s sl.topics sl.authors*)
-
-(* TODO: parse using Header_pack *)
-
-let sub_id text = Logarion.(String.sub (text.Text.uuid |> Id.to_string) 0 8)
-let fname dir text = dir ^ sub_id text ^ ".txt"
-
-let newer time id dir =
- match Logarion.File_store.to_text @@ Filename.concat dir (String.sub (Logarion.Id.to_string id) 0 8 ^ ".txt") with
- | Error x -> prerr_endline x; true
- | Ok txt -> time > (Logarion.(Header_pack.date (Date.listing txt.date)))
- | exception (Sys_error _) -> true
-
-let pull_text url dir id =
- let path = Uri.path url in
- let u = Uri.with_path url (path ^ "/" ^ String.sub (Logarion.Id.to_string id) 0 8 ^ ".txt") in
- match http_apply response u with
- | Error msg -> prerr_endline @@ " Failed " ^ Uri.to_string u ^ " " ^ msg
- | Ok txt ->
- match Logarion.Text.of_string txt with
- | Error s -> prerr_endline s
- | Ok text ->
- let file = open_out_gen [Open_creat; Open_trunc; Open_wronly] 0o640 (fname dir text) in
- output_string file txt; close_out file
-
-(*TODO: adapt Archive predication function to work with free sets*)
-let parse_index _is_selected fn url p =
- let open Logarion.Header_pack in
- let dir = "peers/" ^ match Uri.host url with
- None -> "local/" ^ Uri.path url | Some s -> s ^ Uri.path url ^ "/" in
- Printf.printf "%s => %s\n" p.info.name dir;
- (match Msgpck.to_list p.peers with [] -> () | ps ->
- print_string " peers: ";
- List.iter (fun x -> print_string (" " ^ Msgpck.to_string x)) ps;
- print_newline ());
- match Msgpck.to_list p.texts with
- | [] -> print_endline ", has empty index"
- | texts ->
- match Bos.OS.Dir.create ~mode:0o740 (Fpath.v dir) with
- | Error (`msg s) -> prerr_endline ("Error making domain dir:" ^ s);
- | _ ->
- let numof_texts = string_of_int @@ List.length texts in
- let text_num_len = String.length numof_texts in
- let of_pck i x =
- Printf.printf "\r%*d/%s %!" text_num_len (i+1) numof_texts;
- match x with
- | Msgpck.List (id::time::title::_authors::_topics) ->
- (match Logarion.Id.of_bytes Msgpck.(to_bytes id) with
- | None -> prerr_endline ("Invalid id for " ^ Msgpck.to_string title)
- | Some id ->
- let t = match time with Msgpck.Int i -> Int32.of_int i | Msgpck.Uint32 i -> i | x -> Msgpck.to_uint32 x in
- if newer t id dir then fn url dir id)
- | _ -> prerr_endline ("Invalid record structure") in
- List.iteri of_pck texts;
- print_newline ()
-
-let pull_index url _authors _topics =
- let index_url = Uri.(with_path url (path url ^ "/index.pck")) in
- match http_apply response index_url with
- | Error msg -> prerr_endline @@ "Failed index request for " ^ Uri.to_string index_url ^ " " ^ msg
- | Ok body ->
- let _i, pack = Msgpck.StringBuf.read body in
-(* let predicates =*)
-(* A.predicate A.authored authors_opt*)
-(* @ A.predicate A.topics topics_opt*)
-(* in*)
- let is_selected text = List.fold_left (fun a e -> a && e text) true [](*predicates*) in
- match Logarion.Header_pack.unpack pack with None -> ()
- | Some headers -> parse_index is_selected pull_text url headers
-
-module Msg = struct
- type t = Ptime.t * string
- let compare (x0,y0) (x1,y1) = match Ptime.compare x1 x0 with 0 -> String.compare y0 y1 | c -> c
-end
-
-module MsgSet = Set.Make(Msg)
-
-let pull_msgs url _authors _topics = match http_apply response url with
- | Error msg -> prerr_endline @@ "Failed index request for " ^ Uri.(to_string url) ^ " " ^ msg
- | Ok body ->
- let rec fold_msgs s a fn =
- let t, msg = Scanf.bscanf s "%s %s@\n" (fun t m -> t, m) in
- if t <> "" then fold_msgs s (fn a t msg) fn else a
- in
- let s = Scanf.Scanning.from_string body in
- let msgs = MsgSet.empty in
- let date_string t = Ptime.to_date t |>
- fun (y, m, d) -> Printf.sprintf "%04d-%02d-%02d" y m d in
- let msgs = fold_msgs s msgs
- (fun msgs t m -> match Ptime.of_rfc3339 t with
- | Ok (v,_,_) -> let open MsgSet in
- let msgs = if cardinal msgs > 1 then remove (max_elt msgs) msgs else msgs in
- add (v,m) msgs
- | _ -> msgs) in
- print_endline ("\nāāāāā=[ " ^ Uri.to_string url);
- MsgSet.iter
- (fun (t,m) -> print_endline
- ("ā " ^ date_string t ^ "\nā " ^ m ^ "\nāāāāāāāāāā")) msgs
-
-let pull_url url = match Uri.of_string url with
- | x when x = Uri.empty -> (fun _ _ -> ())
- | x when Uri.scheme x = Some "msg+http" -> pull_msgs Uri.(with_scheme x (Some "http"))
- | x when Uri.scheme x = Some "msg+https"-> pull_msgs Uri.(with_scheme x (Some "https"))
- | x -> pull_index x
-
-let pull_list auths topics =
- let pull peer_url () = pull_url peer_url auths topics in
- let open Logarion.Peers in
- fold_file pull () public_fname;
- fold_file pull () private_fname
-
-let pull = function "" -> pull_list | x -> pull_url x
-
-open Cmdliner
-
-let pull_term =
- let authors = Arg.(value & opt (some string) None & info ["a"; "authors"] ~docv:"AUTHORS" ~doc:"select authors") in
- let topics = Arg.(value & opt (some string) None & info ["t"; "topics"] ~docv:"TOPICS" ~doc:"select topics") in
- let url = Arg.(value & pos 0 string "" & info [] ~docv:"URL" ~doc:"HTTP URL of Logarion") in
- Term.(const pull $ url $ authors $ topics),
- Term.info "pull" ~doc:"pull listed texts" ~man:[ `S "DESCRIPTION"; `P "Pull texts from archive at address"]
diff --git a/lib/archive.ml b/lib/archive.ml
index d095fcd..7a375a7 100644
--- a/lib/archive.ml
+++ b/lib/archive.ml
@@ -1,33 +1,13 @@
-(*let module S = Set.Make (Text) in*)
-(*let module M = Map.Make (String) in*)
-(*let module I = Map.Make (Id) in*)
-(*let aggr = I.empty, M.empty, M.empty, M.empty in*)
-(*let fn (id, a, t, k) (n,_) =*)
-(* let id = I.add n.Text.uuid n id in*)
-(* let a =*)
-(* let f e a = M.update (e.Person.name) (function Some v -> Some (S.add n v) | None -> Some (S.singleton n)) a in*)
-(* Person.Set.fold f n.Text.authors a in*)
-(* let t =*)
-(* let f e a = M.update e (function Some v -> Some (S.add n v) | None -> Some (S.singleton n)) a in*)
-(* String_set.fold f (Text.set "Topics" n) t in*)
-(* let k =*)
-(* let f e a = M.update e (function Some v -> Some (S.add n v) | None -> Some (S.singleton n)) a in*)
-(* String_set.fold f (Text.set "Keywords" n) k in*)
-(* (id, a, t, k)*)
+let predicate fn opt = Option.(to_list @@ map fn opt)
-module Make (Store : Store.T) = struct
- include Store
- let predicate fn opt = Option.(to_list @@ map fn opt)
+let authored query_string =
+ let q = Person.Set.of_query @@ String_set.query query_string in
+ fun n -> Person.Set.predicate q n.Text.authors
- let authored query_string =
- let q = Person.Set.of_query @@ String_set.query query_string in
- fun n -> Person.Set.predicate q n.Text.authors
+let keyworded query_string =
+ let q = String_set.query query_string in
+ fun n -> String_set.(predicate q (Text.set "Keywords" n))
- let keyworded query_string =
- let q = String_set.query query_string in
- fun n -> String_set.(predicate q (Text.set "Keywords" n))
-
- let topics query_string =
- let q = String_set.query query_string in
- fun n -> String_set.(predicate q (Text.set "Topics" n))
-end
+let topics query_string =
+ let q = String_set.query query_string in
+ fun n -> String_set.(predicate q (Text.set "Topics" n))
diff --git a/lib/date.ml b/lib/date.ml
index 3902f47..da07617 100644
--- a/lib/date.ml
+++ b/lib/date.ml
@@ -1,8 +1,14 @@
-type t = { created: Ptime.t option; edited: Ptime.t option }
+type t = { created: string; edited: string }
let compare = compare
-let rfc_string date = match date with Some t -> Ptime.to_rfc3339 t | None -> ""
-let of_string (rfc : string) = match Ptime.of_rfc3339 rfc with Ok (t,_,_) -> Some t | Error _ -> None
-let listing date = if Option.is_some date.edited then date.edited else date.created
-let pretty_date = function
- | Some t -> Ptime.to_date t |> fun (y, m, d) -> Printf.sprintf "%04d-%02d-%02d" y m d
- | None -> ""
+let rfc_string date = date
+let of_string (rfc : string) = rfc
+let listing date = if date.edited <> "" then date.edited else date.created
+let pretty_date date =
+ try Scanf.sscanf date "%4s-%2s-%2s" (fun y m d -> Printf.sprintf "%s %s %s" y m d)
+ with Scanf.Scan_failure s as e -> Printf.fprintf stderr "%s for %s\n" s date; raise e
+let now () = Unix.time () |> Unix.gmtime |>
+ (fun t -> Printf.sprintf "%4d-%02d-%02dT%02d:%02d:%02dZ"
+ (t.tm_year+1900) (t.tm_mon+1) t.tm_mday t.tm_hour t.tm_min t.tm_sec)
+let to_secs date =
+ Scanf.sscanf date "%4d-%02d-%02dT%02d:%02d:%02d"
+ (fun y mo d h mi s -> (y-1970)*31557600 + mo*2629800 + d*86400 + h*3600 + mi*60 + s)
diff --git a/lib/dune b/lib/dune
index fc9ba49..4ec3169 100644
--- a/lib/dune
+++ b/lib/dune
@@ -1,4 +1,4 @@
(library
(name logarion)
(public_name logarion)
- (libraries ptime uuidm uri re.str bos text_parse text_parse.parsers msgpck))
+ (libraries text_parse text_parse.parsers unix str msgpck))
diff --git a/lib/file_store.ml b/lib/file_store.ml
index 3ab2e11..11e28f9 100644
--- a/lib/file_store.ml
+++ b/lib/file_store.ml
@@ -1,52 +1,65 @@
type t = string
-type item_t = string
-type archive_t = {
- name: string; archivists: Person.Set.t; id: Id.t;
- kv: string Store.KV.t; store: t }
+type item_t = t list
type record_t = Text.t * item_t
let extension = ".txt"
+let def_dir = try Sys.getenv "LOGARION_DIR" with Not_found -> "."
let to_string f =
let ic = open_in f in
- let n = in_channel_length ic in
- let s = Bytes.create n in
- really_input ic s 0 n;
+ let s = really_input_string ic (in_channel_length ic) in
close_in ic;
- Bytes.to_string s
+ s
-let file path content = let out = open_out path in
- output_string out content; close_out out
+let fold_file_line fn init file = match open_in file with
+ | exception (Sys_error msg) -> prerr_endline msg; init
+ | file ->
+ let rec read acc = match input_line file with
+ | "" as s | s when String.get s 0 = '#' -> read acc
+ | s -> read (fn s acc)
+ | exception End_of_file -> close_in file; acc
+ in read init
-let (//) a b = a ^ "/" ^ b
+let file path str = let o = open_out path in output_string o str; close_out o
let to_text path =
if Filename.extension path = extension then
(to_string path |> Text.of_string |> Result.map_error (fun m -> path ^": "^ m))
- else Error "Not txt"
+ else Error (Printf.sprintf "Not txt: %s" path)
let newest (a,_pa) (b,_pb) = Text.newest a b
let oldest (a,_pa) (b,_pb) = Text.oldest a b
-let list_iter fn {store;_} paths =
- let link f = match to_text (Filename.concat store f)
- with Ok t -> fn store t f | Error s -> prerr_endline s in
+let list_iter fn dir paths =
+ let link f = match to_text (Filename.concat dir f) with
+ | Ok t -> fn dir t f | Error s -> prerr_endline s in
List.iter link paths
-let iter_valid_text pred fn p =
- match to_text p with Error _ -> () | Ok t -> if pred t then fn (t, p)
+module TextMap = Map.Make(Text)
-let fold_valid_text pred fn acc p =
- match to_text p with Error _ -> acc | Ok t -> if pred t then fn acc (t, p) else acc
+type iteration_t = item_t TextMap.t
+let new_iteration = TextMap.empty
-let list_fs dir =
+(*let iter_valid_text pred fn path =*)
+(* match to_text path with Error _ -> () | Ok t -> if pred t then fn (t, p)*)
+
+let fold_valid_text pred it path =
+ match to_text path with Error _ -> it
+ | Ok t -> if pred t then (TextMap.update t
+ (function None -> Some [path] | Some ps -> Some (path::ps)) it
+ ) else it
+
+(* Compare file system nodes to skip reparsing? *)
+let list_fs ?(r=false) dir =
+ let valid_dir f = r && String.get f 0 <> '.' && Sys.is_directory f in
+ let expand_dir d = Array.(to_list @@ map (Filename.concat d) (Sys.readdir d)) in
let rec loop result = function
- | [] -> result
- | f::fs when Sys.is_directory f ->
- Array.map (Filename.concat f) (Sys.readdir f)
- |> Array.to_list |> List.append fs |> loop result
+ | f::fs when valid_dir f -> expand_dir f |> List.append fs |> loop result
| f::fs -> loop (f::result) fs
- in loop [] [dir]
+ | [] -> result in
+ let dirs = if dir = "." then Array.to_list (Sys.readdir dir) else
+ if not r then expand_dir dir else [dir] in
+ loop [] dirs
let list_take n =
let rec take acc n = function [] -> []
@@ -54,113 +67,80 @@ let list_take n =
| x::xs -> take (x::acc) (n-1) xs
in take [] n
-let iter ?(predicate=fun _ -> true) ?order ?number fn {store;_} =
- match order with
- | None -> List.iter (iter_valid_text predicate fn) @@ list_fs store
- | Some comp ->
- List.iter fn
- @@ (match number with None -> (fun x -> x) | Some n -> list_take n)
- @@ List.fast_sort comp
- @@ List.fold_left (fold_valid_text predicate (fun a e -> List.cons e a)) []
- @@ list_fs store
+let fold_sort_take ?(predicate=fun _ -> true) ?(number=None) comp flist =
+ (match number with None -> (fun x -> x) | Some n -> list_take n)
+ @@ List.fast_sort comp @@ TextMap.bindings
+ @@ List.fold_left (fold_valid_text predicate) new_iteration flist
-let fold ?(predicate=fun _ -> true) ?order ?number fn acc {store;_} =
- match order with
- | None -> List.fold_left (fold_valid_text predicate fn) acc @@ list_fs store
- | Some comp ->
- List.fold_left fn acc
- @@ (match number with None -> (fun x -> x) | Some n -> list_take n)
- @@ List.fast_sort comp
- @@ List.fold_left (fold_valid_text predicate (fun a e -> List.cons e a)) []
- @@ list_fs store
+let iter ?(r=false) ?(dir=def_dir) ?(predicate=fun _ -> true) ?order ?number fn =
+ let flist = list_fs ~r dir in match order with
+ | Some comp -> List.iter fn @@ fold_sort_take ~predicate ~number comp flist
+ | None -> List.iter fn @@ TextMap.bindings @@
+ List.fold_left (fold_valid_text predicate) new_iteration flist
-let with_id { store; _ } id =
+let fold ?(r=false) ?(dir=def_dir) ?(predicate=fun _ -> true) ?order ?number fn acc =
+ let flist = list_fs ~r dir in match order with
+ | Some comp -> List.fold_left fn acc @@ fold_sort_take ~predicate ~number comp flist
+ | None -> List.fold_left fn acc @@ TextMap.bindings @@
+ List.fold_left (fold_valid_text predicate) new_iteration flist
+
+let with_id ?(r=false) ?(dir=def_dir) id =
let matched acc path =
match to_text path with
| Error x -> prerr_endline x; acc
- | Ok text when text.Text.uuid <> id -> acc
+ | Ok text when text.Text.id <> id -> acc
| Ok text ->
match acc with
| Ok None -> Ok (Some text)
| Ok (Some prev) -> if prev = text then acc else Error [text; prev]
| Error x -> Error (text :: x)
- in List.fold_left matched (Ok None) (list_fs store)
+ in List.fold_left matched (Ok None) (list_fs ~r dir)
-module Directory = struct
- let print ?(descr="") dir result =
- let () = match result with
- | Ok true -> print_endline ("Created " ^ descr ^ " directory " ^ dir)
- | Ok false -> print_endline ("Using existing " ^ descr ^ " directory " ^ dir)
- | Error (`Msg msg) -> prerr_endline @@ "Failed making " ^ descr ^ ". " ^ msg
- in
- result
+let with_dir ?(descr="") ?(perm=0o740) dir =
+ let mkdir dir = match Unix.mkdir dir perm with
+ | exception Unix.Unix_error (EEXIST, _, _) -> ()
+ | exception Unix.Unix_error (code, _fn, arg) ->
+ failwith @@ Printf.sprintf "Error %s making %s dir: %s"
+ (Unix.error_message code) descr arg
+ | _ -> () in
+ let rec mkeach path = function [] | [""] -> () | ""::t -> mkeach path t
+ | hd::t -> let d = Filename.concat path hd in mkdir d; mkeach d t in
+ mkeach
+ (if Filename.is_relative dir then "" else "/")
+ (String.split_on_char '/' dir)
- let directory dir = Result.bind (Fpath.of_string dir) Bos.OS.Dir.create
-
- let rec directories = function
- | [] -> Ok ()
- | (d, descr)::tl ->
- match directory d |> print ~descr d with
- | Ok _ -> directories tl
- | Error _ -> Error (d, descr)
-end
-
-let copy ?(recursive = false) src dst =
- Bos.OS.Cmd.run (Bos.Cmd.(v "cp" %% (on recursive @@ v "-r") % src % dst))
+let rec with_dirs = function [] -> () | (d, descr)::tl -> with_dir ~descr d; with_dirs tl
let versioned_basename_of_title ?(version=0) repo extension (title : string) =
let basename = Text.string_alias title in
let rec next version =
- let candidate = repo // basename ^ "." ^ string_of_int version ^ extension in
+ let candidate = Filename.concat repo
+ (basename ^ "." ^ string_of_int version ^ extension) in
if Sys.file_exists candidate then next (succ version) else candidate
in
next version
-let uuid_filename repo extension text =
+let id_filename repo extension text =
let basename = Text.alias text in
- let candidate = repo // String.sub (Id.to_string text.uuid) 0 6 ^ "." ^ basename ^ extension in
+ let candidate = Filename.concat repo (text.id ^ "." ^ basename ^ extension) in
if Sys.file_exists candidate then Error "Name clash, try again" else Ok candidate
-let with_text {store;_} new_text =
- Result.bind (uuid_filename store extension new_text) @@
- fun path ->
- try file path (Text.to_string new_text); Ok (path, new_text) with Sys_error s -> Error s
-
-let basic_config () =
- "Archive-Name: "
- ^ "\nArchive-ID: " ^ Id.(generate () |> to_string)
- ^ "\nArchivists: " ^ Bos.OS.Env.opt_var "USER" ~absent:""
- |> Bytes.of_string
-
-let init ?(dotdir=".logarion/") () =
- match Directory.directories [dotdir, "dotdir"] with
- | Error (_dir, _desc) -> ()
- | Ok () ->
- let config_file =
- open_out_gen [Open_creat; Open_excl; Open_wronly]
- 0o700 (dotdir // "config") in
- output_bytes config_file (basic_config ());
- close_out config_file
+let with_text ?(dir=def_dir) new_text =
+ match id_filename dir extension new_text with
+ | Error _ as e -> e
+ | Ok path ->
+ try file path (Text.to_string new_text); Ok (path, new_text)
+ with Sys_error s -> Error s
module Config = struct
- type t = archive_t
- let key_value k v a = match k with
- | "Archive-Name" -> { a with name = String.trim v }
- | "Archive-ID" -> { a with id = Option.get (Id.of_string (String.trim v)) }
- | "Archivists" -> { a with archivists = Person.Set.of_string v }
- | _ -> { a with kv = Store.KV.add k (String.trim v) a.kv }
+ type t = string Store.KV.t
+ let key_value k v a = Store.KV.add k (String.trim v) a
end
-let of_path store =
+let of_kv_file path =
let open Text_parse in
- let subsyntaxes = [| (module Parsers.Key_value.Make (Config) : Parser.S with type t = Config.t); (module Parsers.Key_value.Make (Config)); |] in
- let of_string text acc = Parser.parse subsyntaxes { text; pos = 0; right_boundary = String.length text - 1 } acc in
- Ok (
- of_string (to_string @@ store ^ "/.logarion/config") {
- name = "";
- archivists = Person.Set.empty;
- id = Id.nil;
- kv = Store.KV.empty;
- store = try Sys.getenv "LOGARION_DIR" with Not_found -> "."
- }
- )
+ let subsyntaxes = Parsers.Key_value.[|
+ (module Make (Config) : Parser.S with type t = Config.t); (module Make (Config)); |] in
+ let of_string text acc =
+ Parser.parse subsyntaxes { text; pos = 0; right_boundary = String.length text - 1 } acc in
+ of_string (to_string @@ path) Store.KV.empty
diff --git a/lib/header_pack.ml b/lib/header_pack.ml
index 1ce0705..f776d8b 100644
--- a/lib/header_pack.ml
+++ b/lib/header_pack.ml
@@ -1,54 +1,57 @@
-type info_t = { version: int; name: string; archivists: string list }
-type text_t = { id: Msgpck.t; time: Msgpck.t; title: Msgpck.t; authors: Msgpck.t }
-type t = { info: info_t; fields: string list; texts: Msgpck.t; peers: Msgpck.t }
+let version = 0
+type info_t = { version: int; id: string; title: string; people: string list; locations: string list }
+type t = { info: info_t; fields: Msgpck.t; texts: Msgpck.t; peers: Msgpck.t }
-let of_id id = Msgpck.Bytes (Id.to_bytes id)
-let to_id pck_id = Id.of_bytes Msgpck.(to_bytes pck_id)
+let of_id id = Msgpck.of_string id
+let to_id = Msgpck.to_string
let person p = Msgpck.String (Person.to_string p)
-let persons ps = List.rev @@ Person.Set.fold (fun p a -> person p :: a) ps []
+let persons ps = Msgpck.of_list @@ List.rev @@ Person.Set.fold (fun p a -> person p :: a) ps []
+
+let str = Msgpck.of_string
+let str_list ls = Msgpck.of_list @@ List.map str ls
+let to_str_list x = List.map Msgpck.to_string (Msgpck.to_list x)
let of_set field t =
List.rev @@ String_set.fold (fun s a -> Msgpck.String s :: a) (Text.set field t) []
-let date = function
- | None -> Int32.zero
- | Some date ->
- let days, ps = Ptime.Span.to_d_ps (Ptime.to_span date) in
- Int32.add Int32.(mul (of_int days) 86400l) Int64.(to_int32 (div ps 1000000000000L))
+let date = function "" -> Int32.zero | date -> Int32.of_int (Date.to_secs date)
-let to_sec = function
- Msgpck.Int i -> Int32.of_int i | Msgpck.Uint32 i -> i | x -> Msgpck.to_uint32 x
-
-let public_peers () =
- Peers.fold_file (fun x a -> Msgpck.String x :: a) [] Peers.public_fname
+let to_sec = function Msgpck.Int i -> Int32.of_int i | Msgpck.Uint32 i -> i | x -> Msgpck.to_uint32 x
let fields = Msgpck.(List [String "id"; String "time"; String "title"; String "authors"; String "topics"])
let to_fields fieldpack = List.map Msgpck.to_string (Msgpck.to_list fieldpack)
-let to_pack a t =
- let open Text in
- Msgpck.(List [
- Bytes (Id.to_bytes t.uuid); of_uint32 (date (Date.listing t.date));
- String t.title; List (persons t.authors); List (of_set "topics" t)
- ]) :: a
-
-let pack_filename ?(filename="index.pck") archive =
- let dir = Store.KV.find "Export-Dir" archive.File_store.kv in (*raises Not_found*)
- dir ^ "/" ^ filename
-
let to_info = function
- | Msgpck.List (v::n::a::[]) ->
- let archivists = List.map Msgpck.to_string (Msgpck.to_list a) in
- Msgpck.({version = to_int v; name = to_string n; archivists})
+ | Msgpck.List (v::id::n::a::ls::[]) ->
+ let people = to_str_list a in
+ let locations = to_str_list ls in
+ Msgpck.({version = to_int v; id = to_string id; title = to_string n; people; locations})
| _ -> invalid_arg "Pack header"
+let of_info i = let open Msgpck in
+ List [Int i.version; String i.id; String i.title; str_list i.people; str_list i.locations]
+
+let of_text a t =
+ let open Text in
+ Msgpck.(List [
+ of_id t.id; of_uint32 (date (Date.listing t.date));
+ String t.title; persons t.authors; List (of_set "topics" t)
+ ]) :: a
+
+let of_text_list l = Msgpck.List l
+
+let pack p = Msgpck.List [of_info p.info; p.fields; p.texts; p.peers]
+let string p = Bytes.to_string @@ Msgpck.Bytes.to_string @@ pack p
+
let unpack = function
- | Msgpck.List (i::f::texts::[]) ->
- Some { info = to_info i; fields = to_fields f; texts; peers = Msgpck.List [] }
- | Msgpck.List (i::f::texts::peers::[]) ->
- Some { info = to_info i; fields = to_fields f; texts; peers }
- | _ -> None
+ | Msgpck.List (i::fields::texts::[]) ->
+ Ok { info = to_info i; fields; texts; peers = Msgpck.List [] }
+ | Msgpck.List (i::fields::texts::peers::[]) ->
+ Ok { info = to_info i; fields; texts; peers }
+ | _ -> Error "format mismatch"
+
+let of_string s = unpack @@ snd @@ Msgpck.StringBuf.read s
let list filename = try
let texts_list = function
@@ -60,25 +63,22 @@ let list filename = try
let contains text = function
| Msgpck.List (id::_time::title::_authors::_topics::[]) ->
- (match Id.of_bytes (Msgpck.to_bytes id) with
- | None -> prerr_endline ("Invalid id for " ^ Msgpck.to_string title); false
- | Some id -> text.Text.uuid = id)
+ (match to_id id with
+ | "" -> prerr_endline ("Invalid id for " ^ Msgpck.to_string title); false
+ | id -> text.Text.id = id)
| _ -> prerr_endline ("Invalid record pattern"); false
-let pack archive records =
- let header_pack = List.fold_left to_pack [] records in
- let info = Msgpck.(List [Int 0; String archive.File_store.name; List (persons archive.archivists)]) in
- Bytes.to_string @@ Msgpck.Bytes.to_string
- (List [info; fields; Msgpck.List header_pack; Msgpck.List (public_peers ())])
-let add archive records =
- let fname = pack_filename archive in
- let append published (t, _f) = if List.exists (contains t) published then published else to_pack published t in
- match list fname with Error e -> prerr_endline e | Ok published_list ->
- let header_pack = List.fold_left append published_list records in
- let archive = Msgpck.(List [Int 0; String archive.File_store.name;
- List (persons archive.archivists)]) in
- File_store.file fname @@ Bytes.to_string
- @@ Msgpck.Bytes.to_string (List [archive; fields; Msgpck.List header_pack])
+(*let pack_filename ?(filename="index.pck") archive =*)
+(* let dir = Store.KV.find "Export-Dir" archive.File_store.kv in (*raises Not_found*)*)
+(* dir ^ "/" ^ filename*)
-let unpublish _archive _records = ()
+(*let add archive records =*)
+(* let fname = pack_filename archive in*)
+(* let append published (t, _f) = if List.exists (contains t) published then published else to_pack published t in*)
+(* match list fname with Error e -> prerr_endline e | Ok published_list ->*)
+(* let header_pack = List.fold_left append published_list records in*)
+(* let archive = Msgpck.(List [*)
+(* Int 0; String archive.File_store.name; persons archive.people]) in*)
+(* File_store.file fname @@ Bytes.to_string*)
+(* @@ Msgpck.Bytes.to_string (List [archive; fields; Msgpck.List header_pack])*)
diff --git a/lib/id.ml b/lib/id.ml
index d79feb4..1dab0ce 100644
--- a/lib/id.ml
+++ b/lib/id.ml
@@ -1,9 +1,33 @@
-let random_state = Random.State.make_self_init ()
-type t = Uuidm.t
-let compare = Uuidm.compare
-let to_string = Uuidm.to_string
-let of_string = Uuidm.of_string
-let to_bytes = Uuidm.to_bytes
-let of_bytes = Uuidm.of_bytes
-let generate ?(random_state=random_state) = Uuidm.v4_gen random_state
-let nil = Uuidm.nil
+let random_state = Random.State.make_self_init
+
+(*module UUID = struct*)
+(*type t = Uuidm.t*)
+(*let compare = Uuidm.compare*)
+(*let to_string = Uuidm.to_string*)
+(*let of_string = Uuidm.of_string*)
+(*let to_bytes = Uuidm.to_bytes*)
+(*let of_bytes = Uuidm.of_bytes*)
+(*let generate ?(random_state=random_state ()) = Uuidm.v4_gen random_state*)
+(*let nil = Uuidm.nil*)
+(*end*)
+
+type t = string
+let compare = String.compare
+let nil = ""
+
+let short ?(len) id =
+ let id_len = String.length id in
+ let l = match len with Some l -> l | None -> if id_len = 36 then 8 else 6 in
+ String.sub id 0 (min l id_len)
+
+let generate ?(len=6) ?(seed=random_state ()) () =
+ let b32 i = char_of_int @@
+ if i < 10 then i+48 else
+ if i < 18 then i+87 else
+ if i < 20 then i+88 else
+ if i < 22 then i+89 else
+ if i < 27 then i+90 else
+ if i < 32 then i+91 else
+ (invalid_arg ("id.char" ^ string_of_int i)) in
+ let c _ = b32 (Random.State.int seed 31) in
+ String.init len c
diff --git a/lib/peers.ml b/lib/peers.ml
index a5f5d1c..44f1389 100644
--- a/lib/peers.ml
+++ b/lib/peers.ml
@@ -1,9 +1,19 @@
-let public_fname = "peers.pub.conf"
-let private_fname = "peers.priv.conf"
+let text_dir = Filename.concat (Sys.getenv "HOME") ".local/share/texts"
-let fold_file fn init file = match open_in file with
- | exception (Sys_error msg) -> prerr_endline msg; init
- | file ->
- let rec read acc = try read (fn (input_line file) acc)
- with End_of_file -> close_in file; acc in
- read init
+let fold fn init = match Sys.readdir text_dir with
+ | exception (Sys_error msg) -> prerr_endline msg
+ | dirs ->
+ let read_pack path =
+ let pack_path = Filename.(concat text_dir @@ concat path "index.pck") in
+ match Sys.file_exists pack_path with false -> () | true ->
+ match Header_pack.of_string (File_store.to_string pack_path) with
+ | Error s -> Printf.eprintf "%s %s\n" s pack_path
+ | Ok p -> ignore @@ List.fold_left fn init Header_pack.(p.info.locations)
+ in
+ Array.iter read_pack dirs
+
+let scheme url =
+ let colon_idx = String.index_from url 0 ':' in
+ let scheme = String.sub url 0 colon_idx in
+(* let remain = String.(sub url (colon_idx+1) (length url - length scheme - 1)) in*)
+ scheme
diff --git a/lib/person.ml b/lib/person.ml
index 876c9e6..e2f3597 100644
--- a/lib/person.ml
+++ b/lib/person.ml
@@ -1,17 +1,18 @@
module Person = struct
type name_t = string
- type address_t = Uri.t
+ type address_t = string
type t = { name: name_t; addresses: address_t list }
let empty = { name = ""; addresses = [] }
let compare = Stdlib.compare
- let to_string p = List.fold_left (fun a e -> a^" <"^Uri.to_string e^">") p.name p.addresses
+ let name_to_string p = p.name
+ let to_string p = List.fold_left (fun a e -> Printf.sprintf "%s <%s>" a e) p.name p.addresses
let of_string s = match String.trim s with "" -> empty | s ->
- match Re.Str.(split (regexp " *< *") s) with
+ match Str.(split (regexp " *< *") s) with
| [] -> empty
| [n] -> let name = String.trim n in { empty with name }
| n::adds ->
let name = String.trim n in
- let addresses = List.map (fun f -> Uri.of_string @@ String.(sub f 0 (length f -1))) adds in
+ let addresses = List.map (fun f -> String.(sub f 0 (length f -1))) adds in
{ name; addresses }
end
@@ -19,8 +20,8 @@ include Person
module Set = struct
include Set.Make(Person)
- let to_string ?(pre="") ?(sep=", ") s =
- let str = Person.to_string in
+ let to_string ?(names_only=false) ?(pre="") ?(sep=", ") s =
+ let str = if names_only then Person.name_to_string else Person.to_string in
let j x a = match a, x with "",_ -> str x | _,x when x = Person.empty -> a | _ -> a^sep^str x in
fold j s pre
let of_string s = of_list (List.map Person.of_string (String_set.list_of_csv s))
diff --git a/lib/store.ml b/lib/store.ml
index 2064335..5b83510 100644
--- a/lib/store.ml
+++ b/lib/store.ml
@@ -3,7 +3,7 @@ module KV = Map.Make (String)
module type T = sig
type t
type item_t
- type archive_t = { name: string; archivists: Person.Set.t; id: Id.t; kv: string KV.t; store: t }
+ type archive_t = { id: Id.t; name: string; archivists: Person.Set.t; kv: string KV.t; store: t }
type record_t = Text.t * item_t
val of_path: string -> (archive_t, string) result
val newest: record_t -> record_t -> int
diff --git a/lib/string_set.ml b/lib/string_set.ml
index 56f537e..ae0ac59 100644
--- a/lib/string_set.ml
+++ b/lib/string_set.ml
@@ -1,6 +1,6 @@
include Set.Make(String)
-let list_of_csv x = Re.Str.(split (regexp " *, *")) (String.trim x)
+let list_of_csv x = Str.(split (regexp " *, *")) (String.trim x)
let of_string x = of_list (list_of_csv x)
let to_string ?(pre="") ?(sep=", ") s =
let j a x = match a, x with "", _ -> x | _, "" -> a | _ -> a ^ sep ^ x in
diff --git a/lib/text.ml b/lib/text.ml
index 0e1d9a8..2539159 100644
--- a/lib/text.ml
+++ b/lib/text.ml
@@ -1,23 +1,23 @@
module String_map = Map.Make (String)
type t = {
- title: string;
- uuid: Id.t;
- authors: Person.Set.t;
- date: Date.t;
- string_map: string String_map.t;
- stringset_map: String_set.t String_map.t;
- body: string;
- }
+ id: Id.t;
+ title: string;
+ authors: Person.Set.t;
+ date: Date.t;
+ string_map: string String_map.t;
+ stringset_map: String_set.t String_map.t;
+ body: string;
+ }
-let blank ?(uuid=(Id.generate ())) () = {
- title = "";
- uuid;
- authors = Person.Set.empty;
- date = Date.({ created = None; edited = None});
- string_map = String_map.empty;
- stringset_map = String_map.empty;
- body = "";
- }
+let blank ?(id=(Id.generate ())) () = {
+ id;
+ title = "";
+ authors = Person.Set.empty;
+ date = Date.({ created = now (); edited = ""});
+ string_map = String_map.empty;
+ stringset_map = String_map.empty;
+ body = "";
+ }
let compare = Stdlib.compare
let newest a b = Date.(compare a.date b.date)
@@ -28,75 +28,75 @@ let str_set key m = String_set.to_string @@ set key m
let with_str_set m key str = { m with stringset_map = String_map.add (String.lowercase_ascii key) (String_set.of_string str) m.stringset_map }
let with_kv x (k,v) =
- let trim = String.trim in
- match String.lowercase_ascii k with
- | "body" -> { x with body = String.trim v }
- | "title"-> { x with title = trim v }
- | "id" -> (match Id.of_string v with Some id -> { x with uuid = id } | None -> x)
- | "author"
- | "authors" -> { x with authors = Person.Set.of_string (trim v)}
- | "date" -> { x with date = Date.{ x.date with created = Date.of_string v }}
- | "date-edited"-> { x with date = Date.{ x.date with edited = Date.of_string v }}
- | "licences" | "topics" | "keywords" | "series" as k -> with_str_set x k v
- | k -> { x with string_map = String_map.add k (trim v) x.string_map }
+ let trim = String.trim in
+ match String.lowercase_ascii k with
+ | "body" -> { x with body = String.trim v }
+ | "title"-> { x with title = trim v }
+ | "id" -> (match v with "" -> x | s -> { x with id = s })
+ | "author"
+ | "authors" -> { x with authors = Person.Set.of_string (trim v)}
+ | "date" -> { x with date = Date.{ x.date with created = Date.of_string v }}
+ | "date-edited"-> { x with date = Date.{ x.date with edited = Date.of_string v }}
+ | "licences" | "topics" | "keywords" | "series" as k -> with_str_set x k v
+ | k -> { x with string_map = String_map.add k (trim v) x.string_map }
-let kv_of_string line = match Re.Str.(bounded_split (regexp ": *")) line 2 with
- | [ key; value ] -> Re.Str.(replace_first (regexp "^#\\+") "" key), value
- | [ key ] -> Re.Str.(replace_first (regexp "^#\\+") "" key), ""
- | _ -> "",""
+let kv_of_string line = match Str.(bounded_split (regexp ": *")) line 2 with
+ | [ key; value ] -> Str.(replace_first (regexp "^#\\+") "" key), value
+ | [ key ] -> Str.(replace_first (regexp "^#\\+") "" key), ""
+ | _ -> "",""
let of_header front_matter =
- let fields = List.map kv_of_string (Re.Str.(split (regexp "\n")) front_matter) in
- List.fold_left with_kv (blank ~uuid:Id.nil ()) fields
+ let fields = List.map kv_of_string (Str.(split (regexp "\n")) front_matter) in
+ List.fold_left with_kv (blank ~id:Id.nil ()) fields
let front_matter_body_split s =
- if Re.Str.(string_match (regexp ".*:.*")) s 0
- then match Re.Str.(bounded_split (regexp "^$")) s 2 with
- | front::body::[] -> (front, body)
- | _ -> ("", s)
- else ("", s)
+ if Str.(string_match (regexp ".*:.*")) s 0
+ then match Str.(bounded_split (regexp "^$")) s 2 with
+ | front::body::[] -> (front, body)
+ | _ -> ("", s)
+ else ("", s)
let of_string s =
- let front_matter, body = front_matter_body_split s in
- try
- let note = { (of_header front_matter) with body } in
- if note.uuid <> Id.nil then Ok note else Error "Missing ID header"
- with _ -> Error ("Failed parsing" ^ s)
+ let front_matter, body = front_matter_body_split s in
+ try
+ let note = { (of_header front_matter) with body } in
+ if note.id <> Id.nil then Ok note else Error "Missing ID header"
+ with _ -> Error ("Failed parsing" ^ s)
let to_string x =
- let has_len v = String.length v > 0 in
- let s field value = if has_len value then field ^ ": " ^ value ^ "\n" else "" in
- let a value = if Person.Set.is_empty value then "" else "Authors: " ^ Person.Set.to_string value ^ "\n" in
- let d field value = match value with Some _ -> field ^ ": " ^ Date.rfc_string value ^ "\n" | None -> "" in
- let rows =
- [ s "Title" x.title;
- a x.authors;
- d "Date" x.date.Date.created;
- d "Edited" x.date.Date.edited;
- s "Licences" (str_set "licences" x);
- s "Topics" (str_set "topics" x);
- s "Keywords" (str_set "keywords" x);
- s "Series" (str_set "series" x);
- s "Abstract" (str "abstract" x);
- s "ID" (Uuidm.to_string x.uuid);
- s "Alias" (str "Alias" x) ]
- in
- String.concat "" rows ^ "\n" ^ x.body
+ let has_len v = String.length v > 0 in
+ let s field value = if has_len value then field ^ ": " ^ value ^ "\n" else "" in
+ let a value = if Person.Set.is_empty value then "" else "Authors: " ^ Person.Set.to_string value ^ "\n" in
+ let d field value = match value with "" -> "" | s -> field ^ ": " ^ Date.rfc_string s ^ "\n" in
+ let rows = [
+ s "ID" x.id;
+ d "Date" x.date.Date.created;
+ d "Edited" x.date.Date.edited;
+ s "Title" x.title;
+ a x.authors;
+ s "Licences" (str_set "licences" x);
+ s "Topics" (str_set "topics" x);
+ s "Keywords" (str_set "keywords" x);
+ s "Series" (str_set "series" x);
+ s "Abstract" (str "abstract" x);
+ s "Alias" (str "Alias" x)
+ ] in
+ String.concat "" rows ^ "\n" ^ x.body
let string_alias t =
- let is_reserved = function
- | '!' | '*' | '\'' | '(' | ')' | ';' | ':' | '@' | '&' | '=' | '+' | '$'
- | ',' | '/' | '?' | '#' | '[' | ']' | ' ' | '\t' | '\x00' -> true
- | _ -> false
- in
- let b = Buffer.create (String.length t) in
- let filter char =
- let open Buffer in
- if is_reserved char then (try (if nth b (pred (length b)) <> '-' then add_char b '-') with Invalid_argument _ -> prerr_endline "reserved")
- else add_char b char
- in
- String.(iter filter (lowercase_ascii t));
- Buffer.contents b
+ let is_reserved = function
+ | '!' | '*' | '\'' | '(' | ')' | ';' | ':' | '@' | '&' | '=' | '+' | '$'
+ | ',' | '/' | '?' | '#' | '[' | ']' | ' ' | '\t' | '\x00' -> true
+ | _ -> false
+ in
+ let b = Buffer.create (String.length t) in
+ let filter char =
+ let open Buffer in
+ if is_reserved char then (try (if nth b (pred (length b)) <> '-' then add_char b '-') with Invalid_argument _ -> prerr_endline "reserved")
+ else add_char b char
+ in
+ String.(iter filter (lowercase_ascii t));
+ Buffer.contents b
let alias t = match str "alias" t with "" -> string_alias t.title | x -> x
-let short_id ?(len=8) t = String.sub (Id.to_string t.uuid) 0 len
+let short_id t = Id.short t.id
diff --git a/lib/topic_set.ml b/lib/topic_set.ml
index d15ad5e..0e723e6 100644
--- a/lib/topic_set.ml
+++ b/lib/topic_set.ml
@@ -1,4 +1,4 @@
-let of_string x = Re.Str.(split (regexp " *> *")) (String.trim x)
+let of_string x = Str.(split (regexp " *> *")) (String.trim x)
let topic x =
let path = of_string x in
diff --git a/logarion.opam b/logarion.opam
index cb73afb..0d85605 100644
--- a/logarion.opam
+++ b/logarion.opam
@@ -1,23 +1,15 @@
# This file is generated by dune, edit dune-project instead
opam-version: "2.0"
synopsis: "Texts archival and exchange"
-maintainer: ["fox@orbitalfox.eu"]
-authors: ["orbifx"]
+maintainer: ["orbifx "]
license: "EUPL-1.2"
-homepage: "https://logarion.orbitalfox.eu"
-bug-reports: "mailto:logarion@lists.orbitalfox.eu?subject=Issue:"
+homepage: "http://logarion.orbitalfox.eu"
depends: [
"dune" {>= "2.0"}
- "re"
- "cmdliner"
- "bos"
- "ptime"
- "uuidm"
- "uri"
"text_parse"
+ "cmdliner" {<= "1.0.4"}
"msgpck"
- "cohttp-lwt-unix"
- "tls"
+ "ocurl"
]
build: [
["dune" "subst"] {pinned}
@@ -33,4 +25,4 @@ build: [
"@doc" {with-doc}
]
]
-dev-repo: "git://orbitalfox.eu/logarion"
+dev-repo: "git+https://git.disroot.org/orbifx/logarion.git"
diff --git a/readme b/readme
index f6f19c0..4a90c29 100644
--- a/readme
+++ b/readme
@@ -1,25 +1,28 @@
-Logarion is a free and open-source text archive system. A blog-wiki hybrid.
+Logarion is a text header-format and suite of tools, for discovering, collecting & exchanging texts.
-Download:
-EUPL licence:
+Guide:
+Source:
+IRC:
+EUPL licence:
-Start
+Header fields
-Create a folder and run `logarion init` from within it to produce `.logarion/config` configuration file.
-Run `logarion --help` for more options.
+ID: unique identifier
+Date: of creation, ISO8601 formatted
+Topics: comma seperated list of topic names & phrases
+Title:
+Authors:list of name with optional set of
+
+A blank line must seperarate the header from the body.
-Community & support
+Build development version
-* Website:
-* Report an issue:
-* Discussion:
- or join via
+Install `ocaml` and `opam`. Then build and install Logarion using opam's pin function:
-
-Install development version
-
- opam pin add text_parse git://orbitalfox.eu/text-parse-ml
- opam pin add logarion git://orbitalfox.eu/logarion
- opam install logarion
+```
+opam pin add text_parse https://git.disroot.org/orbifx/text-parse-ml.git
+opam pin add logarion https://git.disroot.org/orbifx/logarion.git
+opam install logarion
+```