swish/commit

Updated upstream files

authorJan Wielemaker
Mon Oct 5 18:02:25 2015 +0200
committerJan Wielemaker
Mon Oct 5 18:02:25 2015 +0200
commit0b1a1c87b05abe6562466518481213ba9d003061
tree9204bddecc1b2068e718d53c5f3b0d5ced6bc30b
parent0a8f60791f058cd1bc26f5724150101d8cd60366
Diff style: patch stat
diff --git a/lib/swish/gitty.pl b/lib/swish/gitty.pl
index 5d04f1a..bcb118f 100644
--- a/lib/swish/gitty.pl
+++ b/lib/swish/gitty.pl
@@ -28,15 +28,17 @@
 */
 
 :- module(gitty,
-	  [ gitty_file/3,		% +Store, ?Name, ?Hash
+	  [ gitty_open/2,		% +Store, +Options
+	    gitty_close/1,		% +Store
+
+	    gitty_file/3,		% +Store, ?Name, ?Hash
 	    gitty_create/5,		% +Store, +Name, +Data, +Meta, -Commit
 	    gitty_update/5,		% +Store, +Name, +Data, +Meta, -Commit
 	    gitty_commit/3,		% +Store, +Name, -Meta
 	    gitty_data/4,		% +Store, +Name, -Data, -Meta
 	    gitty_history/4,		% +Store, +Name, -History, +Options
-	    gitty_scan/1,		% +Store
-	    gitty_rescan/1,		% ?Store
 	    gitty_hash/2,		% +Store, ?Hash
+
 	    gitty_reserved_meta/1,	% ?Key
 
 	    gitty_diff/4,		% +Store, ?Start, +End, -Diff
@@ -44,15 +46,20 @@
 	    data_diff/3,		% +String1, +String2, -Diff
 	    udiff_string/2		% +Diff, -String
 	  ]).
-:- use_module(library(zlib)).
-:- use_module(library(filesex)).
 :- use_module(library(sha)).
 :- use_module(library(lists)).
 :- use_module(library(apply)).
 :- use_module(library(option)).
 :- use_module(library(process)).
 :- use_module(library(debug)).
-:- use_module(library(dcg/basics)).
+:- use_module(library(error)).
+:- use_module(library(filesex)).
+
+:- if(exists_source(library(bdb))).
+:- use_module(gitty_driver_bdb, []).
+:- endif.
+:- use_module(gitty_driver_files, []).
+
 
 /** <module> Single-file GIT like version system
 
@@ -78,24 +85,62 @@ the newly created (gitty_create/5) or updated object (gitty_update/5).
 */
 
 :- dynamic
-	head/3,				% Store, Name, Hash
-	store/2,			% Store, Updated
-	heads_input_stream_cache/2.	% Store, Stream
-:- volatile
-	head/3,
-	store/2,
-	heads_input_stream_cache/2.	% Store, Stream
-
-% enable/disable syncing remote servers running on  the same file store.
-% This facility requires shared access to files and thus doesn't work on
-% Windows.
-
-:- if(current_prolog_flag(windows, true)).
-remote_sync(false).
-:- else.
-remote_sync(true).
-:- endif.
+	gitty_store_type/2.		% +Store, -Module
 
+%%	gitty_open(+Store, +Options) is det.
+%
+%	Open a gitty store according to Options.  Defined
+%	options are:
+%
+%	  - driver(+Driver)
+%	  Backend driver to use.  One of =files= or =bdb=.  When
+%	  omitted and the store exists, the current store is
+%	  examined.  If the store does not exist, the default
+%	  is =files=.
+
+gitty_open(Store, Options) :-
+	(   exists_directory(Store)
+	->  true
+	;   existence_error(directory, Store)
+	),
+	(   option(driver(Driver), Options)
+	->  true
+	;   default_driver(Store, Driver)
+	),
+	set_driver(Store, Driver).
+
+default_driver(Store, Driver) :-
+	directory_file_path(Store, ref, RefDir),
+	exists_directory(RefDir), !,
+	Driver = files.
+default_driver(Store, Driver) :-
+	directory_file_path(Store, heads, RefDir),
+	exists_file(RefDir), !,
+	Driver = bdb.
+default_driver(_, files).
+
+set_driver(Store, Driver) :-
+	must_be(atom, Store),
+	(   driver_module(Driver, Module)
+	->  retractall(gitty_store_type(Store, _)),
+	    asserta(gitty_store_type(Store, Module))
+	;   domain_error(gitty_driver, Driver)
+	).
+
+driver_module(files, gitty_driver_files).
+driver_module(bdb,   gitty_driver_bdb).
+
+store_driver_module(Store, Module) :-
+	atom(Store), !,
+	gitty_store_type(Store, Module).
+
+%%	gitty_close(+Store) is det.
+%
+%	Close access to the Store.
+
+gitty_close(Store) :-
+	store_driver_module(Store, M),
+	M:gitty_close(Store).
 
 %%	gitty_file(+Store, ?File, ?Head) is nondet.
 %
@@ -103,8 +148,8 @@ remote_sync(true).
 %	revision.
 
 gitty_file(Store, Head, Hash) :-
-	gitty_scan(Store),
-	head(Store, Head, Hash).
+	store_driver_module(Store, M),
+	M:gitty_file(Store, Head, Hash).
 
 %%	gitty_create(+Store, +Name, +Data, +Meta, -Commit) is det.
 %
@@ -113,17 +158,15 @@ gitty_file(Store, Head, Hash) :-
 %	@arg Commit is a dit describing the new Commit
 
 gitty_create(Store, Name, _Data, _Meta, _) :-
-	gitty_scan(Store),
-	head(Store, Name, _), !,
+	gitty_file(Store, Name, _Hash), !,
 	throw(error(gitty(file_exists(Name)),_)).
 gitty_create(Store, Name, Data, Meta, CommitRet) :-
 	save_object(Store, Data, blob, Hash),
 	get_time(Now),
-	Commit = gitty{}.put(Meta)
-		        .put(_{ name:Name,
-				time:Now,
-				data:Hash
-			      }),
+	Commit = gitty{time:Now}.put(Meta)
+		                .put(_{ name:Name,
+					data:Hash
+				      }),
 	format(string(CommitString), '~q.~n', [Commit]),
 	save_object(Store, CommitString, commit, CommitHash),
 	CommitRet = Commit.put(commit, CommitHash),
@@ -137,19 +180,18 @@ gitty_create(Store, Name, Data, Meta, CommitRet) :-
 %	Update document Name using Data and the given meta information
 
 gitty_update(Store, Name, Data, Meta, CommitRet) :-
-	gitty_scan(Store),
-	head(Store, Name, OldHead),
+	gitty_file(Store, Name, OldHead),
 	(   _{previous:OldHead} >:< Meta
 	->  true
-	;   throw(error(gitty(commit_version(OldHead, Meta.previous)), _))
+	;   throw(error(gitty(commit_version(Name, OldHead, Meta.previous)), _))
 	),
 	load_plain_commit(Store, OldHead, OldMeta),
 	get_time(Now),
 	save_object(Store, Data, blob, Hash),
 	Commit = gitty{}.put(OldMeta)
+			.put(_{time:Now})
 		        .put(Meta)
 		        .put(_{ name:Name,
-				time:Now,
 				data:Hash,
 				previous:OldHead
 			      }),
@@ -161,17 +203,29 @@ gitty_update(Store, Name, Data, Meta, CommitRet) :-
 	      ( delete_object(Store, CommitHash),
 		throw(E))).
 
+%%	gitty_update_head(+Store, +Name, +OldCommit, +NewCommit) is det.
+%
+%	Update the head of a gitty  store   for  Name.  OldCommit is the
+%	current head and NewCommit is the new  head. If Name is created,
+%	and thus there is no head, OldCommit must be `-`.
+%
+%	This operation can fail because another   writer has updated the
+%	head.  This can both be in-process or another process.
+%
+%	@error gitty(file_exists(Name) if the file already exists
+%	@error gitty(not_at_head(Name, OldCommit) if the head was moved
+%	       by someone else.
+
+gitty_update_head(Store, Name, OldCommit, NewCommit) :-
+	store_driver_module(Store, Module),
+	Module:gitty_update_head(Store, Name, OldCommit, NewCommit).
+
 %%	gitty_data(+Store, +NameOrHash, -Data, -Meta) is semidet.
 %
 %	Get the data in object Name and its meta-data
 
 gitty_data(Store, Name, Data, Meta) :-
-	gitty_scan(Store),
-	head(Store, Name, Head), !,
-	load_commit(Store, Head, Meta),
-	load_object(Store, Meta.data, Data).
-gitty_data(Store, Hash, Data, Meta) :-
-	load_commit(Store, Hash, Meta),
+	gitty_commit(Store, Name, Meta),
 	load_object(Store, Meta.data, Data).
 
 %%	gitty_commit(+Store, +NameOrHash, -Meta) is semidet.
@@ -180,8 +234,8 @@ gitty_data(Store, Hash, Data, Meta) :-
 %	is added to the meta-data to specify the commit hash.
 
 gitty_commit(Store, Name, Meta) :-
-	gitty_scan(Store),
-	head(Store, Name, Head), !,
+	must_be(atom, Name),
+	gitty_file(Store, Name, Head), !,
 	load_commit(Store, Head, Meta).
 gitty_commit(Store, Hash, Meta) :-
 	load_commit(Store, Hash, Meta).
@@ -189,14 +243,14 @@ gitty_commit(Store, Hash, Meta) :-
 load_commit(Store, Hash, Meta) :-
 	load_plain_commit(Store, Hash, Meta0),
 	Meta1 = Meta0.put(commit, Hash),
-	(   head(Store, Meta0.name, Hash)
+	(   gitty_file(Store, Meta0.name, Hash)
 	->  Meta = Meta1.put(symbolic, "HEAD")
 	;   Meta = Meta1
 	).
 
 load_plain_commit(Store, Hash, Meta) :-
-	load_object(Store, Hash, String),
-	term_string(Meta, String, []).
+	store_driver_module(Store, Module),
+	Module:load_plain_commit(Store, Hash, Meta).
 
 %%	gitty_history(+Store, +NameOrHash, -History, +Options) is det.
 %
@@ -225,8 +279,7 @@ gitty_history(Store, Name, History, Options) :-
 	).
 
 history_hash_start(Store, Name, Hash) :-
-	gitty_scan(Store),
-	head(Store, Name, Head), !,
+	gitty_file(Store, Name, Head), !,
 	Hash = Head.
 history_hash_start(_, Hash, Hash).
 
@@ -261,35 +314,28 @@ list_prefix(N, [H|T0], [H|T]) :-
 	list_prefix(N2, T0, T).
 
 
-%%	save_object(+Store, +Data, +Type, -Hash)
+%%	save_object(+Store, +Data:string, +Type, -Hash) is det.
 %
 %	Save an object in a git compatible   way. Data provides the data
 %	as a string.
 %
 %	@see http://www.gitguys.com/topics/what-is-the-format-of-a-git-blob/
+%	@bug We currently delete objects if the head cannot be moved.
+%	This can lead to a race condition.   We need to leave that
+%	to GC.
 
 save_object(Store, Data, Type, Hash) :-
-	sha_new_ctx(Ctx0, []),
 	size_in_bytes(Data, Size),
 	format(string(Hdr), '~w ~d\u0000', [Type, Size]),
+	sha_new_ctx(Ctx0, []),
 	sha_hash_ctx(Ctx0, Hdr, Ctx1, _),
 	sha_hash_ctx(Ctx1, Data, _, HashBin),
 	hash_atom(HashBin, Hash),
-	sub_atom(Hash, 0, 2, _, Dir0),
-	sub_atom(Hash, 2, 2, _, Dir1),
-	sub_atom(Hash, 4, _, 0, File),
-	directory_file_path(Store, Dir0, D0),
-	ensure_directory(D0),
-	directory_file_path(D0, Dir1, D1),
-	ensure_directory(D1),
-	directory_file_path(D1, File, Path),
-	(   exists_file(Path)
-	->  true
-	;   setup_call_cleanup(
-		gzopen(Path, write, Out, [encoding(utf8)]),
-		format(Out, '~s~s', [Hdr, Data]),
-		close(Out))
-	).
+	store_object(Store, Hash, Hdr, Data).
+
+store_object(Store, Hash, Hdr, Data) :-
+	store_driver_module(Store, Module),
+	Module:store_object(Store, Hash, Hdr, Data).
 
 size_in_bytes(Data, Size) :-
 	setup_call_cleanup(
@@ -299,10 +345,20 @@ size_in_bytes(Data, Size) :-
 	    ),
 	    close(Out)).
 
-ensure_directory(Dir) :-
-	exists_directory(Dir), !.
-ensure_directory(Dir) :-
-	make_directory(Dir).
+
+%%	fsck_object(+Store, +Hash) is semidet.
+%
+%	Test the integrity of object Hash in Store.
+
+:- public fsck_object/2.
+fsck_object(Store, Hash) :-
+	load_object(Store, Hash, Data, Type, Size),
+	format(string(Hdr), '~w ~d\u0000', [Type, Size]),
+	sha_new_ctx(Ctx0, []),
+	sha_hash_ctx(Ctx0, Hdr, Ctx1, _),
+	sha_hash_ctx(Ctx1, Data, _, HashBin),
+	hash_atom(HashBin, Hash).
+
 
 %%	load_object(+Store, +Hash, -Data) is det.
 %%	load_object(+Store, +Hash, -Data, -Type, -Size) is det.
@@ -312,124 +368,24 @@ ensure_directory(Dir) :-
 load_object(Store, Hash, Data) :-
 	load_object(Store, Hash, Data, _, _).
 load_object(Store, Hash, Data, Type, Size) :-
-	hash_file(Store, Hash, Path),
-	setup_call_cleanup(
-	    gzopen(Path, read, In, [encoding(utf8)]),
-	    read_object(In, Data, Type, Size),
-	    close(In)).
-
-read_object(In, Data, Type, Size) :-
-	get_code(In, C0),
-	read_hdr(C0, In, Hdr),
-	phrase((nonblanks(TypeChars), " ", integer(Size)), Hdr),
-	atom_codes(Type, TypeChars),
-	read_string(In, _, Data).
-
-read_hdr(C, In, [C|T]) :-
-	C > 0, !,
-	get_code(In, C1),
-	read_hdr(C1, In, T).
-read_hdr(_, _, []).
-
-%%	gitty_rescan(?Store) is det.
-%
-%	Update our view of the shared   storage  for all stores matching
-%	Store.
-
-gitty_rescan(Store) :-
-	retractall(store(Store, _)).
-
-%%	gitty_scan(+Store) is det.
-%
-%	Scan gitty store for files (entries),   filling  head/3. This is
-%	performed lazily at first access to the store.
-%
-%	@tdb	Possibly we need to maintain a cached version of this
-%		index to avoid having to open all objects of the gitty
-%		store.
-
-gitty_scan(Store) :-
-	store(Store, _), !,
-	(   remote_sync(true)
-	->  with_mutex(gitty, remote_updates(Store))
-	;   true
-	).
-gitty_scan(Store) :-
-	with_mutex(gitty, gitty_scan_sync(Store)).
-
-:- thread_local
-	latest/3.
-
-gitty_scan_sync(Store) :-
-	store(Store, _), !.
-gitty_scan_sync(Store) :-
-	gitty_scan_latest(Store),
-	forall(retract(latest(Name, Hash, _Time)),
-	       assert(head(Store, Name, Hash))),
-	get_time(Now),
-	assertz(store(Store, Now)).
-
-%%	gitty_scan_latest(+Store)
-%
-%	Scans the gitty store, extracting  the   latest  version of each
-%	named entry.
-
-gitty_scan_latest(Store) :-
-	retractall(head(Store, _, _)),
-	retractall(latest(_, _, _)),
-	(   gitty_hash(Store, Hash),
-	    load_object(Store, Hash, Data, commit, _Size),
-	    term_string(Meta, Data, []),
-	    _{name:Name, time:Time} :< Meta,
-	    (	latest(Name, _, OldTime),
-		OldTime > Time
-	    ->	true
-	    ;	retractall(latest(Name, _, _)),
-		assertz(latest(Name, Hash, Time))
-	    ),
-	    fail
-	;   true
-	).
-
+	store_driver_module(Store, Module),
+	Module:load_object(Store, Hash, Data, Type, Size).
 
 %%	gitty_hash(+Store, ?Hash) is nondet.
 %
 %	True when Hash is an object in the store.
 
 gitty_hash(Store, Hash) :-
-	var(Hash), !,
-	access_file(Store, exist),
-	directory_files(Store, Level0),
-	member(E0, Level0),
-	E0 \== '..',
-	atom_length(E0, 2),
-	directory_file_path(Store, E0, Dir0),
-	directory_files(Dir0, Level1),
-	member(E1, Level1),
-	E1 \== '..',
-	atom_length(E1, 2),
-	directory_file_path(Dir0, E1, Dir),
-	directory_files(Dir, Files),
-	member(File, Files),
-	atom_length(File, 36),
-	atomic_list_concat([E0,E1,File], Hash).
-gitty_hash(Store, Hash) :-
-	hash_file(Store, Hash, File),
-	exists_file(File).
+	store_driver_module(Store, Module),
+	Module:gitty_hash(Store, Hash).
 
 %%	delete_object(+Store, +Hash)
 %
 %	Delete an existing object
 
 delete_object(Store, Hash) :-
-	hash_file(Store, Hash, File),
-	delete_file(File).
-
-hash_file(Store, Hash, Path) :-
-	sub_atom(Hash, 0, 2, _, Dir0),
-	sub_atom(Hash, 2, 2, _, Dir1),
-	sub_atom(Hash, 4, _, 0, File),
-	atomic_list_concat([Store, Dir0, Dir1, File], /, Path).
+	store_driver_module(Store, Module),
+	Module:delete_object(Store, Hash).
 
 %%	gitty_reserved_meta(?Key) is nondet.
 %
@@ -440,126 +396,32 @@ gitty_reserved_meta(time).
 gitty_reserved_meta(data).
 gitty_reserved_meta(previous).
 
-
 		 /*******************************
-		 *	      SYNCING		*
+		 *	    FSCK SUPPORT	*
 		 *******************************/
 
-%%	gitty_update_head(+Store, +Name, +OldCommit, +NewCommit) is det.
-%
-%	Update the head of a gitty  store   for  Name.  OldCommit is the
-%	current head and NewCommit is the new  head. If Name is created,
-%	and thus there is no head, OldCommit must be `-`.
-%
-%	This operation can fail because another   writer has updated the
-%	head.  This can both be in-process or another process.
-
-gitty_update_head(Store, Name, OldCommit, NewCommit) :-
-	with_mutex(gitty,
-		   gitty_update_head_sync(Store, Name, OldCommit, NewCommit)).
-
-gitty_update_head_sync(Store, Name, OldCommit, NewCommit) :-
-	remote_sync(true), !,
-	setup_call_cleanup(
-	    heads_output_stream(Store, HeadsOut),
-	    gitty_update_head_sync(Store, Name, OldCommit, NewCommit, HeadsOut),
-	    close(HeadsOut)).
-gitty_update_head_sync(Store, Name, OldCommit, NewCommit) :-
-	gitty_update_head_sync2(Store, Name, OldCommit, NewCommit).
-
-gitty_update_head_sync(Store, Name, OldCommit, NewCommit, HeadsOut) :-
-	gitty_update_head_sync2(Store, Name, OldCommit, NewCommit),
-	format(HeadsOut, '~q.~n', [head(Name, OldCommit, NewCommit)]).
-
-gitty_update_head_sync2(Store, Name, OldCommit, NewCommit) :-
-	gitty_scan(Store),		% fetch remote changes
-	(   OldCommit == (-)
-	->  (   head(Store, Name, _)
-	    ->	throw(error(gitty(file_exists(Name),_)))
-	    ;	assertz(head(Store, Name, NewCommit))
-	    )
-	;   (   retract(head(Store, Name, OldCommit))
-	    ->	assertz(head(Store, Name, NewCommit))
-	    ;	throw(error(gitty(not_at_head(Name, OldCommit)), _))
-	    )
-	).
+:- public
+	delete_object/2,
+	delete_head/2,
+	set_head/3.
 
-remote_updates(Store) :-
-	remote_updates(Store, List),
-	maplist(update_head(Store), List).
+%%	delete_head(+Store, +Head) is det.
+%
+%	Delete Head from the administration.  Used if the head is
+%	inconsistent.
 
-update_head(Store, head(Name, OldCommit, NewCommit)) :-
-	(   OldCommit == (-)
-	->  \+ head(Store, Name, _)
-	;   retract(head(Store, Name, OldCommit))
-	), !,
-	assert(head(Store, Name, NewCommit)).
-update_head(_, _).
+delete_head(Store, Head) :-
+	store_driver_module(Store, Module),
+	Module:delete_head(Store, Head).
 
-%%	remote_updates(+Store, -List) is det.
+%%	set_head(+Store, +File, +Head) is det.
 %
-%	Find updates from other gitties  on   the  same filesystem. Note
-%	that we have to push/pop the input   context to avoid creating a
-%	notion of an  input  context   which  possibly  relate  messages
-%	incorrectly to the sync file.
-
-remote_updates(Store, List) :-
-	heads_input_stream(Store, Stream),
-	setup_call_cleanup(
-	    '$push_input_context'(gitty_sync),
-	    read_new_terms(Stream, List),
-	    '$pop_input_context').
-
-read_new_terms(Stream, Terms) :-
-	read(Stream, First),
-	read_new_terms(First, Stream, Terms).
-
-read_new_terms(end_of_file, _, List) :- !,
-	List = [].
-read_new_terms(Term, Stream, [Term|More]) :-
-	read(Stream, Term2),
-	read_new_terms(Term2, Stream, More).
-
-heads_output_stream(Store, Out) :-
-	heads_file(Store, HeadsFile),
-	open(HeadsFile, append, Out,
-	     [ encoding(utf8),
-	       lock(exclusive)
-	     ]).
-
-heads_input_stream(Store, Stream) :-
-	heads_input_stream_cache(Store, Stream0), !,
-	Stream = Stream0.
-heads_input_stream(Store, Stream) :-
-	heads_file(Store, File),
-	between(1, 2, _),
-	catch(open(File, read, In,
-		   [ encoding(utf8),
-		     eof_action(reset)
-		   ]),
-	      _,
-	      create_heads_file(Store)), !,
-	assert(heads_input_stream_cache(Store, In)),
-	Stream = In.
-
-create_heads_file(Store) :-
-	call_cleanup(
-	    heads_output_stream(Store, Out),
-	    close(Out)),
-	fail.					% always fail!
+%	Register Head as the Head hash for File, removing possible
+%	old head.
 
-heads_file(Store, HeadsFile) :-
-	ensure_directory(Store),
-	directory_file_path(Store, ref, RefDir),
-	ensure_directory(RefDir),
-	directory_file_path(RefDir, head, HeadsFile).
-
-:- multifile
-	prolog:error_message//1.
-
-prolog:error_message(gitty(not_at_head(Name, _OldCommit))) -->
-	[ 'Gitty: cannot update head for "~w" because it was \c
-	   updated by someone else'-[Name] ].
+set_head(Store, File, Head) :-
+	store_driver_module(Store, Module),
+	Module:set_head(Store, File, Head).
 
 
 		 /*******************************
@@ -877,3 +739,19 @@ longest(L1, L2, Longest) :-
 	->  Longest = L1
 	;   Longest = L2
 	).
+
+		 /*******************************
+		 *	      MESSAGES		*
+		 *******************************/
+:- multifile
+	prolog:error_message//1.
+
+prolog:error_message(gitty(not_at_head(Name, _OldCommit))) -->
+	[ 'Gitty: cannot update head for "~w" because it was \c
+	   updated by someone else'-[Name] ].
+prolog:error_message(gitty(file_exists(Name))) -->
+	[ 'Gitty: File exists: ~p'-[Name] ].
+prolog:error_message(gitty(commit_version(Name, _Head, _Previous))) -->
+	[ 'Gitty: ~p: cannot update (modified by someone else)'-[Name] ].
+
+
diff --git a/lib/swish/gitty_driver_bdb.pl b/lib/swish/gitty_driver_bdb.pl
new file mode 100644
index 0000000..965a998
--- /dev/null
+++ b/lib/swish/gitty_driver_bdb.pl
@@ -0,0 +1,278 @@
+/*  Part of SWI-Prolog
+
+    Author:        Jan Wielemaker
+    E-mail:        J.Wielemaker@cs.vu.nl
+    WWW:           http://www.swi-prolog.org
+    Copyright (C): 2015, VU University Amsterdam
+
+    This program is free software; you can redistribute it and/or
+    modify it under the terms of the GNU General Public License
+    as published by the Free Software Foundation; either version 2
+    of the License, or (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public
+    License along with this library; if not, write to the Free Software
+    Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
+
+    As a special exception, if you link this library with other files,
+    compiled with a Free Software compiler, to produce an executable, this
+    library does not by itself cause the resulting executable to be covered
+    by the GNU General Public License. This exception does not however
+    invalidate any other reasons why the executable file might be covered by
+    the GNU General Public License.
+*/
+
+:- module(gitty_driver_bdb,
+	  [ gitty_close/1,		% +Store
+	    gitty_file/3,		% +Store, ?Name, ?Hash
+
+	    gitty_update_head/4,	% +Store, +Name, +OldCommit, +NewCommit
+	    delete_head/2,		% +Store, +Name
+	    set_head/3,			% +Store, +Name, +Hash
+	    store_object/4,		% +Store, +Hash, +Header, +Data
+	    delete_object/2,		% +Store, +Hash
+
+	    gitty_hash/2,		% +Store, ?Hash
+	    load_plain_commit/3,	% +Store, +Hash, -Meta
+	    load_object/5		% +Store, +Hash, -Data, -Type, -Size
+	  ]).
+:- use_module(library(zlib)).
+:- use_module(library(dcg/basics)).
+:- use_module(library(memfile)).
+:- use_module(library(bdb)).
+
+/** <module> Gitty BDB driver
+
+This version of the driver  uses   library(bdb),  the BerkeyDB database.
+This driver is particularly suited for large numbers of files. The store
+uses less disk space and starts much   faster on large numbers of files.
+
+The BDB database file contains two databases:
+
+  - =heads= maps a file name to the hash of the last object
+  - =objects= contains the object blobs.
+*/
+
+
+:- dynamic
+	bdb_env/2,			% Store, Env
+	bdb_db/3.			% Store, Database, Handle
+:- volatile
+	bdb_env/2,
+	bdb_db/3.
+
+
+bdb_handle(Store, Database, Handle) :-
+	bdb_db(Store, Database, Handle), !.
+bdb_handle(Store, Database, Handle) :-
+	with_mutex(gitty_bdb, bdb_handle_sync(Store, Database, Handle)).
+
+bdb_handle_sync(Store, Database, Handle) :-
+	bdb_db(Store, Database, Handle), !.
+bdb_handle_sync(Store, Database, Handle) :-
+	bdb_store(Store, Env),
+	db_types(Database, KeyType, ValueType),
+	bdb_open(Database, update, Handle,
+		 [ environment(Env),
+		   key(KeyType),
+		   value(ValueType)
+		 ]),
+	asserta(bdb_db(Store, Database, Handle)).
+
+db_types(heads,   atom, atom).		% Name --> Hash
+db_types(objects, atom, c_blob).	% Hash --> Blob
+
+%%	bdb_store(+Store, -Env) is det.
+%
+%	Get the BDB environment for Store.
+
+bdb_store(Store, Env) :-
+	bdb_env(Store, Env), !.
+bdb_store(Store, Env) :-
+	with_mutex(gitty_bdb, bdb_store_sync(Store, Env)).
+
+bdb_store_sync(Store, Env) :-
+	bdb_env(Store, Env), !.
+bdb_store_sync(Store, Env) :-
+	ensure_directory(Store),
+	bdb_init(Env,
+		 [ home(Store),
+		   create(true),
+		   thread(true),
+		   init_txn(true),
+		   recover(true),
+		   register(true)
+		 ]),
+	asserta(bdb_env(Store, Env)).
+
+ensure_directory(Dir) :-
+	exists_directory(Dir), !.
+ensure_directory(Dir) :-
+	make_directory(Dir).
+
+%%	gitty_close(+Store) is det.
+%
+%	Close the BDB environment associated with a gitty store
+
+gitty_close(Store) :-
+	with_mutex(gitty_bdb, gitty_close_sync(Store)).
+
+gitty_close_sync(Store) :-
+	(   retract(bdb_env(Store, Env))
+	->  bdb_close_environment(Env)
+	;   true
+	).
+
+
+%%	gitty_file(+Store, ?File, ?Head) is nondet.
+%
+%	True when File entry in the  gitty   store  and Head is the HEAD
+%	revision.
+
+gitty_file(Store, Head, Hash) :-
+	bdb_handle(Store, heads, H),
+	(   nonvar(Head)
+	->  bdb_get(H, Head, Hash)
+	;   bdb_enum(H, Head, Hash)
+	).
+
+%%	gitty_update_head(+Store, +Name, +OldCommit, +NewCommit) is det.
+%
+%	Update the head of a gitty  store   for  Name.  OldCommit is the
+%	current head and NewCommit is the new  head. If Name is created,
+%	and thus there is no head, OldCommit must be `-`.
+%
+%	This operation can fail because another   writer has updated the
+%	head.  This can both be in-process or another process.
+%
+%	@error gitty(file_exists(Name) if the file already exists
+%	@error gitty(not_at_head(Name, OldCommit) if the head was moved
+%	       by someone else.
+
+gitty_update_head(Store, Name, OldCommit, NewCommit) :-
+	bdb_store(Store, Env),
+	bdb_transaction(
+	    Env,
+	    gitty_update_head_sync(Store, Name, OldCommit, NewCommit)).
+
+gitty_update_head_sync(Store, Name, OldCommit, NewCommit) :-
+	bdb_handle(Store, heads, BDB),
+	(   OldCommit == (-)
+	->  (   bdb_get(BDB, Name, _)
+	    ->	throw(error(gitty(file_exists(Name),_)))
+	    ;	bdb_put(BDB, Name, NewCommit)
+	    )
+	;   (   bdb_get(BDB, Name, OldCommit)
+	    ->	bdb_put(BDB, Name, NewCommit)
+	    ;	throw(error(gitty(not_at_head(Name, OldCommit)), _))
+	    )
+	).
+
+%%	delete_head(+Store, +Name) is det.
+%
+%	Delete the named head.
+
+delete_head(Store, Name) :-
+	bdb_handle(Store, heads, BDB),
+	bdb_del(BDB, Name, _Old).
+
+%%	set_head(+Store, +File, +Hash) is det.
+%
+%	Set the head of the given File to Hash
+
+set_head(Store, File, Hash) :-
+	bdb_handle(Store, heads, BDB),
+	bdb_put(BDB, File, Hash).
+
+%%	load_plain_commit(+Store, +Hash, -Meta:dict) is semidet.
+%
+%	Load the commit data as a dict. Fails  if Hash does not exist or
+%	is not a commit.
+
+load_plain_commit(Store, Hash, Meta) :-
+	load_object(Store, Hash, String, commit, _Size),
+	term_string(Meta, String, []).
+
+%%	store_object(+Store, +Hash, +Header:string, +Data:string) is det.
+%
+%	Store the actual object. The store  must associate Hash with the
+%	concatenation of Hdr and Data.
+
+store_object(Store, Hash, Hdr, Data) :-
+	compress_string(Hdr, Data, Object),
+	bdb_handle(Store, objects, BDB),
+	bdb_put(BDB, Hash, Object).
+
+compress_string(Header, Data, String) :-
+	setup_call_cleanup(
+	    new_memory_file(MF),
+	    ( setup_call_cleanup(
+		  open_memory_file(MF, write, Out, [encoding(utf8)]),
+		  setup_call_cleanup(
+		      zopen(Out, OutZ, [ format(gzip),
+					 close_parent(false)
+				       ]),
+		      format(OutZ, '~s~s', [Header, Data]),
+		    close(OutZ)),
+		  close(Out)),
+	      memory_file_to_string(MF, String, octet)
+	    ),
+	    free_memory_file(MF)).
+
+%%	load_object(+Store, +Hash, -Data, -Type, -Size) is det.
+%
+%	Load an object given its  Hash.  Data   holds  the  content as a
+%	string, Type is the object type (an   atom) and Size is the size
+%	of the object in bytes.
+
+load_object(Store, Hash, Data, Type, Size) :-
+	bdb_handle(Store, objects, BDB),
+	bdb_get(BDB, Hash, Blob),
+	setup_call_cleanup(
+	    open_string(Blob, In),
+	    setup_call_cleanup(
+		zopen(In, InZ, [ format(gzip),
+				 close_parent(false)
+			       ]),
+		( set_stream(InZ, encoding(utf8)),
+		  read_object(InZ, Data, Type, Size)
+		),
+		close(InZ)),
+	    close(In)).
+
+read_object(In, Data, Type, Size) :-
+	get_code(In, C0),
+	read_hdr(C0, In, Hdr),
+	phrase((nonblanks(TypeChars), " ", integer(Size)), Hdr),
+	atom_codes(Type, TypeChars),
+	read_string(In, _, Data).
+
+read_hdr(C, In, [C|T]) :-
+	C > 0, !,
+	get_code(In, C1),
+	read_hdr(C1, In, T).
+read_hdr(_, _, []).
+
+%%	gitty_hash(+Store, ?Hash) is nondet.
+%
+%	True when Hash is an object in the store.
+
+gitty_hash(Store, Hash) :-
+	bdb_handle(Store, objects, BDB),
+	(   nonvar(Hash)
+	->  bdb_get(BDB, Hash, _)
+	;   bdb_enum(BDB, Hash, _)
+	).
+
+%%	delete_object(+Store, +Hash)
+%
+%	Delete an existing object
+
+delete_object(Store, Hash) :-
+	bdb_handle(Store, objects, BDB),
+	bdb_del(BDB, Hash, _).
diff --git a/lib/swish/gitty_driver_files.pl b/lib/swish/gitty_driver_files.pl
new file mode 100644
index 0000000..19ebb19
--- /dev/null
+++ b/lib/swish/gitty_driver_files.pl
@@ -0,0 +1,451 @@
+/*  Part of SWI-Prolog
+
+    Author:        Jan Wielemaker
+    E-mail:        J.Wielemaker@cs.vu.nl
+    WWW:           http://www.swi-prolog.org
+    Copyright (C): 2015, VU University Amsterdam
+
+    This program is free software; you can redistribute it and/or
+    modify it under the terms of the GNU General Public License
+    as published by the Free Software Foundation; either version 2
+    of the License, or (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public
+    License along with this library; if not, write to the Free Software
+    Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
+
+    As a special exception, if you link this library with other files,
+    compiled with a Free Software compiler, to produce an executable, this
+    library does not by itself cause the resulting executable to be covered
+    by the GNU General Public License. This exception does not however
+    invalidate any other reasons why the executable file might be covered by
+    the GNU General Public License.
+*/
+
+:- module(gitty_driver_files,
+	  [ gitty_close/1,		% +Store
+	    gitty_file/3,		% +Store, ?Name, ?Hash
+
+	    gitty_update_head/4,	% +Store, +Name, +OldCommit, +NewCommit
+	    delete_head/2,		% +Store, +Name
+	    set_head/3,			% +Store, +Name, +Hash
+	    store_object/4,		% +Store, +Hash, +Header, +Data
+	    delete_object/2,		% +Store, +Hash
+
+	    gitty_hash/2,		% +Store, ?Hash
+	    load_plain_commit/3,	% +Store, +Hash, -Meta
+	    load_object/5,		% +Store, +Hash, -Data, -Type, -Size
+
+	    gitty_rescan/1		% Store
+	  ]).
+:- use_module(library(zlib)).
+:- use_module(library(filesex)).
+:- use_module(library(lists)).
+:- use_module(library(apply)).
+:- use_module(library(dcg/basics)).
+
+/** <module> Gitty plain files driver
+
+This version of the driver uses plain files  to store the gitty data. It
+consists of a nested directory  structure   with  files  named after the
+hash. Objects and hash computation is the same as for `git`. The _heads_
+(files) are computed on startup by scanning all objects. There is a file
+=ref/head= that is updated if a head is updated. Other clients can watch
+this file and update their notion  of   the  head. This implies that the
+store can handle multiple clients that can  access a shared file system,
+optionally shared using NFS from different machines.
+
+The store is simple and robust. The  main disadvantages are long startup
+times as the store holds more objects and relatively high disk usage due
+to rounding the small objects to disk allocation units.
+
+@bug	Shared access does not work on Windows.
+*/
+
+:- dynamic
+	head/3,				% Store, Name, Hash
+	store/2,			% Store, Updated
+	heads_input_stream_cache/2.	% Store, Stream
+:- volatile
+	head/3,
+	store/2,
+	heads_input_stream_cache/2.	% Store, Stream
+
+% enable/disable syncing remote servers running on  the same file store.
+% This facility requires shared access to files and thus doesn't work on
+% Windows.
+
+:- if(current_prolog_flag(windows, true)).
+remote_sync(false).
+:- else.
+remote_sync(true).
+:- endif.
+
+%%	gitty_close(+Store) is det.
+%
+%	Close resources associated with a store.
+
+gitty_close(Store) :-
+	(   retract(heads_input_stream_cache(Store, In))
+	->  close(In)
+	;   true
+	),
+	retractall(head(Store,_,_)),
+	retractall(store(Store,_)).
+
+
+%%	gitty_file(+Store, ?File, ?Head) is nondet.
+%
+%	True when File entry in the  gitty   store  and Head is the HEAD
+%	revision.
+
+gitty_file(Store, Head, Hash) :-
+	gitty_scan(Store),
+	head(Store, Head, Hash).
+
+%%	load_plain_commit(+Store, +Hash, -Meta:dict) is semidet.
+%
+%	Load the commit data as a dict.
+
+load_plain_commit(Store, Hash, Meta) :-
+	load_object(Store, Hash, String, _, _),
+	term_string(Meta, String, []).
+
+%%	store_object(+Store, +Hash, +Header:string, +Data:string) is det.
+%
+%	Store the actual object. The store  must associate Hash with the
+%	concatenation of Hdr and Data.
+
+store_object(Store, Hash, Hdr, Data) :-
+	sub_atom(Hash, 0, 2, _, Dir0),
+	sub_atom(Hash, 2, 2, _, Dir1),
+	sub_atom(Hash, 4, _, 0, File),
+	directory_file_path(Store, Dir0, D0),
+	ensure_directory(D0),
+	directory_file_path(D0, Dir1, D1),
+	ensure_directory(D1),
+	directory_file_path(D1, File, Path),
+	(   exists_file(Path)
+	->  true
+	;   setup_call_cleanup(
+		gzopen(Path, write, Out, [encoding(utf8)]),
+		format(Out, '~s~s', [Hdr, Data]),
+		close(Out))
+	).
+
+ensure_directory(Dir) :-
+	exists_directory(Dir), !.
+ensure_directory(Dir) :-
+	make_directory(Dir).
+
+%%	load_object(+Store, +Hash, -Data, -Type, -Size) is det.
+%
+%	Load the given object.
+
+load_object(Store, Hash, Data, Type, Size) :-
+	hash_file(Store, Hash, Path),
+	setup_call_cleanup(
+	    gzopen(Path, read, In, [encoding(utf8)]),
+	    read_object(In, Data, Type, Size),
+	    close(In)).
+
+read_object(In, Data, Type, Size) :-
+	get_code(In, C0),
+	read_hdr(C0, In, Hdr),
+	phrase((nonblanks(TypeChars), " ", integer(Size)), Hdr),
+	atom_codes(Type, TypeChars),
+	read_string(In, _, Data).
+
+read_hdr(C, In, [C|T]) :-
+	C > 0, !,
+	get_code(In, C1),
+	read_hdr(C1, In, T).
+read_hdr(_, _, []).
+
+%%	gitty_rescan(?Store) is det.
+%
+%	Update our view of the shared   storage  for all stores matching
+%	Store.
+
+gitty_rescan(Store) :-
+	retractall(store(Store, _)).
+
+%%	gitty_scan(+Store) is det.
+%
+%	Scan gitty store for files (entries),   filling  head/3. This is
+%	performed lazily at first access to the store.
+%
+%	@tdb	Possibly we need to maintain a cached version of this
+%		index to avoid having to open all objects of the gitty
+%		store.
+
+gitty_scan(Store) :-
+	store(Store, _), !,
+	(   remote_sync(true)
+	->  with_mutex(gitty, remote_updates(Store))
+	;   true
+	).
+gitty_scan(Store) :-
+	with_mutex(gitty, gitty_scan_sync(Store)).
+
+:- thread_local
+	latest/3.
+
+gitty_scan_sync(Store) :-
+	store(Store, _), !.
+gitty_scan_sync(Store) :-
+	remote_sync(true), !,
+	restore_heads_from_remote(Store).
+gitty_scan_sync(Store) :-
+	read_heads_from_objects(Store).
+
+%%	read_heads_from_objects(+Store) is det.
+%
+%	Establish the head(Store,File,Hash)  relation   by  reading  all
+%	objects and adding a fact for the most recent commit.
+
+read_heads_from_objects(Store) :-
+	gitty_scan_latest(Store),
+	forall(retract(latest(Name, Hash, _Time)),
+	       assert(head(Store, Name, Hash))),
+	get_time(Now),
+	assertz(store(Store, Now)).
+
+%%	gitty_scan_latest(+Store)
+%
+%	Scans the gitty store, extracting  the   latest  version of each
+%	named entry.
+
+gitty_scan_latest(Store) :-
+	retractall(head(Store, _, _)),
+	retractall(latest(_, _, _)),
+	(   gitty_hash(Store, Hash),
+	    load_object(Store, Hash, Data, commit, _Size),
+	    term_string(Meta, Data, []),
+	    _{name:Name, time:Time} :< Meta,
+	    (	latest(Name, _, OldTime),
+		OldTime > Time
+	    ->	true
+	    ;	retractall(latest(Name, _, _)),
+		assertz(latest(Name, Hash, Time))
+	    ),
+	    fail
+	;   true
+	).
+
+
+%%	gitty_hash(+Store, ?Hash) is nondet.
+%
+%	True when Hash is an object in the store.
+
+gitty_hash(Store, Hash) :-
+	var(Hash), !,
+	access_file(Store, exist),
+	directory_files(Store, Level0),
+	member(E0, Level0),
+	E0 \== '..',
+	atom_length(E0, 2),
+	directory_file_path(Store, E0, Dir0),
+	directory_files(Dir0, Level1),
+	member(E1, Level1),
+	E1 \== '..',
+	atom_length(E1, 2),
+	directory_file_path(Dir0, E1, Dir),
+	directory_files(Dir, Files),
+	member(File, Files),
+	atom_length(File, 36),
+	atomic_list_concat([E0,E1,File], Hash).
+gitty_hash(Store, Hash) :-
+	hash_file(Store, Hash, File),
+	exists_file(File).
+
+%%	delete_object(+Store, +Hash)
+%
+%	Delete an existing object
+
+delete_object(Store, Hash) :-
+	hash_file(Store, Hash, File),
+	delete_file(File).
+
+hash_file(Store, Hash, Path) :-
+	sub_atom(Hash, 0, 2, _, Dir0),
+	sub_atom(Hash, 2, 2, _, Dir1),
+	sub_atom(Hash, 4, _, 0, File),
+	atomic_list_concat([Store, Dir0, Dir1, File], /, Path).
+
+
+		 /*******************************
+		 *	      SYNCING		*
+		 *******************************/
+
+%%	gitty_update_head(+Store, +Name, +OldCommit, +NewCommit) is det.
+%
+%	Update the head of a gitty  store   for  Name.  OldCommit is the
+%	current head and NewCommit is the new  head. If Name is created,
+%	and thus there is no head, OldCommit must be `-`.
+%
+%	This operation can fail because another   writer has updated the
+%	head.  This can both be in-process or another process.
+
+gitty_update_head(Store, Name, OldCommit, NewCommit) :-
+	with_mutex(gitty,
+		   gitty_update_head_sync(Store, Name, OldCommit, NewCommit)).
+
+gitty_update_head_sync(Store, Name, OldCommit, NewCommit) :-
+	remote_sync(true), !,
+	setup_call_cleanup(
+	    heads_output_stream(Store, HeadsOut),
+	    gitty_update_head_sync(Store, Name, OldCommit, NewCommit, HeadsOut),
+	    close(HeadsOut)).
+gitty_update_head_sync(Store, Name, OldCommit, NewCommit) :-
+	gitty_update_head_sync2(Store, Name, OldCommit, NewCommit).
+
+gitty_update_head_sync(Store, Name, OldCommit, NewCommit, HeadsOut) :-
+	gitty_update_head_sync2(Store, Name, OldCommit, NewCommit),
+	format(HeadsOut, '~q.~n', [head(Name, OldCommit, NewCommit)]).
+
+gitty_update_head_sync2(Store, Name, OldCommit, NewCommit) :-
+	gitty_scan(Store),		% fetch remote changes
+	(   OldCommit == (-)
+	->  (   head(Store, Name, _)
+	    ->	throw(error(gitty(file_exists(Name),_)))
+	    ;	assertz(head(Store, Name, NewCommit))
+	    )
+	;   (   retract(head(Store, Name, OldCommit))
+	    ->	assertz(head(Store, Name, NewCommit))
+	    ;	throw(error(gitty(not_at_head(Name, OldCommit)), _))
+	    )
+	).
+
+remote_updates(Store) :-
+	remote_updates(Store, List),
+	maplist(update_head(Store), List).
+
+update_head(Store, head(Name, OldCommit, NewCommit)) :-
+	(   OldCommit == (-)
+	->  \+ head(Store, Name, _)
+	;   retract(head(Store, Name, OldCommit))
+	), !,
+	assert(head(Store, Name, NewCommit)).
+update_head(_, _).
+
+%%	remote_updates(+Store, -List) is det.
+%
+%	Find updates from other gitties  on   the  same filesystem. Note
+%	that we have to push/pop the input   context to avoid creating a
+%	notion of an  input  context   which  possibly  relate  messages
+%	incorrectly to the sync file.
+
+remote_updates(Store, List) :-
+	heads_input_stream(Store, Stream),
+	setup_call_cleanup(
+	    '$push_input_context'(gitty_sync),
+	    read_new_terms(Stream, List),
+	    '$pop_input_context').
+
+read_new_terms(Stream, Terms) :-
+	read(Stream, First),
+	read_new_terms(First, Stream, Terms).
+
+read_new_terms(end_of_file, _, List) :- !,
+	List = [].
+read_new_terms(Term, Stream, [Term|More]) :-
+	read(Stream, Term2),
+	read_new_terms(Term2, Stream, More).
+
+heads_output_stream(Store, Out) :-
+	heads_file(Store, HeadsFile),
+	open(HeadsFile, append, Out,
+	     [ encoding(utf8),
+	       lock(exclusive)
+	     ]).
+
+heads_input_stream(Store, Stream) :-
+	heads_input_stream_cache(Store, Stream0), !,
+	Stream = Stream0.
+heads_input_stream(Store, Stream) :-
+	heads_file(Store, File),
+	between(1, 2, _),
+	catch(open(File, read, In,
+		   [ encoding(utf8),
+		     eof_action(reset)
+		   ]),
+	      _,
+	      create_heads_file(Store)), !,
+	assert(heads_input_stream_cache(Store, In)),
+	Stream = In.
+
+create_heads_file(Store) :-
+	call_cleanup(
+	    heads_output_stream(Store, Out),
+	    close(Out)),
+	fail.					% always fail!
+
+heads_file(Store, HeadsFile) :-
+	ensure_directory(Store),
+	directory_file_path(Store, ref, RefDir),
+	ensure_directory(RefDir),
+	directory_file_path(RefDir, head, HeadsFile).
+
+%%	restore_heads_from_remote(Store)
+%
+%	Restore the known heads by reading the remote sync file.
+
+restore_heads_from_remote(Store) :-
+	heads_file(Store, File),
+	exists_file(File),
+	setup_call_cleanup(
+	    open(File, read, In, [encoding(utf8)]),
+	    restore_heads(Store, In),
+	    close(In)), !,
+	get_time(Now),
+	assertz(store(Store, Now)).
+restore_heads_from_remote(Store) :-
+	read_heads_from_objects(Store),
+	heads_file(Store, File),
+	setup_call_cleanup(
+	    open(File, write, Out, [encoding(utf8)]),
+	    save_heads(Store, Out),
+	    close(Out)), !.
+
+restore_heads(Store, In) :-
+	read(In, Term0),
+	Term0 = epoch(_),
+	read(In, Term1),
+	restore_heads(Term1, In, Store).
+
+restore_heads(end_of_file, _, _) :- !.
+restore_heads(head(File, _, Hash), In, Store) :-
+	retractall(head(Store, File, _)),
+	assertz(head(Store, File, Hash)),
+	read(In, Term),
+	restore_heads(Term, In, Store).
+
+save_heads(Store, Out) :-
+	get_time(Now),
+	format(Out, 'epoch(~0f).~n~n', [Now]),
+	forall(head(Store, File, Hash),
+	       format(Out, '~q.~n', [head(File, -, Hash)])).
+
+
+%%	delete_head(+Store, +Head) is det.
+%
+%	Delete Head from Store. Used  by   gitty_fsck/1  to remove heads
+%	that have no commits. Should  we   forward  this  to remotes, or
+%	should they do their own thing?
+
+delete_head(Store, Head) :-
+	retractall(head(Store, Head, _)).
+
+%%	set_head(+Store, +File, +Hash) is det.
+%
+%	Set the head of the given File to Hash
+
+set_head(Store, File, Hash) :-
+	retractall(head(Store, File, _)),
+	asserta(head(Store, File, Hash)).
diff --git a/lib/swish/gitty_tools.pl b/lib/swish/gitty_tools.pl
new file mode 100644
index 0000000..5f7092d
--- /dev/null
+++ b/lib/swish/gitty_tools.pl
@@ -0,0 +1,280 @@
+/*  Part of SWI-Prolog
+
+    Author:        Jan Wielemaker
+    E-mail:        J.Wielemaker@cs.vu.nl
+    WWW:           http://www.swi-prolog.org
+    Copyright (C): 2015, VU University Amsterdam
+
+    This program is free software; you can redistribute it and/or
+    modify it under the terms of the GNU General Public License
+    as published by the Free Software Foundation; either version 2
+    of the License, or (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public
+    License along with this library; if not, write to the Free Software
+    Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
+
+    As a special exception, if you link this library with other files,
+    compiled with a Free Software compiler, to produce an executable, this
+    library does not by itself cause the resulting executable to be covered
+    by the GNU General Public License. This exception does not however
+    invalidate any other reasons why the executable file might be covered by
+    the GNU General Public License.
+*/
+
+:- module(gitty_tools,
+	  [ gitty_copy_store/3,		% +StoreIn, +StoreOut, +Driver
+	    gitty_compare_stores/2,	% +Store1, +Store2
+	    gitty_fsck/2		% +Store, +Options
+	  ]).
+:- use_module(gitty).
+:- use_module(library(apply)).
+:- use_module(library(option)).
+:- use_module(library(aggregate)).
+
+/** <module> Gitty maintenance tools
+
+This file contains some maintenance  predicates   for  gitty  stores. It
+notably allows for copying, synchronizing and comparing stores.
+*/
+
+%%	gitty_copy_store(+StoreIn, +StoreOut, +Driver) is det.
+%
+%	Copy a gitty store, using Driver   for the target StoreOut. This
+%	copies the entire history  and  data   of  StoreIn  to StoreOut,
+%	possibly transcribing to another driver.   Note  that the hashes
+%	are independent from the driver.
+%
+%	Note that gitty_copy_store/3 can also be used to migrate updates
+%	from StoreIn to an older copy StoreOut.
+
+gitty_copy_store(StoreIn, StoreOut, Driver) :-
+	gitty_open(StoreIn, []),
+	gitty_open(StoreOut, [driver(Driver)]),
+	State = state(0),
+	(   gitty_file(StoreIn, File, _),
+	    State = state(N0),
+	    N is N0+1,
+	    nb_setarg(1, State, N),
+	    format(user_error, '~N~D ~`.t ~q ~50|', [N, File]),
+	    (	copy_file(File, StoreIn, StoreOut)
+	    ->	fail
+	    ;	format('Failed to copy ~q~n', [File])
+	    )
+	;   true
+	).
+
+copy_file(File, StoreIn, StoreOut) :-
+	gitty_history(StoreIn, File, History, [depth(1000000)]),
+	reverse(History, LastToFirst),
+	maplist(copy_commit(StoreIn, StoreOut), LastToFirst).
+
+copy_commit(_StoreIn, StoreOut, Commit) :-
+	gitty_hash(StoreOut, Commit.commit), !,
+	put_char(user_error, '+').
+copy_commit(StoreIn, StoreOut, Commit) :-
+	gitty_data(StoreIn, Commit.commit, Data, Meta0),
+	del_keys([commit, symbolic], Meta0, Meta),
+	(   Prev = Meta.get(previous),
+	    gitty_commit(StoreIn, Prev, PrevCommit),
+	    PrevCommit.name == Meta.name
+	->  gitty_update(StoreOut, Meta.name, Data, Meta, _)
+	;   gitty_create(StoreOut, Meta.name, Data, Meta, _)
+	),
+	put_char(user_error, '.').
+
+del_keys([], Dict, Dict).
+del_keys([H|T], Dict0, Dict) :-
+	del_dict(H, Dict0, _, Dict1), !,
+	del_keys(T, Dict1, Dict).
+del_keys([_|T], Dict0, Dict) :-
+	del_keys(T, Dict0, Dict).
+
+%%	gitty_compare_stores(+Store1, +Store2) is semidet.
+%
+%	True if both stores are exactly the same.
+%
+%	@bug	Should (optionally) describe the differences
+
+gitty_compare_stores(Store1, Store2) :-
+	gitty_open(Store1, []),
+	gitty_open(Store2, []),
+	gitty_full_history(Store1, History1),
+	gitty_full_history(Store2, History2),
+	History1 == History2.
+
+gitty_full_history(Store, History) :-
+	setof(File, Hash^gitty_file(Store, File, Hash), Files),
+	maplist(gitty_full_history(Store), Files, History).
+
+gitty_full_history(Store, File, History) :-
+	gitty_history(Store, File, History, [depth(1000000)]).
+
+%%	gitty_fsck(+Store, +Options)
+%
+%	Check integrity of the store.  Requires the following step:
+%
+%	  - Validate objects by recomputing and comparing their hash
+%	    fix: remove bad objects
+%	  - Validate each commit
+%	    - Does the data exists?
+%	    - Does previous exist?
+%	  - Reconstruct heads
+
+gitty_fsck(Store, Options) :-
+	gitty_open(Store, []),
+	check_objects(Store, Options),
+	load_commits(Store),
+	check_heads(Store, Options),
+	check_commits(Store, Options).
+
+check_objects(Store, Options) :-
+	aggregate_all(count,
+		      ( gitty_hash(Store, Hash),
+			check_object(Store, Hash, Options)
+		      ), Objects),
+	progress(checked_objects(Objects)).
+
+%%	check_object(+Store, +Hash) is det.
+%
+%	Check  the  validity  of  the  object    indicated  by  Hash  by
+%	recomputing the hash from the object   content.  If fix(true) is
+%	specified, bad objects are deleted from the store.
+
+check_object(Store, Hash, _) :-
+	gitty:fsck_object(Store, Hash), !.
+check_object(Store, Hash, Options) :-
+	gripe(bad_object(Store, Hash)),
+	fix(gitty:delete_object(Store, Hash), Options).
+
+%%	load_commits(+Store) is det.
+%
+%	Load all commits into a dynamic predicate
+%
+%	  commit(Store, CommitHash, PrevCommitHash, DataHash)
+
+:- dynamic
+	commit/5.			% Store, Commit, Prev, Name, Data
+
+load_commits(Store) :-
+	clean_commits(Store),
+	(   gitty_hash(Store, Hash),
+	    gitty_commit(Store, Hash, Commit),
+	    (	Prev = Commit.get(previous)
+	    ->	true
+	    ;	Prev = (-)
+	    ),
+	    assertz(commit(Store, Commit.commit, Prev, Commit.name, Commit.data)),
+	    fail
+	;   true
+	).
+
+clean_commits(Store) :-
+	retractall(commit(Store, _, _, _, _)).
+
+%%	check_heads(+Store, +Options)
+%
+%	Verify the head admin.
+
+check_heads(Store, Options) :-
+	forall(head(Store, File, Head),
+	       check_head(Store, File, Head, Options)),
+	forall(gitty_file(Store, File, Head),
+	       check_head_exists(Store, File, Head, Options)).
+
+check_head(Store, File, Head, Options) :-
+	(   gitty_file(Store, File, Head)
+	->  true
+	;   gitty_file(Store, File, WrongHash)
+	->  gripe(head_mismatch(Store, File, Head, WrongHash)),
+	    fix(gitty:set_head(Store, File, Head), Options)
+	;   gripe(lost_head(Store, File, Head)),
+	    fix(gitty:set_head(Store, File, Head), Options)
+	).
+
+check_head_exists(Store, File, Head, Options) :-
+	(   head(Store, File, Head)
+	->  true
+	;   (   option(fix(true), Options)
+	    ->	assertion(\+head(Store, File, _))
+	    ;	true
+	    ),
+	    gripe(lost_file(Store, File)),
+	    fix(gitty:delete_head(Store, File), Options)
+	).
+
+head(Store, File, Head) :-
+	commit(Store, Head, _, File, _),
+	\+ commit(Store, _, Head, _, _).
+
+%%	check_commits(Store, Options)
+%
+%	Check connectivity of all commits.
+
+check_commits(Store, Options) :-
+	forall(gitty_file(Store, _File, Head),
+	       check_commit(Store, Head, Options)).
+
+%%	check_commit(+Store, +Head, +Options) is det.
+%
+%	Validate a commit. First checks the  connectivety. If this fails
+%	we have some options:
+%
+%	  - Remove the most recent part of the history until it becomes
+%	    consistent.
+%	  - If data is missing from an older commit, rewrite the
+%	    history.
+
+check_commit(Store, Head, Options) :-
+	(   gitty_commit(Store, Head, Commit)
+	->  (   gitty_hash(Store, Commit.data)
+	    ->	true
+	    ;	gripe(no_data(Commit.data)),
+		fail
+	    ),
+	    (   Prev = Commit.get(previous)
+	    ->  check_commit(Store, Prev, Options)
+	    ;   true
+	    )
+	;   gripe(no_commit(Store, Head)),
+	    fail
+	), !.
+check_commit(_, _, _).
+
+
+:- meta_predicate
+	fix(0, +).
+
+fix(Goal, Options) :-
+	option(fix(true), Options), !,
+	call(Goal).
+fix(_, _).
+
+
+gripe(Term) :-
+	print_message(error, gitty(Term)).
+progress(Term) :-
+	print_message(informational, gitty(Term)).
+
+:- multifile prolog:message//1.
+
+prolog:message(gitty(Term)) -->
+	gitty_message(Term).
+
+gitty_message(no_commit(Store, File, Head)) -->
+	[ '~p: file ~p: missing commit object ~p'-[Store, File, Head] ].
+gitty_message(bad_object(Store, Hash)) -->
+	[ '~p: ~p: corrupt object'-[Store, Hash] ].
+gitty_message(lost_file(Store, File)) -->
+	[ '~p: ~p: lost file'-[Store, File] ].
+gitty_message(lost_head(Store, File, Head)) -->
+	[ '~p: ~p: lost head: ~p'-[Store, File, Head] ].
+gitty_message(head_mismatch(Store, File, Head, WrongHash)) -->
+	[ '~p: ~p: wrong head (~p --> ~p)'-[Store, File, WrongHash, Head] ].
+gitty_message(checked_objects(Count)) -->
+	[ 'Checked ~D objects'-[Count] ].
diff --git a/lib/swish/highlight.pl b/lib/swish/highlight.pl
index b2e3bd1..690e028 100644
--- a/lib/swish/highlight.pl
+++ b/lib/swish/highlight.pl
@@ -328,12 +328,7 @@ enriched_tokens(TB, _Data, Tokens) :-		% source window
 	xref(UUID),
 	server_tokens(TB, Tokens).
 enriched_tokens(TB, Data, Tokens) :-		% query window
-	(   [SourceIdS|_] = Data.get(sourceID)
-	->  true
-	;   SourceIdS = Data.get(sourceID),
-	    atomic(SourceIdS)
-	), !,
-	atom_string(SourceID, SourceIdS),
+	json_source_id(Data.get(sourceID), SourceID), !,
 	memory_file_to_string(TB, Query),
 	with_mutex(swish_highlight_query,
 		   prolog_colourise_query(Query, SourceID, colour_item(TB))),
@@ -343,6 +338,26 @@ enriched_tokens(TB, _Data, Tokens) :-
 	prolog_colourise_query(Query, module(swish), colour_item(TB)),
 	collect_tokens(TB, Tokens).
 
+%%	json_source_id(+Input, -SourceID)
+%
+%	Translate the Input, which is  either  a   string  or  a list of
+%	strings into an  atom  or  list   of  atoms.  Older  versions of
+%	SWI-Prolog only accept a single atom source id.
+
+:- if(current_predicate(prolog_colour:to_list/2)).
+json_source_id(StringList, SourceIDList) :-
+	is_list(StringList),
+	StringList \== [], !,
+	maplist(atom_string, SourceIDList, StringList).
+:- else.				% old version (=< 7.3.7)
+json_source_id([String|_], SourceID) :-
+	maplist(atom_string, SourceID, String).
+:- endif.
+json_source_id(String, SourceID) :-
+	string(String),
+	atom_string(SourceID, String).
+
+
 %%	shadow_editor(+Data, -MemoryFile) is det.
 %
 %	Get our shadow editor:
diff --git a/lib/swish/logging.pl b/lib/swish/logging.pl
index f3b2706..51e2183 100644
--- a/lib/swish/logging.pl
+++ b/lib/swish/logging.pl
@@ -57,6 +57,11 @@ swish_log(create(Pengine, Application, Options0)) :-
 	format_time(string(HDate), '%+', Now),
 	http_log('/*~s*/ pengine(~3f, ~q).~n',
 		 [HDate, Now, create(Pengine, Application, Options)]).
+swish_log(send(Pengine, Event)) :-
+	get_time(Now),
+	format_time(string(HDate), '%+', Now),
+	http_log('/*~s*/ pengine(~3f, ~q).~n',
+		 [HDate, Now, send(Pengine, Event)]).
 
 :- dynamic
 	text_hash/2.
diff --git a/lib/swish/markdown.pl b/lib/swish/markdown.pl
index 7b745f0..eb39d8e 100644
--- a/lib/swish/markdown.pl
+++ b/lib/swish/markdown.pl
@@ -30,6 +30,7 @@
 :- module(swish_markdown, []).
 :- use_module(library(http/http_dispatch)).
 :- use_module(library(http/http_parameters)).
+:- use_module(library(http/http_client)).
 :- use_module(library(http/html_write)).
 :- use_module(library(http/html_head)).
 :- use_module(library(pldoc/doc_html),
@@ -55,6 +56,7 @@ This module translates markdown cells for teh SWISH Notebook into HTML
 %	document.
 
 markdown(Request) :-
+	option(method(get), Request), !,
         http_parameters(Request,
                         [ text(Data, [optional(true), default('')])
                         ]),
@@ -63,6 +65,13 @@ markdown(Request) :-
         phrase(html(DOM), Tokens),
         format('Content-type: text/html; charset=UTF-8\n\n'),
         print_html(Tokens).
+markdown(Request) :-
+	option(method(post), Request), !,
+	http_read_data(Request, Codes, [to(codes)]),
+	wiki_file_codes_to_dom(Codes, '/', DOM),
+	phrase(html(DOM), Tokens),
+        format('Content-type: text/html; charset=UTF-8\n\n'),
+        print_html(Tokens).
 
 %%      wiki_codes_to_dom(+Codes, +File, -DOM)
 %
diff --git a/lib/swish/page.pl b/lib/swish/page.pl
index 75a3a19..e800d93 100644
--- a/lib/swish/page.pl
+++ b/lib/swish/page.pl
@@ -163,14 +163,14 @@ source_option(_Request, Options0, Options) :-
 	option(code(Code), Options0),
 	option(format(swish), Options0), !,
 	(   uri_is_global(Code)
-	->  Options = [url(Code)|Options0]
+	->  Options = [url(Code),st_type(external)|Options0]
 	;   Options = Options0
 	).
 source_option(Request, Options0, Options) :-
 	source_file(Request, File, Options0), !,
 	option(path(Path), Request),
 	(   source_data(File, String, Options1)
-	->  append([ [code(String), url(Path)],
+	->  append([ [code(String), url(Path), st_type(filesys)],
 		     Options1,
 		     Options0
 		   ], Options)
@@ -421,7 +421,8 @@ source_data_attrs(Options) -->
 	(source_file_data(Options) -> [] ; []),
 	(source_url_data(Options) -> [] ; []),
 	(source_title_data(Options) -> [] ; []),
-	(source_meta_data(Options) -> [] ; []).
+	(source_meta_data(Options) -> [] ; []),
+	(source_st_type_data(Options) -> [] ; []).
 
 source_file_data(Options) -->
 	{ option(file(File), Options) },
@@ -432,6 +433,9 @@ source_url_data(Options) -->
 source_title_data(Options) -->
 	{ option(title(File), Options) },
 	['data-title'(File)].
+source_st_type_data(Options) -->
+	{ option(st_type(Type), Options) },
+	['data-st_type'(Type)].
 source_meta_data(Options) -->
 	{ option(meta(Meta), Options), !,
 	  atom_json_dict(Text, Meta, [])
diff --git a/lib/swish/render/graphviz.pl b/lib/swish/render/graphviz.pl
index 4b77e4d..36f1264 100644
--- a/lib/swish/render/graphviz.pl
+++ b/lib/swish/render/graphviz.pl
@@ -158,7 +158,6 @@ render_dot(DOTString, Program, _Options) -->	% <svg> rendering
      var pan;
 
      function updateSize() {
-       console.log("updateSize");
        var w = svg.closest("div.answer").innerWidth();
 
        function reactive() {
diff --git a/lib/swish/storage.pl b/lib/swish/storage.pl
index 136dd56..da7dcbc 100644
--- a/lib/swish/storage.pl
+++ b/lib/swish/storage.pl
@@ -61,6 +61,17 @@ their own version.
 
 :- http_handler(swish('p/'), web_storage, [ id(web_storage), prefix ]).
 
+:- initialization open_gittystore.
+
+open_gittystore :-
+	setting(directory, Dir),
+	(   exists_directory(Dir)
+	->  true
+	;   make_directory(Dir)
+	),
+	gitty_open(Dir, []).
+
+
 %%	web_storage(+Request) is det.
 %
 %	Restfull HTTP handler to store data on behalf of the client in a
@@ -236,7 +247,8 @@ storage_get(Request, Format) :-
 
 storage_get(swish, Dir, _, FileOrHash, Request) :-
 	gitty_data(Dir, FileOrHash, Code, Meta),
-	swish_reply([code(Code),file(FileOrHash),meta(Meta)], Request).
+	swish_reply([code(Code),file(FileOrHash),st_type(gitty),meta(Meta)],
+		    Request).
 storage_get(raw, Dir, _, FileOrHash, _Request) :-
 	gitty_data(Dir, FileOrHash, Code, Meta),
 	file_mime_type(Meta.name, MIME),