@@ -151,36 +151,30 @@ impl FsStore {
151151 Ok ( ( ) )
152152 }
153153
154- pub fn compact ( & self , id : & DocumentId , full_doc : & [ u8 ] ) -> Result < ( ) , Error > {
154+ pub fn compact (
155+ & self ,
156+ id : & DocumentId ,
157+ full_doc : & [ u8 ] ,
158+ new_heads : Vec < ChangeHash > ,
159+ ) -> Result < ( ) , Error > {
155160 let paths = DocIdPaths :: from ( id) ;
156161
157162 // Load all the data we have into a doc
158163 match Chunks :: load ( & self . root , id) {
159164 Ok ( Some ( chunks) ) => {
160- let doc = chunks
161- . to_doc ( )
162- . map_err ( |e| Error ( ErrorKind :: LoadDocToCompact ( e) ) ) ?;
163-
164165 // Write the snapshot
165- let output_chunk_name = SavedChunkName :: new_snapshot ( doc. get_heads ( ) ) ;
166- let chunk = doc. save ( ) ;
167- write_chunk ( & self . root , & paths, & chunk, output_chunk_name. clone ( ) ) ?;
166+ let output_chunk_name = SavedChunkName :: new_snapshot ( new_heads) ;
167+ write_chunk ( & self . root , & paths, full_doc, output_chunk_name. clone ( ) ) ?;
168168
169169 // Remove all the old data
170170 for incremental in chunks. incrementals . keys ( ) {
171171 let path = paths. chunk_path ( & self . root , incremental) ;
172172 std:: fs:: remove_file ( & path)
173173 . map_err ( |e| Error ( ErrorKind :: DeleteChunk ( path, e) ) ) ?;
174174 }
175- let just_wrote = paths. chunk_path ( & self . root , & output_chunk_name) ;
176175 for snapshot in chunks. snapshots . keys ( ) {
177176 let path = paths. chunk_path ( & self . root , snapshot) ;
178177
179- if path == just_wrote {
180- tracing:: trace!( "Somehow trying to delete the same path we just wrote to. Not today Satan" ) ;
181- continue ;
182- }
183-
184178 std:: fs:: remove_file ( & path)
185179 . map_err ( |e| Error ( ErrorKind :: DeleteChunk ( path, e) ) ) ?;
186180 }
@@ -441,21 +435,6 @@ impl Chunks {
441435 incrementals,
442436 } ) )
443437 }
444-
445- fn to_doc ( & self ) -> Result < automerge:: Automerge , automerge:: AutomergeError > {
446- let mut bytes = Vec :: new ( ) ;
447- for chunk in self . snapshots . values ( ) {
448- bytes. extend ( chunk) ;
449- }
450- for chunk in self . incrementals . values ( ) {
451- bytes. extend ( chunk) ;
452- }
453-
454- automerge:: Automerge :: load_with_options (
455- & bytes,
456- automerge:: LoadOptions :: new ( ) . on_partial_load ( automerge:: OnPartialLoad :: Ignore ) ,
457- )
458- }
459438}
460439
461440mod error {
@@ -499,8 +478,6 @@ mod error {
499478 ErrReadingChunkFile ( PathBuf , std:: io:: Error ) ,
500479 #[ error( "error creating level 2 path {0}: {1}" ) ]
501480 CreateLevel2Path ( PathBuf , std:: io:: Error ) ,
502- #[ error( "error loading doc to compact: {0}" ) ]
503- LoadDocToCompact ( automerge:: AutomergeError ) ,
504481 #[ error( "error creating temp file: {0}" ) ]
505482 CreateTempFile ( std:: io:: Error ) ,
506483 #[ error( "error writing temp file {0}: {1}" ) ]
0 commit comments