[Bps-public-commit] r11769 - in Prophet/trunk: . lib/Prophet/Replica lib/Prophet/Resolver t

jesse at bestpractical.com jesse at bestpractical.com
Thu Apr 17 18:25:41 EDT 2008


Author: jesse
Date: Thu Apr 17 18:25:40 2008
New Revision: 11769

Modified:
   Prophet/trunk/   (props changed)
   Prophet/trunk/MANIFEST
   Prophet/trunk/lib/Prophet/App.pm
   Prophet/trunk/lib/Prophet/CLI.pm
   Prophet/trunk/lib/Prophet/Replica.pm
   Prophet/trunk/lib/Prophet/Replica/Native.pm
   Prophet/trunk/lib/Prophet/Replica/SVN.pm
   Prophet/trunk/lib/Prophet/ReplicaExporter.pm
   Prophet/trunk/lib/Prophet/Resolver/AlwaysTarget.pm
   Prophet/trunk/lib/Prophet/Test.pm
   Prophet/trunk/t/export.t
   Prophet/trunk/t/simple-conflicting-merge.t

Log:
 r29855 at 31b:  jesse | 2008-04-17 19:41:54 +0100
 * Native replicas now record resolutions, we think
 * made it possible to run all tests with the prophet replica type
 


Modified: Prophet/trunk/MANIFEST
==============================================================================
--- Prophet/trunk/MANIFEST	(original)
+++ Prophet/trunk/MANIFEST	Thu Apr 17 18:25:40 2008
@@ -1,14 +1,8 @@
+.prove
+.proverc
 bin/generalized_sync_n_merge.t
 bin/prophet
-bin/prophet-merge
-bin/prophet-record-create
-bin/prophet-record-delete
-bin/prophet-record-history
-bin/prophet-record-search
-bin/prophet-record-show
-bin/prophet-record-update
 bin/run_test_yml.pl
-bin/sd
 bin/taste_recipe
 doc/glossary
 doc/jesse_test_env_setup
@@ -24,6 +18,7 @@
 inc/Module/Install/Win32.pm
 inc/Module/Install/WriteAll.pm
 lib/Prophet.pm
+lib/Prophet/App.pm
 lib/Prophet/Change.pm
 lib/Prophet/ChangeSet.pm
 lib/Prophet/CLI.pm
@@ -32,20 +27,14 @@
 lib/Prophet/ConflictingChange.pm
 lib/Prophet/ConflictingPropChange.pm
 lib/Prophet/ForeignReplica.pm
-lib/Prophet/Handle.pm
-lib/Prophet/Handle/SVN.pm
-lib/Prophet/HistoryEntry.pm
 lib/Prophet/PropChange.pm
 lib/Prophet/Record.pm
 lib/Prophet/Replica.pm
-lib/Prophet/Replica/Hiveminder.pm
-lib/Prophet/Replica/Hiveminder/PullEncoder.pm
-lib/Prophet/Replica/HTTP.pm
-lib/Prophet/Replica/RT.pm
-lib/Prophet/Replica/RT/PullEncoder.pm
+lib/Prophet/Replica/Native.pm
 lib/Prophet/Replica/SVN.pm
 lib/Prophet/Replica/SVN/ReplayEditor.pm
 lib/Prophet/Replica/SVN/Util.pm
+lib/Prophet/ReplicaExporter.pm
 lib/Prophet/Resolver.pm
 lib/Prophet/Resolver/AlwaysSource.pm
 lib/Prophet/Resolver/AlwaysTarget.pm
@@ -53,6 +42,7 @@
 lib/Prophet/Resolver/FromResolutionDB.pm
 lib/Prophet/Resolver/IdenticalChanges.pm
 lib/Prophet/Resolver/Prompt.pm
+lib/Prophet/Server/REST.pm
 lib/Prophet/Test.pm
 lib/Prophet/Test/Arena.pm
 lib/Prophet/Test/Participant.pm
@@ -74,9 +64,7 @@
 t/non-conflicting-merge.t
 t/real-conflicting-merge.t
 t/res-conflict-3.t
-t/sd-hm.t
-t/sd-rt-hm.t
-t/sd-rt.t
+t/resty-server.t
 t/simple-conflicting-merge.t
 t/simple-push.t
 t/sync-delete-conflict.t
@@ -85,3 +73,78 @@
 t/use.t
 t/validate.t
 t/validation.t
+talks/2008/forkey
+talks/2008/osdc.tw.key/bg1-1.jpg
+talks/2008/osdc.tw.key/bps_logo_white_on_black.png
+talks/2008/osdc.tw.key/color-profile
+talks/2008/osdc.tw.key/Contents/PkgInfo
+talks/2008/osdc.tw.key/index.apxl.gz
+talks/2008/osdc.tw.key/QuickLook/Thumbnail.jpg
+talks/2008/osdc.tw.key/thumbs/mt0-0-1.tiff
+talks/2008/osdc.tw.key/thumbs/mt0-19.tiff
+talks/2008/osdc.tw.key/thumbs/mt0-20.tiff
+talks/2008/osdc.tw.key/thumbs/mt0-21.tiff
+talks/2008/osdc.tw.key/thumbs/mt0-22.tiff
+talks/2008/osdc.tw.key/thumbs/mt0-23.tiff
+talks/2008/osdc.tw.key/thumbs/mt0-24.tiff
+talks/2008/osdc.tw.key/thumbs/mt0-25.tiff
+talks/2008/osdc.tw.key/thumbs/mt0-26.tiff
+talks/2008/osdc.tw.key/thumbs/mt0-27.tiff
+talks/2008/osdc.tw.key/thumbs/st0-3.tiff
+talks/2008/osdc.tw.key/thumbs/st1-1.tiff
+talks/2008/osdc.tw.key/thumbs/st10-1.tiff
+talks/2008/osdc.tw.key/thumbs/st11.tiff
+talks/2008/osdc.tw.key/thumbs/st12.tiff
+talks/2008/osdc.tw.key/thumbs/st13.tiff
+talks/2008/osdc.tw.key/thumbs/st14.tiff
+talks/2008/osdc.tw.key/thumbs/st15.tiff
+talks/2008/osdc.tw.key/thumbs/st16.tiff
+talks/2008/osdc.tw.key/thumbs/st17.tiff
+talks/2008/osdc.tw.key/thumbs/st18.tiff
+talks/2008/osdc.tw.key/thumbs/st19.tiff
+talks/2008/osdc.tw.key/thumbs/st2-1.tiff
+talks/2008/osdc.tw.key/thumbs/st20.tiff
+talks/2008/osdc.tw.key/thumbs/st21.tiff
+talks/2008/osdc.tw.key/thumbs/st22.tiff
+talks/2008/osdc.tw.key/thumbs/st23.tiff
+talks/2008/osdc.tw.key/thumbs/st24.tiff
+talks/2008/osdc.tw.key/thumbs/st25.tiff
+talks/2008/osdc.tw.key/thumbs/st26.tiff
+talks/2008/osdc.tw.key/thumbs/st27.tiff
+talks/2008/osdc.tw.key/thumbs/st28.tiff
+talks/2008/osdc.tw.key/thumbs/st29.tiff
+talks/2008/osdc.tw.key/thumbs/st3-1.tiff
+talks/2008/osdc.tw.key/thumbs/st30.tiff
+talks/2008/osdc.tw.key/thumbs/st31.tiff
+talks/2008/osdc.tw.key/thumbs/st32.tiff
+talks/2008/osdc.tw.key/thumbs/st33.tiff
+talks/2008/osdc.tw.key/thumbs/st34.tiff
+talks/2008/osdc.tw.key/thumbs/st35-1.tiff
+talks/2008/osdc.tw.key/thumbs/st35.tiff
+talks/2008/osdc.tw.key/thumbs/st36-1.tiff
+talks/2008/osdc.tw.key/thumbs/st36.tiff
+talks/2008/osdc.tw.key/thumbs/st37-1.tiff
+talks/2008/osdc.tw.key/thumbs/st37.tiff
+talks/2008/osdc.tw.key/thumbs/st38-1.tiff
+talks/2008/osdc.tw.key/thumbs/st38.tiff
+talks/2008/osdc.tw.key/thumbs/st39-1.tiff
+talks/2008/osdc.tw.key/thumbs/st39.tiff
+talks/2008/osdc.tw.key/thumbs/st4-1.tiff
+talks/2008/osdc.tw.key/thumbs/st40-1.tiff
+talks/2008/osdc.tw.key/thumbs/st40.tiff
+talks/2008/osdc.tw.key/thumbs/st41-1.tiff
+talks/2008/osdc.tw.key/thumbs/st41.tiff
+talks/2008/osdc.tw.key/thumbs/st42-1.tiff
+talks/2008/osdc.tw.key/thumbs/st42.tiff
+talks/2008/osdc.tw.key/thumbs/st43.tiff
+talks/2008/osdc.tw.key/thumbs/st44.tiff
+talks/2008/osdc.tw.key/thumbs/st47.tiff
+talks/2008/osdc.tw.key/thumbs/st5-1.tiff
+talks/2008/osdc.tw.key/thumbs/st51.tiff
+talks/2008/osdc.tw.key/thumbs/st55.tiff
+talks/2008/osdc.tw.key/thumbs/st6-1.tiff
+talks/2008/osdc.tw.key/thumbs/st7-1.tiff
+talks/2008/osdc.tw.key/thumbs/st8-1.tiff
+talks/2008/osdc.tw.key/thumbs/st9-1.tiff
+talks/2008/osdc.tw.txt
+todo

Modified: Prophet/trunk/lib/Prophet/App.pm
==============================================================================
--- Prophet/trunk/lib/Prophet/App.pm	(original)
+++ Prophet/trunk/lib/Prophet/App.pm	Thu Apr 17 18:25:40 2008
@@ -31,7 +31,8 @@
     my $self = shift;
     unless ( $self->_handle() ) {
         my $root = $ENV{'PROPHET_REPO'} || dir( $ENV{'HOME'}, '.prophet' );
-        $self->_handle( Prophet::Replica->new( { url => 'svn:file://' . $root } ) );
+        my $type = $ENV{'PROPHET_REPLICA_TYPE'} || 'svn';
+        $self->_handle( Prophet::Replica->new( { url => $type.':file://' . $root } ) );
     }
     return $self->_handle();
 }
@@ -44,7 +45,8 @@
     my $self = shift;
     unless ( $self->_resdb_handle ) {
         my $root = ( $ENV{'PROPHET_REPO'} || dir( $ENV{'HOME'}, '.prophet' ) ) . "_res";
-        $self->_resdb_handle( Prophet::Replica->new( { url => 'svn:file://' . $root } ) );
+        my $type = $ENV{'PROPHET_REPLICA_TYPE'} || 'svn';
+        $self->_resdb_handle( Prophet::Replica->new( { url => $type.':file://' . $root } ) );
     }
     return $self->_resdb_handle();
 }

Modified: Prophet/trunk/lib/Prophet/CLI.pm
==============================================================================
--- Prophet/trunk/lib/Prophet/CLI.pm	(original)
+++ Prophet/trunk/lib/Prophet/CLI.pm	Thu Apr 17 18:25:40 2008
@@ -29,8 +29,6 @@
     return $self;
 }
 
-
-
 =head2 _record_cmd
 
 handles the subcommand for a particular type
@@ -236,7 +234,8 @@
 
 sub do_push {
     my $self         = shift;
-    my $source_me    = Prophet::Replica->new( { url => "svn:file://" . $self->app_handle->handle->repo_path } );
+    my $replica_type = $ENV{'PROPHET_REPLICA_TYPE'} || 'svn';
+    my $source_me    = Prophet::Replica->new( { url => $replica_type. ":file://" . $self->app_handle->handle->fs_root } );
     my $other        = shift @ARGV;
     my $source_other = Prophet::Replica->new( { url => $other } );
     my $resdb        = $source_me->import_resolutions_from_remote_replica( from => $source_other );
@@ -246,14 +245,16 @@
 
 sub do_export {
     my $self      = shift;
-    my $source_me = Prophet::Replica->new( { url => "svn:file://" . $self->app_handle->handle->repo_path } );
+    my $replica_type = $ENV{'PROPHET_REPLICA_TYPE'} || 'svn';
+    my $source_me = Prophet::Replica->new( { url => $replica_type.":file://" . $self->app_handle->handle->fs_root } );
     my $path      = $self->args->{'path'};
     $source_me->export_to( path => $path );
 }
 
 sub do_pull {
     my $self         = shift;
-    my $source_me    = Prophet::Replica->new( { url => "svn:file://" . $self->app_handle->handle->repo_path } );
+    my $replica_type = $ENV{'PROPHET_REPLICA_TYPE'} || 'svn';
+    my $source_me    = Prophet::Replica->new( { url => $replica_type. ":file://" . $self->app_handle->handle->fs_root } );
     my $other        = shift @ARGV;
     my $source_other = Prophet::Replica->new( { url => $other } );
     my $resdb        = $source_me->import_resolutions_from_remote_replica( from => $source_other );

Modified: Prophet/trunk/lib/Prophet/Replica.pm
==============================================================================
--- Prophet/trunk/lib/Prophet/Replica.pm	(original)
+++ Prophet/trunk/lib/Prophet/Replica.pm	Thu Apr 17 18:25:40 2008
@@ -226,7 +226,6 @@
     my $self      = shift;
     my $changeset = shift;
 
-    Carp::cluck;
     $self->begin_edit;
     $self->record_changes($changeset);
 

Modified: Prophet/trunk/lib/Prophet/Replica/Native.pm
==============================================================================
--- Prophet/trunk/lib/Prophet/Replica/Native.pm	(original)
+++ Prophet/trunk/lib/Prophet/Replica/Native.pm	Thu Apr 17 18:25:40 2008
@@ -14,13 +14,16 @@
 use File::Find::Rule;
 
 __PACKAGE__->mk_accessors(qw/url db_uuid _uuid/);
-__PACKAGE__->mk_accessors(qw(fs_root_parent fs_root target_replica cas_root record_cas_dir changeset_cas_dir record_dir));
-
-use constant scheme => 'prophet';
-use constant cas_root => dir('cas');
-use constant record_cas_dir => dir(__PACKAGE__->cas_root => 'records');
-use constant changeset_cas_dir => dir(__PACKAGE__->cas_root => 'changesets');
-use constant record_dir => dir('records');
+__PACKAGE__->mk_accessors(
+    qw(fs_root_parent fs_root target_replica cas_root record_cas_dir changeset_cas_dir record_dir current_edit)
+);
+
+use constant scheme            => 'prophet';
+use constant cas_root          => dir('cas');
+use constant record_cas_dir    => dir( __PACKAGE__->cas_root => 'records' );
+use constant changeset_cas_dir => dir( __PACKAGE__->cas_root => 'changesets' );
+use constant record_dir        => dir('records');
+use constant changeset_index   => 'changesets.idx';
 
 =head2 setup
 
@@ -31,69 +34,98 @@
 sub setup {
     my $self = shift;
 
-    $self->{url} =~ s/^prophet://;    # url-based constructor in ::replica should do better
+    $self->{url}
+        =~ s/^prophet://;  # url-based constructor in ::replica should do better
     $self->{url} =~ s{/$}{};
-    my ($db_uuid) =
-    $self->db_uuid( $self->url =~ m{^.*/(.*?)$});
-    $self->fs_root( $self->url =~ m{^file://(.*)$});
-    $self->fs_root_parent( $self->url =~ m{^file://(.*)/.*?$});
+    my ($db_uuid) = $self->db_uuid( $self->url =~ m{^.*/(.*?)$} );
+    $self->fs_root( $self->url        =~ m{^file://(.*)$} );
+    $self->fs_root_parent( $self->url =~ m{^file://(.*)/.*?$} );
     $self->_probe_or_create_db();
     unless ( $self->is_resdb ) {
-      #        $self->resolution_db_handle( __PACKAGE__->new( { url => $self->{url}.'/resolutions', is_resdb => 1 } ) );
+
+        $self->resolution_db_handle( Prophet::Replica->new( { url => "prophet:".$self->{url}.'/resolutions', is_resdb => 1 } ) );
     }
 }
 
 sub _probe_or_create_db {
     my $self = shift;
-    
-    return if $self->_read_file( 'replica-version' );
 
-    if ($self->fs_root_parent) {
-            # We have a filesystem based replica. we can perform a create
-            $self->initialize();
+    return if $self->_read_file('replica-version');
+
+    if ( $self->fs_root_parent ) {
+
+        # We have a filesystem based replica. we can perform a create
+        $self->initialize();
 
     } else {
-        die "We can only create file: based prophet replicas. It looks like you're trying to create ". $self->url;
+        die
+            "We can only create file: based prophet replicas. It looks like you're trying to create "
+            . $self->url;
 
     }
 
-
 }
 
-
-use constant can_read_records => 1;
+use constant can_read_records    => 1;
 use constant can_read_changesets => 1;
-sub can_write_changesets { return (shift->fs_root ? 1 : 0) }
-sub can_write_records { return (shift->fs_root ? 1 : 0) }
-
+sub can_write_changesets { return ( shift->fs_root ? 1 : 0 ) }
+sub can_write_records    { return ( shift->fs_root ? 1 : 0 ) }
 
 sub initialize {
     my $self = shift;
     my %args = validate( @_, { db_uuid => 0 } );
 
-    _mkdir($self->fs_root_parent);
-    _mkdir(dir($self->fs_root, $_)) for ('', $self->record_dir, $self->cas_root );
+    _mkdir( $self->fs_root_parent );
+    _mkdir( dir( $self->fs_root, $_ ) )
+        for ( '', $self->record_dir, $self->cas_root );
     $self->make_tiered_dirs( $self->record_cas_dir );
     $self->make_tiered_dirs( $self->changeset_cas_dir );
 
-    $self->set_latest_sequence_no("1");
+    $self->set_latest_sequence_no("0");
     $self->set_replica_uuid( Data::UUID->new->create_str );
-    $self->_write_file( path => file( $self->fs_root, 'replica-version' ), content => '1' );
+    $self->_write_file(
+        path    => file( $self->fs_root, 'replica-version' ),
+        content => '1'
+    );
+    for(1..2) { # XXXX HORRIBLE HACK TO WORK AROUND THE FACT THAT SVN RECORDS EMPTY CHANGESETS
+    $self->begin_edit;
+    $self->commit_edit;
+    }
 }
 
 sub set_replica_uuid {
     my $self = shift;
     my $uuid = shift;
-    $self->_write_file( path => file( $self->fs_root, 'replica-uuid' ), content => $uuid );
+    $self->_write_file(
+        path    => file( $self->fs_root, 'replica-uuid' ),
+        content => $uuid
+    );
 
 }
 
+sub latest_sequence_no {
+    my $self = shift;
+    $self->_read_file('/latest-sequence-no');
+}
+
 sub set_latest_sequence_no {
     my $self = shift;
     my $id   = shift;
-    $self->_write_file( path => file( $self->fs_root, 'latest-sequence-no' ), content => scalar($id) );
+    $self->_write_file(
+        path    => file( $self->fs_root, 'latest-sequence-no' ),
+        content => scalar($id)
+    );
+}
+
+sub _increment_sequence_no {
+    my $self = shift;
+    my $seq = $self->latest_sequence_no +1;
+    $self->set_latest_sequence_no($seq);
+    return $seq;
 }
 
+
+
 =head2 uuid
 
 Return the replica SVN repository's UUID
@@ -107,88 +139,122 @@
     return $self->_uuid;
 }
 
+=head1 Internals of record handling
+
+=cut
+
 sub _write_record {
     my $self = shift;
     my %args = validate( @_, { record => { isa => 'Prophet::Record' }, } );
-    $self->_write_serialized_record( type => $args{'record'}->type, uuid => $args{'record'}->uuid, props => $args{'record'}->get_props);
+    $self->_write_serialized_record(
+        type  => $args{'record'}->type,
+        uuid  => $args{'record'}->uuid,
+        props => $args{'record'}->get_props
+    );
 }
 
 sub _write_serialized_record {
     my $self = shift;
-    my %args = validate( @_, { type => 1, uuid => 1, props =>1});
+    my %args = validate( @_, { type => 1, uuid => 1, props => 1 } );
 
-    my $record_root = dir( $self->_record_type_root($args{'type'}));
-    $self->make_tiered_dirs($record_root) unless -d dir($self->fs_root, $record_root);
+    my $record_root = dir( $self->_record_type_root( $args{'type'} ) );
+    $self->make_tiered_dirs($record_root)
+        unless -d dir( $self->fs_root, $record_root );
 
-    my $content = YAML::Syck::Dump( $args{'props'});
+    my $content = YAML::Syck::Dump( $args{'props'} );
     my ($cas_key) = $self->_write_to_cas(
         content_ref => \$content,
-        cas_dir     => $self->record_cas_dir);
-    $self->_write_record_index_entry( uuid => $args{uuid}, type => $args{type}, cas_key => $cas_key);
+        cas_dir     => $self->record_cas_dir
+    );
+    $self->_write_record_index_entry(
+        uuid    => $args{uuid},
+        type    => $args{type},
+        cas_key => $cas_key
+    );
 }
 
-
 sub _write_record_index_entry {
-    my $self = shift;
-    my %args = validate( @_, { type => 1, uuid => 1, cas_key =>1});
-    my $idx_filename = $self->_record_index_filename(uuid =>$args{uuid}, type => $args{type});
+    my $self         = shift;
+    my %args         = validate( @_, { type => 1, uuid => 1, cas_key => 1 } );
+    my $idx_filename = $self->_record_index_filename(
+        uuid => $args{uuid},
+        type => $args{type}
+    );
 
-    open( my $record_index, ">>", file($self->fs_root, $idx_filename) ) || die $!;
+    open( my $record_index, ">>", file( $self->fs_root, $idx_filename ) )
+        || die $!;
 
     # XXX TODO: skip if the index already has this version of the record;
     # XXX TODO FETCH THAT
     my $record_last_changed_changeset = 1;
 
-    my $index_row = pack( 'NH40', $record_last_changed_changeset, $args{cas_key} );
+    my $index_row
+        = pack( 'NH40', $record_last_changed_changeset, $args{cas_key} );
     print $record_index $index_row || die $!;
     close $record_index;
 }
 
-
 sub _delete_record_index {
-    my $self = shift;
-    my %args = validate( @_, { type => 1, uuid => 1});
-    my $idx_filename = $self->_record_index_filename(uuid =>$args{uuid}, type => $args{type});
-    unlink(dir($self->fs_root => $idx_filename)) || die "Could not delete record $idx_filename: ".$!;
+    my $self         = shift;
+    my %args         = validate( @_, { type => 1, uuid => 1 } );
+    my $idx_filename = $self->_record_index_filename(
+        uuid => $args{uuid},
+        type => $args{type}
+    );
+    unlink( dir( $self->fs_root => $idx_filename ) )
+        || die "Could not delete record $idx_filename: " . $!;
 }
+use constant RECORD_INDEX_SIZE => ( 4 + 20 );
 
-use constant RECORD_INDEX_SIZE => (4+ 20);
 sub _read_serialized_record {
-    my $self = shift;
-    my %args = validate( @_, { type => 1, uuid => 1} ) ;
-    my $idx_filename = $self->_record_index_filename(uuid =>$args{uuid}, type => $args{type});
-    return undef unless -f  file($self->fs_root, $idx_filename);
+    my $self         = shift;
+    my %args         = validate( @_, { type => 1, uuid => 1 } );
+    my $idx_filename = $self->_record_index_filename(
+        uuid => $args{uuid},
+        type => $args{type}
+    );
+    return undef unless -f file( $self->fs_root, $idx_filename );
     my $index = $self->_read_file($idx_filename);
-    
-    # XXX TODO THIS CODE IS FUCKING HACKY AND SHOULD BE SHOT; 
-    my $count = length($index) / RECORD_INDEX_SIZE;
 
-        my ( $seq,$key ) = unpack( 'NH40', substr( $index, ( $count - 1 ) * RECORD_INDEX_SIZE, RECORD_INDEX_SIZE ) );
-        # XXX: deserialize the changeset content from the cas with $key
-        my $casfile = file ($self->record_cas_dir, substr( $key, 0, 1 ), substr( $key, 1, 1 ) , $key);
-        # That's the props
-        return YAML::Syck::Load($self->_read_file($casfile));
-}
+    # XXX TODO THIS CODE IS FUCKING HACKY AND SHOULD BE SHOT;
+    my $count = length($index) / RECORD_INDEX_SIZE;
 
+    my ( $seq, $key ) = unpack( 'NH40',
+        substr( $index, ( $count - 1 ) * RECORD_INDEX_SIZE, RECORD_INDEX_SIZE )
+    );
 
+    # XXX: deserialize the changeset content from the cas with $key
+    my $casfile = file(
+        $self->record_cas_dir,
+        substr( $key, 0, 1 ),
+        substr( $key, 1, 1 ), $key
+    );
 
+    # That's the props
+    return YAML::Syck::Load( $self->_read_file($casfile) );
+}
 
 sub _record_index_filename {
     my $self = shift;
-    my %args = validate(@_,{ uuid =>1 ,type => 1});
-    return file( $self->_record_type_root($args{'type'}) , substr( $args{uuid}, 0, 1 ), substr( $args{uuid}, 1, 1 ), $args{uuid});
+    my %args = validate( @_, { uuid => 1, type => 1 } );
+    return file(
+        $self->_record_type_root( $args{'type'} ),
+        substr( $args{uuid}, 0, 1 ),
+        substr( $args{uuid}, 1, 1 ),
+        $args{uuid}
+    );
 }
 
 sub _record_type_root {
     my $self = shift;
-    my $type = shift; 
-    return dir($self->record_dir, $type);
+    my $type = shift;
+    return dir( $self->record_dir, $type );
 }
 
-
 sub _write_changeset {
     my $self = shift;
-    my %args = validate( @_, { index_handle => 1, changeset => { isa => 'Prophet::ChangeSet' } } );
+    my %args = validate( @_,
+        { index_handle => 1, changeset => { isa => 'Prophet::ChangeSet' } } );
 
     my $changeset = $args{'changeset'};
     my $fh        = $args{'index_handle'};
@@ -196,7 +262,10 @@
     my $hash_changeset = $changeset->as_hash;
 
     my $content = YAML::Syck::Dump($hash_changeset);
-    my $cas_key = $self->_write_to_cas( content_ref => \$content, cas_dir => $self->changeset_cas_dir );
+    my $cas_key = $self->_write_to_cas(
+        content_ref => \$content,
+        cas_dir     => $self->changeset_cas_dir
+    );
 
     # XXX TODO we should only actually be encoding the sha1 of content once
     # and then converting. this is wasteful
@@ -220,7 +289,7 @@
 =cut
 
 # each record is : local-replica-seq-no : original-uuid : original-seq-no : cas key
-#                       4                    16              4                 20
+#                  4                    16              4                 20
 
 use constant CHG_RECORD_SIZE => ( 4 + 16 + 4 + 20 );
 
@@ -233,16 +302,19 @@
         }
     );
 
-    my $first_rev = ( $args{'after'}+1) || 1;
+    my $first_rev = ( $args{'after'} + 1 ) || 1;
     my $latest    = $self->latest_sequence_no();
-    my $chgidx    = $self->_read_file('/changesets.idx');
-
+    my $chgidx    = $self->_read_file( $self->changeset_index );
+   
     for my $rev ( $first_rev .. $latest ) {
-        my ( $seq, $orig_uuid, $orig_seq, $key )
-            = unpack( 'Na16NH40', substr( $chgidx, ( $rev - 1 ) * CHG_RECORD_SIZE, CHG_RECORD_SIZE ) );
+        my $index_record =  substr( $chgidx, ( $rev - 1 ) * CHG_RECORD_SIZE, CHG_RECORD_SIZE );
+        my ( $seq, $orig_uuid, $orig_seq, $key ) = unpack( 'Na16NH40', $index_record);
+
         $orig_uuid = Data::UUID->new->to_string($orig_uuid);
+    
         # XXX: deserialize the changeset content from the cas with $key
-        my $casfile = file ($self->changeset_cas_dir, substr( $key, 0, 1 ), substr( $key, 1, 1 ) , $key);
+        my $casfile = file( $self->changeset_cas_dir, substr( $key, 0, 1 ), substr( $key, 1, 1 ), $key);
+
         my $changeset = $self->_deserialize_changeset(
             content              => $self->_read_file($casfile),
             original_source_uuid => $orig_uuid,
@@ -254,18 +326,19 @@
 }
 
 
-
-
-sub latest_sequence_no {
-    my $self = shift;
-    $self->_read_file('/latest-sequence-no');
-}
-
 sub _deserialize_changeset {
     my $self = shift;
-    my %args = validate( @_, { content => 1, original_sequence_no => 1, original_source_uuid => 1, sequence_no => 1 } );
+    my %args = validate(
+        @_,
+        {   content              => 1,
+            original_sequence_no => 1,
+            original_source_uuid => 1,
+            sequence_no          => 1
+        }
+    );
     my $content_struct = YAML::Syck::Load( $args{content} );
     my $changeset      = Prophet::ChangeSet->new_from_hashref($content_struct);
+
     # Don't need to do this, since we clobber them below
     #delete $hash_changeset->{'sequence_no'};
     #delete $hash_changeset->{'source_uuid'};
@@ -276,10 +349,18 @@
     return $changeset;
 }
 
+sub _get_changeset_index_handle {
+    my $self = shift;
+
+    open( my $cs_file, ">>" . file( $self->fs_root, $self->changeset_index ) )
+        || die $!;
+    return $cs_file;
+}
+
 sub _mkdir {
     my $path = shift;
     unless ( -d $path ) {
-        mkdir($path) || die "Failed to create directory $path: " .$!;
+        mkdir($path) || die "Failed to create directory $path: " . $!;
     }
     unless ( -w $path ) {
         die "$path not writable";
@@ -290,23 +371,26 @@
 sub make_tiered_dirs {
     my $self = shift;
     my $base = shift;
-    _mkdir( dir($self->fs_root, $base) );
+    _mkdir( dir( $self->fs_root, $base ) );
     for my $a ( 0 .. 9, 'a' .. 'f' ) {
         _mkdir( dir( $self->fs_root, $base => $a ) );
         for my $b ( 0 .. 9, 'a' .. 'f' ) {
-            _mkdir( dir($self->fs_root,  $base => $a => $b ) );
+            _mkdir( dir( $self->fs_root, $base => $a => $b ) );
         }
     }
 
 }
 
 sub _write_to_cas {
-    my $self        = shift;
-    my %args        = validate( @_, { content_ref => 1, cas_dir => 1 } );
-    my $content     = ${ $args{'content_ref'} };
-    my $fingerprint = sha1_hex($content);
-    my $content_filename
-        = file( $self->fs_root, $args{'cas_dir'}, substr( $fingerprint, 0, 1 ), substr( $fingerprint, 1, 1 ), $fingerprint );
+    my $self             = shift;
+    my %args             = validate( @_, { content_ref => 1, cas_dir => 1 } );
+    my $content          = ${ $args{'content_ref'} };
+    my $fingerprint      = sha1_hex($content);
+    my $content_filename = file(
+        $self->fs_root, $args{'cas_dir'},
+        substr( $fingerprint, 0, 1 ),
+        substr( $fingerprint, 1, 1 ), $fingerprint
+    );
 
     $self->_write_file( path => $content_filename, content => $content );
     return $fingerprint;
@@ -316,85 +400,193 @@
     my $self = shift;
     my %args = validate( @_, { path => 1, content => 1 } );
     open( my $file, ">", $args{'path'} ) || die $!;
-    print $file $args{'content'} || die "Could not write to " . $args{'path'} . " " . $!;
+    print $file scalar($args{'content'}); # can't do "||" as we die if we print 0" || die "Could not write to " . $args{'path'} . " " . $!;
     close $file || die $!;
 }
 
 sub _file_exists {
     my $self = shift;
     my ($file) = validate_pos( @_, 1 );
+
     # XXX TODO OPTIMIZE
     return $self->_read_file($file) ? 1 : 0;
 }
+
 sub _read_file {
     my $self = shift;
     my ($file) = validate_pos( @_, 1 );
-    LWP::Simple::get( $self->url ."/". $file );
+    LWP::Simple::get( $self->url . "/" . $file );
 }
 
-sub state_handle { return shift }  #XXX TODO better way to handle this?
-sub record_changeset_integration {
-    my ($self, $changeset) = validate_pos( @_, 1, { isa => 'Prophet::ChangeSet' } );
+sub state_handle { return shift }    #XXX TODO better way to handle this?
 
-    $self->_set_original_source_metadata($changeset);
-    return $self->SUPER::record_changeset_integration($changeset);
-}
 sub begin_edit {
+    my $self = shift;
+    $self->current_edit( Prophet::ChangeSet->new({ original_source_uuid => $self->uuid, source_uuid => $self->uuid }) );
 }
+
 sub commit_edit {
+    my $self = shift;
+    my $sequence = $self->_increment_sequence_no;
+    $self->current_edit->original_sequence_no($sequence);
+    $self->current_edit->sequence_no($sequence);
+
+    my $handle = $self->_get_changeset_index_handle;
+    $self->_write_changeset( index_handle => $handle, changeset => $self->current_edit );
+    close($handle) || die "Failed to close changeset handle: ".$handle;
+    $self->current_edit(undef);
+}
+
+sub _after_record_changes {
+    my $self = shift;
+    my ($changeset) = validate_pos( @_, { isa => 'Prophet::ChangeSet' } );
+
+    $self->current_edit->is_nullification( $changeset->is_nullification );
+    $self->current_edit->is_resolution( $changeset->is_resolution );
 }
+
+
+
+
 sub create_record {
     my $self = shift;
     my %args = validate( @_, { uuid => 1, props => 1, type => 1 } );
-    $self->_write_serialized_record( type => $args{'type'}, uuid => $args{'uuid'}, props => $args{'props'});
 
+    my $inside_edit = $self->current_edit ? 1 : 0;
+    $self->begin_edit() unless ($inside_edit);
+
+
+    $self->_write_serialized_record(
+        type  => $args{'type'},
+        uuid  => $args{'uuid'},
+        props => $args{'props'}
+    );
+
+   my $change =      Prophet::Change->new({
+        record_type => $args{'type'},
+        record_uuid => $args{'uuid'},
+        change_type => 'add_file' });
+
+
+    foreach my $name ( keys %{ $args{props} } ) {
+        $change->add_prop_change(
+            name => $name,
+            old  => undef,
+            new  => $args{props}->{$name});
+    }
+
+    $self->current_edit->add_change(change => $change );
+
+    $self->commit_edit unless ($inside_edit);
 }
+
 sub delete_record {
     my $self = shift;
     my %args = validate( @_, { uuid => 1, type => 1 } );
-    # XXX TODO Write out an entry to the record's index file marking it as a special deleted uuid? - this has lots of ramifications for list, load, exists, create
-    $self->_delete_record_index( uuid => $args{uuid}, type => $args{type});
+
+    my $inside_edit = $self->current_edit ? 1 : 0;
+    $self->begin_edit() unless ($inside_edit);
+
+# XXX TODO Write out an entry to the record's index file marking it as a special deleted uuid? - this has lots of ramifications for list, load, exists, create
+    $self->_delete_record_index( uuid => $args{uuid}, type => $args{type} );
+   
+    my $change =      Prophet::Change->new({
+        record_type => $args{'type'},
+        record_uuid => $args{'uuid'},
+        change_type => 'delete'} );
+    $self->current_edit->add_change(change => $change );
+
+        $self->commit_edit() unless ($inside_edit);
+    return 1;
 }
 
 sub set_record_props {
     my $self      = shift;
     my %args      = validate( @_, { uuid => 1, props => 1, type => 1 } );
-    my $old_props = $self->get_record_props( uuid => $args{'uuid'}, type => $args{'type'} );
+
+    my $inside_edit = $self->current_edit ? 1 : 0;
+    $self->begin_edit() unless ($inside_edit);
+
+    my $old_props = $self->get_record_props( uuid => $args{'uuid'},
+        type => $args{'type'} );
+    my %new_props = %$old_props;
     foreach my $prop ( %{ $args{props} } ) {
         if ( !defined $args{props}->{$prop} ) {
-            delete $old_props->{$prop};
+            delete $new_props{$prop};
         } else {
-            $old_props->{$prop} = $args{props}->{$prop};
+            $new_props{$prop} = $args{props}->{$prop};
         }
     }
-    $self->_write_serialized_record( type => $args{'type'}, uuid => $args{'uuid'}, props => $old_props );
+    $self->_write_serialized_record(
+        type  => $args{'type'},
+        uuid  => $args{'uuid'},
+        props => \%new_props
+    );
+
+    my $change =      Prophet::Change->new({
+        record_type => $args{'type'},
+        record_uuid => $args{'uuid'},
+        change_type => 'update_file'} );
+    
+    foreach my $name ( keys %{ $args{props} } ) {
+        $change->add_prop_change(
+            name => $name,
+            old  => $old_props->{$name},
+            new  => $args{props}->{$name});
+    }
+    $self->current_edit->add_change(change => $change );
+
+
+        $self->commit_edit() unless ($inside_edit);
+
 }
+
 sub get_record_props {
     my $self = shift;
     my %args = validate( @_, { uuid => 1, type => 1 } );
-    return  $self->_read_serialized_record(uuid => $args{'uuid'}, type => $args{'type'});
+    return $self->_read_serialized_record(
+        uuid => $args{'uuid'},
+        type => $args{'type'}
+    );
 }
+
 sub record_exists {
     my $self = shift;
-    my %args = validate( @_, { uuid => 1, type => 1} );
-    return $self->_file_exists($self->_record_index_filename( type => $args{'type'}, uuid => $args{'uuid'}));
+    my %args = validate( @_, { uuid => 1, type => 1 } );
+    return $self->_file_exists(
+        $self->_record_index_filename(
+            type => $args{'type'},
+            uuid => $args{'uuid'}
+        )
+    );
+
     # TODO, check that the index file doesn't have a 'deleted!' note
 }
+
 sub list_records {
     my $self = shift;
     my %args = validate( @_ => { type => 1 } );
+
     #return just the filenames, which, File::Find::Rule doesn't seem capable of
-    return [ map { my @path = split(qr'/',$_); pop @path  } 
-    File::Find::Rule->file->maxdepth(3)->in(dir($self->fs_root,$self->_record_type_root($args{'type'})))];
+    return [
+        map { my @path = split( qr'/', $_ ); pop @path }
+            File::Find::Rule->file->maxdepth(3)->in(
+            dir( $self->fs_root, $self->_record_type_root( $args{'type'} ) )
+            )
+    ];
 }
+
 sub list_types {
     my $self = shift;
-    return[ map { my @path = split(qr'/',$_); pop @path  } File::Find::Rule->file-> maxdepth(1)->in(dir($self->fs_root, $self->record_dir)) ];
+    return [ map { my @path = split( qr'/', $_ ); pop @path }
+            File::Find::Rule->mindepth(1)->maxdepth(1)->in( dir( $self->fs_root, $self->record_dir ) ) 
+            ];
 
 }
+
 sub type_exists {
     my $self = shift;
     my %args = validate( @_, { type => 1 } );
-    return $self->_file_exists($self->_record_type_root( $args{'type'}));
+    return $self->_file_exists( $self->_record_type_root( $args{'type'} ) );
 }
 1;

Modified: Prophet/trunk/lib/Prophet/Replica/SVN.pm
==============================================================================
--- Prophet/trunk/lib/Prophet/Replica/SVN.pm	(original)
+++ Prophet/trunk/lib/Prophet/Replica/SVN.pm	Thu Apr 17 18:25:40 2008
@@ -17,7 +17,7 @@
 use Prophet::ChangeSet;
 use Prophet::Conflict;
 
-__PACKAGE__->mk_accessors(qw/url ra repo_path repo_handle current_edit _pool/);
+__PACKAGE__->mk_accessors(qw/url ra fs_root repo_handle current_edit _pool/);
 
 
 use constant scheme => 'svn';
@@ -65,13 +65,13 @@
 sub _setup_repo_connection {
     my $self = shift;
     my %args = validate( @_, { repository => 1, db_uuid => 0 } );
-    $self->repo_path( $args{'repository'} );
+    $self->fs_root( $args{'repository'} );
     $self->db_uuid( $args{'db_uuid'} ) if ( $args{'db_uuid'} );
     
-    my $repos = eval { SVN::Repos::open( $self->repo_path ); };
+    my $repos = eval { SVN::Repos::open( $self->fs_root ); };
     # If we couldn't open the repository handle, we should create it
-    if ( $@ && !-d $self->repo_path ) {
-        $repos = SVN::Repos::create( $self->repo_path, undef, undef, undef, undef, $self->_pool );
+    if ( $@ && !-d $self->fs_root ) {
+        $repos = SVN::Repos::create( $self->fs_root, undef, undef, undef, undef, $self->_pool );
     }
     $self->repo_handle($repos);
     $self->_determine_db_uuid;
@@ -113,7 +113,9 @@
         unless defined $last_rev;
 
     for my $rev ( $first_rev .. $self->latest_sequence_no ) {
-        $args{callback}->( $self->_fetch_changeset($rev) );
+            my $changeset = $self->_fetch_changeset($rev);
+            next if $changeset->is_empty;;
+        $args{callback}->( $changeset);
     }
 }
 

Modified: Prophet/trunk/lib/Prophet/ReplicaExporter.pm
==============================================================================
--- Prophet/trunk/lib/Prophet/ReplicaExporter.pm	(original)
+++ Prophet/trunk/lib/Prophet/ReplicaExporter.pm	Thu Apr 17 18:25:40 2008
@@ -191,8 +191,7 @@
 
     my $collection = Prophet::Collection->new(
         handle => $self->source_replica,
-        type   => $args{type}
-    );
+        type   => $args{type});
     $collection->matching( sub {1} );
     $self->target_replica->_write_record( record => $_ ) for @$collection;
 
@@ -201,8 +200,7 @@
 sub export_changesets {
     my $self = shift;
 
-    open( my $cs_file, ">" . file( $self->target_replica->fs_root, 'changesets.idx' ) ) || die $!;
-
+    my $cs_file = $self->target_replica->_get_changeset_index_handle();
     foreach my $changeset ( @{ $self->source_replica->fetch_changesets( after => 0 ) } ) {
         $self->target_replica->_write_changeset( index_handle => $cs_file, changeset => $changeset );
 

Modified: Prophet/trunk/lib/Prophet/Resolver/AlwaysTarget.pm
==============================================================================
--- Prophet/trunk/lib/Prophet/Resolver/AlwaysTarget.pm	(original)
+++ Prophet/trunk/lib/Prophet/Resolver/AlwaysTarget.pm	Thu Apr 17 18:25:40 2008
@@ -15,7 +15,7 @@
     } elsif ( $conflicting_change->file_op_conflict eq 'delete_missing_file' ) {
         return $resolution;
     } elsif ( $conflicting_change->file_op_conflict ) {
-        die YAML::Dump( $conflict, $conflicting_change );
+        die YAML::Syck::Dump( $conflict, $conflicting_change );
     }
 
     for my $prop_change ( @{ $conflicting_change->prop_conflicts } ) {

Modified: Prophet/trunk/lib/Prophet/Test.pm
==============================================================================
--- Prophet/trunk/lib/Prophet/Test.pm	(original)
+++ Prophet/trunk/lib/Prophet/Test.pm	Thu Apr 17 18:25:40 2008
@@ -237,7 +237,7 @@
 
 sub replica_last_rev {
     my $cli = Prophet::CLI->new();
-    return $cli->app_handle->handle->repo_handle->fs->youngest_rev;
+    return $cli->app_handle->handle->latest_sequence_no;
 }
 
 =head2 as_user USERNAME CODEREF

Modified: Prophet/trunk/t/export.t
==============================================================================
--- Prophet/trunk/t/export.t	(original)
+++ Prophet/trunk/t/export.t	Thu Apr 17 18:25:40 2008
@@ -22,8 +22,10 @@
 
     run_ok( 'prophet', [qw(create --type Dummy --ignore yes)], "Created a dummy record" );
 
-    run_ok( 'prophet', [ 'merge', '--to', repo_uri_for('bob'), '--from', repo_uri_for('alice') ], "Sync ran ok!" );
+    diag repo_uri_for('bob');
+    diag repo_uri_for('alice');
 
+    run_ok( 'prophet', [ 'merge', '--to', repo_uri_for('bob'), '--from', repo_uri_for('alice') ], "Sync ran ok!" );
     # check our local replicas
     my ( $ret, $out, $err ) = run_script( 'prophet', [qw(search --type Bug --regex .)] );
     like( $out, qr/new/, "We have the one record from alice" );
@@ -61,5 +63,7 @@
     # XXX: compare the changeset structure
     is( lc( $changesets->[-1]->{source_uuid} ), lc( $changesets->[-1]->{original_source_uuid} ) );
 
+
+
 };
 

Modified: Prophet/trunk/t/simple-conflicting-merge.t
==============================================================================
--- Prophet/trunk/t/simple-conflicting-merge.t	(original)
+++ Prophet/trunk/t/simple-conflicting-merge.t	Thu Apr 17 18:25:40 2008
@@ -134,6 +134,7 @@
     $applied_as_hash->{$_} = undef for qw(sequence_no source_uuid original_source_uuid original_sequence_no);
     is_deeply( $applied_as_hash, $null_as_hash );
 
+
     # is the most recent change:
     my $from_alice = shift @changesets;
 



More information about the Bps-public-commit mailing list