[Bps-public-commit] r11730 - in Prophet/trunk: . lib/Prophet/Replica t
jesse at bestpractical.com
jesse at bestpractical.com
Mon Apr 14 19:55:04 EDT 2008
Author: jesse
Date: Mon Apr 14 19:55:03 2008
New Revision: 11730
Modified:
Prophet/trunk/ (props changed)
Prophet/trunk/lib/Prophet/Replica.pm
Prophet/trunk/lib/Prophet/Replica/Native.pm
Prophet/trunk/lib/Prophet/ReplicaExporter.pm
Prophet/trunk/t/export.t
Log:
r29704 at 101: jesse | 2008-04-14 19:54:14 -0400
* Significant work toward lifting functionality from ReplicaExporter to Replica/Native
Modified: Prophet/trunk/lib/Prophet/Replica.pm
==============================================================================
--- Prophet/trunk/lib/Prophet/Replica.pm (original)
+++ Prophet/trunk/lib/Prophet/Replica.pm Mon Apr 14 19:55:03 2008
@@ -470,7 +470,7 @@
my %args = validate( @_, { path => 1, } );
Prophet::ReplicaExporter->require();
- my $exporter = Prophet::ReplicaExporter->new({target_path => $args{'path'}, replica => $self});
+ my $exporter = Prophet::ReplicaExporter->new({target_path => $args{'path'}, source_replica => $self});
$exporter->export();
}
@@ -551,14 +551,10 @@
sub record_resolutions {
my $self = shift;
my ($changeset) = validate_pos(@_, { isa => 'Prophet::ChangeSet'});
-
-
$self->_unimplemented("record_resolutions (since there is no writable handle)") unless ($self->can_write_changesets);
-
# If we have a resolution db handle, record the resolutions there.
# Otherwise, record them locally
- my $res_handle = $self->resolution_db_handle || $self;
-
+ my $res_handle = $self->resolution_db_handle || $self;
return unless $changeset->changes;
Modified: Prophet/trunk/lib/Prophet/Replica/Native.pm
==============================================================================
--- Prophet/trunk/lib/Prophet/Replica/Native.pm (original)
+++ Prophet/trunk/lib/Prophet/Replica/Native.pm Mon Apr 14 19:55:03 2008
@@ -11,7 +11,6 @@
__PACKAGE__->mk_accessors(qw/url db_uuid _uuid/);
-
use constant scheme => 'prophet';
=head2 setup
@@ -30,7 +29,7 @@
unless ( $self->is_resdb ) {
- # $self->resolution_db_handle( __PACKAGE__->new( { url => $self->{url}.'/resolutions', is_resdb => 1 } ) );
+ # $self->resolution_db_handle( __PACKAGE__->new( { url => $self->{url}.'/resolutions', is_resdb => 1 } ) );
}
}
@@ -71,10 +70,8 @@
);
my $first_rev = ( $args{'after'} + 1 ) || 1;
-
- my $latest = LWP::Simple::get( $self->url . '/latest-sequence-no' );
-
- my $chgidx = LWP::Simple::get( $self->url . '/changesets.idx' );
+ my $latest = $self->most_recent_changeset();
+ my $chgidx = LWP::Simple::get( $self->url . '/changesets.idx' );
for my $rev ( $first_rev .. $latest ) {
my ( $seq, $orig_uuid, $orig_seq, $key )
@@ -83,28 +80,31 @@
# XXX: deserialize the changeset content from the cas with $key
my $casfile = $self->url . '/cas/changesets/' . substr( $key, 0, 1 ) . '/' . substr( $key, 1, 1 ) . '/' . $key;
-
- my $content = YAML::Syck::Load( LWP::Simple::get($casfile) );
-
- my $changeset = Prophet::ChangeSet->new_from_hashref($content);
- $changeset->source_uuid( $self->uuid );
- $changeset->sequence_no($seq);
- $changeset->original_source_uuid($orig_uuid);
- $changeset->original_sequence_no($orig_seq);
+ my $changeset = $self->_deserialize_changeset(
+ content => LWP::Simple::get($casfile),
+ original_source_uuid => $orig_uuid,
+ original_sequence_no => $orig_seq,
+ sequence_no => $seq
+ );
$args{callback}->($changeset);
}
}
-sub record_integration_changeset {
- die 'readonly';
+sub most_recent_changeset {
+ my $self = shift;
+ return LWP::Simple::get( $self->url . '/latest-sequence-no' );
}
-sub record_changes {
- die 'readonly';
-}
+sub _deserialize_changeset {
+ my $self = shift;
-sub record_resolutions {
- die 'readonly';
+ my %args = validate( @_, { content => 1, original_sequence_no => 1, original_source_uuid => 1, sequence_no => 1 } );
+ my $content_struct = YAML::Syck::Load( $args{content} );
+ my $changeset = Prophet::ChangeSet->new_from_hashref($content_struct);
+ $changeset->source_uuid( $self->uuid );
+ $changeset->sequence_no( $args{'sequence_no'} );
+ $changeset->original_source_uuid( $args{'original_source_uuid'} );
+ $changeset->original_sequence_no( $args{'original_sequence_no'} );
+ return $changeset;
}
-
1;
Modified: Prophet/trunk/lib/Prophet/ReplicaExporter.pm
==============================================================================
--- Prophet/trunk/lib/Prophet/ReplicaExporter.pm (original)
+++ Prophet/trunk/lib/Prophet/ReplicaExporter.pm Mon Apr 14 19:55:03 2008
@@ -9,8 +9,9 @@
use YAML::Syck;
use UNIVERSAL::require;
-__PACKAGE__->mk_accessors(qw( replica target_path));
-
+__PACKAGE__->mk_accessors(
+ qw( source_replica target_path fs_root cas_root record_cas_dir changeset_cas_dir record_dir));
+
=head1 NAME
Prophet::ReplicaExporter
@@ -30,7 +31,6 @@
=cut
-
=head2 export
This routine will export a copy of this prophet database replica to a flat file on disk suitable for
@@ -158,33 +158,13 @@
sub export {
my $self = shift;
+
+ $self->_initialize_replica( db_uuid => $self->source_replica->db_uuid );
+ $self->_init_export_metadata();
+ $self->export_records( type => $_ ) for ( @{ $self->source_replica->list_types } );
+ $self->export_changesets();
- my $replica_root = dir( $self->target_path, $self->replica->db_uuid );
- my $cas_dir = dir( $replica_root => 'cas' );
- my $record_cas_dir = dir( $cas_dir => 'records' );
- my $changeset_cas_dir = dir( $cas_dir => 'changesets' );
- my $record_dir = dir( $replica_root => 'records' );
-
- _mkdir( $self->target_path);
- _mkdir($replica_root);
- _mkdir($record_dir);
- _mkdir($cas_dir);
- make_tiered_dirs($record_cas_dir);
- make_tiered_dirs($changeset_cas_dir);
-
- $self->_init_export_metadata( root => $replica_root );
-
- foreach my $type ( @{ $self->replica->list_types } ) {
- $self->export_records(
- type => $type,
- root => $replica_root,
- cas_dir => $record_cas_dir
- );
- }
-
- $self->export_changesets( root => $replica_root, cas_dir => $changeset_cas_dir );
-
- #$self->export_resolutions( path => dir( $replica_root, 'resolutions'), resdb_handle => $args{'resdb_handle'} );
+ #$self->export_resolutions( path => dir( $fs_root, 'resolutions'), resdb_handle => $args{'resdb_handle'} );
}
@@ -195,56 +175,78 @@
# ...
}
+sub _initialize_replica {
+ my $self = shift;
+ my %args = validate(@_, { db_uuid => 0});
+
+ $self->fs_root( dir( $self->target_path, $args{'db_uuid'} || Data::UUID->new->create_str() ));
+ $self->cas_root( dir( $self->fs_root => 'cas' ) );
+ $self->record_cas_dir( dir( $self->cas_root => 'records' ) );
+ $self->changeset_cas_dir( dir( $self->cas_root => 'changesets' ) );
+ $self->record_dir( dir( $self->fs_root => 'records' ) );
+
+ _mkdir($_) for ( $self->target_path, $self->fs_root, $self->record_dir, $self->cas_root );
+ make_tiered_dirs( $self->record_cas_dir );
+ make_tiered_dirs( $self->changeset_cas_dir );
+
+ $self->_set_most_recent_changeset_no("1");
+ $self->_set_replica_uuid(Data::UUID->new->create_str);
+ $self->_output_oneliner_file( path => file( $self->fs_root, 'replica-version' ), content => '1' );
+}
+
sub _init_export_metadata {
my $self = shift;
- my %args = validate( @_, { root => 1 } );
+ $self->_set_most_recent_changeset_no($self->source_replica->most_recent_changeset);
+ $self->_set_replica_uuid( $self->source_replica->uuid);
- $self->_output_oneliner_file( path => file( $args{'root'}, 'replica-uuid' ), content => $self->replica->uuid );
- $self->_output_oneliner_file( path => file( $args{'root'}, 'replica-version' ), content => '1' );
- $self->_output_oneliner_file(
- path => file( $args{'root'}, 'latest-sequence-no' ),
- content => $self->replica->most_recent_changeset
- );
+}
+
+sub _set_replica_uuid {
+ my $self = shift;
+ my $uuid = shift;
+ $self->_output_oneliner_file( path => file( $self->fs_root, 'replica-uuid' ), content => $uuid);
+
+}
+sub _set_most_recent_changeset_no {
+ my $self = shift;
+ my $id = shift;
+ $self->_output_oneliner_file( path => file( $self->fs_root, 'latest-sequence-no' ), content => scalar($id));
}
sub export_records {
my $self = shift;
- my %args = validate( @_, { root => 1, type => 1, cas_dir => 1 } );
+ my %args = validate( @_, { type => 1 } );
- make_tiered_dirs( dir( $args{'root'} => 'records' => $args{'type'} ) );
+ make_tiered_dirs( dir( $self->fs_root => 'records' => $args{'type'} ) );
my $collection = Prophet::Collection->new(
- handle => $self->replica,
+ handle => $self->source_replica,
type => $args{type}
);
$collection->matching( sub {1} );
- $self->export_record(
- record_dir => dir( $args{'root'}, 'records', $_->type ),
- cas_dir => $args{'cas_dir'},
+ $self->_write_record(
record => $_
) for @$collection;
}
-sub export_record {
+sub _write_record {
my $self = shift;
my %args = validate(
@_,
{ record => { isa => 'Prophet::Record' },
- record_dir => 1,
- cas_dir => 1,
}
);
+ my $record_dir = dir( $self->fs_root, 'records', $args{'record'}->type );
my $content = YAML::Syck::Dump( $args{'record'}->get_props );
my ($cas_key) = $self->_write_to_cas(
content_ref => \$content,
- cas_dir => $args{'cas_dir'}
+ cas_dir => $self->record_cas_dir
);
- my $idx_filename = file(
- $args{'record_dir'},
+ my $idx_filename = file( $record_dir,
substr( $args{record}->uuid, 0, 1 ),
substr( $args{record}->uuid, 1, 1 ),
$args{record}->uuid
@@ -263,38 +265,44 @@
sub export_changesets {
my $self = shift;
- my %args = validate( @_, { root => 1, cas_dir => 1 } );
- open( my $cs_file, ">" . file( $args{'root'}, 'changesets.idx' ) ) || die $!;
+ open( my $cs_file, ">" . file( $self->fs_root, 'changesets.idx' ) ) || die $!;
- foreach my $changeset ( @{ $self->replica->fetch_changesets( after => 0 ) } ) {
- my $hash_changeset = $changeset->as_hash;
- delete $hash_changeset->{'sequence_no'};
- delete $hash_changeset->{'source_uuid'};
-
- my $content = YAML::Syck::Dump($hash_changeset);
- my $cas_key = $self->_write_to_cas(
- content_ref => \$content,
- cas_dir => $args{'cas_dir'}
- );
-
- # XXX TODO we should only actually be encoding the sha1 of content once
- # and then converting. this is wasteful
-
- my $packed_cas_key = sha1($content);
-
- print $cs_file pack( 'Na16Na20',
- $changeset->sequence_no,
- Data::UUID->new->from_string( $changeset->original_source_uuid ),
- $changeset->original_sequence_no,
- $packed_cas_key )
- || die $!;
+ foreach my $changeset ( @{ $self->source_replica->fetch_changesets( after => 0 ) } ) {
+ $self->_write_changeset( index_handle => $cs_file, changeset => $changeset );
}
-
close($cs_file);
}
+sub _write_changeset {
+ my $self = shift;
+ my %args = validate( @_, { index_handle => 1, changeset => { isa => 'Prophet::ChangeSet' } } );
+
+ my $changeset = $args{'changeset'};
+ my $fh = $args{'index_handle'};
+
+ my $hash_changeset = $changeset->as_hash;
+ delete $hash_changeset->{'sequence_no'};
+ delete $hash_changeset->{'source_uuid'};
+
+ my $content = YAML::Syck::Dump($hash_changeset);
+ my $cas_key = $self->_write_to_cas( content_ref => \$content, cas_dir => $self->changeset_cas_dir );
+
+ # XXX TODO we should only actually be encoding the sha1 of content once
+ # and then converting. this is wasteful
+
+ my $packed_cas_key = sha1($content);
+
+ my $changeset_index_line = pack( 'Na16Na20',
+ $changeset->sequence_no,
+ Data::UUID->new->from_string( $changeset->original_source_uuid ),
+ $changeset->original_sequence_no,
+ $packed_cas_key );
+ print $fh $changeset_index_line || die $!;
+
+}
+
sub _mkdir {
my $path = shift;
unless ( -d $path ) {
@@ -319,9 +327,8 @@
}
sub _write_to_cas {
- my $self = shift;
- my %args = validate( @_, { content_ref => 1, cas_dir => 1 } );
-
+ my $self = shift;
+ my %args = validate( @_, { content_ref => 1, cas_dir => 1 } );
my $content = ${ $args{'content_ref'} };
my $fingerprint = sha1_hex($content);
my $content_filename
@@ -337,7 +344,7 @@
my %args = validate( @_, { path => 1, content => 1 } );
open( my $file, ">", $args{'path'} ) || die $!;
- print $file $args{'content'} || die $!;
+ print $file $args{'content'} || die "Could not write to ".$args{'path'} . " " . $!;
close $file || die $!;
}
Modified: Prophet/trunk/t/export.t
==============================================================================
--- Prophet/trunk/t/export.t (original)
+++ Prophet/trunk/t/export.t Mon Apr 14 19:55:03 2008
@@ -54,7 +54,7 @@
ok( -e $path->file('changesets.idx'), 'found changesets index' );
my $latest = $path->file('latest-sequence-no')->slurp;
is( $latest, 5 );
- use_ok('Prophet::Replica::HTTP');
+ use_ok('Prophet::Replica::Native');
my $changesets = Prophet::Replica->new( { url => 'prophet:file://' . $path } )->fetch_changesets( after => 0 );
is( $#{$changesets}, 4, "We found a total of 5 changesets" );
More information about the Bps-public-commit
mailing list