[Bps-public-commit] www-mechanize branch, master, updated. 1.68-2-g5b8d747
Jason May
jasonmay at bestpractical.com
Mon Aug 1 17:49:47 EDT 2011
The branch, master has been updated
via 5b8d7477e7e744df9a685ef56bed08cf7f2567ff (commit)
via 603cc85e10f46be6237d486ca8a71272ae314104 (commit)
from 564087617ea8ee69a2fcdbf00a708865cdc69766 (commit)
Summary of changes:
.shipit | 3 +-
Changes | 7 ++
lib/WWW/Mechanize.pm | 2 +-
t/local/LocalServer.pm | 207 +++++++++++++++++++++++++++++++++---------------
t/local/failure.t | 2 +-
t/local/get.t | 14 ++--
t/local/log-server | 123 ++++++++++++++++++++++------
t/local/reload.t | 6 +-
8 files changed, 259 insertions(+), 105 deletions(-)
- Log -----------------------------------------------------------------
commit 603cc85e10f46be6237d486ca8a71272ae314104
Author: Max Maischein <corion at corion.net>
Date: Sun Jul 31 12:16:13 2011 +0200
Update the Test::HTTP::LocalServer files (and tests) to a recent version
- remove ->creds_required(), as that path does not exist in the server
- add some more error conditions to the server (unused by WWW::Mechanize)
- delete environment variables for proxies ETC
- remove logfile on disk
- update copyright
diff --git a/Changes b/Changes
index 4127412..dc5df0d 100644
--- a/Changes
+++ b/Changes
@@ -3,6 +3,10 @@ Revision history for WWW::Mechanize
Mech now has its own mailing list at Google Groups:
http://groups.google.com/group/www-mechanize-users
+[CHANGES]
+
+The test suite for the local tests was updated
+
1.68 Fri Apr 22 01:10:40 EST 2011
========================================
No changes from 1.67_01
diff --git a/t/local/LocalServer.pm b/t/local/LocalServer.pm
index b15544e..393af37 100644
--- a/t/local/LocalServer.pm
+++ b/t/local/LocalServer.pm
@@ -3,7 +3,6 @@ package LocalServer;
# start a fake webserver, fork, and connect to ourselves
use warnings;
use strict;
-use Test::More;
use LWP::Simple;
use FindBin;
use File::Spec;
@@ -11,7 +10,21 @@ use File::Temp;
use URI::URL qw();
use Carp qw(carp croak);
-=head2 C<< Test::HTTP::LocalServer->spawn %ARGS >>
+use vars qw($VERSION);
+$VERSION = '0.55';
+
+=head1 SYNOPSIS
+
+ use LWP::Simple qw(get);
+ my $server = Test::HTTP::LocalServer->spawn;
+
+ ok get $server->url, "Retrieve " . $server->url;
+
+ $server->stop;
+
+=head1 METHODS
+
+=head2 C<Test::HTTP::LocalServer-E<gt>spawn %ARGS>
This spawns a new HTTP server. The server will stay running until
C<< $server->stop >> is called.
@@ -20,22 +33,37 @@ Valid arguments are:
=over 4
-=item * html
+=item *
+
+C<< html => >> scalar containing the page to be served
+
+=item *
-scalar containing the page to be served
+C<< file => >> filename containing the page to be served
-=item * file
+=item *
-filename containing the page to be served
+C<< debug => 1 >> to make the spawned server output debug information
-=item * debug
+=item *
-Set to true to make the spawned server output debug information
+C<< eval => >> string that will get evaluated per request in the server
+
+Try to avoid characters that are special to the shell, especially quotes.
+A good idea for a slow server would be
+
+ eval => sleep+10
=back
All served HTML will have the first %s replaced by the current location.
+The following entries will be removed from C<%ENV>:
+
+ HTTP_PROXY
+ http_proxy
+ CGI_HTTP_PROXY
+
=cut
sub spawn {
@@ -44,7 +72,10 @@ sub spawn {
bless $self,$class;
local $ENV{TEST_HTTP_VERBOSE};
- $ENV{TEST_HTTP_VERBOSE} = 1 if delete $args{debug};
+ $ENV{TEST_HTTP_VERBOSE} = 1
+ if (delete $args{debug});
+
+ delete @ENV{qw(HTTP_PROXY http_proxy CGI_HTTP_PROXY)};
$self->{delete} = [];
if (my $html = delete $args{html}) {
@@ -61,28 +92,27 @@ sub spawn {
close $fh;
push @{$self->{delete}},$logfile;
$self->{logfile} = $logfile;
- my $web_page = delete $args{file};
- if (defined $web_page) {
- $web_page = qq{"$web_page"}
- } else {
- $web_page = "";
- };
+ my $web_page = delete $args{file} || "";
my $server_file = File::Spec->catfile( $FindBin::Bin,'log-server' );
+ my @opts;
+ push @opts, "-e" => qq{"} . delete($args{ eval }) . qq{"}
+ if $args{ eval };
- open my $server, qq'$^X "$server_file" "$web_page" "$logfile" |'
- or die "Couldn't spawn fake server $server_file : $!";
+ my $pid = open my $server, qq'$^X "$server_file" "$web_page" "$logfile" @opts|'
+ or croak "Couldn't spawn local server $server_file : $!";
my $url = <$server>;
chomp $url;
- die "Couldn't find fake server url" unless $url;
-
- $self->{_fh} = $server;
+ die "Couldn't read back local server url"
+ unless $url;
+ # What is this code supposed to fix?
my $lhurl = URI::URL->new( $url );
$lhurl->host( 'localhost' );
$self->{_server_url} = $lhurl;
-
- diag "Started $lhurl";
+
+ $self->{_fh} = $server;
+ $self->{_pid} = $pid;
$self;
};
@@ -103,46 +133,46 @@ sub port {
=head2 C<< $server->url >>
This returns the url where you can contact the server. This url
-is valid until you call
-C<< $server->stop >>
-or
-C<< $server->get_output >>
+is valid until the C<$server> goes out of scope or you call
+C<< $server->stop >> or C<< $server->get_log >>.
=cut
sub url {
- my $url = $_[0]->{_server_url}->abs;
-
- return $url->as_string;
+ $_[0]->{_server_url}->abs->as_string
};
-=head2 C<< $server->creds_required >>
+=head2 C<< $server->stop >>
-This returns a URL for a page that requires HTTP Basic-Auth. The
-content returned is invariant and irrelevant; this method is for
-testing credential-passing code. The username is 'luser' and the
-password is 'fnord'. When these credentials are passed, the returned
-status will be 200, otherwise it will be 401.
+This stops the server process by requesting a special
+url.
=cut
-sub creds_required {
- return $_[0]->{_server_url} . 'creds_required';
-}
+sub stop {
+ my ($self) = @_;
+ get( $self->quit_server );
+ undef $self->{_server_url};
+ if ( $self->{_fh} ) {
+ close $self->{_fh};
+ delete $self->{_fh};
+ }
+};
-=head2 C<< $server->stop >>
+=head2 C<< $server->kill >>
-This stops the server process by requesting a special
-url.
+This kills the server process via C<kill>. The log
+cannot be retrieved then.
=cut
-sub stop {
- get( $_[0]->{_server_url} . 'quit_server' );
- undef $_[0]->{_server_url}
+sub kill {
+ CORE::kill( 9 => $_[0]->{ _pid } );
+ undef $_[0]->{_server_url};
+ undef $_[0]->{_pid};
};
-=head2 C<< $server->get_output >>
+=head2 C<< $server->get_log >>
This stops the server by calling C<stop> and then returns the
output of the server process. This output will be a list of
@@ -151,27 +181,76 @@ as a string.
=cut
-sub get_output {
+sub get_log {
my ($self) = @_;
+
+ my $log = get( $self->get_server_log );
$self->stop;
- local $/;
- local *LOG;
- open LOG, '<', $self->{logfile}
- or die "Couldn't retrieve logfile";
- return join "", <LOG>;
-}
+ return $log;
+};
sub DESTROY {
- my $self = shift;
- $self->stop if $self->{_server_url};
- if ( $self->{_fh} ) {
- close $self->{_fh};
- delete $self->{_fh};
- }
- for my $file ( @{$self->{delete}} ) {
- unlink $file or warn "Couldn't remove tempfile $file : $!\n";
- }
-}
+ $_[0]->stop if $_[0]->{_server_url};
+ for my $file (@{$_[0]->{delete}}) {
+ unlink $file or warn "Couldn't remove tempfile $file : $!\n";
+ };
+};
+
+=head1 URLs implemented by the server
+
+=head2 302 redirect C<< $server->redirect($target) >>
+
+This URL will issue a redirect to C<$target>. No special care is taken
+towards URL-decoding C<$target> as not to complicate the server code.
+You need to be wary about issuing requests with escaped URL parameters.
+
+=head2 404 error C<< $server->error_notfound($target) >>
+
+This URL will response with status code 404.
+
+=head2 Timeout C<< $server->error_timeout($seconds) >>
+
+This URL will send a 599 error after C<$seconds> seconds.
+
+=head2 Timeout+close C<< $server->error_close($seconds) >>
+
+This URL will send nothing and close the connection after C<$seconds> seconds.
+
+=head2 Error in response content C<< $server->error_after_headers >>
+
+This URL will send headers for a successfull response but will close the
+socket with an error after 2 blocks of 16 spaces have been sent.
+
+=head2 Chunked response C<< $server->chunked >>
+
+This URL will return 5 blocks of 16 spaces at a rate of one block per second
+in a chunked response.
+
+=head2 Other URLs
+
+All other URLs will echo back the cookies and query parameters.
+
+=cut
+
+use vars qw(%urls);
+%urls = (
+ 'quit_server' => 'quit_server',
+ 'get_server_log' => 'get_server_log',
+ 'redirect' => 'redirect/%s',
+ 'error_notfound' => 'error/notfound/%s',
+ 'error_timeout' => 'error/timeout/%s',
+ 'error_close' => 'error/close/%s',
+ 'error_after_headers' => 'error/after_headers',
+ 'chunked' => 'chunks',
+);
+for (keys %urls) {
+ no strict 'refs';
+ my $name = $_;
+ *{ $name } = sub {
+ my $self = shift;
+ $self->url . sprintf $urls{ $name }, @_;
+ };
+};
=head1 EXPORT
@@ -181,7 +260,7 @@ None by default.
This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself.
-Copyright (C) 2003 Max Maischein
+Copyright (C) 2003-2011 Max Maischein
=head1 AUTHOR
@@ -191,7 +270,7 @@ Please contact me if you find bugs or otherwise improve the module. More tests a
=head1 SEE ALSO
-L<WWW::Mechanize>,L<WWW::Mechanize::Shell>
+L<WWW::Mechanize>,L<WWW::Mechanize::Shell>,L<WWW::Mechanize::Firefox>
=cut
diff --git a/t/local/failure.t b/t/local/failure.t
index 72768b0..0470c02 100644
--- a/t/local/failure.t
+++ b/t/local/failure.t
@@ -36,7 +36,7 @@ GOOD_PAGE: {
ok( $response->is_success, 'Success' );
ok( $mech->success, 'Get webpage' );
ok( $mech->is_html, 'It\'s HTML' );
- is( $mech->title, 'WWW::Mechanize::Shell test page', 'Correct title' );
+ is( $mech->title, 'WWW::Mechanize test page', 'Correct title' );
my @links = $mech->links;
is( scalar @links, 10, '10 links, please' );
diff --git a/t/local/get.t b/t/local/get.t
index 31c2138..c4106d5 100644
--- a/t/local/get.t
+++ b/t/local/get.t
@@ -28,33 +28,33 @@ ok( $response->is_success, 'Page read OK' );
ok( $agent->success, "Get webpage" );
is( $agent->ct, "text/html", "Got the content-type..." );
ok( $agent->is_html, "... and the is_html wrapper" );
-is( $agent->title, 'WWW::Mechanize::Shell test page', 'Titles match' );
+is( $agent->title, 'WWW::Mechanize test page', 'Titles match' );
$agent->get( '/foo/' );
ok( $agent->success, 'Got the /foo' );
is( $agent->uri, sprintf('%sfoo/',$server->url), 'Got relative OK' );
ok( $agent->is_html,'Got HTML back' );
-is( $agent->title, 'WWW::Mechanize::Shell test page', 'Got the right page' );
+is( $agent->title, 'WWW::Mechanize test page', 'Got the right page' );
$agent->get( '../bar/' );
ok( $agent->success, 'Got the /bar page' );
is( $agent->uri, sprintf('%sbar/',$server->url), 'Got relative OK' );
ok( $agent->is_html, 'is HTML' );
-is( $agent->title, 'WWW::Mechanize::Shell test page', 'Got the right page' );
+is( $agent->title, 'WWW::Mechanize test page', 'Got the right page' );
$agent->get( 'basics.html' );
ok( $agent->success, 'Got the basics page' );
is( $agent->uri, sprintf('%sbar/basics.html',$server->url), 'Got relative OK' );
ok( $agent->is_html, 'is HTML' );
-is( $agent->title, 'WWW::Mechanize::Shell test page', 'Title matches' );
-like( $agent->content, qr/WWW::Mechanize::Shell test page/, 'Got the right page' );
+is( $agent->title, 'WWW::Mechanize test page', 'Title matches' );
+like( $agent->content, qr/WWW::Mechanize test page/, 'Got the right page' );
$agent->get( './refinesearch.html' );
ok( $agent->success, 'Got the "refine search" page' );
is( $agent->uri, sprintf('%sbar/refinesearch.html',$server->url), 'Got relative OK' );
ok( $agent->is_html, 'is HTML' );
-is( $agent->title, 'WWW::Mechanize::Shell test page', 'Title matches' );
-like( $agent->content, qr/WWW::Mechanize::Shell test page/, 'Got the right page' );
+is( $agent->title, 'WWW::Mechanize test page', 'Title matches' );
+like( $agent->content, qr/WWW::Mechanize test page/, 'Got the right page' );
my $rslength = do {use bytes; length $agent->content};
my $tempfile = './temp';
diff --git a/t/local/log-server b/t/local/log-server
index 6490f2f..dfde1c5 100755
--- a/t/local/log-server
+++ b/t/local/log-server
@@ -4,19 +4,35 @@ use strict;
use HTTP::Daemon;
use CGI;
use encoding 'iso-8859-1';
+use Getopt::Long;
+use vars qw($VERSION);
+$VERSION = '0.55';
$|++;
-my $d = HTTP::Daemon->new or die;
-print $d->url, "\n";
+GetOptions(
+ 'e=s' => \my $expression,
+);
+
+my $host = 'localhost';
+my $d = HTTP::Daemon->new(
+ LocalAddr => $host,
+) or die;
+
+# HTTP::Deamon doesn't return http://localhost:.../
+# for LocalAddr => 'localhost'. This causes the
+# tests to fail of many machines.
+( my $url = URI->new($d->url) )->host($host);
+print "$url\n";
my ($filename,$logfile) = @ARGV[0,1];
if ($filename) {
open DATA, "< $filename"
or die "Couldn't read page '$filename' : $!\n";
};
-open LOG, ">", $logfile
- or die "Couldn't create logfile '$logfile' : $!\n";
+#open LOG, ">", $logfile
+# or die "Couldn't create logfile '$logfile' : $!\n";
+my $log;
binmode DATA,':encoding(iso-8859-1)';
my $body = join "", <DATA>;
@@ -32,7 +48,6 @@ SERVERLOOP: {
while (my $c = $d->accept) {
debug "New connection";
while (my $r = $c->get_request) {
- print LOG "Request:\n" . $r->as_string . "\n";
debug "Request:\n" . $r->as_string;
my $location = ($r->uri->path || "/");
my ($link1,$link2) = ('','');
@@ -40,33 +55,87 @@ SERVERLOOP: {
($link1,$link2) = ($1,$2);
};
my $res;
- if ($location =~ m!^/redirect/(.*)$!) {
- $res = HTTP::Response->new(302);
- $res->header('location', $d->url . $1);
+ if ($location eq '/get_server_log') {
+ $res = HTTP::Response->new(200, "OK", undef, $log);
+ $log = '';
+ } elsif ( $location eq '/quit_server') {
+ debug "Quitting";
+ $res = HTTP::Response->new(200, "OK", [Connection => 'close'], "quit");
+ $quitserver = 1;
} else {
- my $q = CGI->new($r->uri->query);
+ eval $expression
+ if $expression;
+ warn "eval: $@" if $@;
+ $log .= "Request:\n" . $r->as_string . "\n";
+ if ($location =~ m!^/redirect/(.*)$!) {
+ $res = HTTP::Response->new(302);
+ $res->header('location', $d->url . $1);
+ } elsif ($location =~ m!^/error/notfound/(.*)$!) {
+ $res = HTTP::Response->new(404, "Not found", [Connection => 'close']);
+ } elsif ($location =~ m!^/error/timeout/(\d+)$!) {
+ sleep $1;
+ $res = HTTP::Response->new(599, "Timeout reached", [Connection => 'close']);
+ } elsif ($location =~ m!^/error/close/(\d+)$!) {
+ sleep $1;
+ $res = undef;
+ } elsif ( $location =~ m!^/chunks!) {
+ my $count = 5;
+ $res = HTTP::Response->new(200, "OK", undef, sub {
+ sleep 1;
+ my $buf = 'x' x 16;
+ return $buf if $count-- > 0;
+ return undef; # done
+ });
+ } elsif ($location =~ m!^/error/after_headers$!) {
+ my $count = 2;
+ $res = HTTP::Response->new(200, "OK", undef, sub {
+ sleep 1;
+ my $buf = 'x' x 16;
+ return $buf if $count-- > 0;
+ die "Planned error after headers";
+ });
+ } else {
+ my $q = CGI->new($r->uri->query);
- # Make sticky form fields
- my ($query,$session,%cat);
- $query = defined $q->param('query') ? $q->param('query') : "(empty)";
- $session = defined $q->param('session') ? $q->param('session') : 1;
- %cat = map { $_ => 1 } (defined $q->param('cat') ? $q->param('cat') : qw( cat_foo cat_bar ));
- my @categories = map { $cat{$_} ? "checked" : "" } qw( cat_foo cat_bar cat_baz );
- $res = HTTP::Response->new(200, "OK", undef, sprintf($body,$location,$session,$query, at categories));
- $res->content_type('text/html; charset=utf8');
- debug "Request " . ($r->uri->path || "/");
- if ( $location eq '/quit_server') {
- debug "Quitting";
- $c->force_last_request;
- $quitserver = 1;
- close LOG;
+ # Make sticky form fields
+ my ($query,$session,%cat);
+ $query = defined $q->param('query') ? $q->param('query') : "(empty)";
+ $session = defined $q->param('session') ? $q->param('session') : 1;
+ %cat = map { $_ => 1 } (defined $q->param('cat') ? $q->param('cat') : qw( cat_foo cat_bar ));
+ my @categories = map { $cat{$_} ? "checked" : "" } qw( cat_foo cat_bar cat_baz );
+ (my $h = $r->headers->{host}) =~ s/:\d+//;
+ my $rbody = sprintf $body,$location,$session,$query, at categories;
+ $res = HTTP::Response->new(200, "OK", [
+ "Set-Cookie" => $q->cookie(-name => 'log-server',-value=>'shazam2', -discard=>1,),
+ 'Cache-Control' => 'no-cache',
+ 'Pragma' => 'no-cache',
+ 'Max-Age' => 0,
+ 'Connection' => 'close',
+ 'Content-Length' => length($rbody),
+ ], $rbody);
+ $res->content_type('text/html');
+ debug "Request " . ($r->uri->path || "/");
};
};
- debug "Response:\n" . $res->as_string;
- $c->send_response($res);
+ debug "Response:\n" . $res->as_string
+ if $res;
+ eval {
+ $c->send_response($res)
+ if $res;
+ };
+ if (my $err = $@) {
+ debug "Server raised error: $err";
+ if ($err !~ /^Planned error\b/) {
+ warn $err;
+ };
+ $c->close;
+ };
+ if (! $res) {
+ $c->close;
+ };
last if $quitserver;
}
- $c->close;
+ sleep 1;
undef($c);
last SERVERLOOP
if $quitserver;
@@ -77,7 +146,7 @@ END { debug "Server stopped" };
__DATA__
<html>
<head>
-<title>WWW::Mechanize::Shell test page</title>
+<title>WWW::Mechanize test page</title>
</head>
<body>
<h1>Location: %s</h1>
diff --git a/t/local/reload.t b/t/local/reload.t
index 1d04713..60e7373 100644
--- a/t/local/reload.t
+++ b/t/local/reload.t
@@ -31,13 +31,13 @@ FIRST_GET: {
isa_ok( $r, 'HTTP::Response' );
ok( $r->is_success, 'Get google webpage');
ok( $agent->is_html, 'Valid HTML' );
- is( $agent->title, 'WWW::Mechanize::Shell test page' );
+ is( $agent->title, 'WWW::Mechanize test page' );
}
INVALIDATE: {
undef $agent->{content};
undef $agent->{ct};
- isnt( $agent->title, 'WWW::Mechanize::Shell test page' );
+ isnt( $agent->title, 'WWW::Mechanize test page' );
ok( !$agent->is_html, 'Not HTML' );
}
@@ -45,7 +45,7 @@ RELOAD: {
my $r = $agent->reload;
isa_ok( $r, 'HTTP::Response' );
ok( $agent->is_html, 'Valid HTML' );
- ok( $agent->title, 'WWW::Mechanize::Shell test page' );
+ ok( $agent->title, 'WWW::Mechanize test page' );
}
SKIP: {
commit 5b8d7477e7e744df9a685ef56bed08cf7f2567ff
Author: Jason May <jasonmay at bestpractical.com>
Date: Mon Aug 1 17:48:14 2011 -0400
Checking in changes prior to tagging of version 1.69_01.
Changelog diff is:
diff --git a/Changes b/Changes
index dc5df0d..018c62e 100644
--- a/Changes
+++ b/Changes
@@ -5,6 +5,9 @@ http://groups.google.com/group/www-mechanize-users
[CHANGES]
+1.69_01
+========================================
+[INTERNALS]
The test suite for the local tests was updated
1.68 Fri Apr 22 01:10:40 EST 2011
diff --git a/.shipit b/.shipit
index 44c02c1..0d9261c 100644
--- a/.shipit
+++ b/.shipit
@@ -1,8 +1,7 @@
# auto-generated shipit config file.
-steps = FindVersion, ChangeVersion, CheckChangeLog, DistTest, Commit, Tag, MakeDist, UploadCPAN, Twitter
+steps = FindVersion, ChangeVersion, CheckChangeLog, DistTest, Commit, Tag, MakeDist, UploadCPAN
git.tagpattern = %v
-twitter.config = ~/.twitterrc
# svn.tagpattern = MyProj-%v
# svn.tagpattern = http://code.example.com/svn/tags/MyProj-%v
diff --git a/Changes b/Changes
index dc5df0d..018c62e 100644
--- a/Changes
+++ b/Changes
@@ -5,6 +5,9 @@ http://groups.google.com/group/www-mechanize-users
[CHANGES]
+1.69_01
+========================================
+[INTERNALS]
The test suite for the local tests was updated
1.68 Fri Apr 22 01:10:40 EST 2011
diff --git a/lib/WWW/Mechanize.pm b/lib/WWW/Mechanize.pm
index 4ea1765..7880cc6 100644
--- a/lib/WWW/Mechanize.pm
+++ b/lib/WWW/Mechanize.pm
@@ -10,7 +10,7 @@ Version 1.66
=cut
-our $VERSION = '1.68';
+our $VERSION = '1.69_01';
=head1 SYNOPSIS
-----------------------------------------------------------------------
More information about the Bps-public-commit
mailing list