Add built local::lib
[catagits/Gitalist.git] / local-lib5 / lib / perl5 / LWP / RobotUA.pm
CommitLineData
3fea05b9 1package LWP::RobotUA;
2
3require LWP::UserAgent;
4@ISA = qw(LWP::UserAgent);
5$VERSION = "5.827";
6
7require WWW::RobotRules;
8require HTTP::Request;
9require HTTP::Response;
10
11use Carp ();
12use HTTP::Status ();
13use HTTP::Date qw(time2str);
14use strict;
15
16
17#
18# Additional attributes in addition to those found in LWP::UserAgent:
19#
20# $self->{'delay'} Required delay between request to the same
21# server in minutes.
22#
23# $self->{'rules'} A WWW::RobotRules object
24#
25
26sub new
27{
28 my $class = shift;
29 my %cnf;
30 if (@_ < 4) {
31 # legacy args
32 @cnf{qw(agent from rules)} = @_;
33 }
34 else {
35 %cnf = @_;
36 }
37
38 Carp::croak('LWP::RobotUA agent required') unless $cnf{agent};
39 Carp::croak('LWP::RobotUA from address required')
40 unless $cnf{from} && $cnf{from} =~ m/\@/;
41
42 my $delay = delete $cnf{delay} || 1;
43 my $use_sleep = delete $cnf{use_sleep};
44 $use_sleep = 1 unless defined($use_sleep);
45 my $rules = delete $cnf{rules};
46
47 my $self = LWP::UserAgent->new(%cnf);
48 $self = bless $self, $class;
49
50 $self->{'delay'} = $delay; # minutes
51 $self->{'use_sleep'} = $use_sleep;
52
53 if ($rules) {
54 $rules->agent($cnf{agent});
55 $self->{'rules'} = $rules;
56 }
57 else {
58 $self->{'rules'} = WWW::RobotRules->new($cnf{agent});
59 }
60
61 $self;
62}
63
64
65sub delay { shift->_elem('delay', @_); }
66sub use_sleep { shift->_elem('use_sleep', @_); }
67
68
69sub agent
70{
71 my $self = shift;
72 my $old = $self->SUPER::agent(@_);
73 if (@_) {
74 # Changing our name means to start fresh
75 $self->{'rules'}->agent($self->{'agent'});
76 }
77 $old;
78}
79
80
81sub rules {
82 my $self = shift;
83 my $old = $self->_elem('rules', @_);
84 $self->{'rules'}->agent($self->{'agent'}) if @_;
85 $old;
86}
87
88
89sub no_visits
90{
91 my($self, $netloc) = @_;
92 $self->{'rules'}->no_visits($netloc) || 0;
93}
94
95*host_count = \&no_visits; # backwards compatibility with LWP-5.02
96
97
98sub host_wait
99{
100 my($self, $netloc) = @_;
101 return undef unless defined $netloc;
102 my $last = $self->{'rules'}->last_visit($netloc);
103 if ($last) {
104 my $wait = int($self->{'delay'} * 60 - (time - $last));
105 $wait = 0 if $wait < 0;
106 return $wait;
107 }
108 return 0;
109}
110
111
112sub simple_request
113{
114 my($self, $request, $arg, $size) = @_;
115
116 # Do we try to access a new server?
117 my $allowed = $self->{'rules'}->allowed($request->uri);
118
119 if ($allowed < 0) {
120 # Host is not visited before, or robots.txt expired; fetch "robots.txt"
121 my $robot_url = $request->uri->clone;
122 $robot_url->path("robots.txt");
123 $robot_url->query(undef);
124
125 # make access to robot.txt legal since this will be a recursive call
126 $self->{'rules'}->parse($robot_url, "");
127
128 my $robot_req = new HTTP::Request 'GET', $robot_url;
129 my $robot_res = $self->request($robot_req);
130 my $fresh_until = $robot_res->fresh_until;
131 if ($robot_res->is_success) {
132 my $c = $robot_res->content;
133 if ($robot_res->content_type =~ m,^text/, && $c =~ /^\s*Disallow\s*:/mi) {
134 $self->{'rules'}->parse($robot_url, $c, $fresh_until);
135 }
136 else {
137 $self->{'rules'}->parse($robot_url, "", $fresh_until);
138 }
139
140 }
141 else {
142 $self->{'rules'}->parse($robot_url, "", $fresh_until);
143 }
144
145 # recalculate allowed...
146 $allowed = $self->{'rules'}->allowed($request->uri);
147 }
148
149 # Check rules
150 unless ($allowed) {
151 my $res = new HTTP::Response
152 &HTTP::Status::RC_FORBIDDEN, 'Forbidden by robots.txt';
153 $res->request( $request ); # bind it to that request
154 return $res;
155 }
156
157 my $netloc = eval { local $SIG{__DIE__}; $request->uri->host_port; };
158 my $wait = $self->host_wait($netloc);
159
160 if ($wait) {
161 if ($self->{'use_sleep'}) {
162 sleep($wait)
163 }
164 else {
165 my $res = new HTTP::Response
166 &HTTP::Status::RC_SERVICE_UNAVAILABLE, 'Please, slow down';
167 $res->header('Retry-After', time2str(time + $wait));
168 $res->request( $request ); # bind it to that request
169 return $res;
170 }
171 }
172
173 # Perform the request
174 my $res = $self->SUPER::simple_request($request, $arg, $size);
175
176 $self->{'rules'}->visit($netloc);
177
178 $res;
179}
180
181
182sub as_string
183{
184 my $self = shift;
185 my @s;
186 push(@s, "Robot: $self->{'agent'} operated by $self->{'from'} [$self]");
187 push(@s, " Minimum delay: " . int($self->{'delay'}*60) . "s");
188 push(@s, " Will sleep if too early") if $self->{'use_sleep'};
189 push(@s, " Rules = $self->{'rules'}");
190 join("\n", @s, '');
191}
192
1931;
194
195
196__END__
197
198=head1 NAME
199
200LWP::RobotUA - a class for well-behaved Web robots
201
202=head1 SYNOPSIS
203
204 use LWP::RobotUA;
205 my $ua = LWP::RobotUA->new('my-robot/0.1', 'me@foo.com');
206 $ua->delay(10); # be very nice -- max one hit every ten minutes!
207 ...
208
209 # Then just use it just like a normal LWP::UserAgent:
210 my $response = $ua->get('http://whatever.int/...');
211 ...
212
213=head1 DESCRIPTION
214
215This class implements a user agent that is suitable for robot
216applications. Robots should be nice to the servers they visit. They
217should consult the F</robots.txt> file to ensure that they are welcomed
218and they should not make requests too frequently.
219
220But before you consider writing a robot, take a look at
221<URL:http://www.robotstxt.org/>.
222
223When you use a I<LWP::RobotUA> object as your user agent, then you do not
224really have to think about these things yourself; C<robots.txt> files
225are automatically consulted and obeyed, the server isn't queried
226too rapidly, and so on. Just send requests
227as you do when you are using a normal I<LWP::UserAgent>
228object (using C<< $ua->get(...) >>, C<< $ua->head(...) >>,
229C<< $ua->request(...) >>, etc.), and this
230special agent will make sure you are nice.
231
232=head1 METHODS
233
234The LWP::RobotUA is a sub-class of LWP::UserAgent and implements the
235same methods. In addition the following methods are provided:
236
237=over 4
238
239=item $ua = LWP::RobotUA->new( %options )
240
241=item $ua = LWP::RobotUA->new( $agent, $from )
242
243=item $ua = LWP::RobotUA->new( $agent, $from, $rules )
244
245The LWP::UserAgent options C<agent> and C<from> are mandatory. The
246options C<delay>, C<use_sleep> and C<rules> initialize attributes
247private to the RobotUA. If C<rules> are not provided, then
248C<WWW::RobotRules> is instantiated providing an internal database of
249F<robots.txt>.
250
251It is also possible to just pass the value of C<agent>, C<from> and
252optionally C<rules> as plain positional arguments.
253
254=item $ua->delay
255
256=item $ua->delay( $minutes )
257
258Get/set the minimum delay between requests to the same server, in
259I<minutes>. The default is 1 minute. Note that this number doesn't
260have to be an integer; for example, this sets the delay to 10 seconds:
261
262 $ua->delay(10/60);
263
264=item $ua->use_sleep
265
266=item $ua->use_sleep( $boolean )
267
268Get/set a value indicating whether the UA should sleep() if requests
269arrive too fast, defined as $ua->delay minutes not passed since
270last request to the given server. The default is TRUE. If this value is
271FALSE then an internal SERVICE_UNAVAILABLE response will be generated.
272It will have an Retry-After header that indicates when it is OK to
273send another request to this server.
274
275=item $ua->rules
276
277=item $ua->rules( $rules )
278
279Set/get which I<WWW::RobotRules> object to use.
280
281=item $ua->no_visits( $netloc )
282
283Returns the number of documents fetched from this server host. Yeah I
284know, this method should probably have been named num_visits() or
285something like that. :-(
286
287=item $ua->host_wait( $netloc )
288
289Returns the number of I<seconds> (from now) you must wait before you can
290make a new request to this host.
291
292=item $ua->as_string
293
294Returns a string that describes the state of the UA.
295Mainly useful for debugging.
296
297=back
298
299=head1 SEE ALSO
300
301L<LWP::UserAgent>, L<WWW::RobotRules>
302
303=head1 COPYRIGHT
304
305Copyright 1996-2004 Gisle Aas.
306
307This library is free software; you can redistribute it and/or
308modify it under the same terms as Perl itself.