New package: perl-WWW-RobotRules-6.01.
This commit is contained in:
parent
8a09c82bb2
commit
12a0dafbaa
1 changed files with 26 additions and 0 deletions
26
srcpkgs/perl-WWW-RobotRules/template
Normal file
26
srcpkgs/perl-WWW-RobotRules/template
Normal file
|
@ -0,0 +1,26 @@
|
|||
# Template build file for 'perl-WWW-RobotRules'.
|
||||
pkgname=perl-WWW-RobotRules
|
||||
version=6.01
|
||||
wrksrc="WWW-RobotRules-$version"
|
||||
distfiles="${CPAN_SITE}/WWW/WWW-RobotRules-$version.tar.gz"
|
||||
build_style=perl_module
|
||||
short_desc="WWW::RobotRules - database of robots.txt-derived permissions"
|
||||
maintainer="davehome <davehome@redthumb.info.tm>"
|
||||
homepage="http://search.cpan.org/~gaas/WWW-RobotRules-6.01/lib/WWW/RobotRules.pm"
|
||||
license="GPL-2"
|
||||
checksum=f817e3e982c9d869c7796bcb5737c3422c2272355424acd162d0f3b132bec9d3
|
||||
long_desc="
|
||||
This module parses /robots.txt files as specified in
|
||||
A Standard for Robot Exclusion, at http://www.robotstxt.org/wc/norobots.html
|
||||
Webmasters can use the /robots.txt file to forbid conforming robots from
|
||||
accessing parts of their web site.
|
||||
|
||||
The parsed files are kept in a WWW::RobotRules object, and this object
|
||||
provides methods to check if access to a given URL is prohibited. The same
|
||||
WWW::RobotRules object can be used for one or more parsed /robots.txt files
|
||||
on any number of hosts."
|
||||
|
||||
noarch=yes
|
||||
|
||||
Add_dependency full perl-URI
|
||||
Add_dependency full perl
|
Loading…
Reference in a new issue