2011-07-14 17:33:28 +00:00
|
|
|
# Template build file for 'perl-WWW-RobotRules'.
|
|
|
|
pkgname=perl-WWW-RobotRules
|
2013-11-23 11:40:57 +00:00
|
|
|
version=6.02
|
|
|
|
revision=1
|
2011-07-14 17:33:28 +00:00
|
|
|
wrksrc="WWW-RobotRules-$version"
|
2011-07-18 14:42:50 +00:00
|
|
|
build_style=perl-module
|
2013-11-23 10:57:36 +00:00
|
|
|
hostmakedepends="perl>=5.18"
|
|
|
|
makedepends="${hostmakedepends} perl-URI"
|
2014-01-01 15:10:11 +00:00
|
|
|
depends="${makedepends}"
|
|
|
|
noarch="yes"
|
2011-07-14 17:33:28 +00:00
|
|
|
short_desc="WWW::RobotRules - database of robots.txt-derived permissions"
|
|
|
|
maintainer="davehome <davehome@redthumb.info.tm>"
|
2013-11-23 11:40:57 +00:00
|
|
|
homepage="http://search.cpan.org/dist/WWW-RobotRules"
|
2011-07-14 17:33:28 +00:00
|
|
|
license="GPL-2"
|
2013-04-14 09:13:30 +00:00
|
|
|
distfiles="${CPAN_SITE}/WWW/WWW-RobotRules-$version.tar.gz"
|
2013-11-23 11:40:57 +00:00
|
|
|
checksum=46b502e7a288d559429891eeb5d979461dd3ecc6a5c491ead85d165b6e03a51e
|
2011-07-14 17:33:28 +00:00
|
|
|
long_desc="
|
|
|
|
This module parses /robots.txt files as specified in
|
|
|
|
A Standard for Robot Exclusion, at http://www.robotstxt.org/wc/norobots.html
|
|
|
|
Webmasters can use the /robots.txt file to forbid conforming robots from
|
|
|
|
accessing parts of their web site.
|
|
|
|
|
|
|
|
The parsed files are kept in a WWW::RobotRules object, and this object
|
|
|
|
provides methods to check if access to a given URL is prohibited. The same
|
|
|
|
WWW::RobotRules object can be used for one or more parsed /robots.txt files
|
|
|
|
on any number of hosts."
|