# Template build file for 'perl-WWW-RobotRules'. pkgname=perl-WWW-RobotRules version=6.02 revision=1 wrksrc="WWW-RobotRules-$version" build_style=perl-module hostmakedepends="perl>=5.18" makedepends="${hostmakedepends} perl-URI" depends="${makedepends}" noarch="yes" short_desc="WWW::RobotRules - database of robots.txt-derived permissions" maintainer="davehome " homepage="http://search.cpan.org/dist/WWW-RobotRules" license="GPL-2" distfiles="${CPAN_SITE}/WWW/WWW-RobotRules-$version.tar.gz" checksum=46b502e7a288d559429891eeb5d979461dd3ecc6a5c491ead85d165b6e03a51e long_desc=" This module parses /robots.txt files as specified in A Standard for Robot Exclusion, at http://www.robotstxt.org/wc/norobots.html Webmasters can use the /robots.txt file to forbid conforming robots from accessing parts of their web site. The parsed files are kept in a WWW::RobotRules object, and this object provides methods to check if access to a given URL is prohibited. The same WWW::RobotRules object can be used for one or more parsed /robots.txt files on any number of hosts."