# Template build file for 'perl-WWW-RobotRules'. pkgname=perl-WWW-RobotRules version=6.01 wrksrc="WWW-RobotRules-$version" distfiles="${CPAN_SITE}/WWW/WWW-RobotRules-$version.tar.gz" build_style=perl-module fulldepends="perl-URI perl" revision=1 short_desc="WWW::RobotRules - database of robots.txt-derived permissions" maintainer="davehome " homepage="http://search.cpan.org/~gaas/WWW-RobotRules-6.01/lib/WWW/RobotRules.pm" license="GPL-2" checksum=f817e3e982c9d869c7796bcb5737c3422c2272355424acd162d0f3b132bec9d3 long_desc=" This module parses /robots.txt files as specified in A Standard for Robot Exclusion, at http://www.robotstxt.org/wc/norobots.html Webmasters can use the /robots.txt file to forbid conforming robots from accessing parts of their web site. The parsed files are kept in a WWW::RobotRules object, and this object provides methods to check if access to a given URL is prohibited. The same WWW::RobotRules object can be used for one or more parsed /robots.txt files on any number of hosts." noarch=yes