void-packages/srcpkgs/perl-WWW-RobotRules/template
2013-11-23 12:40:57 +01:00

32 lines
1.1 KiB
Text

# Template build file for 'perl-WWW-RobotRules'.
pkgname=perl-WWW-RobotRules
version=6.02
revision=1
wrksrc="WWW-RobotRules-$version"
build_style=perl-module
hostmakedepends="perl>=5.18"
makedepends="${hostmakedepends} perl-URI"
short_desc="WWW::RobotRules - database of robots.txt-derived permissions"
maintainer="davehome <davehome@redthumb.info.tm>"
homepage="http://search.cpan.org/dist/WWW-RobotRules"
license="GPL-2"
distfiles="${CPAN_SITE}/WWW/WWW-RobotRules-$version.tar.gz"
checksum=46b502e7a288d559429891eeb5d979461dd3ecc6a5c491ead85d165b6e03a51e
long_desc="
This module parses /robots.txt files as specified in
A Standard for Robot Exclusion, at http://www.robotstxt.org/wc/norobots.html
Webmasters can use the /robots.txt file to forbid conforming robots from
accessing parts of their web site.
The parsed files are kept in a WWW::RobotRules object, and this object
provides methods to check if access to a given URL is prohibited. The same
WWW::RobotRules object can be used for one or more parsed /robots.txt files
on any number of hosts."
perl-WWW-RobotRules_package() {
depends="${makedepends}"
noarch="yes"
pkg_install() {
vmove all
}
}