perl-WWW-RobotRules: remove long_desc.
This commit is contained in:
parent
697a262ff1
commit
22bf775f9c
1 changed files with 2 additions and 12 deletions
|
@ -1,8 +1,8 @@
|
|||
# Template build file for 'perl-WWW-RobotRules'.
|
||||
pkgname=perl-WWW-RobotRules
|
||||
version=6.02
|
||||
revision=1
|
||||
wrksrc="WWW-RobotRules-$version"
|
||||
revision=2
|
||||
wrksrc="${pkgname/perl-/}-${version}"
|
||||
build_style=perl-module
|
||||
hostmakedepends="perl>=5.18"
|
||||
makedepends="${hostmakedepends} perl-URI"
|
||||
|
@ -14,13 +14,3 @@ homepage="http://search.cpan.org/dist/WWW-RobotRules"
|
|||
license="GPL-2"
|
||||
distfiles="${CPAN_SITE}/WWW/WWW-RobotRules-$version.tar.gz"
|
||||
checksum=46b502e7a288d559429891eeb5d979461dd3ecc6a5c491ead85d165b6e03a51e
|
||||
long_desc="
|
||||
This module parses /robots.txt files as specified in
|
||||
A Standard for Robot Exclusion, at http://www.robotstxt.org/wc/norobots.html
|
||||
Webmasters can use the /robots.txt file to forbid conforming robots from
|
||||
accessing parts of their web site.
|
||||
|
||||
The parsed files are kept in a WWW::RobotRules object, and this object
|
||||
provides methods to check if access to a given URL is prohibited. The same
|
||||
WWW::RobotRules object can be used for one or more parsed /robots.txt files
|
||||
on any number of hosts."
|
||||
|
|
Loading…
Reference in a new issue