From: Efraim Flashner Date: Tue, 21 Jul 2020 09:04:15 +0000 (+0300) Subject: gnu: Add python-robot-detection. X-Git-Url: https://git.hcoop.net/jackhill/guix/guix.git/commitdiff_plain/97a01170398055f3fcb61e844c3314d8d5aec5dd gnu: Add python-robot-detection. * gnu/packages/python-web.scm (python-robot-detection): New variable. --- diff --git a/gnu/packages/python-web.scm b/gnu/packages/python-web.scm index 338d73d1a9..5a2fa1ee7c 100644 --- a/gnu/packages/python-web.scm +++ b/gnu/packages/python-web.scm @@ -4346,3 +4346,25 @@ conflicts detected by that mechanism.") (description "This package contains a generic transaction implementation for Python. It is mainly used by the ZODB.") (license license:zpl2.1))) + +(define-public python-robot-detection + (package + (name "python-robot-detection") + (version "0.4") + (source + (origin + (method url-fetch) + (uri (pypi-uri "robot-detection" version)) + (sha256 + (base32 + "1xd2jm3yn31bnk1kqzggils2rxj26ylxsfz3ap7bhr3ilhnbg3rx")))) + (build-system python-build-system) + (arguments '(#:tests? #f)) ; Tests not shipped in pypi release. + (propagated-inputs `(("python-six" ,python-six))) + (home-page "https://github.com/rory/robot-detection") + (synopsis "Detect web crawlers") + (description + "@code{robot_detection} is a python module to detect if a given HTTP User +Agent is a web crawler. It uses the list of registered robots from +@url{http://www.robotstxt.org}.") + (license license:gpl3+)))