Merge branch 'master' into core-updates
[jackhill/guix/guix.git] / gnu / packages / machine-learning.scm
index d9b54d6..fb511b4 100644 (file)
@@ -6,6 +6,7 @@
 ;;; Copyright © 2018 Tobias Geerinckx-Rice <me@tobias.gr>
 ;;; Copyright © 2018 Mark Meyer <mark@ofosos.org>
 ;;; Copyright © 2018 Ben Woodcroft <donttrustben@gmail.com>
+;;; Copyright © 2018 Fis Trivial <ybbs.daans@hotmail.com>
 ;;;
 ;;; This file is part of GNU Guix.
 ;;;
@@ -689,3 +690,46 @@ mining and data analysis.")
 
 (define-public python2-scikit-learn
   (package-with-python2 python-scikit-learn))
+
+(define-public python-autograd
+  (let* ((commit "442205dfefe407beffb33550846434baa90c4de7")
+         (revision "0")
+         (version (git-version "0.0.0" revision commit)))
+    (package
+      (name "python-autograd")
+      (home-page "https://github.com/HIPS/autograd")
+      (source (origin
+                (method git-fetch)
+                (uri (git-reference
+                      (url home-page)
+                      (commit commit)))
+                (sha256
+                 (base32
+                  "189sv2xb0mwnjawa9z7mrgdglc1miaq93pnck26r28fi1jdwg0z4"))
+                (file-name (git-file-name name version))))
+      (version version)
+      (build-system python-build-system)
+      (native-inputs
+       `(("python-nose" ,python-nose)
+         ("python-pytest" ,python-pytest)))
+      (propagated-inputs
+       `(("python-future" ,python-future)
+         ("python-numpy" ,python-numpy)))
+      (arguments
+       `(#:phases (modify-phases %standard-phases
+                    (replace 'check
+                      (lambda _
+                        (invoke "py.test" "-v"))))))
+      (synopsis "Efficiently computes derivatives of NumPy code")
+      (description "Autograd can automatically differentiate native Python and
+NumPy code.  It can handle a large subset of Python's features, including loops,
+ifs, recursion and closures, and it can even take derivatives of derivatives
+of derivatives.  It supports reverse-mode differentiation
+(a.k.a. backpropagation), which means it can efficiently take gradients of
+scalar-valued functions with respect to array-valued arguments, as well as
+forward-mode differentiation, and the two can be composed arbitrarily.  The
+main intended application of Autograd is gradient-based optimization.")
+      (license license:expat))))
+
+(define-public python2-autograd
+  (package-with-python2 python-autograd))