1 ;;; GNU Guix --- Functional package management for GNU
2 ;;; Copyright © 2015, 2016, 2017, 2018, 2019, 2020 Ricardo Wurmus <rekado@elephly.net>
3 ;;; Copyright © 2016, 2020 Efraim Flashner <efraim@flashner.co.il>
4 ;;; Copyright © 2016, 2017, 2020 Marius Bakke <mbakke@fastmail.com>
5 ;;; Copyright © 2016 Hartmut Goebel <h.goebel@crazy-compilers.com>
6 ;;; Copyright © 2018, 2019, 2020 Tobias Geerinckx-Rice <me@tobias.gr>
7 ;;; Copyright © 2018 Kei Kebreau <kkebreau@posteo.net>
8 ;;; Copyright © 2018 Mark Meyer <mark@ofosos.org>
9 ;;; Copyright © 2018 Ben Woodcroft <donttrustben@gmail.com>
10 ;;; Copyright © 2018 Fis Trivial <ybbs.daans@hotmail.com>
11 ;;; Copyright © 2018 Julien Lepiller <julien@lepiller.eu>
12 ;;; Copyright © 2018 Björn Höfling <bjoern.hoefling@bjoernhoefling.de>
13 ;;; Copyright © 2019 Nicolas Goaziou <mail@nicolasgoaziou.fr>
14 ;;; Copyright © 2019, 2020 Guillaume Le Vaillant <glv@posteo.net>
15 ;;; Copyright © 2019 Brett Gilio <brettg@gnu.org>
16 ;;; Copyright © 2020 Konrad Hinsen <konrad.hinsen@fastmail.net>
17 ;;; Copyright © 2020 Edouard Klein <edk@beaver-labs.com>
18 ;;; Copyright © 2020 Vinicius Monego <monego@posteo.net>
20 ;;; This file is part of GNU Guix.
22 ;;; GNU Guix is free software; you can redistribute it and/or modify it
23 ;;; under the terms of the GNU General Public License as published by
24 ;;; the Free Software Foundation; either version 3 of the License, or (at
25 ;;; your option) any later version.
27 ;;; GNU Guix is distributed in the hope that it will be useful, but
28 ;;; WITHOUT ANY WARRANTY; without even the implied warranty of
29 ;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
30 ;;; GNU General Public License for more details.
32 ;;; You should have received a copy of the GNU General Public License
33 ;;; along with GNU Guix. If not, see <http://www.gnu.org/licenses/>.
35 (define-module (gnu packages machine-learning)
36 #:use-module ((guix licenses) #:prefix license:)
37 #:use-module (guix packages)
38 #:use-module (guix utils)
39 #:use-module (guix download)
40 #:use-module (guix svn-download)
41 #:use-module (guix build-system cmake)
42 #:use-module (guix build-system gnu)
43 #:use-module (guix build-system ocaml)
44 #:use-module (guix build-system python)
45 #:use-module (guix build-system r)
46 #:use-module (guix git-download)
47 #:use-module (gnu packages)
48 #:use-module (gnu packages adns)
49 #:use-module (gnu packages algebra)
50 #:use-module (gnu packages audio)
51 #:use-module (gnu packages autotools)
52 #:use-module (gnu packages base)
53 #:use-module (gnu packages bash)
54 #:use-module (gnu packages boost)
55 #:use-module (gnu packages check)
56 #:use-module (gnu packages compression)
57 #:use-module (gnu packages cmake)
58 #:use-module (gnu packages cran)
59 #:use-module (gnu packages databases)
60 #:use-module (gnu packages dejagnu)
61 #:use-module (gnu packages gcc)
62 #:use-module (gnu packages glib)
63 #:use-module (gnu packages graphviz)
64 #:use-module (gnu packages gstreamer)
65 #:use-module (gnu packages image)
66 #:use-module (gnu packages linux)
67 #:use-module (gnu packages maths)
68 #:use-module (gnu packages mpi)
69 #:use-module (gnu packages ocaml)
70 #:use-module (gnu packages onc-rpc)
71 #:use-module (gnu packages perl)
72 #:use-module (gnu packages pkg-config)
73 #:use-module (gnu packages protobuf)
74 #:use-module (gnu packages python)
75 #:use-module (gnu packages python-check)
76 #:use-module (gnu packages python-science)
77 #:use-module (gnu packages python-web)
78 #:use-module (gnu packages python-xyz)
79 #:use-module (gnu packages rpc)
80 #:use-module (gnu packages serialization)
81 #:use-module (gnu packages sphinx)
82 #:use-module (gnu packages statistics)
83 #:use-module (gnu packages sqlite)
84 #:use-module (gnu packages swig)
85 #:use-module (gnu packages web)
86 #:use-module (gnu packages xml)
87 #:use-module (gnu packages xorg)
88 #:use-module (ice-9 match))
91 ;; The last release is >100 commits behind, so we package from git.
92 (let ((commit "d71d54788bee56ba4cf7522801270152da5209d7"))
95 (version (string-append "2.2.0-1." (string-take commit 8)))
99 (url "https://github.com/libfann/fann")
101 (file-name (string-append name "-" version "-checkout"))
104 "0ibwpfrjs6q2lijs8slxjgzb2llcl6rk3v2ski4r6215g5jjhg3x"))))
105 (build-system cmake-build-system)
108 (modify-phases %standard-phases
110 (lambda* (#:key outputs #:allow-other-keys)
111 (let* ((out (assoc-ref outputs "out")))
112 (with-directory-excursion (string-append (getcwd) "/tests")
113 (invoke "./fann_tests"))))))))
114 (home-page "http://leenissen.dk/fann/wp/")
115 (synopsis "Fast Artificial Neural Network")
117 "FANN is a neural network library, which implements multilayer
118 artificial neural networks in C with support for both fully connected and
119 sparsely connected networks.")
120 (license license:lgpl2.1))))
122 (define-public libsvm
129 (uri (string-append "https://www.csie.ntu.edu.tw/~cjlin/libsvm/"
130 name "-" version ".tar.gz"))
132 (base32 "0jpaq0rr92x38p4nk3gjan79ip67m6p80anb28z1d8601miysyi5"))))
133 (build-system gnu-build-system)
135 `(#:tests? #f ; no "check" target
136 #:phases (modify-phases %standard-phases
139 'install ; no ‘install’ target
140 (lambda* (#:key outputs #:allow-other-keys)
141 (let* ((out (assoc-ref outputs "out"))
142 (bin (string-append out "/bin/")))
144 (for-each (lambda (file)
145 (copy-file file (string-append bin file)))
150 (home-page "https://www.csie.ntu.edu.tw/~cjlin/libsvm/")
151 (synopsis "Library for Support Vector Machines")
153 "LIBSVM is a machine learning library for support vector
154 classification, (C-SVC, nu-SVC), regression (epsilon-SVR, nu-SVR) and
155 distribution estimation (one-class SVM). It supports multi-class
157 (license license:bsd-3)))
159 (define-public python-libsvm
160 (package (inherit libsvm)
161 (name "python-libsvm")
162 (build-system gnu-build-system)
164 `(#:tests? #f ; no "check" target
165 #:make-flags '("-C" "python")
167 (modify-phases %standard-phases
170 'install ; no ‘install’ target
171 (lambda* (#:key inputs outputs #:allow-other-keys)
172 (let ((site (string-append (assoc-ref outputs "out")
176 (assoc-ref inputs "python") 5) 3)
178 (substitute* "python/svm.py"
179 (("../libsvm.so.2") "libsvm.so.2"))
181 (for-each (lambda (file)
182 (copy-file file (string-append site (basename file))))
183 (find-files "python" "\\.py"))
184 (copy-file "libsvm.so.2"
185 (string-append site "libsvm.so.2")))
188 `(("python" ,python)))
189 (synopsis "Python bindings of libSVM")))
192 ;; The latest release candidate is several years and a couple of fixes have
193 ;; been published since. This is why we download the sources from the SVN
195 (let ((svn-revision 2341))
198 (version (string-append "0.9-rc3-0." (number->string svn-revision)))
202 (url "http://svn.code.sf.net/p/ghmm/code/trunk")
203 (revision svn-revision)))
204 (file-name (string-append name "-" version "-checkout"))
207 "0qbq1rqp94l530f043qzp8aw5lj7dng9wq0miffd7spd1ff638wq"))))
208 (build-system gnu-build-system)
210 `(#:imported-modules (,@%gnu-build-system-modules
211 (guix build python-build-system))
212 #:modules ((guix build python-build-system)
213 ,@%gnu-build-system-modules)
215 (modify-phases %standard-phases
216 (add-after 'unpack 'enter-dir
217 (lambda _ (chdir "ghmm") #t))
219 (add-after 'install 'check
220 (assoc-ref %standard-phases 'check))
221 (add-before 'check 'fix-PYTHONPATH
222 (lambda* (#:key inputs outputs #:allow-other-keys)
223 (let ((python-version (python-version
224 (assoc-ref inputs "python"))))
226 (string-append (getenv "PYTHONPATH")
227 ":" (assoc-ref outputs "out")
228 "/lib/python" python-version
231 (add-after 'enter-dir 'fix-runpath
232 (lambda* (#:key outputs #:allow-other-keys)
233 (substitute* "ghmmwrapper/setup.py"
234 (("^(.*)extra_compile_args = \\[" line indent)
235 (string-append indent
236 "extra_link_args = [\"-Wl,-rpath="
237 (assoc-ref outputs "out") "/lib\"],\n"
240 (assoc-ref outputs "out")
243 (add-after 'enter-dir 'disable-broken-tests
245 (substitute* "tests/Makefile.am"
246 ;; GHMM_SILENT_TESTS is assumed to be a command.
247 (("TESTS_ENVIRONMENT.*") "")
248 ;; Do not build broken tests.
252 (("label_higher_order_test.*$")
253 "label_higher_order_test\n"))
255 ;; These Python unittests are broken as there is no gato.
256 ;; See https://sourceforge.net/p/ghmm/support-requests/3/
257 (substitute* "ghmmwrapper/ghmmunittests.py"
258 (("^(.*)def (testNewXML|testMultipleTransitionClasses|testNewXML)"
260 (string-append indent
261 "@unittest.skip(\"Disabled by Guix\")\n"
265 `(("python" ,python-2) ; only Python 2 is supported
266 ("libxml2" ,libxml2)))
268 `(("pkg-config" ,pkg-config)
271 ("autoconf" ,autoconf)
272 ("automake" ,automake)
273 ("libtool" ,libtool)))
274 (home-page "http://ghmm.org")
275 (synopsis "Hidden Markov Model library")
277 "The General Hidden Markov Model library (GHMM) is a C library with
278 additional Python bindings implementing a wide range of types of @dfn{Hidden
279 Markov Models} (HMM) and algorithms: discrete, continuous emissions, basic
280 training, HMM clustering, HMM mixtures.")
281 (license license:lgpl2.0+))))
290 "http://micans.org/mcl/src/mcl-"
291 (string-replace-substring version "." "-")
295 "15xlax3z31lsn62vlg94hkm75nm40q4679amnfg13jm8m2bnhy5m"))))
296 (build-system gnu-build-system)
298 `(#:configure-flags (list "--enable-blast")))
301 (home-page "http://micans.org/mcl/")
302 (synopsis "Clustering algorithm for graphs")
304 "The MCL algorithm is short for the @dfn{Markov Cluster Algorithm}, a
305 fast and scalable unsupervised cluster algorithm for graphs (also known as
306 networks) based on simulation of (stochastic) flow in graphs.")
307 ;; In the LICENCE file and web page it says "The software is licensed
308 ;; under the GNU General Public License, version 3.", but in several of
309 ;; the source code files it suggests GPL3 or later.
310 ;; http://listserver.ebi.ac.uk/pipermail/mcl-users/2016/000376.html
311 (license license:gpl3)))
313 (define-public ocaml-mcl
316 (version "12-068oasis4")
321 (url "https://github.com/fhcrc/mcl")
323 (file-name (git-file-name name version))
326 "0009dc3h2jp3qg5val452wngpqnbfyhbcxylghq0mrjqxx0jdq5p"))))
327 (build-system ocaml-build-system)
330 (modify-phases %standard-phases
331 (add-before 'configure 'patch-paths
333 (substitute* "configure"
334 (("/bin/sh") (which "sh")))
335 (substitute* "setup.ml"
337 (string-append "LDFLAGS=-fPIC\"; \"SHELL=" (which "sh")))
338 (("-std=c89") "-std=gnu99")
340 ;; This is a mutable string, which is no longer supported. Use
341 ;; a byte buffer instead.
342 (("String.make \\(String.length s\\)")
343 "Bytes.make (String.length s)")
345 ;; These two belong together.
346 (("OASISString.replace_chars")
347 "Bytes.to_string (OASISString.replace_chars")
350 (substitute* "myocamlbuild.ml"
351 (("std=c89") "std=gnu99"))
352 ;; Since we build with a more recent OCaml, we have to use C99 or
353 ;; later. This causes problems with the old C code.
354 (substitute* "src/impala/matrix.c"
355 (("restrict") "restrict_"))
358 `(("ocamlbuild" ,ocamlbuild)))
359 (home-page "https://github.com/fhcrc/mcl")
360 (synopsis "OCaml wrappers around MCL")
362 "This package provides OCaml bindings for the MCL graph clustering
364 (license license:gpl3)))
366 (define-public randomjungle
368 (name "randomjungle")
374 "https://www.imbs.uni-luebeck.de/fileadmin/files/Software"
375 "/randomjungle/randomjungle-" version ".tar_.gz"))
376 (patches (search-patches "randomjungle-disable-static-build.patch"))
379 "12c8rf30cla71swx2mf4ww9mfd8jbdw5lnxd7dxhyw1ygrvg6y4w"))))
380 (build-system gnu-build-system)
383 (list "--disable-static"
384 (string-append "--with-boost="
385 (assoc-ref %build-inputs "boost")))
387 (modify-phases %standard-phases
389 'configure 'set-CXXFLAGS
391 (setenv "CXXFLAGS" "-fpermissive ")
399 `(("gfortran" ,gfortran)
400 ("gfortran:lib" ,gfortran "lib")))
401 ;; Non-portable assembly instructions are used so building fails on
402 ;; platforms other than x86_64 or i686.
403 (supported-systems '("x86_64-linux" "i686-linux"))
404 (home-page "https://www.imbs.uni-luebeck.de/forschung/software/details.html#c224")
405 (synopsis "Implementation of the Random Forests machine learning method")
407 "Random Jungle is an implementation of Random Forests. It is supposed to
408 analyse high dimensional data. In genetics, it can be used for analysing big
409 Genome Wide Association (GWA) data. Random Forests is a powerful machine
410 learning method. Most interesting features are variable selection, missing
411 value imputation, classifier creation, generalization error estimation and
412 sample proximities between pairs of cases.")
413 (license license:gpl3+)))
415 (define-public openfst
421 (uri (string-append "http://www.openfst.org/twiki/pub/FST/"
422 "FstDownload/openfst-" version ".tar.gz"))
425 "1pmx1yhn2gknj0an0zwqmzgwjaycapi896244np50a8y3nrsw6ck"))))
426 (build-system gnu-build-system)
427 (home-page "http://www.openfst.org")
428 (synopsis "Library for weighted finite-state transducers")
429 (description "OpenFst is a library for constructing, combining,
430 optimizing, and searching weighted finite-state transducers (FSTs).")
431 (license license:asl2.0)))
433 (define-public shogun
441 "ftp://shogun-toolbox.org/shogun/releases/"
442 (version-major+minor version)
443 "/sources/shogun-" version ".tar.bz2"))
446 "1rn9skm3nw6hr7mr3lgp2gfqhi7ii0lyxck7qmqnf8avq349s5jp"))
447 (modules '((guix build utils)
451 ;; Remove non-free sources and files referencing them
452 (for-each delete-file
453 (find-files "src/shogun/classifier/svm/"
454 "SVMLight\\.(cpp|h)"))
455 (for-each delete-file
456 (find-files "examples/undocumented/libshogun/"
458 "(classifier_.*svmlight.*|"
459 "evaluation_cross_validation_locked_comparison).cpp")))
460 ;; Remove non-free functions.
461 (define (delete-ifdefs file)
462 (with-atomic-file-replacement file
464 (let loop ((line (read-line in 'concat))
466 (if (eof-object? line)
471 "#endif //USE_SVMLIGHT" line)))
473 "#ifdef USE_SVMLIGHT" line))))
474 (when (or (not skipping?)
475 (and skipping? (not skip-next?)))
477 (loop (read-line in 'concat) skip-next?)))))))
478 (for-each delete-ifdefs
480 (find-files "src/shogun/classifier/mkl"
481 "^MKLClassification\\.cpp")
482 (find-files "src/shogun/classifier/svm"
483 "^SVMLightOneClass\\.(cpp|h)")
484 (find-files "src/shogun/multiclass"
485 "^ScatterSVM\\.(cpp|h)")
486 (find-files "src/shogun/kernel/"
487 "^(Kernel|CombinedKernel|ProductKernel)\\.(cpp|h)")
488 (find-files "src/shogun/regression/svr"
489 "^(MKLRegression|SVRLight)\\.(cpp|h)")
490 (find-files "src/shogun/transfer/domain_adaptation"
491 "^DomainAdaptationSVM\\.(cpp|h)")))
493 (build-system cmake-build-system)
495 '(#:tests? #f ;no check target
497 (modify-phases %standard-phases
498 (add-after 'unpack 'delete-broken-symlinks
500 (for-each delete-file '("applications/arts/data"
501 "applications/asp/data"
502 "applications/easysvm/data"
503 "applications/msplicer/data"
504 "applications/ocr/data"
506 "examples/undocumented/data"))
508 (add-after 'unpack 'change-R-target-path
509 (lambda* (#:key outputs #:allow-other-keys)
510 (substitute* '("src/interfaces/r/CMakeLists.txt"
511 "examples/meta/r/CMakeLists.txt")
512 (("\\$\\{R_COMPONENT_LIB_PATH\\}")
513 (string-append (assoc-ref outputs "out")
516 (add-after 'unpack 'fix-octave-modules
517 (lambda* (#:key outputs #:allow-other-keys)
518 (substitute* "src/interfaces/octave/CMakeLists.txt"
519 (("^include_directories\\(\\$\\{OCTAVE_INCLUDE_DIRS\\}")
520 "include_directories(${OCTAVE_INCLUDE_DIRS} ${OCTAVE_INCLUDE_DIRS}/octave")
521 ;; change target directory
522 (("\\$\\{OCTAVE_OCT_LOCAL_API_FILE_DIR\\}")
523 (string-append (assoc-ref outputs "out")
524 "/share/octave/packages")))
525 (substitute* '("src/interfaces/octave/swig_typemaps.i"
526 "src/interfaces/octave/sg_print_functions.cpp")
527 ;; "octave/config.h" and "octave/oct-obj.h" deprecated in Octave.
528 (("octave/config\\.h") "octave/octave-config.h")
529 (("octave/oct-obj.h") "octave/ovl.h"))
531 (add-after 'unpack 'move-rxcpp
532 (lambda* (#:key inputs #:allow-other-keys)
533 (let ((rxcpp-dir "shogun/third-party/rxcpp"))
535 (install-file (assoc-ref inputs "rxcpp") rxcpp-dir)
537 (add-before 'build 'set-HOME
538 ;; $HOME needs to be set at some point during the build phase
539 (lambda _ (setenv "HOME" "/tmp") #t)))
541 (list "-DCMAKE_BUILD_WITH_INSTALL_RPATH=TRUE"
542 "-DUSE_SVMLIGHT=OFF" ;disable proprietary SVMLIGHT
543 "-DBUILD_META_EXAMPLES=OFF" ;requires unpackaged ctags
544 ;;"-DINTERFACE_JAVA=ON" ;requires unpackaged jblas
545 ;;"-DINTERFACE_RUBY=ON" ;requires unpackaged ruby-narray
546 ;;"-DINTERFACE_PERL=ON" ;"FindPerlLibs" does not exist
547 ;;"-DINTERFACE_LUA=ON" ;fails because lua doesn't build pkgconfig file
548 "-DINTERFACE_OCTAVE=ON"
549 "-DINTERFACE_PYTHON=ON"
550 "-DINTERFACE_R=ON")))
553 ("numpy" ,python-numpy)
554 ("r-minimal" ,r-minimal)
555 ("octave" ,octave-cli)
560 ("arpack" ,arpack-ng)
567 `(("pkg-config" ,pkg-config)
569 ;; Non-portable SSE instructions are used so building fails on platforms
570 ;; other than x86_64.
571 (supported-systems '("x86_64-linux"))
572 (home-page "https://shogun-toolbox.org/")
573 (synopsis "Machine learning toolbox")
575 "The Shogun Machine learning toolbox provides a wide range of unified and
576 efficient Machine Learning (ML) methods. The toolbox seamlessly
577 combines multiple data representations, algorithm classes, and general purpose
578 tools. This enables both rapid prototyping of data pipelines and extensibility
579 in terms of new algorithms.")
580 (license license:gpl3+)))
582 (define-public python-onnx
589 (uri (pypi-uri "onnx" version))
590 ;; ONNX will build googletest from a git checkout. Patch CMake
591 ;; to use googletest from Guix and enable tests by default.
592 (patches (search-patches "python-onnx-use-system-googletest.patch"))
594 (base32 "0j6rgfbhsw3a8id8pyg18y93k68lbjbj1kq6qia36h69f6pvlyjy"))))
595 (build-system python-build-system)
598 ("googletest" ,googletest)
599 ("pybind11" ,pybind11)
600 ("python-coverage" ,python-coverage)
601 ("python-nbval" ,python-nbval)
602 ("python-pytest" ,python-pytest)
603 ("python-pytest-runner" ,python-pytest-runner)))
605 `(("protobuf" ,protobuf)))
607 `(("python-numpy" ,python-numpy)
608 ("python-protobuf" ,python-protobuf)
609 ("python-six" ,python-six)
610 ("python-tabulate" ,python-tabulate)
611 ("python-typing-extensions"
612 ,python-typing-extensions)))
613 (home-page "https://onnx.ai/")
614 (synopsis "Open Neural Network Exchange")
616 "Open Neural Network Exchange (ONNX) provides an open source format for
617 AI models, both deep learning and traditional ML. It defines an extensible
618 computation graph model, as well as definitions of built-in operators and
619 standard data types.")
620 (license license:expat)))
630 (url "https://github.com/ReactiveX/RxCpp")
631 (commit (string-append "v" version))))
633 (base32 "1rdpa3jlc181jd08nk437aar085h28i45s6nzrv65apb3xyyz0ij"))
634 (file-name (git-file-name name version))))
635 (build-system cmake-build-system)
638 (modify-phases %standard-phases
639 (add-after 'unpack 'remove-werror
641 (substitute* (find-files ".")
646 (invoke "ctest"))))))
648 `(("catch" ,catch-framework)))
649 (home-page "http://reactivex.io/")
650 (synopsis "Reactive Extensions for C++")
652 "The Reactive Extensions for C++ (RxCpp) is a library of algorithms for
653 values-distributed-in-time. ReactiveX is a library for composing asynchronous
654 and event-based programs by using observable sequences.
656 It extends the observer pattern to support sequences of data and/or events and
657 adds operators that allow you to compose sequences together declaratively while
658 abstracting away concerns about things like low-level threading,
659 synchronization, thread-safety, concurrent data structures, and non-blocking
661 (license license:asl2.0)))
663 (define-public r-adaptivesparsity
665 (name "r-adaptivesparsity")
669 (uri (cran-uri "AdaptiveSparsity" version))
672 "0imr5m8mll9j6n4icsv6z9rl5kbnwsp9wvzrg7n90nnmcxq2cz91"))))
674 `((upstream-name . "AdaptiveSparsity")))
675 (build-system r-build-system)
678 (modify-phases %standard-phases
679 (add-after 'unpack 'link-against-armadillo
681 (substitute* "src/Makevars"
682 (("PKG_LIBS=" prefix)
683 (string-append prefix "-larmadillo"))))))))
686 ("r-matrix" ,r-matrix)
688 ("r-rcpparmadillo" ,r-rcpparmadillo)))
690 `(("armadillo" ,armadillo)))
691 (home-page "https://cran.r-project.org/web/packages/AdaptiveSparsity")
692 (synopsis "Adaptive sparsity models")
694 "This package implements the Figueiredo machine learning algorithm for
695 adaptive sparsity and the Wong algorithm for adaptively sparse gaussian
697 (license license:lgpl3+)))
699 (define-public gemmlowp-for-tensorflow
700 ;; The commit hash is taken from "tensorflow/workspace.bzl".
701 (let ((commit "38ebac7b059e84692f53e5938f97a9943c120d98")
705 (version (git-version "0" revision commit))
708 (uri (string-append "https://mirror.bazel.build/"
709 "github.com/google/gemmlowp/archive/"
711 (file-name (string-append "gemmlowp-" version ".zip"))
714 "0n56s2g8hrssm4w8qj1v58gfm56a04n9v992ixkmvk6zjiralzxq"))))
715 (build-system cmake-build-system)
718 (list ,@(match (%current-system)
719 ((or "x86_64-linux" "i686-linux")
720 '("-DCMAKE_CXX_FLAGS=-msse2"))
723 (modify-phases %standard-phases
724 ;; This directory contains the CMakeLists.txt.
725 (add-after 'unpack 'chdir
726 (lambda _ (chdir "contrib") #t))
727 ;; There is no install target
729 (lambda* (#:key outputs #:allow-other-keys)
730 (let* ((out (assoc-ref outputs "out"))
731 (lib (string-append out "/lib/"))
732 (inc (string-append out "/include/")))
733 (install-file "../build/libeight_bit_int_gemm.so" lib)
734 (for-each (lambda (dir)
735 (let ((target (string-append inc "/" dir)))
737 (for-each (lambda (h)
738 (install-file h target))
739 (find-files (string-append "../" dir)
741 '("meta" "profiling" "public" "fixedpoint"
742 "eight_bit_int_gemm" "internal"))
746 (home-page "https://github.com/google/gemmlowp")
747 (synopsis "Small self-contained low-precision GEMM library")
749 "This is a small self-contained low-precision @dfn{general matrix
750 multiplication} (GEMM) library. It is not a full linear algebra library.
751 Low-precision means that the input and output matrix entries are integers on
752 at most 8 bits. To avoid overflow, results are internally accumulated on more
753 than 8 bits, and at the end only some significant 8 bits are kept.")
754 (license license:asl2.0))))
763 "http://dlib.net/files/dlib-" version ".tar.bz2"))
766 "139jyi19qz37wwmmy48gil9d1kkh2r3w3bwdzabha6ayxmba96nz"))
767 (modules '((guix build utils)))
770 ;; Delete ~13MB of bundled dependencies.
771 (delete-file-recursively "dlib/external")
772 (delete-file-recursively "docs/dlib/external")
774 (build-system cmake-build-system)
776 `(#:configure-flags '("-DBUILD_SHARED_LIBS=ON")
778 (modify-phases %standard-phases
779 (add-after 'unpack 'disable-asserts
781 ;; config.h recommends explicitly enabling or disabling asserts
782 ;; when building as a shared library. By default neither is set.
783 (substitute* "dlib/config.h"
784 (("^//#define DLIB_DISABLE_ASSERTS") "#define DLIB_DISABLE_ASSERTS"))
786 (add-after 'disable-asserts 'disable-failing-tests
788 ;; One test times out on MIPS, so we need to disable it.
789 ;; Others are flaky on some platforms.
790 (let* ((system ,(or (%current-target-system)
792 (disabled-tests (cond
793 ((string-prefix? "mips64" system)
794 '("object_detector" ; timeout
796 ((string-prefix? "armhf" system)
797 '("learning_to_track"))
798 ((string-prefix? "i686" system)
803 (substitute* "dlib/test/makefile"
804 (((string-append "SRC \\+= " test "\\.cpp")) "")))
809 ;; No test target, so we build and run the unit tests here.
810 (let ((test-dir (string-append "../dlib-" ,version "/dlib/test")))
811 (with-directory-excursion test-dir
812 (invoke "make" "-j" (number->string (parallel-job-count)))
813 (invoke "./dtest" "--runall"))
816 `(("pkg-config" ,pkg-config)
822 ("libjpeg" ,libjpeg-turbo)
825 ("openblas" ,openblas)
828 "Toolkit for making machine learning and data analysis applications in C++")
830 "Dlib is a modern C++ toolkit containing machine learning algorithms and
831 tools. It is used in both industry and academia in a wide range of domains
832 including robotics, embedded devices, mobile phones, and large high performance
833 computing environments.")
834 (home-page "http://dlib.net")
835 (license license:boost1.0)))
837 (define-public python-scikit-learn
839 (name "python-scikit-learn")
845 (url "https://github.com/scikit-learn/scikit-learn")
847 (file-name (git-file-name name version))
850 "1xqxv210gsmjw094vc5ghq2y9lmm74qkk22pq6flcjzj51b86jxf"))))
851 (build-system python-build-system)
854 (modify-phases %standard-phases
855 (add-after 'build 'build-ext
856 (lambda _ (invoke "python" "setup.py" "build_ext" "--inplace") #t))
859 ;; Restrict OpenBLAS threads to prevent segfaults while testing!
860 (setenv "OPENBLAS_NUM_THREADS" "1")
862 ;; Some tests require write access to $HOME.
863 (setenv "HOME" "/tmp")
865 (invoke "pytest" "sklearn" "-m" "not network")))
866 (add-before 'reset-gzip-timestamps 'make-files-writable
867 (lambda* (#:key outputs #:allow-other-keys)
868 ;; Make sure .gz files are writable so that the
869 ;; 'reset-gzip-timestamps' phase can do its work.
870 (let ((out (assoc-ref outputs "out")))
871 (for-each make-file-writable
872 (find-files out "\\.gz$"))
875 `(("openblas" ,openblas)))
877 `(("python-pytest" ,python-pytest)
878 ("python-pandas" ,python-pandas) ;for tests
879 ("python-cython" ,python-cython)))
881 `(("python-numpy" ,python-numpy)
882 ("python-scipy" ,python-scipy)
883 ("python-joblib" ,python-joblib)))
884 (home-page "https://scikit-learn.org/")
885 (synopsis "Machine Learning in Python")
887 "Scikit-learn provides simple and efficient tools for data mining and
889 (properties `((python2-variant . ,(delay python2-scikit-learn))))
890 (license license:bsd-3)))
892 ;; scikit-learn 0.22 and later only supports Python 3, so we stick with
893 ;; an older version here.
894 (define-public python2-scikit-learn
895 (let ((base (package-with-python2 (strip-python2-variant python-scikit-learn))))
902 (url "https://github.com/scikit-learn/scikit-learn")
904 (file-name (git-file-name "python-scikit-learn" version))
907 "08zbzi8yx5wdlxfx9jap61vg1malc9ajf576w7a0liv6jvvrxlpj")))))))
909 (define-public python-scikit-rebate
911 (name "python-scikit-rebate")
915 (uri (pypi-uri "skrebate" version))
918 "1h7qs9gjxpzqabzhb8rmpv3jpmi5iq41kqdibg48299h94iikiw7"))))
919 (build-system python-build-system)
920 ;; Pandas is only needed to run the tests.
922 `(("python-pandas" ,python-pandas)))
924 `(("python-numpy" ,python-numpy)
925 ("python-scipy" ,python-scipy)
926 ("python-scikit-learn" ,python-scikit-learn)
927 ("python-joblib" ,python-joblib)))
928 (home-page "https://epistasislab.github.io/scikit-rebate/")
929 (synopsis "Relief-based feature selection algorithms for Python")
930 (description "Scikit-rebate is a scikit-learn-compatible Python
931 implementation of ReBATE, a suite of Relief-based feature selection algorithms
932 for Machine Learning. These algorithms excel at identifying features that are
933 predictive of the outcome in supervised learning problems, and are especially
934 good at identifying feature interactions that are normally overlooked by
935 standard feature selection algorithms.")
936 (license license:expat)))
938 (define-public python-autograd
939 (let* ((commit "442205dfefe407beffb33550846434baa90c4de7")
941 (version (git-version "0.0.0" revision commit)))
943 (name "python-autograd")
944 (home-page "https://github.com/HIPS/autograd")
952 "189sv2xb0mwnjawa9z7mrgdglc1miaq93pnck26r28fi1jdwg0z4"))
953 (file-name (git-file-name name version))))
955 (build-system python-build-system)
957 `(("python-nose" ,python-nose)
958 ("python-pytest" ,python-pytest)))
960 `(("python-future" ,python-future)
961 ("python-numpy" ,python-numpy)))
963 `(#:phases (modify-phases %standard-phases
966 (invoke "py.test" "-v"))))))
967 (synopsis "Efficiently computes derivatives of NumPy code")
968 (description "Autograd can automatically differentiate native Python and
969 NumPy code. It can handle a large subset of Python's features, including loops,
970 ifs, recursion and closures, and it can even take derivatives of derivatives
971 of derivatives. It supports reverse-mode differentiation
972 (a.k.a. backpropagation), which means it can efficiently take gradients of
973 scalar-valued functions with respect to array-valued arguments, as well as
974 forward-mode differentiation, and the two can be composed arbitrarily. The
975 main intended application of Autograd is gradient-based optimization.")
976 (license license:expat))))
978 (define-public python2-autograd
979 (package-with-python2 python-autograd))
981 (define-public lightgbm
988 (url "https://github.com/Microsoft/LightGBM")
989 (commit (string-append "v" version))))
992 "0jlvyn7k81dzrh9ij3zw576wbgiwmmr26rzpdxjn1dbpc3njpvzi"))
993 (file-name (git-file-name name version))))
995 `(("python-pytest" ,python-pytest)
996 ("python-nose" ,python-nose)))
998 `(("openmpi" ,openmpi)))
1000 `(("python-numpy" ,python-numpy)
1001 ("python-scipy" ,python-scipy)))
1006 (modify-phases %standard-phases
1009 (with-directory-excursion "../source"
1010 (invoke "pytest" "tests/c_api_test/test_.py")))))))
1011 (build-system cmake-build-system)
1012 (home-page "https://github.com/Microsoft/LightGBM")
1013 (synopsis "Gradient boosting framework based on decision tree algorithms")
1014 (description "LightGBM is a gradient boosting framework that uses tree
1015 based learning algorithms. It is designed to be distributed and efficient with
1016 the following advantages:
1019 @item Faster training speed and higher efficiency
1020 @item Lower memory usage
1021 @item Better accuracy
1022 @item Parallel and GPU learning supported (not enabled in this package)
1023 @item Capable of handling large-scale data
1025 (license license:expat)))
1027 (define-public vowpal-wabbit
1028 ;; Language bindings not included.
1030 (name "vowpal-wabbit")
1035 (url "https://github.com/JohnLangford/vowpal_wabbit")
1039 "04bwzk6ifgnz3fmzid8b7avxf9n5pnx9xcjm61nkjng1vv0bpj8x"))
1040 (file-name (git-file-name name version))))
1046 (list (string-append "--with-boost="
1047 (assoc-ref %build-inputs "boost")))
1049 (modify-phases %standard-phases
1050 (add-after 'unpack 'make-files-writable
1052 (for-each make-file-writable (find-files "." ".*")) #t))
1053 (add-after 'install 'install-more-headers
1054 (lambda* (#:key outputs #:allow-other-keys)
1057 (install-file file (string-append
1058 (assoc-ref outputs "out")
1059 "/include/vowpalwabbit")))
1060 (find-files "vowpalwabbit" "\\.h$"))
1062 (build-system gnu-build-system)
1063 (home-page "https://github.com/JohnLangford/vowpal_wabbit")
1064 (synopsis "Fast machine learning library for online learning")
1065 (description "Vowpal Wabbit is a machine learning system with techniques
1066 such as online, hashing, allreduce, reductions, learning2search, active, and
1067 interactive learning.")
1068 (license license:bsd-3)))
1070 (define-public python2-fastlmm
1072 (name "python2-fastlmm")
1077 (uri (pypi-uri "fastlmm" version ".zip"))
1080 "1q8c34rpmwkfy3r4d5172pzdkpfryj561897z9r3x22gq7813x1m"))))
1081 (build-system python-build-system)
1083 `(#:tests? #f ; some test files are missing
1084 #:python ,python-2)) ; only Python 2.7 is supported
1086 `(("python2-numpy" ,python2-numpy)
1087 ("python2-scipy" ,python2-scipy)
1088 ("python2-matplotlib" ,python2-matplotlib)
1089 ("python2-pandas" ,python2-pandas)
1090 ("python2-scikit-learn" ,python2-scikit-learn)
1091 ("python2-pysnptools" ,python2-pysnptools)))
1094 ("python2-cython" ,python2-cython)
1095 ("python2-mock" ,python2-mock)
1096 ("python2-nose" ,python2-nose)))
1097 (home-page "http://research.microsoft.com/en-us/um/redmond/projects/mscompbio/fastlmm/")
1098 (synopsis "Perform genome-wide association studies on large data sets")
1100 "FaST-LMM, which stands for Factored Spectrally Transformed Linear Mixed
1101 Models, is a program for performing both single-SNP and SNP-set genome-wide
1102 association studies (GWAS) on extremely large data sets.")
1103 (license license:asl2.0)))
1105 ;; There have been no proper releases yet.
1106 (define-public kaldi
1107 (let ((commit "d4791c0f3fc1a09c042dac365e120899ee2ad21e")
1111 (version (git-version "0" revision commit))
1115 (url "https://github.com/kaldi-asr/kaldi")
1117 (file-name (git-file-name name version))
1120 "07k80my6f19mhrkwbzhjsnpf9871wmrwkl0ym468i830w67qyjrz"))))
1121 (build-system gnu-build-system)
1123 `(#:test-target "test"
1125 (modify-phases %standard-phases
1126 (add-after 'unpack 'chdir
1127 (lambda _ (chdir "src") #t))
1129 (lambda* (#:key build system inputs outputs #:allow-other-keys)
1130 (when (not (or (string-prefix? "x86_64" system)
1131 (string-prefix? "i686" system)))
1132 (substitute* "makefiles/linux_openblas.mk"
1133 (("-msse -msse2") "")))
1134 (substitute* "makefiles/default_rules.mk"
1135 (("/bin/bash") (which "bash")))
1136 (substitute* "Makefile"
1137 (("ext_depend: check_portaudio")
1139 (substitute* '("online/Makefile"
1140 "onlinebin/Makefile"
1141 "gst-plugin/Makefile")
1142 (("../../tools/portaudio/install")
1143 (assoc-ref inputs "portaudio")))
1145 ;; This `configure' script doesn't support variables passed as
1146 ;; arguments, nor does it support "prefix".
1147 (let ((out (assoc-ref outputs "out"))
1148 (openblas (assoc-ref inputs "openblas"))
1149 (openfst (assoc-ref inputs "openfst")))
1150 (substitute* "configure"
1151 (("check_for_slow_expf;") "")
1152 ;; This affects the RPATH and also serves as the installation
1154 (("KALDILIBDIR=`pwd`/lib")
1155 (string-append "KALDILIBDIR=" out "/lib")))
1156 (mkdir-p out) ; must exist
1157 (setenv "CONFIG_SHELL" (which "bash"))
1158 (setenv "OPENFST_VER" ,(package-version openfst))
1159 (invoke "./configure"
1162 (string-append "--openblas-root=" openblas)
1163 (string-append "--fst-root=" openfst)))))
1164 (add-after 'build 'build-ext-and-gstreamer-plugin
1166 (invoke "make" "-C" "online" "depend")
1167 (invoke "make" "-C" "online")
1168 (invoke "make" "-C" "onlinebin" "depend")
1169 (invoke "make" "-C" "onlinebin")
1170 (invoke "make" "-C" "gst-plugin" "depend")
1171 (invoke "make" "-C" "gst-plugin")
1173 ;; TODO: also install the executables.
1175 (lambda* (#:key outputs #:allow-other-keys)
1176 (let* ((out (assoc-ref outputs "out"))
1177 (inc (string-append out "/include"))
1178 (lib (string-append out "/lib")))
1180 ;; The build phase installed symlinks to the actual
1181 ;; libraries. Install the actual targets.
1182 (for-each (lambda (file)
1183 (let ((target (readlink file)))
1185 (install-file target lib)))
1186 (find-files lib "\\.so"))
1188 (for-each (lambda (file)
1189 (let ((target-dir (string-append inc "/" (dirname file))))
1190 (install-file file target-dir)))
1191 (find-files "." "\\.h"))
1192 (install-file "gst-plugin/libgstonlinegmmdecodefaster.so"
1193 (string-append lib "/gstreamer-1.0"))
1196 `(("alsa-lib" ,alsa-lib)
1197 ("gfortran" ,gfortran "lib")
1199 ("gstreamer" ,gstreamer)
1201 ("openblas" ,openblas)
1202 ("openfst" ,openfst)
1203 ("portaudio" ,portaudio)
1204 ("python" ,python)))
1206 `(("glib" ,glib "bin") ; glib-genmarshal
1209 ("pkg-config" ,pkg-config)
1211 (home-page "https://kaldi-asr.org/")
1212 (synopsis "Speech recognition toolkit")
1213 (description "Kaldi is an extensible toolkit for speech recognition
1215 (license license:asl2.0))))
1217 (define-public gst-kaldi-nnet2-online
1218 (let ((commit "cb227ef43b66a9835c14eb0ad39e08ee03c210ad")
1221 (name "gst-kaldi-nnet2-online")
1222 (version (git-version "0" revision commit))
1226 (url "https://github.com/alumae/gst-kaldi-nnet2-online")
1228 (file-name (git-file-name name version))
1231 "1i6ffwiavxx07ri0lxix6s8q0r31x7i4xxvhys5jxkixf5q34w8g"))))
1232 (build-system gnu-build-system)
1234 `(#:tests? #f ; there are none
1236 (list (string-append "SHELL="
1237 (assoc-ref %build-inputs "bash") "/bin/bash")
1238 (string-append "KALDI_ROOT="
1239 (assoc-ref %build-inputs "kaldi-src"))
1240 (string-append "KALDILIBDIR="
1241 (assoc-ref %build-inputs "kaldi") "/lib")
1242 "KALDI_FLAVOR=dynamic")
1244 (modify-phases %standard-phases
1245 (add-after 'unpack 'chdir
1246 (lambda _ (chdir "src") #t))
1248 (lambda* (#:key inputs #:allow-other-keys)
1249 (let ((glib (assoc-ref inputs "glib")))
1250 (setenv "CXXFLAGS" "-fPIC")
1251 (setenv "CPLUS_INCLUDE_PATH"
1252 (string-append glib "/include/glib-2.0:"
1253 glib "/lib/glib-2.0/include:"
1254 (assoc-ref inputs "gstreamer")
1255 "/include/gstreamer-1.0")))
1256 (substitute* "Makefile"
1257 (("include \\$\\(KALDI_ROOT\\)/src/kaldi.mk") "")
1258 (("\\$\\(error Cannot find") "#"))
1260 (add-before 'build 'build-depend
1261 (lambda* (#:key make-flags #:allow-other-keys)
1262 (apply invoke "make" "depend" make-flags)))
1264 (lambda* (#:key outputs #:allow-other-keys)
1265 (let* ((out (assoc-ref outputs "out"))
1266 (lib (string-append out "/lib/gstreamer-1.0")))
1267 (install-file "libgstkaldinnet2onlinedecoder.so" lib)
1271 ("gstreamer" ,gstreamer)
1272 ("jansson" ,jansson)
1273 ("openfst" ,openfst)
1277 ("glib:bin" ,glib "bin") ; glib-genmarshal
1278 ("kaldi-src" ,(package-source kaldi))
1279 ("pkg-config" ,pkg-config)))
1280 (home-page "https://kaldi-asr.org/")
1281 (synopsis "Gstreamer plugin for decoding speech")
1282 (description "This package provides a GStreamer plugin that wraps
1283 Kaldi's @code{SingleUtteranceNnet2Decoder}. It requires iVector-adapted DNN
1284 acoustic models. The iVectors are adapted to the current audio stream
1286 (license license:asl2.0))))
1288 (define-public kaldi-gstreamer-server
1289 ;; This is the tip of the py3 branch
1290 (let ((commit "f68cab490be7eb0da2af1475fbc16655f50a60cb")
1293 (name "kaldi-gstreamer-server")
1294 (version (git-version "0" revision commit))
1298 (url "https://github.com/alumae/kaldi-gstreamer-server")
1300 (file-name (git-file-name name version))
1303 "17lh1368vkg8ngrcbn2phvigzlmalrqg6djx2gg61qq1a0nj87dm"))))
1304 (build-system gnu-build-system)
1306 `(#:tests? #f ; there are no tests that can be run automatically
1307 #:modules ((guix build utils)
1308 (guix build gnu-build-system)
1311 (modify-phases %standard-phases
1314 (lambda* (#:key outputs #:allow-other-keys)
1315 ;; Disable hash randomization to ensure the generated .pycs
1316 ;; are reproducible.
1317 (setenv "PYTHONHASHSEED" "0")
1318 (with-directory-excursion "kaldigstserver"
1319 ;; See https://github.com/alumae/kaldi-gstreamer-server/issues/232
1320 (substitute* "master_server.py"
1321 (("\\.replace\\('\\\\.*") ")"))
1323 ;; This is a Python 2 file
1324 (delete-file "decoder_test.py")
1325 (delete-file "test-buffer.py")
1327 (for-each (lambda (file)
1331 "-f" ; force rebuild
1333 (find-files "." "\\.py$")))
1336 (lambda* (#:key inputs outputs #:allow-other-keys)
1337 (let* ((out (assoc-ref outputs "out"))
1338 (bin (string-append out "/bin"))
1339 (share (string-append out "/share/kaldi-gstreamer-server/")))
1340 ;; Install Python files
1341 (with-directory-excursion "kaldigstserver"
1342 (for-each (cut install-file <> share)
1343 (find-files "." ".*")))
1345 ;; Install sample configuration files
1346 (for-each (cut install-file <> share)
1347 (find-files "." "\\.yaml"))
1349 ;; Install executables
1351 (let* ((server (string-append bin "/kaldi-gst-server"))
1352 (client (string-append bin "/kaldi-gst-client"))
1353 (worker (string-append bin "/kaldi-gst-worker"))
1354 (PYTHONPATH (getenv "PYTHONPATH"))
1355 (GST_PLUGIN_PATH (string-append
1356 (assoc-ref inputs "gst-kaldi-nnet2-online")
1357 "/lib/gstreamer-1.0:${GST_PLUGIN_PATH}"))
1358 (wrap (lambda (wrapper what)
1359 (with-output-to-file wrapper
1363 export PYTHONPATH=~a
1364 export GST_PLUGIN_PATH=~a
1365 exec ~a ~a/~a \"$@\"~%"
1366 (which "bash") PYTHONPATH GST_PLUGIN_PATH
1367 (which "python") share what)))
1368 (chmod wrapper #o555))))
1370 (list server client worker)
1371 (list "master_server.py"
1376 `(("gst-kaldi-nnet2-online" ,gst-kaldi-nnet2-online)
1377 ("python" ,python-wrapper)
1378 ("python-pygobject" ,python-pygobject)
1379 ("python-pyyaml" ,python-pyyaml)
1380 ("python-tornado" ,python-tornado-6)))
1381 (home-page "https://github.com/alumae/kaldi-gstreamer-server")
1382 (synopsis "Real-time full-duplex speech recognition server")
1383 (description "This is a real-time full-duplex speech recognition server,
1384 based on the Kaldi toolkit and the GStreamer framework and implemented in
1386 (license license:bsd-2))))
1388 ;; Note that Tensorflow includes a "third_party" directory, which seems to not
1389 ;; only contain modified subsets of upstream library source code, but also
1390 ;; adapter headers provided by Google (such as the fft.h header, which is not
1391 ;; part of the upstream project code). The Tensorflow code includes headers
1392 ;; from the "third_party" directory. It does not look like we can replace
1393 ;; these headers with unmodified upstream files, so we keep them.
1394 (define-public tensorflow
1402 (url "https://github.com/tensorflow/tensorflow")
1403 (commit (string-append "v" version))))
1404 (file-name (string-append "tensorflow-" version "-checkout"))
1407 "0a9kwha395g3wgxfwln5j8vn9nkspmd75xldrlqdq540w996g8xa"))))
1408 (build-system cmake-build-system)
1410 `(#:tests? #f ; no "check" target
1411 #:build-type "Release"
1413 (let ((protobuf (assoc-ref %build-inputs "protobuf"))
1414 (protobuf:native (assoc-ref %build-inputs "protobuf:native"))
1415 (jsoncpp (assoc-ref %build-inputs "jsoncpp"))
1416 (snappy (assoc-ref %build-inputs "snappy"))
1417 (sqlite (assoc-ref %build-inputs "sqlite")))
1419 ;; Use protobuf from Guix
1420 (string-append "-Dprotobuf_STATIC_LIBRARIES="
1421 protobuf "/lib/libprotobuf.so")
1422 (string-append "-DPROTOBUF_PROTOC_EXECUTABLE="
1423 protobuf:native "/bin/protoc")
1425 ;; Use snappy from Guix
1426 (string-append "-Dsnappy_STATIC_LIBRARIES="
1427 snappy "/lib/libsnappy.so")
1428 ;; Yes, this is not actually the include directory but a prefix...
1429 (string-append "-Dsnappy_INCLUDE_DIR=" snappy)
1431 ;; Use jsoncpp from Guix
1432 (string-append "-Djsoncpp_STATIC_LIBRARIES="
1433 jsoncpp "/lib/libjsoncpp.so")
1434 ;; Yes, this is not actually the include directory but a prefix...
1435 (string-append "-Djsoncpp_INCLUDE_DIR=" jsoncpp)
1437 ;; Use sqlite from Guix
1438 (string-append "-Dsqlite_STATIC_LIBRARIES="
1439 sqlite "/lib/libsqlite.a")
1441 ;; Use system libraries wherever possible. Currently, this
1442 ;; only affects zlib.
1443 "-Dsystemlib_ALL=ON"
1444 "-Dtensorflow_ENABLE_POSITION_INDEPENDENT_CODE=ON"
1445 "-Dtensorflow_BUILD_SHARED_LIB=ON"
1446 "-Dtensorflow_OPTIMIZE_FOR_NATIVE_ARCH=OFF"
1447 "-Dtensorflow_ENABLE_SSL_SUPPORT=OFF"
1448 "-Dtensorflow_BUILD_CONTRIB_KERNELS=OFF"))
1451 #:modules ((ice-9 ftw)
1453 (guix build cmake-build-system)
1454 ((guix build python-build-system)
1455 #:select (python-version)))
1456 #:imported-modules (,@%cmake-build-system-modules
1457 (guix build python-build-system))
1459 (modify-phases %standard-phases
1460 (add-after 'unpack 'set-source-file-times-to-1980
1461 ;; At the end of the tf_python_build_pip_package target, a ZIP
1462 ;; archive should be generated via bdist_wheel, but it fails with
1463 ;; "ZIP does not support timestamps before 1980". Luckily,
1464 ;; SOURCE_DATE_EPOCH is respected, which we set to some time in
1466 (lambda _ (setenv "SOURCE_DATE_EPOCH" "315532800") #t))
1467 ;; See https://github.com/tensorflow/tensorflow/issues/20517#issuecomment-406373913
1468 (add-after 'unpack 'python3.7-compatibility
1470 (substitute* '("tensorflow/python/eager/pywrap_tfe_src.cc"
1471 "tensorflow/python/lib/core/ndarray_tensor.cc"
1472 "tensorflow/python/lib/core/py_func.cc")
1473 (("PyUnicode_AsUTF8") "(char *)PyUnicode_AsUTF8"))
1474 (substitute* "tensorflow/c/eager/c_api.h"
1475 (("unsigned char async")
1476 "unsigned char is_async"))
1478 ;; Remove dependency on tensorboard, a complicated but probably
1479 ;; optional package.
1480 (substitute* "tensorflow/tools/pip_package/setup.py"
1481 ((".*'tensorboard >.*") ""))
1483 ;; Fix the build with python-3.8, taken from rejected upstream patch:
1484 ;; https://github.com/tensorflow/tensorflow/issues/34197
1485 (substitute* (find-files "tensorflow/python" ".*\\.cc$")
1486 (("(nullptr,)(\\ +/. tp_print)" _ _ tp_print)
1487 (string-append "NULL, " tp_print)))
1489 (add-after 'python3.7-compatibility 'chdir
1490 (lambda _ (chdir "tensorflow/contrib/cmake") #t))
1491 (add-after 'chdir 'disable-downloads
1492 (lambda* (#:key inputs #:allow-other-keys)
1493 (substitute* (find-files "external" "\\.cmake$")
1494 (("GIT_REPOSITORY.*") "")
1497 "DOWNLOAD_COMMAND \"\"\nPREFIX "))
1499 ;; Use packages from Guix
1500 (let ((grpc (assoc-ref inputs "grpc")))
1501 (substitute* "CMakeLists.txt"
1503 (("include\\(sqlite\\)") "")
1504 (("\\$\\{sqlite_STATIC_LIBRARIES\\}")
1505 (string-append (assoc-ref inputs "sqlite")
1506 "/lib/libsqlite3.so"))
1507 (("sqlite_copy_headers_to_destination") "")
1510 (("include\\(png\\)") "")
1511 (("\\$\\{png_STATIC_LIBRARIES\\}")
1512 (string-append (assoc-ref inputs "libpng")
1513 "/lib/libpng16.so"))
1514 (("png_copy_headers_to_destination") "")
1517 (("include\\(jpeg\\)") "")
1518 (("\\$\\{jpeg_STATIC_LIBRARIES\\}")
1519 (string-append (assoc-ref inputs "libjpeg")
1521 (("jpeg_copy_headers_to_destination") "")
1524 (("include\\(gif\\)") "")
1525 (("\\$\\{gif_STATIC_LIBRARIES\\}")
1526 (string-append (assoc-ref inputs "giflib")
1528 (("gif_copy_headers_to_destination") "")
1531 (("include\\(lmdb\\)") "")
1532 (("\\$\\{lmdb_STATIC_LIBRARIES\\}")
1533 (string-append (assoc-ref inputs "lmdb")
1535 (("lmdb_copy_headers_to_destination") "")
1538 (("include\\(protobuf\\)") "")
1539 (("protobuf_copy_headers_to_destination") "")
1540 (("^ +protobuf") "")
1543 (("include\\(grpc\\)")
1544 "find_package(grpc REQUIRED NAMES gRPC)")
1545 (("list\\(APPEND tensorflow_EXTERNAL_DEPENDENCIES grpc\\)") "")
1548 (("include\\(eigen\\)")
1549 (string-append "find_package(eigen REQUIRED NAMES Eigen3)
1550 set(eigen_INCLUDE_DIRS ${CMAKE_CURRENT_BINARY_DIR}/external/eigen_archive "
1551 (assoc-ref inputs "eigen") "/include/eigen3)"))
1555 (("include\\(snappy\\)")
1556 "add_definitions(-DTF_USE_SNAPPY)")
1557 (("list\\(APPEND tensorflow_EXTERNAL_DEPENDENCIES snappy\\)") "")
1560 (("include\\(jsoncpp\\)") "")
1561 (("^ +jsoncpp") ""))
1563 (substitute* "tf_core_framework.cmake"
1565 (("\\$\\{GRPC_BUILD\\}/grpc_cpp_plugin")
1566 (which "grpc_cpp_plugin"))
1567 ;; Link with gRPC libraries
1568 (("add_library\\(tf_protos_cc.*" m)
1570 (format #f "\ntarget_link_libraries(tf_protos_cc PRIVATE \
1571 ~a/lib/libgrpc++_unsecure.a \
1572 ~a/lib/libgrpc_unsecure.a \
1573 ~a/lib/libaddress_sorting.a \
1578 (assoc-ref inputs "c-ares"))))))
1579 (substitute* "tf_tools.cmake"
1580 (("add_dependencies\\(\\$\\{proto_text.*") ""))
1581 ;; Remove dependency on bundled grpc
1582 (substitute* "tf_core_distributed_runtime.cmake"
1583 (("tf_core_cpu grpc") "tf_core_cpu"))
1585 ;; This directory is a dependency of many targets.
1586 (mkdir-p "protobuf")
1588 (add-after 'configure 'unpack-third-party-sources
1589 (lambda* (#:key inputs #:allow-other-keys)
1590 ;; This is needed to configure bundled packages properly.
1591 (setenv "CONFIG_SHELL" (which "bash"))
1594 (let* ((what (assoc-ref inputs (string-append name "-src")))
1595 (name* (string-map (lambda (c)
1598 (where (string-append "../build/" name* "/src/" name*)))
1600 ((string-suffix? ".zip" what)
1602 (with-directory-excursion where
1603 (invoke "unzip" what)))
1604 ((string-suffix? ".tar.gz" what)
1606 (invoke "tar" "xf" what
1607 "-C" where "--strip-components=1"))
1609 (let ((parent (dirname where)))
1611 (with-directory-excursion parent
1612 (when (file-exists? name*)
1613 (delete-file-recursively name*))
1614 (copy-recursively what name*)
1615 (map make-file-writable
1616 (find-files name* ".*"))))))))
1626 (rename-file "../build/cub/src/cub/cub-1.8.0/"
1627 "../build/cub/src/cub/cub/")
1629 (add-after 'unpack 'fix-python-build
1630 (lambda* (#:key inputs outputs #:allow-other-keys)
1631 (mkdir-p "protobuf-src")
1632 (invoke "tar" "xf" (assoc-ref inputs "protobuf:src")
1633 "-C" "protobuf-src" "--strip-components=1")
1634 (mkdir-p "eigen-src")
1635 (invoke "tar" "xf" (assoc-ref inputs "eigen:src")
1636 "-C" "eigen-src" "--strip-components=1")
1638 (substitute* "tensorflow/contrib/cmake/tf_python.cmake"
1639 ;; Ensure that all Python dependencies can be found at build time.
1640 (("PYTHONPATH=\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/tf_python" m)
1641 (string-append m ":" (getenv "PYTHONPATH")))
1642 ;; Take protobuf source files from our source package.
1643 (("\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/protobuf/src/protobuf/src/google")
1644 (string-append (getcwd) "/protobuf-src/src/google")))
1646 (substitute* '("tensorflow/contrib/cmake/tf_shared_lib.cmake"
1647 "tensorflow/contrib/cmake/tf_python.cmake")
1648 ;; Take Eigen source files from our source package.
1649 (("\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/eigen/src/eigen/")
1650 (string-append (getcwd) "/eigen-src/"))
1651 ;; Take Eigen headers from our own package.
1652 (("\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/external/eigen_archive")
1653 (string-append (assoc-ref inputs "eigen") "/include/eigen3")))
1655 ;; Correct the RUNPATH of ops libraries generated for Python.
1656 ;; TODO: this doesn't work :(
1657 ;; /gnu/store/...-tensorflow-1.9.0/lib/python3.7/site-packages/tensorflow/contrib/seq2seq/python/ops/lib_beam_search_ops.so:
1658 ;; warning: RUNPATH contains bogus entries: ("/tmp/guix-build-tensorflow-1.9.0.drv-0/source/tensorflow/contrib/build")
1659 ;; /gnu/store/...-tensorflow-1.9.0/lib/python3.7/site-packages/tensorflow/contrib/seq2seq/python/ops/lib_beam_search_ops.so:
1660 ;; error: depends on 'libpywrap_tensorflow_internal.so', which
1661 ;; cannot be found in RUNPATH ...
1662 (substitute* "tensorflow/contrib/cmake/tf_cc_ops.cmake"
1663 (("set_target_properties.*")
1664 (string-append "set_target_properties(${_AT_TARGET} PROPERTIES \
1665 COMPILE_FLAGS ${target_compile_flags} \
1666 INSTALL_RPATH_USE_LINK_PATH TRUE \
1667 INSTALL_RPATH " (assoc-ref outputs "out") "/lib)\n")))
1669 (add-after 'build 'build-pip-package
1670 (lambda* (#:key outputs #:allow-other-keys)
1672 (string-append "-Wl,-rpath="
1673 (assoc-ref outputs "out") "/lib"))
1674 (invoke "make" "tf_python_build_pip_package")
1676 (add-after 'build-pip-package 'install-python
1677 (lambda* (#:key inputs outputs #:allow-other-keys)
1678 (let ((out (assoc-ref outputs "out"))
1679 (wheel (car (find-files "../build/tf_python/dist/" "\\.whl$")))
1680 (python-version (python-version
1681 (assoc-ref inputs "python"))))
1682 (invoke "python" "-m" "pip" "install" wheel
1683 (string-append "--prefix=" out))
1685 ;; XXX: broken RUNPATH, see fix-python-build phase.
1688 out "/lib/python" python-version
1689 "/site-packages/tensorflow/contrib/"
1690 "seq2seq/python/ops/lib_beam_search_ops.so"))
1693 `(("pkg-config" ,pkg-config)
1694 ("protobuf:native" ,protobuf-3.6) ; protoc
1695 ("protobuf:src" ,(package-source protobuf-3.6))
1696 ("eigen:src" ,(package-source eigen-for-tensorflow))
1697 ;; install_pip_packages.sh wants setuptools 39.1.0 specifically.
1698 ("python-setuptools" ,python-setuptools-for-tensorflow)
1700 ;; The commit hashes and URLs for third-party source code are taken
1701 ;; from "tensorflow/workspace.bzl".
1703 ,(let ((commit "ee7aa02")
1708 (url "https://boringssl.googlesource.com/boringssl")
1710 (file-name (string-append "boringssl-0-" revision
1711 (string-take commit 7)
1715 "1jf693q0nw0adsic6cgmbdx6g7wr4rj4vxa8j1hpn792fqhd8wgw")))))
1717 ,(let ((version "1.8.0"))
1720 (uri (string-append "https://mirror.bazel.build/github.com/NVlabs/"
1721 "cub/archive/" version ".zip"))
1722 (file-name (string-append "cub-" version ".zip"))
1725 "1hsqikqridb90dkxkjr2918dcry6pfh46ccnwrzawl56aamhdykb")))))
1726 ("double-conversion-src"
1727 ,(let ((commit "5664746")
1732 (url "https://github.com/google/double-conversion")
1735 (git-file-name "double-conversion"
1736 (string-append "0-" revision "."
1737 (string-take commit 7))))
1740 "1h5lppqqxcvdg5jq42i5msgwx20ryij3apvmndflngrgdpc04gn1")))))
1742 ,(let ((commit "816a4ae622e964763ca0862d9dbd19324a1eaf45"))
1746 "https://mirror.bazel.build/github.com/google/farmhash/archive/"
1748 (file-name (string-append "farmhash-0-" (string-take commit 7)
1752 "185b2xdxl4d4cnsnv6abg8s22gxvx8673jq2yaq85bz4cdy58q35")))))
1753 ;; The license notice on the home page at
1754 ;; http://www.kurims.kyoto-u.ac.jp/~ooura/fft.html says:
1755 ;; Copyright Takuya OOURA, 1996-2001
1757 ;; You may use, copy, modify and distribute this code for any purpose
1758 ;; (include commercial use) and without fee. Please refer to this
1759 ;; package when you modify this code.
1761 ;; We take the identical tarball from the Bazel mirror, because the URL
1762 ;; at the home page is not versioned and might change.
1766 (uri "https://mirror.bazel.build/www.kurims.kyoto-u.ac.jp/~ooura/fft.tgz")
1767 (file-name "fft2d.tar.gz")
1770 "15jjkfvhqvl2c0753d2di8hz0pyzn598g74wqy79awdrf1y67fsj"))))
1772 ,(let ((commit "be5edafc2e1a455768e260ccd68ae7317b6690ee")
1777 (url "https://github.com/google/highwayhash")
1779 (file-name (string-append "highwayhash-0-" revision
1780 (string-take commit 7)
1784 "154jwf98cyy54hldr94pgjn85zynly3abpnc1avmb8a18lzwjyb6")))))
1786 ,(let ((version "0559ce013feac8db639ee1bf776aca0325d28777")
1790 (uri (string-append "https://mirror.bazel.build/"
1791 "github.com/google/nsync/archive/"
1793 (file-name (string-append "nsync-0." revision
1794 "-" (string-take version 7)
1798 "0qdkyqym34x739mmzv97ah5r7ph462v5xkxqxvidmcfqbi64b132")))))
1800 ,(let ((commit "e7efc48")
1805 (url "https://github.com/google/re2")
1807 (file-name (string-append "re2-0-" revision
1808 (string-take commit 7)
1812 "161g9841rjfsy5pn52fcis0s9hdr7rxvb06pad38j5rppfihvign")))))
1813 ("googletest" ,googletest)
1817 `(("python-absl-py" ,python-absl-py)
1818 ("python-astor" ,python-astor)
1819 ("python-gast" ,python-gast)
1820 ("python-grpcio" ,python-grpcio)
1821 ("python-numpy" ,python-numpy)
1822 ("python-protobuf" ,python-protobuf-3.6)
1823 ("python-six" ,python-six)
1824 ("python-termcolo" ,python-termcolor)
1825 ("python-wheel" ,python-wheel)))
1827 `(("c-ares" ,c-ares)
1828 ("eigen" ,eigen-for-tensorflow)
1829 ("gemmlowp" ,gemmlowp-for-tensorflow)
1831 ("libjpeg" ,libjpeg-turbo)
1834 ("grpc" ,grpc-1.16.1 "static")
1835 ("grpc:bin" ,grpc-1.16.1)
1836 ("jsoncpp" ,jsoncpp-for-tensorflow)
1839 ("protobuf" ,protobuf-3.6)
1840 ("python" ,python-wrapper)
1842 (home-page "https://tensorflow.org")
1843 (synopsis "Machine learning framework")
1845 "TensorFlow is a flexible platform for building and training machine
1846 learning models. It provides a library for high performance numerical
1847 computation and includes high level Python APIs, including both a sequential
1848 API for beginners that allows users to build models quickly by plugging
1849 together building blocks and a subclassing API with an imperative style for
1850 advanced research.")
1851 (license license:asl2.0)))
1853 (define-public python-iml
1860 (uri (pypi-uri "iml" version))
1863 "1k8szlpm19rcwcxdny9qdm3gmaqq8akb4xlvrzyz8c2d679aak6l"))))
1864 (build-system python-build-system)
1866 `(("ipython" ,python-ipython)
1867 ("numpy" ,python-numpy)
1868 ("pandas" ,python-pandas)
1869 ("scipy" ,python-scipy)))
1871 `(("nose" ,python-nose)))
1872 (home-page "https://github.com/interpretable-ml/iml")
1873 (synopsis "Interpretable Machine Learning (iML) package")
1874 (description "Interpretable ML (iML) is a set of data type objects,
1875 visualizations, and interfaces that can be used by any method designed to
1876 explain the predictions of machine learning models (or really the output of
1877 any function). It currently contains the interface and IO code from the Shap
1878 project, and it will potentially also do the same for the Lime project.")
1879 (license license:expat)))
1881 (define-public python-keras-applications
1883 (name "python-keras-applications")
1888 (uri (pypi-uri "Keras_Applications" version))
1891 "1rcz31ca4axa6kzhjx4lwqxbg4wvlljkj8qj9a7p9sfd5fhzjyam"))))
1892 (build-system python-build-system)
1893 ;; The tests require Keras, but this package is needed to build Keras.
1894 (arguments '(#:tests? #f))
1896 `(("python-h5py" ,python-h5py)
1897 ("python-numpy" ,python-numpy)))
1899 `(("python-pytest" ,python-pytest)
1900 ("python-pytest-cov" ,python-pytest-cov)
1901 ("python-pytest-pep8" ,python-pytest-pep8)
1902 ("python-pytest-xdist" ,python-pytest-xdist)))
1903 (home-page "https://github.com/keras-team/keras-applications")
1904 (synopsis "Reference implementations of popular deep learning models")
1906 "This package provides reference implementations of popular deep learning
1907 models for use with the Keras deep learning framework.")
1908 (license license:expat)))
1910 (define-public python-keras-preprocessing
1912 (name "python-keras-preprocessing")
1917 (uri (pypi-uri "Keras_Preprocessing" version))
1920 "1r98nm4k1svsqjyaqkfk23i31bl1kcfcyp7094yyj3c43phfp3as"))))
1921 (build-system python-build-system)
1923 `(("python-numpy" ,python-numpy)
1924 ("python-six" ,python-six)))
1926 `(("python-pandas" ,python-pandas)
1927 ("python-pillow" ,python-pillow)
1928 ("python-pytest" ,python-pytest)
1929 ("python-pytest-cov" ,python-pytest-cov)
1930 ("python-pytest-xdist" ,python-pytest-xdist)
1931 ("tensorflow" ,tensorflow)))
1932 (home-page "https://github.com/keras-team/keras-preprocessing/")
1933 (synopsis "Data preprocessing and augmentation for deep learning models")
1935 "Keras Preprocessing is the data preprocessing and data augmentation
1936 module of the Keras deep learning library. It provides utilities for working
1937 with image data, text data, and sequence data.")
1938 (license license:expat)))
1940 (define-public python-keras
1942 (name "python-keras")
1947 (uri (pypi-uri "Keras" version))
1948 (patches (search-patches "python-keras-integration-test.patch"))
1951 "1j8bsqzh49vjdxy6l1k4iwax5vpjzniynyd041xjavdzvfii1dlh"))))
1952 (build-system python-build-system)
1955 (modify-phases %standard-phases
1956 (add-after 'unpack 'remove-tests-for-unavailable-features
1958 (delete-file "keras/backend/theano_backend.py")
1959 (delete-file "keras/backend/cntk_backend.py")
1960 (delete-file "tests/keras/backend/backend_test.py")
1962 ;; FIXME: This doesn't work because Tensorflow is missing the
1963 ;; coder ops library.
1964 (delete-file "tests/keras/test_callbacks.py")
1968 ;; These tests attempt to download data files from the internet.
1969 (delete-file "tests/integration_tests/test_datasets.py")
1970 (delete-file "tests/integration_tests/imagenet_utils_test.py")
1972 (setenv "PYTHONPATH"
1973 (string-append (getcwd) "/build/lib:"
1974 (getenv "PYTHONPATH")))
1975 (invoke "py.test" "-v"
1976 "-p" "no:cacheprovider"
1977 "--ignore" "keras/utils"))))))
1979 `(("python-h5py" ,python-h5py)
1980 ("python-keras-applications" ,python-keras-applications)
1981 ("python-keras-preprocessing" ,python-keras-preprocessing)
1982 ("python-numpy" ,python-numpy)
1983 ("python-pydot" ,python-pydot)
1984 ("python-pyyaml" ,python-pyyaml)
1985 ("python-scipy" ,python-scipy)
1986 ("python-six" ,python-six)
1987 ("tensorflow" ,tensorflow)
1988 ("graphviz" ,graphviz)))
1990 `(("python-pandas" ,python-pandas)
1991 ("python-pytest" ,python-pytest)
1992 ("python-pytest-cov" ,python-pytest-cov)
1993 ("python-pytest-pep8" ,python-pytest-pep8)
1994 ("python-pytest-timeout" ,python-pytest-timeout)
1995 ("python-pytest-xdist" ,python-pytest-xdist)
1996 ("python-sphinx" ,python-sphinx)
1997 ("python-requests" ,python-requests)))
1998 (home-page "https://github.com/keras-team/keras")
1999 (synopsis "High-level deep learning framework")
2000 (description "Keras is a high-level neural networks API, written in Python
2001 and capable of running on top of TensorFlow. It was developed with a focus on
2002 enabling fast experimentation. Use Keras if you need a deep learning library
2006 @item Allows for easy and fast prototyping (through user friendliness,
2007 modularity, and extensibility).
2008 @item Supports both convolutional networks and recurrent networks, as well as
2009 combinations of the two.
2010 @item Runs seamlessly on CPU and GPU.
2012 (license license:expat)))
2015 (let ((version "0.0.0") ; no proper version tag
2016 (commit "ca528e32fea9ca8f2b16053cff17160290fc84ce")
2020 (version (git-version version revision commit))
2025 (url "https://github.com/facebookincubator/gloo")
2027 (file-name (git-file-name name version))
2030 "1q9f80zy75f6njrzrqkmhc0g3qxs4gskr7ns2jdqanxa2ww7a99w"))))
2031 (build-system cmake-build-system)
2033 `(("googletest" ,googletest)))
2035 `(#:configure-flags '("-DBUILD_TEST=1")
2037 (modify-phases %standard-phases
2040 (invoke "make" "gloo_test")
2042 (synopsis "Collective communications library")
2044 "Gloo is a collective communications library. It comes with a
2045 number of collective algorithms useful for machine learning applications.
2046 These include a barrier, broadcast, and allreduce.")
2047 (home-page "https://github.com/facebookincubator/gloo")
2048 (license license:bsd-3))))
2050 (define-public python-umap-learn
2052 (name "python-umap-learn")
2057 (uri (pypi-uri "umap-learn" version))
2060 "02ada2yy6km6zgk2836kg1c97yrcpalvan34p8c57446finnpki1"))))
2061 (build-system python-build-system)
2063 `(("python-joblib" ,python-joblib)
2064 ("python-nose" ,python-nose)))
2066 `(("python-numba" ,python-numba)
2067 ("python-numpy" ,python-numpy)
2068 ("python-scikit-learn" ,python-scikit-learn)
2069 ("python-scipy" ,python-scipy)))
2070 (home-page "https://github.com/lmcinnes/umap")
2072 "Uniform Manifold Approximation and Projection")
2074 "Uniform Manifold Approximation and Projection is a dimension reduction
2075 technique that can be used for visualisation similarly to t-SNE, but also for
2076 general non-linear dimension reduction.")
2077 (license license:bsd-3)))