05646dd77024fffb0e90710460337fc1957c8ed2
[jackhill/guix/guix.git] / gnu / packages / machine-learning.scm
1 ;;; GNU Guix --- Functional package management for GNU
2 ;;; Copyright © 2015, 2016, 2017, 2018, 2019, 2020 Ricardo Wurmus <rekado@elephly.net>
3 ;;; Copyright © 2016, 2020 Efraim Flashner <efraim@flashner.co.il>
4 ;;; Copyright © 2016, 2017, 2020 Marius Bakke <mbakke@fastmail.com>
5 ;;; Copyright © 2016 Hartmut Goebel <h.goebel@crazy-compilers.com>
6 ;;; Copyright © 2018, 2019, 2020 Tobias Geerinckx-Rice <me@tobias.gr>
7 ;;; Copyright © 2018 Kei Kebreau <kkebreau@posteo.net>
8 ;;; Copyright © 2018 Mark Meyer <mark@ofosos.org>
9 ;;; Copyright © 2018 Ben Woodcroft <donttrustben@gmail.com>
10 ;;; Copyright © 2018 Fis Trivial <ybbs.daans@hotmail.com>
11 ;;; Copyright © 2018 Julien Lepiller <julien@lepiller.eu>
12 ;;; Copyright © 2018 Björn Höfling <bjoern.hoefling@bjoernhoefling.de>
13 ;;; Copyright © 2019 Nicolas Goaziou <mail@nicolasgoaziou.fr>
14 ;;; Copyright © 2019, 2020 Guillaume Le Vaillant <glv@posteo.net>
15 ;;; Copyright © 2019 Brett Gilio <brettg@gnu.org>
16 ;;; Copyright © 2020 Konrad Hinsen <konrad.hinsen@fastmail.net>
17 ;;; Copyright © 2020 Edouard Klein <edk@beaver-labs.com>
18 ;;; Copyright © 2020 Vinicius Monego <monego@posteo.net>
19 ;;;
20 ;;; This file is part of GNU Guix.
21 ;;;
22 ;;; GNU Guix is free software; you can redistribute it and/or modify it
23 ;;; under the terms of the GNU General Public License as published by
24 ;;; the Free Software Foundation; either version 3 of the License, or (at
25 ;;; your option) any later version.
26 ;;;
27 ;;; GNU Guix is distributed in the hope that it will be useful, but
28 ;;; WITHOUT ANY WARRANTY; without even the implied warranty of
29 ;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
30 ;;; GNU General Public License for more details.
31 ;;;
32 ;;; You should have received a copy of the GNU General Public License
33 ;;; along with GNU Guix. If not, see <http://www.gnu.org/licenses/>.
34
35 (define-module (gnu packages machine-learning)
36 #:use-module ((guix licenses) #:prefix license:)
37 #:use-module (guix packages)
38 #:use-module (guix utils)
39 #:use-module (guix download)
40 #:use-module (guix svn-download)
41 #:use-module (guix build-system cmake)
42 #:use-module (guix build-system gnu)
43 #:use-module (guix build-system ocaml)
44 #:use-module (guix build-system python)
45 #:use-module (guix build-system r)
46 #:use-module (guix git-download)
47 #:use-module (gnu packages)
48 #:use-module (gnu packages adns)
49 #:use-module (gnu packages algebra)
50 #:use-module (gnu packages audio)
51 #:use-module (gnu packages autotools)
52 #:use-module (gnu packages base)
53 #:use-module (gnu packages bash)
54 #:use-module (gnu packages boost)
55 #:use-module (gnu packages check)
56 #:use-module (gnu packages compression)
57 #:use-module (gnu packages cmake)
58 #:use-module (gnu packages cran)
59 #:use-module (gnu packages databases)
60 #:use-module (gnu packages dejagnu)
61 #:use-module (gnu packages gcc)
62 #:use-module (gnu packages glib)
63 #:use-module (gnu packages graphviz)
64 #:use-module (gnu packages gstreamer)
65 #:use-module (gnu packages image)
66 #:use-module (gnu packages linux)
67 #:use-module (gnu packages maths)
68 #:use-module (gnu packages mpi)
69 #:use-module (gnu packages ocaml)
70 #:use-module (gnu packages onc-rpc)
71 #:use-module (gnu packages perl)
72 #:use-module (gnu packages pkg-config)
73 #:use-module (gnu packages protobuf)
74 #:use-module (gnu packages python)
75 #:use-module (gnu packages python-check)
76 #:use-module (gnu packages python-science)
77 #:use-module (gnu packages python-web)
78 #:use-module (gnu packages python-xyz)
79 #:use-module (gnu packages rpc)
80 #:use-module (gnu packages serialization)
81 #:use-module (gnu packages sphinx)
82 #:use-module (gnu packages statistics)
83 #:use-module (gnu packages sqlite)
84 #:use-module (gnu packages swig)
85 #:use-module (gnu packages web)
86 #:use-module (gnu packages xml)
87 #:use-module (gnu packages xorg)
88 #:use-module (ice-9 match))
89
90 (define-public fann
91 ;; The last release is >100 commits behind, so we package from git.
92 (let ((commit "d71d54788bee56ba4cf7522801270152da5209d7"))
93 (package
94 (name "fann")
95 (version (string-append "2.2.0-1." (string-take commit 8)))
96 (source (origin
97 (method git-fetch)
98 (uri (git-reference
99 (url "https://github.com/libfann/fann")
100 (commit commit)))
101 (file-name (string-append name "-" version "-checkout"))
102 (sha256
103 (base32
104 "0ibwpfrjs6q2lijs8slxjgzb2llcl6rk3v2ski4r6215g5jjhg3x"))))
105 (build-system cmake-build-system)
106 (arguments
107 `(#:phases
108 (modify-phases %standard-phases
109 (replace 'check
110 (lambda* (#:key outputs #:allow-other-keys)
111 (let* ((out (assoc-ref outputs "out")))
112 (with-directory-excursion (string-append (getcwd) "/tests")
113 (invoke "./fann_tests"))))))))
114 (home-page "http://leenissen.dk/fann/wp/")
115 (synopsis "Fast Artificial Neural Network")
116 (description
117 "FANN is a neural network library, which implements multilayer
118 artificial neural networks in C with support for both fully connected and
119 sparsely connected networks.")
120 (license license:lgpl2.1))))
121
122 (define-public libsvm
123 (package
124 (name "libsvm")
125 (version "3.23")
126 (source
127 (origin
128 (method url-fetch)
129 (uri (string-append "https://www.csie.ntu.edu.tw/~cjlin/libsvm/"
130 name "-" version ".tar.gz"))
131 (sha256
132 (base32 "0jpaq0rr92x38p4nk3gjan79ip67m6p80anb28z1d8601miysyi5"))))
133 (build-system gnu-build-system)
134 (arguments
135 `(#:tests? #f ; no "check" target
136 #:phases (modify-phases %standard-phases
137 (delete 'configure)
138 (replace
139 'install ; no ‘install’ target
140 (lambda* (#:key outputs #:allow-other-keys)
141 (let* ((out (assoc-ref outputs "out"))
142 (bin (string-append out "/bin/")))
143 (mkdir-p bin)
144 (for-each (lambda (file)
145 (copy-file file (string-append bin file)))
146 '("svm-train"
147 "svm-predict"
148 "svm-scale")))
149 #t)))))
150 (home-page "https://www.csie.ntu.edu.tw/~cjlin/libsvm/")
151 (synopsis "Library for Support Vector Machines")
152 (description
153 "LIBSVM is a machine learning library for support vector
154 classification, (C-SVC, nu-SVC), regression (epsilon-SVR, nu-SVR) and
155 distribution estimation (one-class SVM). It supports multi-class
156 classification.")
157 (license license:bsd-3)))
158
159 (define-public python-libsvm
160 (package (inherit libsvm)
161 (name "python-libsvm")
162 (build-system gnu-build-system)
163 (arguments
164 `(#:tests? #f ; no "check" target
165 #:make-flags '("-C" "python")
166 #:phases
167 (modify-phases %standard-phases
168 (delete 'configure)
169 (replace
170 'install ; no ‘install’ target
171 (lambda* (#:key inputs outputs #:allow-other-keys)
172 (let ((site (string-append (assoc-ref outputs "out")
173 "/lib/python"
174 (string-take
175 (string-take-right
176 (assoc-ref inputs "python") 5) 3)
177 "/site-packages/")))
178 (substitute* "python/svm.py"
179 (("../libsvm.so.2") "libsvm.so.2"))
180 (mkdir-p site)
181 (for-each (lambda (file)
182 (copy-file file (string-append site (basename file))))
183 (find-files "python" "\\.py"))
184 (copy-file "libsvm.so.2"
185 (string-append site "libsvm.so.2")))
186 #t)))))
187 (inputs
188 `(("python" ,python)))
189 (synopsis "Python bindings of libSVM")))
190
191 (define-public ghmm
192 ;; The latest release candidate is several years and a couple of fixes have
193 ;; been published since. This is why we download the sources from the SVN
194 ;; repository.
195 (let ((svn-revision 2341))
196 (package
197 (name "ghmm")
198 (version (string-append "0.9-rc3-0." (number->string svn-revision)))
199 (source (origin
200 (method svn-fetch)
201 (uri (svn-reference
202 (url "http://svn.code.sf.net/p/ghmm/code/trunk")
203 (revision svn-revision)))
204 (file-name (string-append name "-" version "-checkout"))
205 (sha256
206 (base32
207 "0qbq1rqp94l530f043qzp8aw5lj7dng9wq0miffd7spd1ff638wq"))))
208 (build-system gnu-build-system)
209 (arguments
210 `(#:imported-modules (,@%gnu-build-system-modules
211 (guix build python-build-system))
212 #:modules ((guix build python-build-system)
213 ,@%gnu-build-system-modules)
214 #:phases
215 (modify-phases %standard-phases
216 (add-after 'unpack 'enter-dir
217 (lambda _ (chdir "ghmm") #t))
218 (delete 'check)
219 (add-after 'install 'check
220 (assoc-ref %standard-phases 'check))
221 (add-before 'check 'fix-PYTHONPATH
222 (lambda* (#:key inputs outputs #:allow-other-keys)
223 (let ((python-version (python-version
224 (assoc-ref inputs "python"))))
225 (setenv "PYTHONPATH"
226 (string-append (getenv "PYTHONPATH")
227 ":" (assoc-ref outputs "out")
228 "/lib/python" python-version
229 "/site-packages")))
230 #t))
231 (add-after 'enter-dir 'fix-runpath
232 (lambda* (#:key outputs #:allow-other-keys)
233 (substitute* "ghmmwrapper/setup.py"
234 (("^(.*)extra_compile_args = \\[" line indent)
235 (string-append indent
236 "extra_link_args = [\"-Wl,-rpath="
237 (assoc-ref outputs "out") "/lib\"],\n"
238 line
239 "\"-Wl,-rpath="
240 (assoc-ref outputs "out")
241 "/lib\", ")))
242 #t))
243 (add-after 'enter-dir 'disable-broken-tests
244 (lambda _
245 (substitute* "tests/Makefile.am"
246 ;; GHMM_SILENT_TESTS is assumed to be a command.
247 (("TESTS_ENVIRONMENT.*") "")
248 ;; Do not build broken tests.
249 (("chmm .*") "")
250 (("read_fa .*") "")
251 (("mcmc .*") "")
252 (("label_higher_order_test.*$")
253 "label_higher_order_test\n"))
254
255 ;; These Python unittests are broken as there is no gato.
256 ;; See https://sourceforge.net/p/ghmm/support-requests/3/
257 (substitute* "ghmmwrapper/ghmmunittests.py"
258 (("^(.*)def (testNewXML|testMultipleTransitionClasses|testNewXML)"
259 line indent)
260 (string-append indent
261 "@unittest.skip(\"Disabled by Guix\")\n"
262 line)))
263 #t)))))
264 (inputs
265 `(("python" ,python-2) ; only Python 2 is supported
266 ("libxml2" ,libxml2)))
267 (native-inputs
268 `(("pkg-config" ,pkg-config)
269 ("dejagnu" ,dejagnu)
270 ("swig" ,swig)
271 ("autoconf" ,autoconf)
272 ("automake" ,automake)
273 ("libtool" ,libtool)))
274 (home-page "http://ghmm.org")
275 (synopsis "Hidden Markov Model library")
276 (description
277 "The General Hidden Markov Model library (GHMM) is a C library with
278 additional Python bindings implementing a wide range of types of @dfn{Hidden
279 Markov Models} (HMM) and algorithms: discrete, continuous emissions, basic
280 training, HMM clustering, HMM mixtures.")
281 (license license:lgpl2.0+))))
282
283 (define-public mcl
284 (package
285 (name "mcl")
286 (version "14.137")
287 (source (origin
288 (method url-fetch)
289 (uri (string-append
290 "http://micans.org/mcl/src/mcl-"
291 (string-replace-substring version "." "-")
292 ".tar.gz"))
293 (sha256
294 (base32
295 "15xlax3z31lsn62vlg94hkm75nm40q4679amnfg13jm8m2bnhy5m"))))
296 (build-system gnu-build-system)
297 (arguments
298 `(#:configure-flags (list "--enable-blast")))
299 (inputs
300 `(("perl" ,perl)))
301 (home-page "http://micans.org/mcl/")
302 (synopsis "Clustering algorithm for graphs")
303 (description
304 "The MCL algorithm is short for the @dfn{Markov Cluster Algorithm}, a
305 fast and scalable unsupervised cluster algorithm for graphs (also known as
306 networks) based on simulation of (stochastic) flow in graphs.")
307 ;; In the LICENCE file and web page it says "The software is licensed
308 ;; under the GNU General Public License, version 3.", but in several of
309 ;; the source code files it suggests GPL3 or later.
310 ;; http://listserver.ebi.ac.uk/pipermail/mcl-users/2016/000376.html
311 (license license:gpl3)))
312
313 (define-public ocaml-mcl
314 (package
315 (name "ocaml-mcl")
316 (version "12-068oasis4")
317 (source
318 (origin
319 (method git-fetch)
320 (uri (git-reference
321 (url "https://github.com/fhcrc/mcl")
322 (commit version)))
323 (file-name (git-file-name name version))
324 (sha256
325 (base32
326 "0009dc3h2jp3qg5val452wngpqnbfyhbcxylghq0mrjqxx0jdq5p"))))
327 (build-system ocaml-build-system)
328 (arguments
329 `(#:phases
330 (modify-phases %standard-phases
331 (add-before 'configure 'patch-paths
332 (lambda _
333 (substitute* "configure"
334 (("/bin/sh") (which "sh")))
335 (substitute* "setup.ml"
336 (("LDFLAGS=-fPIC")
337 (string-append "LDFLAGS=-fPIC\"; \"SHELL=" (which "sh")))
338 (("-std=c89") "-std=gnu99")
339
340 ;; This is a mutable string, which is no longer supported. Use
341 ;; a byte buffer instead.
342 (("String.make \\(String.length s\\)")
343 "Bytes.make (String.length s)")
344
345 ;; These two belong together.
346 (("OASISString.replace_chars")
347 "Bytes.to_string (OASISString.replace_chars")
348 ((" s;")
349 " s);"))
350 (substitute* "myocamlbuild.ml"
351 (("std=c89") "std=gnu99"))
352 ;; Since we build with a more recent OCaml, we have to use C99 or
353 ;; later. This causes problems with the old C code.
354 (substitute* "src/impala/matrix.c"
355 (("restrict") "restrict_"))
356 #t)))))
357 (native-inputs
358 `(("ocamlbuild" ,ocamlbuild)))
359 (home-page "https://github.com/fhcrc/mcl")
360 (synopsis "OCaml wrappers around MCL")
361 (description
362 "This package provides OCaml bindings for the MCL graph clustering
363 algorithm.")
364 (license license:gpl3)))
365
366 (define-public randomjungle
367 (package
368 (name "randomjungle")
369 (version "2.1.0")
370 (source
371 (origin
372 (method url-fetch)
373 (uri (string-append
374 "https://www.imbs.uni-luebeck.de/fileadmin/files/Software"
375 "/randomjungle/randomjungle-" version ".tar_.gz"))
376 (patches (search-patches "randomjungle-disable-static-build.patch"))
377 (sha256
378 (base32
379 "12c8rf30cla71swx2mf4ww9mfd8jbdw5lnxd7dxhyw1ygrvg6y4w"))))
380 (build-system gnu-build-system)
381 (arguments
382 `(#:configure-flags
383 (list "--disable-static"
384 (string-append "--with-boost="
385 (assoc-ref %build-inputs "boost")))
386 #:phases
387 (modify-phases %standard-phases
388 (add-before
389 'configure 'set-CXXFLAGS
390 (lambda _
391 (setenv "CXXFLAGS" "-fpermissive ")
392 #t)))))
393 (inputs
394 `(("boost" ,boost)
395 ("gsl" ,gsl)
396 ("libxml2" ,libxml2)
397 ("zlib" ,zlib)))
398 (native-inputs
399 `(("gfortran" ,gfortran)
400 ("gfortran:lib" ,gfortran "lib")))
401 ;; Non-portable assembly instructions are used so building fails on
402 ;; platforms other than x86_64 or i686.
403 (supported-systems '("x86_64-linux" "i686-linux"))
404 (home-page "https://www.imbs.uni-luebeck.de/forschung/software/details.html#c224")
405 (synopsis "Implementation of the Random Forests machine learning method")
406 (description
407 "Random Jungle is an implementation of Random Forests. It is supposed to
408 analyse high dimensional data. In genetics, it can be used for analysing big
409 Genome Wide Association (GWA) data. Random Forests is a powerful machine
410 learning method. Most interesting features are variable selection, missing
411 value imputation, classifier creation, generalization error estimation and
412 sample proximities between pairs of cases.")
413 (license license:gpl3+)))
414
415 (define-public openfst
416 (package
417 (name "openfst")
418 (version "1.7.9")
419 (source (origin
420 (method url-fetch)
421 (uri (string-append "http://www.openfst.org/twiki/pub/FST/"
422 "FstDownload/openfst-" version ".tar.gz"))
423 (sha256
424 (base32
425 "1pmx1yhn2gknj0an0zwqmzgwjaycapi896244np50a8y3nrsw6ck"))))
426 (build-system gnu-build-system)
427 (home-page "http://www.openfst.org")
428 (synopsis "Library for weighted finite-state transducers")
429 (description "OpenFst is a library for constructing, combining,
430 optimizing, and searching weighted finite-state transducers (FSTs).")
431 (license license:asl2.0)))
432
433 (define-public shogun
434 (package
435 (name "shogun")
436 (version "6.1.3")
437 (source
438 (origin
439 (method url-fetch)
440 (uri (string-append
441 "ftp://shogun-toolbox.org/shogun/releases/"
442 (version-major+minor version)
443 "/sources/shogun-" version ".tar.bz2"))
444 (sha256
445 (base32
446 "1rn9skm3nw6hr7mr3lgp2gfqhi7ii0lyxck7qmqnf8avq349s5jp"))
447 (modules '((guix build utils)
448 (ice-9 rdelim)))
449 (snippet
450 '(begin
451 ;; Remove non-free sources and files referencing them
452 (for-each delete-file
453 (find-files "src/shogun/classifier/svm/"
454 "SVMLight\\.(cpp|h)"))
455 (for-each delete-file
456 (find-files "examples/undocumented/libshogun/"
457 (string-append
458 "(classifier_.*svmlight.*|"
459 "evaluation_cross_validation_locked_comparison).cpp")))
460 ;; Remove non-free functions.
461 (define (delete-ifdefs file)
462 (with-atomic-file-replacement file
463 (lambda (in out)
464 (let loop ((line (read-line in 'concat))
465 (skipping? #f))
466 (if (eof-object? line)
467 #t
468 (let ((skip-next?
469 (or (and skipping?
470 (not (string-prefix?
471 "#endif //USE_SVMLIGHT" line)))
472 (string-prefix?
473 "#ifdef USE_SVMLIGHT" line))))
474 (when (or (not skipping?)
475 (and skipping? (not skip-next?)))
476 (display line out))
477 (loop (read-line in 'concat) skip-next?)))))))
478 (for-each delete-ifdefs
479 (append
480 (find-files "src/shogun/classifier/mkl"
481 "^MKLClassification\\.cpp")
482 (find-files "src/shogun/classifier/svm"
483 "^SVMLightOneClass\\.(cpp|h)")
484 (find-files "src/shogun/multiclass"
485 "^ScatterSVM\\.(cpp|h)")
486 (find-files "src/shogun/kernel/"
487 "^(Kernel|CombinedKernel|ProductKernel)\\.(cpp|h)")
488 (find-files "src/shogun/regression/svr"
489 "^(MKLRegression|SVRLight)\\.(cpp|h)")
490 (find-files "src/shogun/transfer/domain_adaptation"
491 "^DomainAdaptationSVM\\.(cpp|h)")))
492 #t))))
493 (build-system cmake-build-system)
494 (arguments
495 '(#:tests? #f ;no check target
496 #:phases
497 (modify-phases %standard-phases
498 (add-after 'unpack 'delete-broken-symlinks
499 (lambda _
500 (for-each delete-file '("applications/arts/data"
501 "applications/asp/data"
502 "applications/easysvm/data"
503 "applications/msplicer/data"
504 "applications/ocr/data"
505 "examples/meta/data"
506 "examples/undocumented/data"))
507 #t))
508 (add-after 'unpack 'change-R-target-path
509 (lambda* (#:key outputs #:allow-other-keys)
510 (substitute* '("src/interfaces/r/CMakeLists.txt"
511 "examples/meta/r/CMakeLists.txt")
512 (("\\$\\{R_COMPONENT_LIB_PATH\\}")
513 (string-append (assoc-ref outputs "out")
514 "/lib/R/library/")))
515 #t))
516 (add-after 'unpack 'fix-octave-modules
517 (lambda* (#:key outputs #:allow-other-keys)
518 (substitute* "src/interfaces/octave/CMakeLists.txt"
519 (("^include_directories\\(\\$\\{OCTAVE_INCLUDE_DIRS\\}")
520 "include_directories(${OCTAVE_INCLUDE_DIRS} ${OCTAVE_INCLUDE_DIRS}/octave")
521 ;; change target directory
522 (("\\$\\{OCTAVE_OCT_LOCAL_API_FILE_DIR\\}")
523 (string-append (assoc-ref outputs "out")
524 "/share/octave/packages")))
525 (substitute* '("src/interfaces/octave/swig_typemaps.i"
526 "src/interfaces/octave/sg_print_functions.cpp")
527 ;; "octave/config.h" and "octave/oct-obj.h" deprecated in Octave.
528 (("octave/config\\.h") "octave/octave-config.h")
529 (("octave/oct-obj.h") "octave/ovl.h"))
530 #t))
531 (add-after 'unpack 'move-rxcpp
532 (lambda* (#:key inputs #:allow-other-keys)
533 (let ((rxcpp-dir "shogun/third-party/rxcpp"))
534 (mkdir-p rxcpp-dir)
535 (install-file (assoc-ref inputs "rxcpp") rxcpp-dir)
536 #t)))
537 (add-before 'build 'set-HOME
538 ;; $HOME needs to be set at some point during the build phase
539 (lambda _ (setenv "HOME" "/tmp") #t)))
540 #:configure-flags
541 (list "-DCMAKE_BUILD_WITH_INSTALL_RPATH=TRUE"
542 "-DUSE_SVMLIGHT=OFF" ;disable proprietary SVMLIGHT
543 "-DBUILD_META_EXAMPLES=OFF" ;requires unpackaged ctags
544 ;;"-DINTERFACE_JAVA=ON" ;requires unpackaged jblas
545 ;;"-DINTERFACE_RUBY=ON" ;requires unpackaged ruby-narray
546 ;;"-DINTERFACE_PERL=ON" ;"FindPerlLibs" does not exist
547 ;;"-DINTERFACE_LUA=ON" ;fails because lua doesn't build pkgconfig file
548 "-DINTERFACE_OCTAVE=ON"
549 "-DINTERFACE_PYTHON=ON"
550 "-DINTERFACE_R=ON")))
551 (inputs
552 `(("python" ,python)
553 ("numpy" ,python-numpy)
554 ("r-minimal" ,r-minimal)
555 ("octave" ,octave-cli)
556 ("swig" ,swig)
557 ("eigen" ,eigen)
558 ("hdf5" ,hdf5)
559 ("atlas" ,atlas)
560 ("arpack" ,arpack-ng)
561 ("lapack" ,lapack)
562 ("glpk" ,glpk)
563 ("libxml2" ,libxml2)
564 ("lzo" ,lzo)
565 ("zlib" ,zlib)))
566 (native-inputs
567 `(("pkg-config" ,pkg-config)
568 ("rxcpp" ,rxcpp)))
569 ;; Non-portable SSE instructions are used so building fails on platforms
570 ;; other than x86_64.
571 (supported-systems '("x86_64-linux"))
572 (home-page "https://shogun-toolbox.org/")
573 (synopsis "Machine learning toolbox")
574 (description
575 "The Shogun Machine learning toolbox provides a wide range of unified and
576 efficient Machine Learning (ML) methods. The toolbox seamlessly
577 combines multiple data representations, algorithm classes, and general purpose
578 tools. This enables both rapid prototyping of data pipelines and extensibility
579 in terms of new algorithms.")
580 (license license:gpl3+)))
581
582 (define-public python-onnx
583 (package
584 (name "python-onnx")
585 (version "1.7.0")
586 (source
587 (origin
588 (method url-fetch)
589 (uri (pypi-uri "onnx" version))
590 ;; ONNX will build googletest from a git checkout. Patch CMake
591 ;; to use googletest from Guix and enable tests by default.
592 (patches (search-patches "python-onnx-use-system-googletest.patch"))
593 (sha256
594 (base32 "0j6rgfbhsw3a8id8pyg18y93k68lbjbj1kq6qia36h69f6pvlyjy"))))
595 (build-system python-build-system)
596 (native-inputs
597 `(("cmake" ,cmake)
598 ("googletest" ,googletest)
599 ("pybind11" ,pybind11)
600 ("python-coverage" ,python-coverage)
601 ("python-nbval" ,python-nbval)
602 ("python-pytest" ,python-pytest)
603 ("python-pytest-runner" ,python-pytest-runner)))
604 (inputs
605 `(("protobuf" ,protobuf)))
606 (propagated-inputs
607 `(("python-numpy" ,python-numpy)
608 ("python-protobuf" ,python-protobuf)
609 ("python-six" ,python-six)
610 ("python-tabulate" ,python-tabulate)
611 ("python-typing-extensions"
612 ,python-typing-extensions)))
613 (home-page "https://onnx.ai/")
614 (synopsis "Open Neural Network Exchange")
615 (description
616 "Open Neural Network Exchange (ONNX) provides an open source format for
617 AI models, both deep learning and traditional ML. It defines an extensible
618 computation graph model, as well as definitions of built-in operators and
619 standard data types.")
620 (license license:expat)))
621
622 (define-public rxcpp
623 (package
624 (name "rxcpp")
625 (version "4.1.0")
626 (source
627 (origin
628 (method git-fetch)
629 (uri (git-reference
630 (url "https://github.com/ReactiveX/RxCpp")
631 (commit (string-append "v" version))))
632 (sha256
633 (base32 "1rdpa3jlc181jd08nk437aar085h28i45s6nzrv65apb3xyyz0ij"))
634 (file-name (git-file-name name version))))
635 (build-system cmake-build-system)
636 (arguments
637 `(#:phases
638 (modify-phases %standard-phases
639 (add-after 'unpack 'remove-werror
640 (lambda _
641 (substitute* (find-files ".")
642 (("-Werror") ""))
643 #t))
644 (replace 'check
645 (lambda _
646 (invoke "ctest"))))))
647 (native-inputs
648 `(("catch" ,catch-framework)))
649 (home-page "http://reactivex.io/")
650 (synopsis "Reactive Extensions for C++")
651 (description
652 "The Reactive Extensions for C++ (RxCpp) is a library of algorithms for
653 values-distributed-in-time. ReactiveX is a library for composing asynchronous
654 and event-based programs by using observable sequences.
655
656 It extends the observer pattern to support sequences of data and/or events and
657 adds operators that allow you to compose sequences together declaratively while
658 abstracting away concerns about things like low-level threading,
659 synchronization, thread-safety, concurrent data structures, and non-blocking
660 I/O.")
661 (license license:asl2.0)))
662
663 (define-public r-adaptivesparsity
664 (package
665 (name "r-adaptivesparsity")
666 (version "1.6")
667 (source (origin
668 (method url-fetch)
669 (uri (cran-uri "AdaptiveSparsity" version))
670 (sha256
671 (base32
672 "0imr5m8mll9j6n4icsv6z9rl5kbnwsp9wvzrg7n90nnmcxq2cz91"))))
673 (properties
674 `((upstream-name . "AdaptiveSparsity")))
675 (build-system r-build-system)
676 (arguments
677 `(#:phases
678 (modify-phases %standard-phases
679 (add-after 'unpack 'link-against-armadillo
680 (lambda _
681 (substitute* "src/Makevars"
682 (("PKG_LIBS=" prefix)
683 (string-append prefix "-larmadillo"))))))))
684 (propagated-inputs
685 `(("r-mass" ,r-mass)
686 ("r-matrix" ,r-matrix)
687 ("r-rcpp" ,r-rcpp)
688 ("r-rcpparmadillo" ,r-rcpparmadillo)))
689 (inputs
690 `(("armadillo" ,armadillo)))
691 (home-page "https://cran.r-project.org/web/packages/AdaptiveSparsity")
692 (synopsis "Adaptive sparsity models")
693 (description
694 "This package implements the Figueiredo machine learning algorithm for
695 adaptive sparsity and the Wong algorithm for adaptively sparse gaussian
696 geometric models.")
697 (license license:lgpl3+)))
698
699 (define-public gemmlowp-for-tensorflow
700 ;; The commit hash is taken from "tensorflow/workspace.bzl".
701 (let ((commit "38ebac7b059e84692f53e5938f97a9943c120d98")
702 (revision "2"))
703 (package
704 (name "gemmlowp")
705 (version (git-version "0" revision commit))
706 (source (origin
707 (method url-fetch)
708 (uri (string-append "https://mirror.bazel.build/"
709 "github.com/google/gemmlowp/archive/"
710 commit ".zip"))
711 (file-name (string-append "gemmlowp-" version ".zip"))
712 (sha256
713 (base32
714 "0n56s2g8hrssm4w8qj1v58gfm56a04n9v992ixkmvk6zjiralzxq"))))
715 (build-system cmake-build-system)
716 (arguments
717 `(#:configure-flags
718 (list ,@(match (%current-system)
719 ((or "x86_64-linux" "i686-linux")
720 '("-DCMAKE_CXX_FLAGS=-msse2"))
721 (_ '())))
722 #:phases
723 (modify-phases %standard-phases
724 ;; This directory contains the CMakeLists.txt.
725 (add-after 'unpack 'chdir
726 (lambda _ (chdir "contrib") #t))
727 ;; There is no install target
728 (replace 'install
729 (lambda* (#:key outputs #:allow-other-keys)
730 (let* ((out (assoc-ref outputs "out"))
731 (lib (string-append out "/lib/"))
732 (inc (string-append out "/include/")))
733 (install-file "../build/libeight_bit_int_gemm.so" lib)
734 (for-each (lambda (dir)
735 (let ((target (string-append inc "/" dir)))
736 (mkdir-p target)
737 (for-each (lambda (h)
738 (install-file h target))
739 (find-files (string-append "../" dir)
740 "\\.h$"))))
741 '("meta" "profiling" "public" "fixedpoint"
742 "eight_bit_int_gemm" "internal"))
743 #t))))))
744 (native-inputs
745 `(("unzip" ,unzip)))
746 (home-page "https://github.com/google/gemmlowp")
747 (synopsis "Small self-contained low-precision GEMM library")
748 (description
749 "This is a small self-contained low-precision @dfn{general matrix
750 multiplication} (GEMM) library. It is not a full linear algebra library.
751 Low-precision means that the input and output matrix entries are integers on
752 at most 8 bits. To avoid overflow, results are internally accumulated on more
753 than 8 bits, and at the end only some significant 8 bits are kept.")
754 (license license:asl2.0))))
755
756 (define-public dlib
757 (package
758 (name "dlib")
759 (version "19.20")
760 (source (origin
761 (method url-fetch)
762 (uri (string-append
763 "http://dlib.net/files/dlib-" version ".tar.bz2"))
764 (sha256
765 (base32
766 "139jyi19qz37wwmmy48gil9d1kkh2r3w3bwdzabha6ayxmba96nz"))
767 (modules '((guix build utils)))
768 (snippet
769 '(begin
770 ;; Delete ~13MB of bundled dependencies.
771 (delete-file-recursively "dlib/external")
772 (delete-file-recursively "docs/dlib/external")
773 #t))))
774 (build-system cmake-build-system)
775 (arguments
776 `(#:configure-flags '("-DBUILD_SHARED_LIBS=ON")
777 #:phases
778 (modify-phases %standard-phases
779 (add-after 'unpack 'disable-asserts
780 (lambda _
781 ;; config.h recommends explicitly enabling or disabling asserts
782 ;; when building as a shared library. By default neither is set.
783 (substitute* "dlib/config.h"
784 (("^//#define DLIB_DISABLE_ASSERTS") "#define DLIB_DISABLE_ASSERTS"))
785 #t))
786 (add-after 'disable-asserts 'disable-failing-tests
787 (lambda _
788 ;; One test times out on MIPS, so we need to disable it.
789 ;; Others are flaky on some platforms.
790 (let* ((system ,(or (%current-target-system)
791 (%current-system)))
792 (disabled-tests (cond
793 ((string-prefix? "mips64" system)
794 '("object_detector" ; timeout
795 "data_io"))
796 ((string-prefix? "armhf" system)
797 '("learning_to_track"))
798 ((string-prefix? "i686" system)
799 '("optimization"))
800 (else '()))))
801 (for-each
802 (lambda (test)
803 (substitute* "dlib/test/makefile"
804 (((string-append "SRC \\+= " test "\\.cpp")) "")))
805 disabled-tests)
806 #t)))
807 (replace 'check
808 (lambda _
809 ;; No test target, so we build and run the unit tests here.
810 (let ((test-dir (string-append "../dlib-" ,version "/dlib/test")))
811 (with-directory-excursion test-dir
812 (invoke "make" "-j" (number->string (parallel-job-count)))
813 (invoke "./dtest" "--runall"))
814 #t))))))
815 (native-inputs
816 `(("pkg-config" ,pkg-config)
817 ;; For tests.
818 ("libnsl" ,libnsl)))
819 (inputs
820 `(("giflib" ,giflib)
821 ("lapack" ,lapack)
822 ("libjpeg" ,libjpeg-turbo)
823 ("libpng" ,libpng)
824 ("libx11" ,libx11)
825 ("openblas" ,openblas)
826 ("zlib" ,zlib)))
827 (synopsis
828 "Toolkit for making machine learning and data analysis applications in C++")
829 (description
830 "Dlib is a modern C++ toolkit containing machine learning algorithms and
831 tools. It is used in both industry and academia in a wide range of domains
832 including robotics, embedded devices, mobile phones, and large high performance
833 computing environments.")
834 (home-page "http://dlib.net")
835 (license license:boost1.0)))
836
837 (define-public python-scikit-learn
838 (package
839 (name "python-scikit-learn")
840 (version "0.22.1")
841 (source
842 (origin
843 (method git-fetch)
844 (uri (git-reference
845 (url "https://github.com/scikit-learn/scikit-learn")
846 (commit version)))
847 (file-name (git-file-name name version))
848 (sha256
849 (base32
850 "1xqxv210gsmjw094vc5ghq2y9lmm74qkk22pq6flcjzj51b86jxf"))))
851 (build-system python-build-system)
852 (arguments
853 `(#:phases
854 (modify-phases %standard-phases
855 (add-after 'build 'build-ext
856 (lambda _ (invoke "python" "setup.py" "build_ext" "--inplace") #t))
857 (replace 'check
858 (lambda _
859 ;; Restrict OpenBLAS threads to prevent segfaults while testing!
860 (setenv "OPENBLAS_NUM_THREADS" "1")
861
862 ;; Some tests require write access to $HOME.
863 (setenv "HOME" "/tmp")
864
865 (invoke "pytest" "sklearn" "-m" "not network")))
866 (add-before 'reset-gzip-timestamps 'make-files-writable
867 (lambda* (#:key outputs #:allow-other-keys)
868 ;; Make sure .gz files are writable so that the
869 ;; 'reset-gzip-timestamps' phase can do its work.
870 (let ((out (assoc-ref outputs "out")))
871 (for-each make-file-writable
872 (find-files out "\\.gz$"))
873 #t))))))
874 (inputs
875 `(("openblas" ,openblas)))
876 (native-inputs
877 `(("python-pytest" ,python-pytest)
878 ("python-pandas" ,python-pandas) ;for tests
879 ("python-cython" ,python-cython)))
880 (propagated-inputs
881 `(("python-numpy" ,python-numpy)
882 ("python-scipy" ,python-scipy)
883 ("python-joblib" ,python-joblib)))
884 (home-page "https://scikit-learn.org/")
885 (synopsis "Machine Learning in Python")
886 (description
887 "Scikit-learn provides simple and efficient tools for data mining and
888 data analysis.")
889 (properties `((python2-variant . ,(delay python2-scikit-learn))))
890 (license license:bsd-3)))
891
892 ;; scikit-learn 0.22 and later only supports Python 3, so we stick with
893 ;; an older version here.
894 (define-public python2-scikit-learn
895 (let ((base (package-with-python2 (strip-python2-variant python-scikit-learn))))
896 (package
897 (inherit base)
898 (version "0.20.4")
899 (source (origin
900 (method git-fetch)
901 (uri (git-reference
902 (url "https://github.com/scikit-learn/scikit-learn")
903 (commit version)))
904 (file-name (git-file-name "python-scikit-learn" version))
905 (sha256
906 (base32
907 "08zbzi8yx5wdlxfx9jap61vg1malc9ajf576w7a0liv6jvvrxlpj")))))))
908
909 (define-public python-scikit-rebate
910 (package
911 (name "python-scikit-rebate")
912 (version "0.6")
913 (source (origin
914 (method url-fetch)
915 (uri (pypi-uri "skrebate" version))
916 (sha256
917 (base32
918 "1h7qs9gjxpzqabzhb8rmpv3jpmi5iq41kqdibg48299h94iikiw7"))))
919 (build-system python-build-system)
920 ;; Pandas is only needed to run the tests.
921 (native-inputs
922 `(("python-pandas" ,python-pandas)))
923 (propagated-inputs
924 `(("python-numpy" ,python-numpy)
925 ("python-scipy" ,python-scipy)
926 ("python-scikit-learn" ,python-scikit-learn)
927 ("python-joblib" ,python-joblib)))
928 (home-page "https://epistasislab.github.io/scikit-rebate/")
929 (synopsis "Relief-based feature selection algorithms for Python")
930 (description "Scikit-rebate is a scikit-learn-compatible Python
931 implementation of ReBATE, a suite of Relief-based feature selection algorithms
932 for Machine Learning. These algorithms excel at identifying features that are
933 predictive of the outcome in supervised learning problems, and are especially
934 good at identifying feature interactions that are normally overlooked by
935 standard feature selection algorithms.")
936 (license license:expat)))
937
938 (define-public python-autograd
939 (let* ((commit "442205dfefe407beffb33550846434baa90c4de7")
940 (revision "0")
941 (version (git-version "0.0.0" revision commit)))
942 (package
943 (name "python-autograd")
944 (home-page "https://github.com/HIPS/autograd")
945 (source (origin
946 (method git-fetch)
947 (uri (git-reference
948 (url home-page)
949 (commit commit)))
950 (sha256
951 (base32
952 "189sv2xb0mwnjawa9z7mrgdglc1miaq93pnck26r28fi1jdwg0z4"))
953 (file-name (git-file-name name version))))
954 (version version)
955 (build-system python-build-system)
956 (native-inputs
957 `(("python-nose" ,python-nose)
958 ("python-pytest" ,python-pytest)))
959 (propagated-inputs
960 `(("python-future" ,python-future)
961 ("python-numpy" ,python-numpy)))
962 (arguments
963 `(#:phases (modify-phases %standard-phases
964 (replace 'check
965 (lambda _
966 (invoke "py.test" "-v"))))))
967 (synopsis "Efficiently computes derivatives of NumPy code")
968 (description "Autograd can automatically differentiate native Python and
969 NumPy code. It can handle a large subset of Python's features, including loops,
970 ifs, recursion and closures, and it can even take derivatives of derivatives
971 of derivatives. It supports reverse-mode differentiation
972 (a.k.a. backpropagation), which means it can efficiently take gradients of
973 scalar-valued functions with respect to array-valued arguments, as well as
974 forward-mode differentiation, and the two can be composed arbitrarily. The
975 main intended application of Autograd is gradient-based optimization.")
976 (license license:expat))))
977
978 (define-public python2-autograd
979 (package-with-python2 python-autograd))
980
981 (define-public lightgbm
982 (package
983 (name "lightgbm")
984 (version "2.0.12")
985 (source (origin
986 (method git-fetch)
987 (uri (git-reference
988 (url "https://github.com/Microsoft/LightGBM")
989 (commit (string-append "v" version))))
990 (sha256
991 (base32
992 "0jlvyn7k81dzrh9ij3zw576wbgiwmmr26rzpdxjn1dbpc3njpvzi"))
993 (file-name (git-file-name name version))))
994 (native-inputs
995 `(("python-pytest" ,python-pytest)
996 ("python-nose" ,python-nose)))
997 (inputs
998 `(("openmpi" ,openmpi)))
999 (propagated-inputs
1000 `(("python-numpy" ,python-numpy)
1001 ("python-scipy" ,python-scipy)))
1002 (arguments
1003 `(#:configure-flags
1004 '("-DUSE_MPI=ON")
1005 #:phases
1006 (modify-phases %standard-phases
1007 (replace 'check
1008 (lambda _
1009 (with-directory-excursion "../source"
1010 (invoke "pytest" "tests/c_api_test/test_.py")))))))
1011 (build-system cmake-build-system)
1012 (home-page "https://github.com/Microsoft/LightGBM")
1013 (synopsis "Gradient boosting framework based on decision tree algorithms")
1014 (description "LightGBM is a gradient boosting framework that uses tree
1015 based learning algorithms. It is designed to be distributed and efficient with
1016 the following advantages:
1017
1018 @itemize
1019 @item Faster training speed and higher efficiency
1020 @item Lower memory usage
1021 @item Better accuracy
1022 @item Parallel and GPU learning supported (not enabled in this package)
1023 @item Capable of handling large-scale data
1024 @end itemize\n")
1025 (license license:expat)))
1026
1027 (define-public vowpal-wabbit
1028 ;; Language bindings not included.
1029 (package
1030 (name "vowpal-wabbit")
1031 (version "8.5.0")
1032 (source (origin
1033 (method git-fetch)
1034 (uri (git-reference
1035 (url "https://github.com/JohnLangford/vowpal_wabbit")
1036 (commit version)))
1037 (sha256
1038 (base32
1039 "04bwzk6ifgnz3fmzid8b7avxf9n5pnx9xcjm61nkjng1vv0bpj8x"))
1040 (file-name (git-file-name name version))))
1041 (inputs
1042 `(("boost" ,boost)
1043 ("zlib" ,zlib)))
1044 (arguments
1045 `(#:configure-flags
1046 (list (string-append "--with-boost="
1047 (assoc-ref %build-inputs "boost")))
1048 #:phases
1049 (modify-phases %standard-phases
1050 (add-after 'unpack 'make-files-writable
1051 (lambda _
1052 (for-each make-file-writable (find-files "." ".*")) #t))
1053 (add-after 'install 'install-more-headers
1054 (lambda* (#:key outputs #:allow-other-keys)
1055 (for-each
1056 (lambda (file)
1057 (install-file file (string-append
1058 (assoc-ref outputs "out")
1059 "/include/vowpalwabbit")))
1060 (find-files "vowpalwabbit" "\\.h$"))
1061 #t)))))
1062 (build-system gnu-build-system)
1063 (home-page "https://github.com/JohnLangford/vowpal_wabbit")
1064 (synopsis "Fast machine learning library for online learning")
1065 (description "Vowpal Wabbit is a machine learning system with techniques
1066 such as online, hashing, allreduce, reductions, learning2search, active, and
1067 interactive learning.")
1068 (license license:bsd-3)))
1069
1070 (define-public python2-fastlmm
1071 (package
1072 (name "python2-fastlmm")
1073 (version "0.2.21")
1074 (source
1075 (origin
1076 (method url-fetch)
1077 (uri (pypi-uri "fastlmm" version ".zip"))
1078 (sha256
1079 (base32
1080 "1q8c34rpmwkfy3r4d5172pzdkpfryj561897z9r3x22gq7813x1m"))))
1081 (build-system python-build-system)
1082 (arguments
1083 `(#:tests? #f ; some test files are missing
1084 #:python ,python-2)) ; only Python 2.7 is supported
1085 (propagated-inputs
1086 `(("python2-numpy" ,python2-numpy)
1087 ("python2-scipy" ,python2-scipy)
1088 ("python2-matplotlib" ,python2-matplotlib)
1089 ("python2-pandas" ,python2-pandas)
1090 ("python2-scikit-learn" ,python2-scikit-learn)
1091 ("python2-pysnptools" ,python2-pysnptools)))
1092 (native-inputs
1093 `(("unzip" ,unzip)
1094 ("python2-cython" ,python2-cython)
1095 ("python2-mock" ,python2-mock)
1096 ("python2-nose" ,python2-nose)))
1097 (home-page "http://research.microsoft.com/en-us/um/redmond/projects/mscompbio/fastlmm/")
1098 (synopsis "Perform genome-wide association studies on large data sets")
1099 (description
1100 "FaST-LMM, which stands for Factored Spectrally Transformed Linear Mixed
1101 Models, is a program for performing both single-SNP and SNP-set genome-wide
1102 association studies (GWAS) on extremely large data sets.")
1103 (license license:asl2.0)))
1104
1105 ;; There have been no proper releases yet.
1106 (define-public kaldi
1107 (let ((commit "d4791c0f3fc1a09c042dac365e120899ee2ad21e")
1108 (revision "2"))
1109 (package
1110 (name "kaldi")
1111 (version (git-version "0" revision commit))
1112 (source (origin
1113 (method git-fetch)
1114 (uri (git-reference
1115 (url "https://github.com/kaldi-asr/kaldi")
1116 (commit commit)))
1117 (file-name (git-file-name name version))
1118 (sha256
1119 (base32
1120 "07k80my6f19mhrkwbzhjsnpf9871wmrwkl0ym468i830w67qyjrz"))))
1121 (build-system gnu-build-system)
1122 (arguments
1123 `(#:test-target "test"
1124 #:phases
1125 (modify-phases %standard-phases
1126 (add-after 'unpack 'chdir
1127 (lambda _ (chdir "src") #t))
1128 (replace 'configure
1129 (lambda* (#:key build system inputs outputs #:allow-other-keys)
1130 (when (not (or (string-prefix? "x86_64" system)
1131 (string-prefix? "i686" system)))
1132 (substitute* "makefiles/linux_openblas.mk"
1133 (("-msse -msse2") "")))
1134 (substitute* "makefiles/default_rules.mk"
1135 (("/bin/bash") (which "bash")))
1136 (substitute* "Makefile"
1137 (("ext_depend: check_portaudio")
1138 "ext_depend:"))
1139 (substitute* '("online/Makefile"
1140 "onlinebin/Makefile"
1141 "gst-plugin/Makefile")
1142 (("../../tools/portaudio/install")
1143 (assoc-ref inputs "portaudio")))
1144
1145 ;; This `configure' script doesn't support variables passed as
1146 ;; arguments, nor does it support "prefix".
1147 (let ((out (assoc-ref outputs "out"))
1148 (openblas (assoc-ref inputs "openblas"))
1149 (openfst (assoc-ref inputs "openfst")))
1150 (substitute* "configure"
1151 (("check_for_slow_expf;") "")
1152 ;; This affects the RPATH and also serves as the installation
1153 ;; directory.
1154 (("KALDILIBDIR=`pwd`/lib")
1155 (string-append "KALDILIBDIR=" out "/lib")))
1156 (mkdir-p out) ; must exist
1157 (setenv "CONFIG_SHELL" (which "bash"))
1158 (setenv "OPENFST_VER" ,(package-version openfst))
1159 (invoke "./configure"
1160 "--use-cuda=no"
1161 "--shared"
1162 (string-append "--openblas-root=" openblas)
1163 (string-append "--fst-root=" openfst)))))
1164 (add-after 'build 'build-ext-and-gstreamer-plugin
1165 (lambda _
1166 (invoke "make" "-C" "online" "depend")
1167 (invoke "make" "-C" "online")
1168 (invoke "make" "-C" "onlinebin" "depend")
1169 (invoke "make" "-C" "onlinebin")
1170 (invoke "make" "-C" "gst-plugin" "depend")
1171 (invoke "make" "-C" "gst-plugin")
1172 #t))
1173 ;; TODO: also install the executables.
1174 (replace 'install
1175 (lambda* (#:key outputs #:allow-other-keys)
1176 (let* ((out (assoc-ref outputs "out"))
1177 (inc (string-append out "/include"))
1178 (lib (string-append out "/lib")))
1179 (mkdir-p lib)
1180 ;; The build phase installed symlinks to the actual
1181 ;; libraries. Install the actual targets.
1182 (for-each (lambda (file)
1183 (let ((target (readlink file)))
1184 (delete-file file)
1185 (install-file target lib)))
1186 (find-files lib "\\.so"))
1187 ;; Install headers
1188 (for-each (lambda (file)
1189 (let ((target-dir (string-append inc "/" (dirname file))))
1190 (install-file file target-dir)))
1191 (find-files "." "\\.h"))
1192 (install-file "gst-plugin/libgstonlinegmmdecodefaster.so"
1193 (string-append lib "/gstreamer-1.0"))
1194 #t))))))
1195 (inputs
1196 `(("alsa-lib" ,alsa-lib)
1197 ("gfortran" ,gfortran "lib")
1198 ("glib" ,glib)
1199 ("gstreamer" ,gstreamer)
1200 ("jack" ,jack-1)
1201 ("openblas" ,openblas)
1202 ("openfst" ,openfst)
1203 ("portaudio" ,portaudio)
1204 ("python" ,python)))
1205 (native-inputs
1206 `(("glib" ,glib "bin") ; glib-genmarshal
1207 ("grep" ,grep)
1208 ("sed" ,sed)
1209 ("pkg-config" ,pkg-config)
1210 ("which" ,which)))
1211 (home-page "https://kaldi-asr.org/")
1212 (synopsis "Speech recognition toolkit")
1213 (description "Kaldi is an extensible toolkit for speech recognition
1214 written in C++.")
1215 (license license:asl2.0))))
1216
1217 (define-public gst-kaldi-nnet2-online
1218 (let ((commit "cb227ef43b66a9835c14eb0ad39e08ee03c210ad")
1219 (revision "2"))
1220 (package
1221 (name "gst-kaldi-nnet2-online")
1222 (version (git-version "0" revision commit))
1223 (source (origin
1224 (method git-fetch)
1225 (uri (git-reference
1226 (url "https://github.com/alumae/gst-kaldi-nnet2-online")
1227 (commit commit)))
1228 (file-name (git-file-name name version))
1229 (sha256
1230 (base32
1231 "1i6ffwiavxx07ri0lxix6s8q0r31x7i4xxvhys5jxkixf5q34w8g"))))
1232 (build-system gnu-build-system)
1233 (arguments
1234 `(#:tests? #f ; there are none
1235 #:make-flags
1236 (list (string-append "SHELL="
1237 (assoc-ref %build-inputs "bash") "/bin/bash")
1238 (string-append "KALDI_ROOT="
1239 (assoc-ref %build-inputs "kaldi-src"))
1240 (string-append "KALDILIBDIR="
1241 (assoc-ref %build-inputs "kaldi") "/lib")
1242 "KALDI_FLAVOR=dynamic")
1243 #:phases
1244 (modify-phases %standard-phases
1245 (add-after 'unpack 'chdir
1246 (lambda _ (chdir "src") #t))
1247 (replace 'configure
1248 (lambda* (#:key inputs #:allow-other-keys)
1249 (let ((glib (assoc-ref inputs "glib")))
1250 (setenv "CXXFLAGS" "-fPIC")
1251 (setenv "CPLUS_INCLUDE_PATH"
1252 (string-append glib "/include/glib-2.0:"
1253 glib "/lib/glib-2.0/include:"
1254 (assoc-ref inputs "gstreamer")
1255 "/include/gstreamer-1.0")))
1256 (substitute* "Makefile"
1257 (("include \\$\\(KALDI_ROOT\\)/src/kaldi.mk") "")
1258 (("\\$\\(error Cannot find") "#"))
1259 #t))
1260 (add-before 'build 'build-depend
1261 (lambda* (#:key make-flags #:allow-other-keys)
1262 (apply invoke "make" "depend" make-flags)))
1263 (replace 'install
1264 (lambda* (#:key outputs #:allow-other-keys)
1265 (let* ((out (assoc-ref outputs "out"))
1266 (lib (string-append out "/lib/gstreamer-1.0")))
1267 (install-file "libgstkaldinnet2onlinedecoder.so" lib)
1268 #t))))))
1269 (inputs
1270 `(("glib" ,glib)
1271 ("gstreamer" ,gstreamer)
1272 ("jansson" ,jansson)
1273 ("openfst" ,openfst)
1274 ("kaldi" ,kaldi)))
1275 (native-inputs
1276 `(("bash" ,bash)
1277 ("glib:bin" ,glib "bin") ; glib-genmarshal
1278 ("kaldi-src" ,(package-source kaldi))
1279 ("pkg-config" ,pkg-config)))
1280 (home-page "https://kaldi-asr.org/")
1281 (synopsis "Gstreamer plugin for decoding speech")
1282 (description "This package provides a GStreamer plugin that wraps
1283 Kaldi's @code{SingleUtteranceNnet2Decoder}. It requires iVector-adapted DNN
1284 acoustic models. The iVectors are adapted to the current audio stream
1285 automatically.")
1286 (license license:asl2.0))))
1287
1288 (define-public kaldi-gstreamer-server
1289 ;; This is the tip of the py3 branch
1290 (let ((commit "f68cab490be7eb0da2af1475fbc16655f50a60cb")
1291 (revision "2"))
1292 (package
1293 (name "kaldi-gstreamer-server")
1294 (version (git-version "0" revision commit))
1295 (source (origin
1296 (method git-fetch)
1297 (uri (git-reference
1298 (url "https://github.com/alumae/kaldi-gstreamer-server")
1299 (commit commit)))
1300 (file-name (git-file-name name version))
1301 (sha256
1302 (base32
1303 "17lh1368vkg8ngrcbn2phvigzlmalrqg6djx2gg61qq1a0nj87dm"))))
1304 (build-system gnu-build-system)
1305 (arguments
1306 `(#:tests? #f ; there are no tests that can be run automatically
1307 #:modules ((guix build utils)
1308 (guix build gnu-build-system)
1309 (srfi srfi-26))
1310 #:phases
1311 (modify-phases %standard-phases
1312 (delete 'configure)
1313 (replace 'build
1314 (lambda* (#:key outputs #:allow-other-keys)
1315 ;; Disable hash randomization to ensure the generated .pycs
1316 ;; are reproducible.
1317 (setenv "PYTHONHASHSEED" "0")
1318 (with-directory-excursion "kaldigstserver"
1319 ;; See https://github.com/alumae/kaldi-gstreamer-server/issues/232
1320 (substitute* "master_server.py"
1321 (("\\.replace\\('\\\\.*") ")"))
1322
1323 ;; This is a Python 2 file
1324 (delete-file "decoder_test.py")
1325 (delete-file "test-buffer.py")
1326
1327 (for-each (lambda (file)
1328 (apply invoke
1329 `("python"
1330 "-m" "compileall"
1331 "-f" ; force rebuild
1332 ,file)))
1333 (find-files "." "\\.py$")))
1334 #t))
1335 (replace 'install
1336 (lambda* (#:key inputs outputs #:allow-other-keys)
1337 (let* ((out (assoc-ref outputs "out"))
1338 (bin (string-append out "/bin"))
1339 (share (string-append out "/share/kaldi-gstreamer-server/")))
1340 ;; Install Python files
1341 (with-directory-excursion "kaldigstserver"
1342 (for-each (cut install-file <> share)
1343 (find-files "." ".*")))
1344
1345 ;; Install sample configuration files
1346 (for-each (cut install-file <> share)
1347 (find-files "." "\\.yaml"))
1348
1349 ;; Install executables
1350 (mkdir-p bin)
1351 (let* ((server (string-append bin "/kaldi-gst-server"))
1352 (client (string-append bin "/kaldi-gst-client"))
1353 (worker (string-append bin "/kaldi-gst-worker"))
1354 (PYTHONPATH (getenv "PYTHONPATH"))
1355 (GST_PLUGIN_PATH (string-append
1356 (assoc-ref inputs "gst-kaldi-nnet2-online")
1357 "/lib/gstreamer-1.0:${GST_PLUGIN_PATH}"))
1358 (wrap (lambda (wrapper what)
1359 (with-output-to-file wrapper
1360 (lambda _
1361 (format #t
1362 "#!~a
1363 export PYTHONPATH=~a
1364 export GST_PLUGIN_PATH=~a
1365 exec ~a ~a/~a \"$@\"~%"
1366 (which "bash") PYTHONPATH GST_PLUGIN_PATH
1367 (which "python") share what)))
1368 (chmod wrapper #o555))))
1369 (for-each wrap
1370 (list server client worker)
1371 (list "master_server.py"
1372 "client.py"
1373 "worker.py")))
1374 #t))))))
1375 (inputs
1376 `(("gst-kaldi-nnet2-online" ,gst-kaldi-nnet2-online)
1377 ("python" ,python-wrapper)
1378 ("python-pygobject" ,python-pygobject)
1379 ("python-pyyaml" ,python-pyyaml)
1380 ("python-tornado" ,python-tornado-6)))
1381 (home-page "https://github.com/alumae/kaldi-gstreamer-server")
1382 (synopsis "Real-time full-duplex speech recognition server")
1383 (description "This is a real-time full-duplex speech recognition server,
1384 based on the Kaldi toolkit and the GStreamer framework and implemented in
1385 Python.")
1386 (license license:bsd-2))))
1387
1388 ;; Note that Tensorflow includes a "third_party" directory, which seems to not
1389 ;; only contain modified subsets of upstream library source code, but also
1390 ;; adapter headers provided by Google (such as the fft.h header, which is not
1391 ;; part of the upstream project code). The Tensorflow code includes headers
1392 ;; from the "third_party" directory. It does not look like we can replace
1393 ;; these headers with unmodified upstream files, so we keep them.
1394 (define-public tensorflow
1395 (package
1396 (name "tensorflow")
1397 (version "1.9.0")
1398 (source
1399 (origin
1400 (method git-fetch)
1401 (uri (git-reference
1402 (url "https://github.com/tensorflow/tensorflow")
1403 (commit (string-append "v" version))))
1404 (file-name (string-append "tensorflow-" version "-checkout"))
1405 (sha256
1406 (base32
1407 "0a9kwha395g3wgxfwln5j8vn9nkspmd75xldrlqdq540w996g8xa"))))
1408 (build-system cmake-build-system)
1409 (arguments
1410 `(#:tests? #f ; no "check" target
1411 #:build-type "Release"
1412 #:configure-flags
1413 (let ((protobuf (assoc-ref %build-inputs "protobuf"))
1414 (protobuf:native (assoc-ref %build-inputs "protobuf:native"))
1415 (jsoncpp (assoc-ref %build-inputs "jsoncpp"))
1416 (snappy (assoc-ref %build-inputs "snappy"))
1417 (sqlite (assoc-ref %build-inputs "sqlite")))
1418 (list
1419 ;; Use protobuf from Guix
1420 (string-append "-Dprotobuf_STATIC_LIBRARIES="
1421 protobuf "/lib/libprotobuf.so")
1422 (string-append "-DPROTOBUF_PROTOC_EXECUTABLE="
1423 protobuf:native "/bin/protoc")
1424
1425 ;; Use snappy from Guix
1426 (string-append "-Dsnappy_STATIC_LIBRARIES="
1427 snappy "/lib/libsnappy.so")
1428 ;; Yes, this is not actually the include directory but a prefix...
1429 (string-append "-Dsnappy_INCLUDE_DIR=" snappy)
1430
1431 ;; Use jsoncpp from Guix
1432 (string-append "-Djsoncpp_STATIC_LIBRARIES="
1433 jsoncpp "/lib/libjsoncpp.so")
1434 ;; Yes, this is not actually the include directory but a prefix...
1435 (string-append "-Djsoncpp_INCLUDE_DIR=" jsoncpp)
1436
1437 ;; Use sqlite from Guix
1438 (string-append "-Dsqlite_STATIC_LIBRARIES="
1439 sqlite "/lib/libsqlite.a")
1440
1441 ;; Use system libraries wherever possible. Currently, this
1442 ;; only affects zlib.
1443 "-Dsystemlib_ALL=ON"
1444 "-Dtensorflow_ENABLE_POSITION_INDEPENDENT_CODE=ON"
1445 "-Dtensorflow_BUILD_SHARED_LIB=ON"
1446 "-Dtensorflow_OPTIMIZE_FOR_NATIVE_ARCH=OFF"
1447 "-Dtensorflow_ENABLE_SSL_SUPPORT=OFF"
1448 "-Dtensorflow_BUILD_CONTRIB_KERNELS=OFF"))
1449 #:make-flags
1450 (list "CC=gcc")
1451 #:modules ((ice-9 ftw)
1452 (guix build utils)
1453 (guix build cmake-build-system)
1454 ((guix build python-build-system)
1455 #:select (python-version)))
1456 #:imported-modules (,@%cmake-build-system-modules
1457 (guix build python-build-system))
1458 #:phases
1459 (modify-phases %standard-phases
1460 (add-after 'unpack 'set-source-file-times-to-1980
1461 ;; At the end of the tf_python_build_pip_package target, a ZIP
1462 ;; archive should be generated via bdist_wheel, but it fails with
1463 ;; "ZIP does not support timestamps before 1980". Luckily,
1464 ;; SOURCE_DATE_EPOCH is respected, which we set to some time in
1465 ;; 1980.
1466 (lambda _ (setenv "SOURCE_DATE_EPOCH" "315532800") #t))
1467 ;; See https://github.com/tensorflow/tensorflow/issues/20517#issuecomment-406373913
1468 (add-after 'unpack 'python3.7-compatibility
1469 (lambda _
1470 (substitute* '("tensorflow/python/eager/pywrap_tfe_src.cc"
1471 "tensorflow/python/lib/core/ndarray_tensor.cc"
1472 "tensorflow/python/lib/core/py_func.cc")
1473 (("PyUnicode_AsUTF8") "(char *)PyUnicode_AsUTF8"))
1474 (substitute* "tensorflow/c/eager/c_api.h"
1475 (("unsigned char async")
1476 "unsigned char is_async"))
1477
1478 ;; Remove dependency on tensorboard, a complicated but probably
1479 ;; optional package.
1480 (substitute* "tensorflow/tools/pip_package/setup.py"
1481 ((".*'tensorboard >.*") ""))
1482
1483 ;; Fix the build with python-3.8, taken from rejected upstream patch:
1484 ;; https://github.com/tensorflow/tensorflow/issues/34197
1485 (substitute* (find-files "tensorflow/python" ".*\\.cc$")
1486 (("(nullptr,)(\\ +/. tp_print)" _ _ tp_print)
1487 (string-append "NULL, " tp_print)))
1488 #t))
1489 (add-after 'python3.7-compatibility 'chdir
1490 (lambda _ (chdir "tensorflow/contrib/cmake") #t))
1491 (add-after 'chdir 'disable-downloads
1492 (lambda* (#:key inputs #:allow-other-keys)
1493 (substitute* (find-files "external" "\\.cmake$")
1494 (("GIT_REPOSITORY.*") "")
1495 (("GIT_TAG.*") "")
1496 (("PREFIX ")
1497 "DOWNLOAD_COMMAND \"\"\nPREFIX "))
1498
1499 ;; Use packages from Guix
1500 (let ((grpc (assoc-ref inputs "grpc")))
1501 (substitute* "CMakeLists.txt"
1502 ;; Sqlite
1503 (("include\\(sqlite\\)") "")
1504 (("\\$\\{sqlite_STATIC_LIBRARIES\\}")
1505 (string-append (assoc-ref inputs "sqlite")
1506 "/lib/libsqlite3.so"))
1507 (("sqlite_copy_headers_to_destination") "")
1508
1509 ;; PNG
1510 (("include\\(png\\)") "")
1511 (("\\$\\{png_STATIC_LIBRARIES\\}")
1512 (string-append (assoc-ref inputs "libpng")
1513 "/lib/libpng16.so"))
1514 (("png_copy_headers_to_destination") "")
1515
1516 ;; JPEG
1517 (("include\\(jpeg\\)") "")
1518 (("\\$\\{jpeg_STATIC_LIBRARIES\\}")
1519 (string-append (assoc-ref inputs "libjpeg")
1520 "/lib/libjpeg.so"))
1521 (("jpeg_copy_headers_to_destination") "")
1522
1523 ;; GIF
1524 (("include\\(gif\\)") "")
1525 (("\\$\\{gif_STATIC_LIBRARIES\\}")
1526 (string-append (assoc-ref inputs "giflib")
1527 "/lib/libgif.so"))
1528 (("gif_copy_headers_to_destination") "")
1529
1530 ;; lmdb
1531 (("include\\(lmdb\\)") "")
1532 (("\\$\\{lmdb_STATIC_LIBRARIES\\}")
1533 (string-append (assoc-ref inputs "lmdb")
1534 "/lib/liblmdb.so"))
1535 (("lmdb_copy_headers_to_destination") "")
1536
1537 ;; Protobuf
1538 (("include\\(protobuf\\)") "")
1539 (("protobuf_copy_headers_to_destination") "")
1540 (("^ +protobuf") "")
1541
1542 ;; gRPC
1543 (("include\\(grpc\\)")
1544 "find_package(grpc REQUIRED NAMES gRPC)")
1545 (("list\\(APPEND tensorflow_EXTERNAL_DEPENDENCIES grpc\\)") "")
1546
1547 ;; Eigen
1548 (("include\\(eigen\\)")
1549 (string-append "find_package(eigen REQUIRED NAMES Eigen3)
1550 set(eigen_INCLUDE_DIRS ${CMAKE_CURRENT_BINARY_DIR}/external/eigen_archive "
1551 (assoc-ref inputs "eigen") "/include/eigen3)"))
1552 (("^ +eigen") "")
1553
1554 ;; snappy
1555 (("include\\(snappy\\)")
1556 "add_definitions(-DTF_USE_SNAPPY)")
1557 (("list\\(APPEND tensorflow_EXTERNAL_DEPENDENCIES snappy\\)") "")
1558
1559 ;; jsoncpp
1560 (("include\\(jsoncpp\\)") "")
1561 (("^ +jsoncpp") ""))
1562
1563 (substitute* "tf_core_framework.cmake"
1564 ((" grpc") "")
1565 (("\\$\\{GRPC_BUILD\\}/grpc_cpp_plugin")
1566 (which "grpc_cpp_plugin"))
1567 ;; Link with gRPC libraries
1568 (("add_library\\(tf_protos_cc.*" m)
1569 (string-append m
1570 (format #f "\ntarget_link_libraries(tf_protos_cc PRIVATE \
1571 ~a/lib/libgrpc++_unsecure.a \
1572 ~a/lib/libgrpc_unsecure.a \
1573 ~a/lib/libaddress_sorting.a \
1574 ~a/lib/libgpr.a \
1575 ~a//lib/libcares.so
1576 )\n"
1577 grpc grpc grpc grpc
1578 (assoc-ref inputs "c-ares"))))))
1579 (substitute* "tf_tools.cmake"
1580 (("add_dependencies\\(\\$\\{proto_text.*") ""))
1581 ;; Remove dependency on bundled grpc
1582 (substitute* "tf_core_distributed_runtime.cmake"
1583 (("tf_core_cpu grpc") "tf_core_cpu"))
1584
1585 ;; This directory is a dependency of many targets.
1586 (mkdir-p "protobuf")
1587 #t))
1588 (add-after 'configure 'unpack-third-party-sources
1589 (lambda* (#:key inputs #:allow-other-keys)
1590 ;; This is needed to configure bundled packages properly.
1591 (setenv "CONFIG_SHELL" (which "bash"))
1592 (for-each
1593 (lambda (name)
1594 (let* ((what (assoc-ref inputs (string-append name "-src")))
1595 (name* (string-map (lambda (c)
1596 (if (char=? c #\-)
1597 #\_ c)) name))
1598 (where (string-append "../build/" name* "/src/" name*)))
1599 (cond
1600 ((string-suffix? ".zip" what)
1601 (mkdir-p where)
1602 (with-directory-excursion where
1603 (invoke "unzip" what)))
1604 ((string-suffix? ".tar.gz" what)
1605 (mkdir-p where)
1606 (invoke "tar" "xf" what
1607 "-C" where "--strip-components=1"))
1608 (else
1609 (let ((parent (dirname where)))
1610 (mkdir-p parent)
1611 (with-directory-excursion parent
1612 (when (file-exists? name*)
1613 (delete-file-recursively name*))
1614 (copy-recursively what name*)
1615 (map make-file-writable
1616 (find-files name* ".*"))))))))
1617 (list "boringssl"
1618 "cub"
1619 "double-conversion"
1620 "farmhash"
1621 "fft2d"
1622 "highwayhash"
1623 "nsync"
1624 "re2"))
1625
1626 (rename-file "../build/cub/src/cub/cub-1.8.0/"
1627 "../build/cub/src/cub/cub/")
1628 #t))
1629 (add-after 'unpack 'fix-python-build
1630 (lambda* (#:key inputs outputs #:allow-other-keys)
1631 (mkdir-p "protobuf-src")
1632 (invoke "tar" "xf" (assoc-ref inputs "protobuf:src")
1633 "-C" "protobuf-src" "--strip-components=1")
1634 (mkdir-p "eigen-src")
1635 (invoke "tar" "xf" (assoc-ref inputs "eigen:src")
1636 "-C" "eigen-src" "--strip-components=1")
1637
1638 (substitute* "tensorflow/contrib/cmake/tf_python.cmake"
1639 ;; Ensure that all Python dependencies can be found at build time.
1640 (("PYTHONPATH=\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/tf_python" m)
1641 (string-append m ":" (getenv "PYTHONPATH")))
1642 ;; Take protobuf source files from our source package.
1643 (("\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/protobuf/src/protobuf/src/google")
1644 (string-append (getcwd) "/protobuf-src/src/google")))
1645
1646 (substitute* '("tensorflow/contrib/cmake/tf_shared_lib.cmake"
1647 "tensorflow/contrib/cmake/tf_python.cmake")
1648 ;; Take Eigen source files from our source package.
1649 (("\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/eigen/src/eigen/")
1650 (string-append (getcwd) "/eigen-src/"))
1651 ;; Take Eigen headers from our own package.
1652 (("\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/external/eigen_archive")
1653 (string-append (assoc-ref inputs "eigen") "/include/eigen3")))
1654
1655 ;; Correct the RUNPATH of ops libraries generated for Python.
1656 ;; TODO: this doesn't work :(
1657 ;; /gnu/store/...-tensorflow-1.9.0/lib/python3.7/site-packages/tensorflow/contrib/seq2seq/python/ops/lib_beam_search_ops.so:
1658 ;; warning: RUNPATH contains bogus entries: ("/tmp/guix-build-tensorflow-1.9.0.drv-0/source/tensorflow/contrib/build")
1659 ;; /gnu/store/...-tensorflow-1.9.0/lib/python3.7/site-packages/tensorflow/contrib/seq2seq/python/ops/lib_beam_search_ops.so:
1660 ;; error: depends on 'libpywrap_tensorflow_internal.so', which
1661 ;; cannot be found in RUNPATH ...
1662 (substitute* "tensorflow/contrib/cmake/tf_cc_ops.cmake"
1663 (("set_target_properties.*")
1664 (string-append "set_target_properties(${_AT_TARGET} PROPERTIES \
1665 COMPILE_FLAGS ${target_compile_flags} \
1666 INSTALL_RPATH_USE_LINK_PATH TRUE \
1667 INSTALL_RPATH " (assoc-ref outputs "out") "/lib)\n")))
1668 #t))
1669 (add-after 'build 'build-pip-package
1670 (lambda* (#:key outputs #:allow-other-keys)
1671 (setenv "LDFLAGS"
1672 (string-append "-Wl,-rpath="
1673 (assoc-ref outputs "out") "/lib"))
1674 (invoke "make" "tf_python_build_pip_package")
1675 #t))
1676 (add-after 'build-pip-package 'install-python
1677 (lambda* (#:key inputs outputs #:allow-other-keys)
1678 (let ((out (assoc-ref outputs "out"))
1679 (wheel (car (find-files "../build/tf_python/dist/" "\\.whl$")))
1680 (python-version (python-version
1681 (assoc-ref inputs "python"))))
1682 (invoke "python" "-m" "pip" "install" wheel
1683 (string-append "--prefix=" out))
1684
1685 ;; XXX: broken RUNPATH, see fix-python-build phase.
1686 (delete-file
1687 (string-append
1688 out "/lib/python" python-version
1689 "/site-packages/tensorflow/contrib/"
1690 "seq2seq/python/ops/lib_beam_search_ops.so"))
1691 #t))))))
1692 (native-inputs
1693 `(("pkg-config" ,pkg-config)
1694 ("protobuf:native" ,protobuf-3.6) ; protoc
1695 ("protobuf:src" ,(package-source protobuf-3.6))
1696 ("eigen:src" ,(package-source eigen-for-tensorflow))
1697 ;; install_pip_packages.sh wants setuptools 39.1.0 specifically.
1698 ("python-setuptools" ,python-setuptools-for-tensorflow)
1699
1700 ;; The commit hashes and URLs for third-party source code are taken
1701 ;; from "tensorflow/workspace.bzl".
1702 ("boringssl-src"
1703 ,(let ((commit "ee7aa02")
1704 (revision "1"))
1705 (origin
1706 (method git-fetch)
1707 (uri (git-reference
1708 (url "https://boringssl.googlesource.com/boringssl")
1709 (commit commit)))
1710 (file-name (string-append "boringssl-0-" revision
1711 (string-take commit 7)
1712 "-checkout"))
1713 (sha256
1714 (base32
1715 "1jf693q0nw0adsic6cgmbdx6g7wr4rj4vxa8j1hpn792fqhd8wgw")))))
1716 ("cub-src"
1717 ,(let ((version "1.8.0"))
1718 (origin
1719 (method url-fetch)
1720 (uri (string-append "https://mirror.bazel.build/github.com/NVlabs/"
1721 "cub/archive/" version ".zip"))
1722 (file-name (string-append "cub-" version ".zip"))
1723 (sha256
1724 (base32
1725 "1hsqikqridb90dkxkjr2918dcry6pfh46ccnwrzawl56aamhdykb")))))
1726 ("double-conversion-src"
1727 ,(let ((commit "5664746")
1728 (revision "1"))
1729 (origin
1730 (method git-fetch)
1731 (uri (git-reference
1732 (url "https://github.com/google/double-conversion")
1733 (commit commit)))
1734 (file-name
1735 (git-file-name "double-conversion"
1736 (string-append "0-" revision "."
1737 (string-take commit 7))))
1738 (sha256
1739 (base32
1740 "1h5lppqqxcvdg5jq42i5msgwx20ryij3apvmndflngrgdpc04gn1")))))
1741 ("farmhash-src"
1742 ,(let ((commit "816a4ae622e964763ca0862d9dbd19324a1eaf45"))
1743 (origin
1744 (method url-fetch)
1745 (uri (string-append
1746 "https://mirror.bazel.build/github.com/google/farmhash/archive/"
1747 commit ".tar.gz"))
1748 (file-name (string-append "farmhash-0-" (string-take commit 7)
1749 ".tar.gz"))
1750 (sha256
1751 (base32
1752 "185b2xdxl4d4cnsnv6abg8s22gxvx8673jq2yaq85bz4cdy58q35")))))
1753 ;; The license notice on the home page at
1754 ;; http://www.kurims.kyoto-u.ac.jp/~ooura/fft.html says:
1755 ;; Copyright Takuya OOURA, 1996-2001
1756 ;;
1757 ;; You may use, copy, modify and distribute this code for any purpose
1758 ;; (include commercial use) and without fee. Please refer to this
1759 ;; package when you modify this code.
1760 ;;
1761 ;; We take the identical tarball from the Bazel mirror, because the URL
1762 ;; at the home page is not versioned and might change.
1763 ("fft2d-src"
1764 ,(origin
1765 (method url-fetch)
1766 (uri "https://mirror.bazel.build/www.kurims.kyoto-u.ac.jp/~ooura/fft.tgz")
1767 (file-name "fft2d.tar.gz")
1768 (sha256
1769 (base32
1770 "15jjkfvhqvl2c0753d2di8hz0pyzn598g74wqy79awdrf1y67fsj"))))
1771 ("highwayhash-src"
1772 ,(let ((commit "be5edafc2e1a455768e260ccd68ae7317b6690ee")
1773 (revision "1"))
1774 (origin
1775 (method git-fetch)
1776 (uri (git-reference
1777 (url "https://github.com/google/highwayhash")
1778 (commit commit)))
1779 (file-name (string-append "highwayhash-0-" revision
1780 (string-take commit 7)
1781 "-checkout"))
1782 (sha256
1783 (base32
1784 "154jwf98cyy54hldr94pgjn85zynly3abpnc1avmb8a18lzwjyb6")))))
1785 ("nsync-src"
1786 ,(let ((version "0559ce013feac8db639ee1bf776aca0325d28777")
1787 (revision "1"))
1788 (origin
1789 (method url-fetch)
1790 (uri (string-append "https://mirror.bazel.build/"
1791 "github.com/google/nsync/archive/"
1792 version ".tar.gz"))
1793 (file-name (string-append "nsync-0." revision
1794 "-" (string-take version 7)
1795 ".tar.gz"))
1796 (sha256
1797 (base32
1798 "0qdkyqym34x739mmzv97ah5r7ph462v5xkxqxvidmcfqbi64b132")))))
1799 ("re2-src"
1800 ,(let ((commit "e7efc48")
1801 (revision "1"))
1802 (origin
1803 (method git-fetch)
1804 (uri (git-reference
1805 (url "https://github.com/google/re2")
1806 (commit commit)))
1807 (file-name (string-append "re2-0-" revision
1808 (string-take commit 7)
1809 "-checkout"))
1810 (sha256
1811 (base32
1812 "161g9841rjfsy5pn52fcis0s9hdr7rxvb06pad38j5rppfihvign")))))
1813 ("googletest" ,googletest)
1814 ("swig" ,swig)
1815 ("unzip" ,unzip)))
1816 (propagated-inputs
1817 `(("python-absl-py" ,python-absl-py)
1818 ("python-astor" ,python-astor)
1819 ("python-gast" ,python-gast)
1820 ("python-grpcio" ,python-grpcio)
1821 ("python-numpy" ,python-numpy)
1822 ("python-protobuf" ,python-protobuf-3.6)
1823 ("python-six" ,python-six)
1824 ("python-termcolo" ,python-termcolor)
1825 ("python-wheel" ,python-wheel)))
1826 (inputs
1827 `(("c-ares" ,c-ares)
1828 ("eigen" ,eigen-for-tensorflow)
1829 ("gemmlowp" ,gemmlowp-for-tensorflow)
1830 ("lmdb" ,lmdb)
1831 ("libjpeg" ,libjpeg-turbo)
1832 ("libpng" ,libpng)
1833 ("giflib" ,giflib)
1834 ("grpc" ,grpc-1.16.1 "static")
1835 ("grpc:bin" ,grpc-1.16.1)
1836 ("jsoncpp" ,jsoncpp-for-tensorflow)
1837 ("snappy" ,snappy)
1838 ("sqlite" ,sqlite)
1839 ("protobuf" ,protobuf-3.6)
1840 ("python" ,python-wrapper)
1841 ("zlib" ,zlib)))
1842 (home-page "https://tensorflow.org")
1843 (synopsis "Machine learning framework")
1844 (description
1845 "TensorFlow is a flexible platform for building and training machine
1846 learning models. It provides a library for high performance numerical
1847 computation and includes high level Python APIs, including both a sequential
1848 API for beginners that allows users to build models quickly by plugging
1849 together building blocks and a subclassing API with an imperative style for
1850 advanced research.")
1851 (license license:asl2.0)))
1852
1853 (define-public python-iml
1854 (package
1855 (name "python-iml")
1856 (version "0.6.2")
1857 (source
1858 (origin
1859 (method url-fetch)
1860 (uri (pypi-uri "iml" version))
1861 (sha256
1862 (base32
1863 "1k8szlpm19rcwcxdny9qdm3gmaqq8akb4xlvrzyz8c2d679aak6l"))))
1864 (build-system python-build-system)
1865 (propagated-inputs
1866 `(("ipython" ,python-ipython)
1867 ("numpy" ,python-numpy)
1868 ("pandas" ,python-pandas)
1869 ("scipy" ,python-scipy)))
1870 (native-inputs
1871 `(("nose" ,python-nose)))
1872 (home-page "https://github.com/interpretable-ml/iml")
1873 (synopsis "Interpretable Machine Learning (iML) package")
1874 (description "Interpretable ML (iML) is a set of data type objects,
1875 visualizations, and interfaces that can be used by any method designed to
1876 explain the predictions of machine learning models (or really the output of
1877 any function). It currently contains the interface and IO code from the Shap
1878 project, and it will potentially also do the same for the Lime project.")
1879 (license license:expat)))
1880
1881 (define-public python-keras-applications
1882 (package
1883 (name "python-keras-applications")
1884 (version "1.0.8")
1885 (source
1886 (origin
1887 (method url-fetch)
1888 (uri (pypi-uri "Keras_Applications" version))
1889 (sha256
1890 (base32
1891 "1rcz31ca4axa6kzhjx4lwqxbg4wvlljkj8qj9a7p9sfd5fhzjyam"))))
1892 (build-system python-build-system)
1893 ;; The tests require Keras, but this package is needed to build Keras.
1894 (arguments '(#:tests? #f))
1895 (propagated-inputs
1896 `(("python-h5py" ,python-h5py)
1897 ("python-numpy" ,python-numpy)))
1898 (native-inputs
1899 `(("python-pytest" ,python-pytest)
1900 ("python-pytest-cov" ,python-pytest-cov)
1901 ("python-pytest-pep8" ,python-pytest-pep8)
1902 ("python-pytest-xdist" ,python-pytest-xdist)))
1903 (home-page "https://github.com/keras-team/keras-applications")
1904 (synopsis "Reference implementations of popular deep learning models")
1905 (description
1906 "This package provides reference implementations of popular deep learning
1907 models for use with the Keras deep learning framework.")
1908 (license license:expat)))
1909
1910 (define-public python-keras-preprocessing
1911 (package
1912 (name "python-keras-preprocessing")
1913 (version "1.1.0")
1914 (source
1915 (origin
1916 (method url-fetch)
1917 (uri (pypi-uri "Keras_Preprocessing" version))
1918 (sha256
1919 (base32
1920 "1r98nm4k1svsqjyaqkfk23i31bl1kcfcyp7094yyj3c43phfp3as"))))
1921 (build-system python-build-system)
1922 (propagated-inputs
1923 `(("python-numpy" ,python-numpy)
1924 ("python-six" ,python-six)))
1925 (native-inputs
1926 `(("python-pandas" ,python-pandas)
1927 ("python-pillow" ,python-pillow)
1928 ("python-pytest" ,python-pytest)
1929 ("python-pytest-cov" ,python-pytest-cov)
1930 ("python-pytest-xdist" ,python-pytest-xdist)
1931 ("tensorflow" ,tensorflow)))
1932 (home-page "https://github.com/keras-team/keras-preprocessing/")
1933 (synopsis "Data preprocessing and augmentation for deep learning models")
1934 (description
1935 "Keras Preprocessing is the data preprocessing and data augmentation
1936 module of the Keras deep learning library. It provides utilities for working
1937 with image data, text data, and sequence data.")
1938 (license license:expat)))
1939
1940 (define-public python-keras
1941 (package
1942 (name "python-keras")
1943 (version "2.2.4")
1944 (source
1945 (origin
1946 (method url-fetch)
1947 (uri (pypi-uri "Keras" version))
1948 (patches (search-patches "python-keras-integration-test.patch"))
1949 (sha256
1950 (base32
1951 "1j8bsqzh49vjdxy6l1k4iwax5vpjzniynyd041xjavdzvfii1dlh"))))
1952 (build-system python-build-system)
1953 (arguments
1954 `(#:phases
1955 (modify-phases %standard-phases
1956 (add-after 'unpack 'remove-tests-for-unavailable-features
1957 (lambda _
1958 (delete-file "keras/backend/theano_backend.py")
1959 (delete-file "keras/backend/cntk_backend.py")
1960 (delete-file "tests/keras/backend/backend_test.py")
1961
1962 ;; FIXME: This doesn't work because Tensorflow is missing the
1963 ;; coder ops library.
1964 (delete-file "tests/keras/test_callbacks.py")
1965 #t))
1966 (replace 'check
1967 (lambda _
1968 ;; These tests attempt to download data files from the internet.
1969 (delete-file "tests/integration_tests/test_datasets.py")
1970 (delete-file "tests/integration_tests/imagenet_utils_test.py")
1971
1972 (setenv "PYTHONPATH"
1973 (string-append (getcwd) "/build/lib:"
1974 (getenv "PYTHONPATH")))
1975 (invoke "py.test" "-v"
1976 "-p" "no:cacheprovider"
1977 "--ignore" "keras/utils"))))))
1978 (propagated-inputs
1979 `(("python-h5py" ,python-h5py)
1980 ("python-keras-applications" ,python-keras-applications)
1981 ("python-keras-preprocessing" ,python-keras-preprocessing)
1982 ("python-numpy" ,python-numpy)
1983 ("python-pydot" ,python-pydot)
1984 ("python-pyyaml" ,python-pyyaml)
1985 ("python-scipy" ,python-scipy)
1986 ("python-six" ,python-six)
1987 ("tensorflow" ,tensorflow)
1988 ("graphviz" ,graphviz)))
1989 (native-inputs
1990 `(("python-pandas" ,python-pandas)
1991 ("python-pytest" ,python-pytest)
1992 ("python-pytest-cov" ,python-pytest-cov)
1993 ("python-pytest-pep8" ,python-pytest-pep8)
1994 ("python-pytest-timeout" ,python-pytest-timeout)
1995 ("python-pytest-xdist" ,python-pytest-xdist)
1996 ("python-sphinx" ,python-sphinx)
1997 ("python-requests" ,python-requests)))
1998 (home-page "https://github.com/keras-team/keras")
1999 (synopsis "High-level deep learning framework")
2000 (description "Keras is a high-level neural networks API, written in Python
2001 and capable of running on top of TensorFlow. It was developed with a focus on
2002 enabling fast experimentation. Use Keras if you need a deep learning library
2003 that:
2004
2005 @itemize
2006 @item Allows for easy and fast prototyping (through user friendliness,
2007 modularity, and extensibility).
2008 @item Supports both convolutional networks and recurrent networks, as well as
2009 combinations of the two.
2010 @item Runs seamlessly on CPU and GPU.
2011 @end itemize\n")
2012 (license license:expat)))
2013
2014 (define-public gloo
2015 (let ((version "0.0.0") ; no proper version tag
2016 (commit "ca528e32fea9ca8f2b16053cff17160290fc84ce")
2017 (revision "0"))
2018 (package
2019 (name "gloo")
2020 (version (git-version version revision commit))
2021 (source
2022 (origin
2023 (method git-fetch)
2024 (uri (git-reference
2025 (url "https://github.com/facebookincubator/gloo")
2026 (commit commit)))
2027 (file-name (git-file-name name version))
2028 (sha256
2029 (base32
2030 "1q9f80zy75f6njrzrqkmhc0g3qxs4gskr7ns2jdqanxa2ww7a99w"))))
2031 (build-system cmake-build-system)
2032 (native-inputs
2033 `(("googletest" ,googletest)))
2034 (arguments
2035 `(#:configure-flags '("-DBUILD_TEST=1")
2036 #:phases
2037 (modify-phases %standard-phases
2038 (replace 'check
2039 (lambda _
2040 (invoke "make" "gloo_test")
2041 #t)))))
2042 (synopsis "Collective communications library")
2043 (description
2044 "Gloo is a collective communications library. It comes with a
2045 number of collective algorithms useful for machine learning applications.
2046 These include a barrier, broadcast, and allreduce.")
2047 (home-page "https://github.com/facebookincubator/gloo")
2048 (license license:bsd-3))))
2049
2050 (define-public python-umap-learn
2051 (package
2052 (name "python-umap-learn")
2053 (version "0.3.10")
2054 (source
2055 (origin
2056 (method url-fetch)
2057 (uri (pypi-uri "umap-learn" version))
2058 (sha256
2059 (base32
2060 "02ada2yy6km6zgk2836kg1c97yrcpalvan34p8c57446finnpki1"))))
2061 (build-system python-build-system)
2062 (native-inputs
2063 `(("python-joblib" ,python-joblib)
2064 ("python-nose" ,python-nose)))
2065 (propagated-inputs
2066 `(("python-numba" ,python-numba)
2067 ("python-numpy" ,python-numpy)
2068 ("python-scikit-learn" ,python-scikit-learn)
2069 ("python-scipy" ,python-scipy)))
2070 (home-page "https://github.com/lmcinnes/umap")
2071 (synopsis
2072 "Uniform Manifold Approximation and Projection")
2073 (description
2074 "Uniform Manifold Approximation and Projection is a dimension reduction
2075 technique that can be used for visualisation similarly to t-SNE, but also for
2076 general non-linear dimension reduction.")
2077 (license license:bsd-3)))