gnu: python-scipy: Move to (gnu packages python-science).
[jackhill/guix/guix.git] / gnu / packages / machine-learning.scm
1 ;;; GNU Guix --- Functional package management for GNU
2 ;;; Copyright © 2015, 2016, 2017, 2018, 2019 Ricardo Wurmus <rekado@elephly.net>
3 ;;; Copyright © 2016 Efraim Flashner <efraim@flashner.co.il>
4 ;;; Copyright © 2016, 2017 Marius Bakke <mbakke@fastmail.com>
5 ;;; Copyright © 2016 Hartmut Goebel <h.goebel@crazy-compilers.com>
6 ;;; Copyright © 2018, 2019 Tobias Geerinckx-Rice <me@tobias.gr>
7 ;;; Copyright © 2018 Kei Kebreau <kkebreau@posteo.net>
8 ;;; Copyright © 2018 Mark Meyer <mark@ofosos.org>
9 ;;; Copyright © 2018 Ben Woodcroft <donttrustben@gmail.com>
10 ;;; Copyright © 2018 Fis Trivial <ybbs.daans@hotmail.com>
11 ;;; Copyright © 2018 Julien Lepiller <julien@lepiller.eu>
12 ;;; Copyright © 2018 Björn Höfling <bjoern.hoefling@bjoernhoefling.de>
13 ;;; Copyright © 2019 Nicolas Goaziou <mail@nicolasgoaziou.fr>
14 ;;; Copyright © 2019 Guillaume Le Vaillant <glv@posteo.net>
15 ;;;
16 ;;; This file is part of GNU Guix.
17 ;;;
18 ;;; GNU Guix is free software; you can redistribute it and/or modify it
19 ;;; under the terms of the GNU General Public License as published by
20 ;;; the Free Software Foundation; either version 3 of the License, or (at
21 ;;; your option) any later version.
22 ;;;
23 ;;; GNU Guix is distributed in the hope that it will be useful, but
24 ;;; WITHOUT ANY WARRANTY; without even the implied warranty of
25 ;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
26 ;;; GNU General Public License for more details.
27 ;;;
28 ;;; You should have received a copy of the GNU General Public License
29 ;;; along with GNU Guix. If not, see <http://www.gnu.org/licenses/>.
30
31 (define-module (gnu packages machine-learning)
32 #:use-module ((guix licenses) #:prefix license:)
33 #:use-module (guix packages)
34 #:use-module (guix utils)
35 #:use-module (guix download)
36 #:use-module (guix svn-download)
37 #:use-module (guix build-system asdf)
38 #:use-module (guix build-system cmake)
39 #:use-module (guix build-system gnu)
40 #:use-module (guix build-system ocaml)
41 #:use-module (guix build-system python)
42 #:use-module (guix build-system r)
43 #:use-module (guix git-download)
44 #:use-module (gnu packages)
45 #:use-module (gnu packages adns)
46 #:use-module (gnu packages algebra)
47 #:use-module (gnu packages audio)
48 #:use-module (gnu packages autotools)
49 #:use-module (gnu packages base)
50 #:use-module (gnu packages bash)
51 #:use-module (gnu packages boost)
52 #:use-module (gnu packages check)
53 #:use-module (gnu packages compression)
54 #:use-module (gnu packages cran)
55 #:use-module (gnu packages databases)
56 #:use-module (gnu packages dejagnu)
57 #:use-module (gnu packages gcc)
58 #:use-module (gnu packages glib)
59 #:use-module (gnu packages graphviz)
60 #:use-module (gnu packages gstreamer)
61 #:use-module (gnu packages image)
62 #:use-module (gnu packages linux)
63 #:use-module (gnu packages lisp)
64 #:use-module (gnu packages maths)
65 #:use-module (gnu packages mpi)
66 #:use-module (gnu packages ocaml)
67 #:use-module (gnu packages onc-rpc)
68 #:use-module (gnu packages perl)
69 #:use-module (gnu packages pkg-config)
70 #:use-module (gnu packages protobuf)
71 #:use-module (gnu packages python)
72 #:use-module (gnu packages python-science)
73 #:use-module (gnu packages python-web)
74 #:use-module (gnu packages python-xyz)
75 #:use-module (gnu packages serialization)
76 #:use-module (gnu packages sphinx)
77 #:use-module (gnu packages statistics)
78 #:use-module (gnu packages sqlite)
79 #:use-module (gnu packages swig)
80 #:use-module (gnu packages tls)
81 #:use-module (gnu packages web)
82 #:use-module (gnu packages xml)
83 #:use-module (gnu packages xorg)
84 #:use-module (ice-9 match))
85
86 (define-public fann
87 ;; The last release is >100 commits behind, so we package from git.
88 (let ((commit "d71d54788bee56ba4cf7522801270152da5209d7"))
89 (package
90 (name "fann")
91 (version (string-append "2.2.0-1." (string-take commit 8)))
92 (source (origin
93 (method git-fetch)
94 (uri (git-reference
95 (url "https://github.com/libfann/fann.git")
96 (commit commit)))
97 (file-name (string-append name "-" version "-checkout"))
98 (sha256
99 (base32
100 "0ibwpfrjs6q2lijs8slxjgzb2llcl6rk3v2ski4r6215g5jjhg3x"))))
101 (build-system cmake-build-system)
102 (arguments
103 `(#:phases
104 (modify-phases %standard-phases
105 (replace 'check
106 (lambda* (#:key outputs #:allow-other-keys)
107 (let* ((out (assoc-ref outputs "out")))
108 (with-directory-excursion (string-append (getcwd) "/tests")
109 (invoke "./fann_tests"))))))))
110 (home-page "http://leenissen.dk/fann/wp/")
111 (synopsis "Fast Artificial Neural Network")
112 (description
113 "FANN is a neural network library, which implements multilayer
114 artificial neural networks in C with support for both fully connected and
115 sparsely connected networks.")
116 (license license:lgpl2.1))))
117
118 (define-public libsvm
119 (package
120 (name "libsvm")
121 (version "3.23")
122 (source
123 (origin
124 (method url-fetch)
125 (uri (string-append "https://www.csie.ntu.edu.tw/~cjlin/libsvm/"
126 name "-" version ".tar.gz"))
127 (sha256
128 (base32 "0jpaq0rr92x38p4nk3gjan79ip67m6p80anb28z1d8601miysyi5"))))
129 (build-system gnu-build-system)
130 (arguments
131 `(#:tests? #f ; no "check" target
132 #:phases (modify-phases %standard-phases
133 (delete 'configure)
134 (replace
135 'install ; no ‘install’ target
136 (lambda* (#:key outputs #:allow-other-keys)
137 (let* ((out (assoc-ref outputs "out"))
138 (bin (string-append out "/bin/")))
139 (mkdir-p bin)
140 (for-each (lambda (file)
141 (copy-file file (string-append bin file)))
142 '("svm-train"
143 "svm-predict"
144 "svm-scale")))
145 #t)))))
146 (home-page "http://www.csie.ntu.edu.tw/~cjlin/libsvm/")
147 (synopsis "Library for Support Vector Machines")
148 (description
149 "LIBSVM is a machine learning library for support vector
150 classification, (C-SVC, nu-SVC), regression (epsilon-SVR, nu-SVR) and
151 distribution estimation (one-class SVM). It supports multi-class
152 classification.")
153 (license license:bsd-3)))
154
155 (define-public python-libsvm
156 (package (inherit libsvm)
157 (name "python-libsvm")
158 (build-system gnu-build-system)
159 (arguments
160 `(#:tests? #f ; no "check" target
161 #:make-flags '("-C" "python")
162 #:phases
163 (modify-phases %standard-phases
164 (delete 'configure)
165 (replace
166 'install ; no ‘install’ target
167 (lambda* (#:key inputs outputs #:allow-other-keys)
168 (let ((site (string-append (assoc-ref outputs "out")
169 "/lib/python"
170 (string-take
171 (string-take-right
172 (assoc-ref inputs "python") 5) 3)
173 "/site-packages/")))
174 (substitute* "python/svm.py"
175 (("../libsvm.so.2") "libsvm.so.2"))
176 (mkdir-p site)
177 (for-each (lambda (file)
178 (copy-file file (string-append site (basename file))))
179 (find-files "python" "\\.py"))
180 (copy-file "libsvm.so.2"
181 (string-append site "libsvm.so.2")))
182 #t)))))
183 (inputs
184 `(("python" ,python)))
185 (synopsis "Python bindings of libSVM")))
186
187 (define-public ghmm
188 ;; The latest release candidate is several years and a couple of fixes have
189 ;; been published since. This is why we download the sources from the SVN
190 ;; repository.
191 (let ((svn-revision 2341))
192 (package
193 (name "ghmm")
194 (version (string-append "0.9-rc3-0." (number->string svn-revision)))
195 (source (origin
196 (method svn-fetch)
197 (uri (svn-reference
198 (url "http://svn.code.sf.net/p/ghmm/code/trunk")
199 (revision svn-revision)))
200 (file-name (string-append name "-" version "-checkout"))
201 (sha256
202 (base32
203 "0qbq1rqp94l530f043qzp8aw5lj7dng9wq0miffd7spd1ff638wq"))))
204 (build-system gnu-build-system)
205 (arguments
206 `(#:imported-modules (,@%gnu-build-system-modules
207 (guix build python-build-system))
208 #:phases
209 (modify-phases %standard-phases
210 (add-after 'unpack 'enter-dir
211 (lambda _ (chdir "ghmm") #t))
212 (delete 'check)
213 (add-after 'install 'check
214 (assoc-ref %standard-phases 'check))
215 (add-before 'check 'fix-PYTHONPATH
216 (lambda* (#:key inputs outputs #:allow-other-keys)
217 (let ((python-version (python-version
218 (assoc-ref inputs "python"))))
219 (setenv "PYTHONPATH"
220 (string-append (getenv "PYTHONPATH")
221 ":" (assoc-ref outputs "out")
222 "/lib/python" python-version
223 "/site-packages")))
224 #t))
225 (add-after 'enter-dir 'fix-runpath
226 (lambda* (#:key outputs #:allow-other-keys)
227 (substitute* "ghmmwrapper/setup.py"
228 (("^(.*)extra_compile_args = \\[" line indent)
229 (string-append indent
230 "extra_link_args = [\"-Wl,-rpath="
231 (assoc-ref outputs "out") "/lib\"],\n"
232 line
233 "\"-Wl,-rpath="
234 (assoc-ref outputs "out")
235 "/lib\", ")))
236 #t))
237 (add-after 'enter-dir 'disable-broken-tests
238 (lambda _
239 (substitute* "tests/Makefile.am"
240 ;; GHMM_SILENT_TESTS is assumed to be a command.
241 (("TESTS_ENVIRONMENT.*") "")
242 ;; Do not build broken tests.
243 (("chmm .*") "")
244 (("read_fa .*") "")
245 (("mcmc .*") "")
246 (("label_higher_order_test.*$")
247 "label_higher_order_test\n"))
248
249 ;; These Python unittests are broken as there is no gato.
250 ;; See https://sourceforge.net/p/ghmm/support-requests/3/
251 (substitute* "ghmmwrapper/ghmmunittests.py"
252 (("^(.*)def (testNewXML|testMultipleTransitionClasses|testNewXML)"
253 line indent)
254 (string-append indent
255 "@unittest.skip(\"Disabled by Guix\")\n"
256 line)))
257 #t)))))
258 (inputs
259 `(("python" ,python-2) ; only Python 2 is supported
260 ("libxml2" ,libxml2)))
261 (native-inputs
262 `(("pkg-config" ,pkg-config)
263 ("dejagnu" ,dejagnu)
264 ("swig" ,swig)
265 ("autoconf" ,autoconf)
266 ("automake" ,automake)
267 ("libtool" ,libtool)))
268 (home-page "http://ghmm.org")
269 (synopsis "Hidden Markov Model library")
270 (description
271 "The General Hidden Markov Model library (GHMM) is a C library with
272 additional Python bindings implementing a wide range of types of @dfn{Hidden
273 Markov Models} (HMM) and algorithms: discrete, continuous emissions, basic
274 training, HMM clustering, HMM mixtures.")
275 (license license:lgpl2.0+))))
276
277 (define-public mcl
278 (package
279 (name "mcl")
280 (version "14.137")
281 (source (origin
282 (method url-fetch)
283 (uri (string-append
284 "http://micans.org/mcl/src/mcl-"
285 (string-replace-substring version "." "-")
286 ".tar.gz"))
287 (sha256
288 (base32
289 "15xlax3z31lsn62vlg94hkm75nm40q4679amnfg13jm8m2bnhy5m"))))
290 (build-system gnu-build-system)
291 (arguments
292 `(#:configure-flags (list "--enable-blast")))
293 (inputs
294 `(("perl" ,perl)))
295 (home-page "http://micans.org/mcl/")
296 (synopsis "Clustering algorithm for graphs")
297 (description
298 "The MCL algorithm is short for the @dfn{Markov Cluster Algorithm}, a
299 fast and scalable unsupervised cluster algorithm for graphs (also known as
300 networks) based on simulation of (stochastic) flow in graphs.")
301 ;; In the LICENCE file and web page it says "The software is licensed
302 ;; under the GNU General Public License, version 3.", but in several of
303 ;; the source code files it suggests GPL3 or later.
304 ;; http://listserver.ebi.ac.uk/pipermail/mcl-users/2016/000376.html
305 (license license:gpl3)))
306
307 (define-public ocaml-mcl
308 (package
309 (name "ocaml-mcl")
310 (version "12-068oasis4")
311 (source
312 (origin
313 (method git-fetch)
314 (uri (git-reference
315 (url "https://github.com/fhcrc/mcl.git")
316 (commit version)))
317 (file-name (git-file-name name version))
318 (sha256
319 (base32
320 "0009dc3h2jp3qg5val452wngpqnbfyhbcxylghq0mrjqxx0jdq5p"))))
321 (build-system ocaml-build-system)
322 (arguments
323 `(#:phases
324 (modify-phases %standard-phases
325 (add-before 'configure 'patch-paths
326 (lambda _
327 (substitute* "configure"
328 (("/bin/sh") (which "sh")))
329 (substitute* "setup.ml"
330 (("LDFLAGS=-fPIC")
331 (string-append "LDFLAGS=-fPIC\"; \"SHELL=" (which "sh")))
332 (("-std=c89") "-std=gnu99")
333
334 ;; This is a mutable string, which is no longer supported. Use
335 ;; a byte buffer instead.
336 (("String.make \\(String.length s\\)")
337 "Bytes.make (String.length s)")
338
339 ;; These two belong together.
340 (("OASISString.replace_chars")
341 "Bytes.to_string (OASISString.replace_chars")
342 ((" s;")
343 " s);"))
344 (substitute* "myocamlbuild.ml"
345 (("std=c89") "std=gnu99"))
346 ;; Since we build with a more recent OCaml, we have to use C99 or
347 ;; later. This causes problems with the old C code.
348 (substitute* "src/impala/matrix.c"
349 (("restrict") "restrict_"))
350 #t)))))
351 (native-inputs
352 `(("ocamlbuild" ,ocamlbuild)))
353 (home-page "https://github.com/fhcrc/mcl")
354 (synopsis "OCaml wrappers around MCL")
355 (description
356 "This package provides OCaml bindings for the MCL graph clustering
357 algorithm.")
358 (license license:gpl3)))
359
360 (define-public randomjungle
361 (package
362 (name "randomjungle")
363 (version "2.1.0")
364 (source
365 (origin
366 (method url-fetch)
367 (uri (string-append
368 "https://www.imbs.uni-luebeck.de/fileadmin/files/Software"
369 "/randomjungle/randomjungle-" version ".tar_.gz"))
370 (patches (search-patches "randomjungle-disable-static-build.patch"))
371 (sha256
372 (base32
373 "12c8rf30cla71swx2mf4ww9mfd8jbdw5lnxd7dxhyw1ygrvg6y4w"))))
374 (build-system gnu-build-system)
375 (arguments
376 `(#:configure-flags
377 (list "--disable-static"
378 (string-append "--with-boost="
379 (assoc-ref %build-inputs "boost")))
380 #:phases
381 (modify-phases %standard-phases
382 (add-before
383 'configure 'set-CXXFLAGS
384 (lambda _
385 (setenv "CXXFLAGS" "-fpermissive ")
386 #t)))))
387 (inputs
388 `(("boost" ,boost)
389 ("gsl" ,gsl)
390 ("libxml2" ,libxml2)
391 ("zlib" ,zlib)))
392 (native-inputs
393 `(("gfortran" ,gfortran)
394 ("gfortran:lib" ,gfortran "lib")))
395 ;; Non-portable assembly instructions are used so building fails on
396 ;; platforms other than x86_64 or i686.
397 (supported-systems '("x86_64-linux" "i686-linux"))
398 (home-page "https://www.imbs.uni-luebeck.de/forschung/software/details.html#c224")
399 (synopsis "Implementation of the Random Forests machine learning method")
400 (description
401 "Random Jungle is an implementation of Random Forests. It is supposed to
402 analyse high dimensional data. In genetics, it can be used for analysing big
403 Genome Wide Association (GWA) data. Random Forests is a powerful machine
404 learning method. Most interesting features are variable selection, missing
405 value imputation, classifier creation, generalization error estimation and
406 sample proximities between pairs of cases.")
407 (license license:gpl3+)))
408
409 (define-public openfst
410 (package
411 (name "openfst")
412 (version "1.7.2")
413 (source (origin
414 (method url-fetch)
415 (uri (string-append "http://www.openfst.org/twiki/pub/FST/"
416 "FstDownload/openfst-" version ".tar.gz"))
417 (sha256
418 (base32
419 "0fqgk8195kz21is09gwzwnrg7fr9526bi9mh4apyskapz27pbhr1"))))
420 (build-system gnu-build-system)
421 (home-page "http://www.openfst.org")
422 (synopsis "Library for weighted finite-state transducers")
423 (description "OpenFst is a library for constructing, combining,
424 optimizing, and searching weighted finite-state transducers (FSTs).")
425 (license license:asl2.0)))
426
427 (define-public shogun
428 (package
429 (name "shogun")
430 (version "6.1.3")
431 (source
432 (origin
433 (method url-fetch)
434 (uri (string-append
435 "ftp://shogun-toolbox.org/shogun/releases/"
436 (version-major+minor version)
437 "/sources/shogun-" version ".tar.bz2"))
438 (sha256
439 (base32
440 "1rn9skm3nw6hr7mr3lgp2gfqhi7ii0lyxck7qmqnf8avq349s5jp"))
441 (modules '((guix build utils)
442 (ice-9 rdelim)))
443 (snippet
444 '(begin
445 ;; Remove non-free sources and files referencing them
446 (for-each delete-file
447 (find-files "src/shogun/classifier/svm/"
448 "SVMLight\\.(cpp|h)"))
449 (for-each delete-file
450 (find-files "examples/undocumented/libshogun/"
451 (string-append
452 "(classifier_.*svmlight.*|"
453 "evaluation_cross_validation_locked_comparison).cpp")))
454 ;; Remove non-free functions.
455 (define (delete-ifdefs file)
456 (with-atomic-file-replacement file
457 (lambda (in out)
458 (let loop ((line (read-line in 'concat))
459 (skipping? #f))
460 (if (eof-object? line)
461 #t
462 (let ((skip-next?
463 (or (and skipping?
464 (not (string-prefix?
465 "#endif //USE_SVMLIGHT" line)))
466 (string-prefix?
467 "#ifdef USE_SVMLIGHT" line))))
468 (when (or (not skipping?)
469 (and skipping? (not skip-next?)))
470 (display line out))
471 (loop (read-line in 'concat) skip-next?)))))))
472 (for-each delete-ifdefs
473 (append
474 (find-files "src/shogun/classifier/mkl"
475 "^MKLClassification\\.cpp")
476 (find-files "src/shogun/classifier/svm"
477 "^SVMLightOneClass\\.(cpp|h)")
478 (find-files "src/shogun/multiclass"
479 "^ScatterSVM\\.(cpp|h)")
480 (find-files "src/shogun/kernel/"
481 "^(Kernel|CombinedKernel|ProductKernel)\\.(cpp|h)")
482 (find-files "src/shogun/regression/svr"
483 "^(MKLRegression|SVRLight)\\.(cpp|h)")
484 (find-files "src/shogun/transfer/domain_adaptation"
485 "^DomainAdaptationSVM\\.(cpp|h)")))
486 #t))))
487 (build-system cmake-build-system)
488 (arguments
489 '(#:tests? #f ;no check target
490 #:phases
491 (modify-phases %standard-phases
492 (add-after 'unpack 'delete-broken-symlinks
493 (lambda _
494 (for-each delete-file '("applications/arts/data"
495 "applications/asp/data"
496 "applications/easysvm/data"
497 "applications/msplicer/data"
498 "applications/ocr/data"
499 "examples/meta/data"
500 "examples/undocumented/data"))
501 #t))
502 (add-after 'unpack 'change-R-target-path
503 (lambda* (#:key outputs #:allow-other-keys)
504 (substitute* '("src/interfaces/r/CMakeLists.txt"
505 "examples/meta/r/CMakeLists.txt")
506 (("\\$\\{R_COMPONENT_LIB_PATH\\}")
507 (string-append (assoc-ref outputs "out")
508 "/lib/R/library/")))
509 #t))
510 (add-after 'unpack 'fix-octave-modules
511 (lambda* (#:key outputs #:allow-other-keys)
512 (substitute* "src/interfaces/octave/CMakeLists.txt"
513 (("^include_directories\\(\\$\\{OCTAVE_INCLUDE_DIRS\\}")
514 "include_directories(${OCTAVE_INCLUDE_DIRS} ${OCTAVE_INCLUDE_DIRS}/octave")
515 ;; change target directory
516 (("\\$\\{OCTAVE_OCT_LOCAL_API_FILE_DIR\\}")
517 (string-append (assoc-ref outputs "out")
518 "/share/octave/packages")))
519 (substitute* '("src/interfaces/octave/swig_typemaps.i"
520 "src/interfaces/octave/sg_print_functions.cpp")
521 ;; "octave/config.h" and "octave/oct-obj.h" deprecated in Octave.
522 (("octave/config\\.h") "octave/octave-config.h")
523 (("octave/oct-obj.h") "octave/ovl.h"))
524 #t))
525 (add-after 'unpack 'move-rxcpp
526 (lambda* (#:key inputs #:allow-other-keys)
527 (let ((rxcpp-dir "shogun/third-party/rxcpp"))
528 (mkdir-p rxcpp-dir)
529 (install-file (assoc-ref inputs "rxcpp") rxcpp-dir)
530 #t)))
531 (add-before 'build 'set-HOME
532 ;; $HOME needs to be set at some point during the build phase
533 (lambda _ (setenv "HOME" "/tmp") #t)))
534 #:configure-flags
535 (list "-DCMAKE_BUILD_WITH_INSTALL_RPATH=TRUE"
536 "-DUSE_SVMLIGHT=OFF" ;disable proprietary SVMLIGHT
537 "-DBUILD_META_EXAMPLES=OFF" ;requires unpackaged ctags
538 ;;"-DINTERFACE_JAVA=ON" ;requires unpackaged jblas
539 ;;"-DINTERFACE_RUBY=ON" ;requires unpackaged ruby-narray
540 ;;"-DINTERFACE_PERL=ON" ;"FindPerlLibs" does not exist
541 ;;"-DINTERFACE_LUA=ON" ;fails because lua doesn't build pkgconfig file
542 "-DINTERFACE_OCTAVE=ON"
543 "-DINTERFACE_PYTHON=ON"
544 "-DINTERFACE_R=ON")))
545 (inputs
546 `(("python" ,python)
547 ("numpy" ,python-numpy)
548 ("r-minimal" ,r-minimal)
549 ("octave" ,octave-cli)
550 ("swig" ,swig)
551 ("eigen" ,eigen)
552 ("hdf5" ,hdf5)
553 ("atlas" ,atlas)
554 ("arpack" ,arpack-ng)
555 ("lapack" ,lapack)
556 ("glpk" ,glpk)
557 ("libxml2" ,libxml2)
558 ("lzo" ,lzo)
559 ("zlib" ,zlib)))
560 (native-inputs
561 `(("pkg-config" ,pkg-config)
562 ("rxcpp" ,rxcpp)))
563 ;; Non-portable SSE instructions are used so building fails on platforms
564 ;; other than x86_64.
565 (supported-systems '("x86_64-linux"))
566 (home-page "http://shogun-toolbox.org/")
567 (synopsis "Machine learning toolbox")
568 (description
569 "The Shogun Machine learning toolbox provides a wide range of unified and
570 efficient Machine Learning (ML) methods. The toolbox seamlessly allows to
571 combine multiple data representations, algorithm classes, and general purpose
572 tools. This enables both rapid prototyping of data pipelines and extensibility
573 in terms of new algorithms.")
574 (license license:gpl3+)))
575
576 (define-public rxcpp
577 (package
578 (name "rxcpp")
579 (version "4.1.0")
580 (source
581 (origin
582 (method git-fetch)
583 (uri (git-reference
584 (url "https://github.com/ReactiveX/RxCpp.git")
585 (commit (string-append "v" version))))
586 (sha256
587 (base32 "1rdpa3jlc181jd08nk437aar085h28i45s6nzrv65apb3xyyz0ij"))
588 (file-name (git-file-name name version))))
589 (build-system cmake-build-system)
590 (arguments
591 `(#:phases
592 (modify-phases %standard-phases
593 (add-after 'unpack 'remove-werror
594 (lambda _
595 (substitute* (find-files ".")
596 (("-Werror") ""))
597 #t))
598 (replace 'check
599 (lambda _
600 (invoke "ctest"))))))
601 (native-inputs
602 `(("catch" ,catch-framework)))
603 (home-page "http://reactivex.io/")
604 (synopsis "Reactive Extensions for C++")
605 (description
606 "The Reactive Extensions for C++ (RxCpp) is a library of algorithms for
607 values-distributed-in-time. ReactiveX is a library for composing asynchronous
608 and event-based programs by using observable sequences.
609
610 It extends the observer pattern to support sequences of data and/or events and
611 adds operators that allow you to compose sequences together declaratively while
612 abstracting away concerns about things like low-level threading,
613 synchronization, thread-safety, concurrent data structures, and non-blocking
614 I/O.")
615 (license license:asl2.0)))
616
617 (define-public r-adaptivesparsity
618 (package
619 (name "r-adaptivesparsity")
620 (version "1.6")
621 (source (origin
622 (method url-fetch)
623 (uri (cran-uri "AdaptiveSparsity" version))
624 (sha256
625 (base32
626 "0imr5m8mll9j6n4icsv6z9rl5kbnwsp9wvzrg7n90nnmcxq2cz91"))))
627 (properties
628 `((upstream-name . "AdaptiveSparsity")))
629 (build-system r-build-system)
630 (arguments
631 `(#:phases
632 (modify-phases %standard-phases
633 (add-after 'unpack 'link-against-armadillo
634 (lambda _
635 (substitute* "src/Makevars"
636 (("PKG_LIBS=" prefix)
637 (string-append prefix "-larmadillo"))))))))
638 (propagated-inputs
639 `(("r-mass" ,r-mass)
640 ("r-matrix" ,r-matrix)
641 ("r-rcpp" ,r-rcpp)
642 ("r-rcpparmadillo" ,r-rcpparmadillo)))
643 (inputs
644 `(("armadillo" ,armadillo)))
645 (home-page "https://cran.r-project.org/web/packages/AdaptiveSparsity")
646 (synopsis "Adaptive sparsity models")
647 (description
648 "This package implements the Figueiredo machine learning algorithm for
649 adaptive sparsity and the Wong algorithm for adaptively sparse gaussian
650 geometric models.")
651 (license license:lgpl3+)))
652
653 (define-public gemmlowp-for-tensorflow
654 ;; The commit hash is taken from "tensorflow/workspace.bzl".
655 (let ((commit "38ebac7b059e84692f53e5938f97a9943c120d98")
656 (revision "2"))
657 (package
658 (name "gemmlowp")
659 (version (git-version "0" revision commit))
660 (source (origin
661 (method url-fetch)
662 (uri (string-append "https://mirror.bazel.build/"
663 "github.com/google/gemmlowp/archive/"
664 commit ".zip"))
665 (file-name (string-append "gemmlowp-" version ".zip"))
666 (sha256
667 (base32
668 "0n56s2g8hrssm4w8qj1v58gfm56a04n9v992ixkmvk6zjiralzxq"))))
669 (build-system cmake-build-system)
670 (arguments
671 `(#:configure-flags
672 (list ,@(match (%current-system)
673 ((or "x86_64-linux" "i686-linux")
674 '("-DCMAKE_CXX_FLAGS=-msse2"))
675 (_ '())))
676 #:phases
677 (modify-phases %standard-phases
678 ;; This directory contains the CMakeLists.txt.
679 (add-after 'unpack 'chdir
680 (lambda _ (chdir "contrib") #t))
681 ;; There is no install target
682 (replace 'install
683 (lambda* (#:key outputs #:allow-other-keys)
684 (let* ((out (assoc-ref outputs "out"))
685 (lib (string-append out "/lib/"))
686 (inc (string-append out "/include/")))
687 (install-file "../build/libeight_bit_int_gemm.so" lib)
688 (for-each (lambda (dir)
689 (let ((target (string-append inc "/" dir)))
690 (mkdir-p target)
691 (for-each (lambda (h)
692 (install-file h target))
693 (find-files (string-append "../" dir)
694 "\\.h$"))))
695 '("meta" "profiling" "public" "fixedpoint"
696 "eight_bit_int_gemm" "internal"))
697 #t))))))
698 (native-inputs
699 `(("unzip" ,unzip)))
700 (home-page "https://github.com/google/gemmlowp")
701 (synopsis "Small self-contained low-precision GEMM library")
702 (description
703 "This is a small self-contained low-precision @dfn{general matrix
704 multiplication} (GEMM) library. It is not a full linear algebra library.
705 Low-precision means that the input and output matrix entries are integers on
706 at most 8 bits. To avoid overflow, results are internally accumulated on more
707 than 8 bits, and at the end only some significant 8 bits are kept.")
708 (license license:asl2.0))))
709
710 (define-public dlib
711 (package
712 (name "dlib")
713 (version "19.7")
714 (source (origin
715 (method url-fetch)
716 (uri (string-append
717 "http://dlib.net/files/dlib-" version ".tar.bz2"))
718 (sha256
719 (base32
720 "1mljz02kwkrbggyncxv5fpnyjdybw2qihaacb3js8yfkw12vwpc2"))
721 (modules '((guix build utils)))
722 (snippet
723 '(begin
724 ;; Delete ~13MB of bundled dependencies.
725 (delete-file-recursively "dlib/external")
726 (delete-file-recursively "docs/dlib/external")
727 #t))))
728 (build-system cmake-build-system)
729 (arguments
730 `(#:phases
731 (modify-phases %standard-phases
732 (add-after 'unpack 'disable-asserts
733 (lambda _
734 ;; config.h recommends explicitly enabling or disabling asserts
735 ;; when building as a shared library. By default neither is set.
736 (substitute* "dlib/config.h"
737 (("^//#define DLIB_DISABLE_ASSERTS") "#define DLIB_DISABLE_ASSERTS"))
738 #t))
739 (add-after 'disable-asserts 'disable-failing-tests
740 (lambda _
741 ;; One test times out on MIPS, so we need to disable it.
742 ;; Others are flaky on some platforms.
743 (let* ((system ,(or (%current-target-system)
744 (%current-system)))
745 (disabled-tests (cond
746 ((string-prefix? "mips64" system)
747 '("object_detector" ; timeout
748 "data_io"))
749 ((string-prefix? "armhf" system)
750 '("learning_to_track"))
751 ((string-prefix? "i686" system)
752 '("optimization"))
753 (else '()))))
754 (for-each
755 (lambda (test)
756 (substitute* "dlib/test/makefile"
757 (((string-append "SRC \\+= " test "\\.cpp")) "")))
758 disabled-tests)
759 #t)))
760 (replace 'check
761 (lambda _
762 ;; No test target, so we build and run the unit tests here.
763 (let ((test-dir (string-append "../dlib-" ,version "/dlib/test")))
764 (with-directory-excursion test-dir
765 (invoke "make" "-j" (number->string (parallel-job-count)))
766 (invoke "./dtest" "--runall"))
767 #t)))
768 (add-after 'install 'delete-static-library
769 (lambda* (#:key outputs #:allow-other-keys)
770 (delete-file (string-append (assoc-ref outputs "out")
771 "/lib/libdlib.a"))
772 #t)))))
773 (native-inputs
774 `(("pkg-config" ,pkg-config)
775 ;; For tests.
776 ("libnsl" ,libnsl)))
777 (inputs
778 `(("giflib" ,giflib)
779 ("lapack" ,lapack)
780 ("libjpeg" ,libjpeg)
781 ("libpng" ,libpng)
782 ("libx11" ,libx11)
783 ("openblas" ,openblas)
784 ("zlib" ,zlib)))
785 (synopsis
786 "Toolkit for making machine learning and data analysis applications in C++")
787 (description
788 "Dlib is a modern C++ toolkit containing machine learning algorithms and
789 tools. It is used in both industry and academia in a wide range of domains
790 including robotics, embedded devices, mobile phones, and large high performance
791 computing environments.")
792 (home-page "http://dlib.net")
793 (license license:boost1.0)))
794
795 (define-public python-scikit-learn
796 (package
797 (name "python-scikit-learn")
798 (version "0.20.4")
799 (source
800 (origin
801 (method git-fetch)
802 (uri (git-reference
803 (url "https://github.com/scikit-learn/scikit-learn.git")
804 (commit version)))
805 (file-name (git-file-name name version))
806 (sha256
807 (base32
808 "08zbzi8yx5wdlxfx9jap61vg1malc9ajf576w7a0liv6jvvrxlpj"))))
809 (build-system python-build-system)
810 (arguments
811 `(#:phases
812 (modify-phases %standard-phases
813 (add-after 'build 'build-ext
814 (lambda _ (invoke "python" "setup.py" "build_ext" "--inplace") #t))
815 (replace 'check
816 (lambda _
817 ;; Restrict OpenBLAS threads to prevent segfaults while testing!
818 (setenv "OPENBLAS_NUM_THREADS" "1")
819
820 ;; Some tests require write access to $HOME.
821 (setenv "HOME" "/tmp")
822
823 (invoke "pytest" "sklearn" "-m" "not network")))
824 (add-before 'reset-gzip-timestamps 'make-files-writable
825 (lambda* (#:key outputs #:allow-other-keys)
826 ;; Make sure .gz files are writable so that the
827 ;; 'reset-gzip-timestamps' phase can do its work.
828 (let ((out (assoc-ref outputs "out")))
829 (for-each make-file-writable
830 (find-files out "\\.gz$"))
831 #t))))))
832 (inputs
833 `(("openblas" ,openblas)))
834 (native-inputs
835 `(("python-pytest" ,python-pytest)
836 ("python-pandas" ,python-pandas) ;for tests
837 ("python-cython" ,python-cython)))
838 (propagated-inputs
839 `(("python-numpy" ,python-numpy)
840 ("python-scipy" ,python-scipy)))
841 (home-page "http://scikit-learn.org/")
842 (synopsis "Machine Learning in Python")
843 (description
844 "Scikit-learn provides simple and efficient tools for data mining and
845 data analysis.")
846 (license license:bsd-3)))
847
848 (define-public python2-scikit-learn
849 (package-with-python2 python-scikit-learn))
850
851 (define-public python-autograd
852 (let* ((commit "442205dfefe407beffb33550846434baa90c4de7")
853 (revision "0")
854 (version (git-version "0.0.0" revision commit)))
855 (package
856 (name "python-autograd")
857 (home-page "https://github.com/HIPS/autograd")
858 (source (origin
859 (method git-fetch)
860 (uri (git-reference
861 (url home-page)
862 (commit commit)))
863 (sha256
864 (base32
865 "189sv2xb0mwnjawa9z7mrgdglc1miaq93pnck26r28fi1jdwg0z4"))
866 (file-name (git-file-name name version))))
867 (version version)
868 (build-system python-build-system)
869 (native-inputs
870 `(("python-nose" ,python-nose)
871 ("python-pytest" ,python-pytest)))
872 (propagated-inputs
873 `(("python-future" ,python-future)
874 ("python-numpy" ,python-numpy)))
875 (arguments
876 `(#:phases (modify-phases %standard-phases
877 (replace 'check
878 (lambda _
879 (invoke "py.test" "-v"))))))
880 (synopsis "Efficiently computes derivatives of NumPy code")
881 (description "Autograd can automatically differentiate native Python and
882 NumPy code. It can handle a large subset of Python's features, including loops,
883 ifs, recursion and closures, and it can even take derivatives of derivatives
884 of derivatives. It supports reverse-mode differentiation
885 (a.k.a. backpropagation), which means it can efficiently take gradients of
886 scalar-valued functions with respect to array-valued arguments, as well as
887 forward-mode differentiation, and the two can be composed arbitrarily. The
888 main intended application of Autograd is gradient-based optimization.")
889 (license license:expat))))
890
891 (define-public python2-autograd
892 (package-with-python2 python-autograd))
893
894 (define-public lightgbm
895 (package
896 (name "lightgbm")
897 (version "2.0.12")
898 (source (origin
899 (method url-fetch)
900 (uri (string-append
901 "https://github.com/Microsoft/LightGBM/archive/v"
902 version ".tar.gz"))
903 (sha256
904 (base32
905 "132zf0yk0545mg72hyzxm102g3hpb6ixx9hnf8zd2k55gas6cjj1"))
906 (file-name (string-append name "-" version ".tar.gz"))))
907 (native-inputs
908 `(("python-pytest" ,python-pytest)
909 ("python-nose" ,python-nose)))
910 (inputs
911 `(("openmpi" ,openmpi)))
912 (propagated-inputs
913 `(("python-numpy" ,python-numpy)
914 ("python-scipy" ,python-scipy)))
915 (arguments
916 `(#:configure-flags
917 '("-DUSE_MPI=ON")
918 #:phases
919 (modify-phases %standard-phases
920 (replace 'check
921 (lambda* (#:key outputs #:allow-other-keys)
922 (with-directory-excursion ,(string-append "../LightGBM-" version)
923 (invoke "pytest" "tests/c_api_test/test_.py")))))))
924 (build-system cmake-build-system)
925 (home-page "https://github.com/Microsoft/LightGBM")
926 (synopsis "Gradient boosting framework based on decision tree algorithms")
927 (description "LightGBM is a gradient boosting framework that uses tree
928 based learning algorithms. It is designed to be distributed and efficient with
929 the following advantages:
930
931 @itemize
932 @item Faster training speed and higher efficiency
933 @item Lower memory usage
934 @item Better accuracy
935 @item Parallel and GPU learning supported (not enabled in this package)
936 @item Capable of handling large-scale data
937 @end itemize\n")
938 (license license:expat)))
939
940 (define-public vowpal-wabbit
941 ;; Language bindings not included.
942 (package
943 (name "vowpal-wabbit")
944 (version "8.5.0")
945 (source (origin
946 (method url-fetch)
947 (uri (string-append
948 "https://github.com/JohnLangford/vowpal_wabbit/archive/"
949 version ".tar.gz"))
950 (sha256
951 (base32
952 "0clp2kb7rk5sckhllxjr5a651awf4s8dgzg4659yh4hf5cqnf0gr"))
953 (file-name (string-append name "-" version ".tar.gz"))))
954 (inputs
955 `(("boost" ,boost)
956 ("zlib" ,zlib)))
957 (arguments
958 `(#:configure-flags
959 (list (string-append "--with-boost="
960 (assoc-ref %build-inputs "boost")))))
961 (build-system gnu-build-system)
962 (home-page "https://github.com/JohnLangford/vowpal_wabbit")
963 (synopsis "Fast machine learning library for online learning")
964 (description "Vowpal Wabbit is a machine learning system with techniques
965 such as online, hashing, allreduce, reductions, learning2search, active, and
966 interactive learning.")
967 (license license:bsd-3)))
968
969 (define-public python2-fastlmm
970 (package
971 (name "python2-fastlmm")
972 (version "0.2.21")
973 (source
974 (origin
975 (method url-fetch)
976 (uri (pypi-uri "fastlmm" version ".zip"))
977 (sha256
978 (base32
979 "1q8c34rpmwkfy3r4d5172pzdkpfryj561897z9r3x22gq7813x1m"))))
980 (build-system python-build-system)
981 (arguments
982 `(#:tests? #f ; some test files are missing
983 #:python ,python-2)) ; only Python 2.7 is supported
984 (propagated-inputs
985 `(("python2-numpy" ,python2-numpy)
986 ("python2-scipy" ,python2-scipy)
987 ("python2-matplotlib" ,python2-matplotlib)
988 ("python2-pandas" ,python2-pandas)
989 ("python2-scikit-learn" ,python2-scikit-learn)
990 ("python2-pysnptools" ,python2-pysnptools)))
991 (native-inputs
992 `(("unzip" ,unzip)
993 ("python2-cython" ,python2-cython)
994 ("python2-mock" ,python2-mock)
995 ("python2-nose" ,python2-nose)))
996 (home-page "http://research.microsoft.com/en-us/um/redmond/projects/mscompbio/fastlmm/")
997 (synopsis "Perform genome-wide association studies on large data sets")
998 (description
999 "FaST-LMM, which stands for Factored Spectrally Transformed Linear Mixed
1000 Models, is a program for performing both single-SNP and SNP-set genome-wide
1001 association studies (GWAS) on extremely large data sets.")
1002 (license license:asl2.0)))
1003
1004 ;; There have been no proper releases yet.
1005 (define-public kaldi
1006 (let ((commit "2f95609f0bb085bd3a1dc5eb0a39f3edea59e606")
1007 (revision "1"))
1008 (package
1009 (name "kaldi")
1010 (version (git-version "0" revision commit))
1011 (source (origin
1012 (method git-fetch)
1013 (uri (git-reference
1014 (url "https://github.com/kaldi-asr/kaldi.git")
1015 (commit commit)))
1016 (file-name (git-file-name name version))
1017 (sha256
1018 (base32
1019 "082qh3pfi7hvncylp4xsmkfahbd7gb0whdfa4rwrx7fxk9rdh3kz"))))
1020 (build-system gnu-build-system)
1021 (arguments
1022 `(#:test-target "test"
1023 #:phases
1024 (modify-phases %standard-phases
1025 (add-after 'unpack 'chdir
1026 (lambda _ (chdir "src") #t))
1027 (replace 'configure
1028 (lambda* (#:key build system inputs outputs #:allow-other-keys)
1029 (when (not (or (string-prefix? "x86_64" system)
1030 (string-prefix? "i686" system)))
1031 (substitute* "makefiles/linux_openblas.mk"
1032 (("-msse -msse2") "")))
1033 (substitute* "makefiles/default_rules.mk"
1034 (("/bin/bash") (which "bash")))
1035 (substitute* "Makefile"
1036 (("ext_depend: check_portaudio")
1037 "ext_depend:"))
1038 (substitute* '("online/Makefile"
1039 "onlinebin/Makefile"
1040 "gst-plugin/Makefile")
1041 (("../../tools/portaudio/install")
1042 (assoc-ref inputs "portaudio")))
1043
1044 ;; This `configure' script doesn't support variables passed as
1045 ;; arguments, nor does it support "prefix".
1046 (let ((out (assoc-ref outputs "out"))
1047 (openblas (assoc-ref inputs "openblas"))
1048 (openfst (assoc-ref inputs "openfst")))
1049 (substitute* "configure"
1050 (("check_for_slow_expf;") "")
1051 ;; This affects the RPATH and also serves as the installation
1052 ;; directory.
1053 (("KALDILIBDIR=`pwd`/lib")
1054 (string-append "KALDILIBDIR=" out "/lib")))
1055 (mkdir-p out) ; must exist
1056 (setenv "CONFIG_SHELL" (which "bash"))
1057 (setenv "OPENFST_VER" ,(package-version openfst))
1058 (invoke "./configure"
1059 "--use-cuda=no"
1060 "--shared"
1061 (string-append "--openblas-root=" openblas)
1062 (string-append "--fst-root=" openfst)))))
1063 (add-after 'build 'build-ext-and-gstreamer-plugin
1064 (lambda _
1065 (invoke "make" "-C" "online" "depend")
1066 (invoke "make" "-C" "online")
1067 (invoke "make" "-C" "onlinebin" "depend")
1068 (invoke "make" "-C" "onlinebin")
1069 (invoke "make" "-C" "gst-plugin" "depend")
1070 (invoke "make" "-C" "gst-plugin")
1071 #t))
1072 ;; TODO: also install the executables.
1073 (replace 'install
1074 (lambda* (#:key outputs #:allow-other-keys)
1075 (let* ((out (assoc-ref outputs "out"))
1076 (inc (string-append out "/include"))
1077 (lib (string-append out "/lib")))
1078 (mkdir-p lib)
1079 ;; The build phase installed symlinks to the actual
1080 ;; libraries. Install the actual targets.
1081 (for-each (lambda (file)
1082 (let ((target (readlink file)))
1083 (delete-file file)
1084 (install-file target lib)))
1085 (find-files lib "\\.so"))
1086 ;; Install headers
1087 (for-each (lambda (file)
1088 (let ((target-dir (string-append inc "/" (dirname file))))
1089 (install-file file target-dir)))
1090 (find-files "." "\\.h"))
1091 (install-file "gst-plugin/libgstonlinegmmdecodefaster.so"
1092 (string-append lib "/gstreamer-1.0"))
1093 #t))))))
1094 (inputs
1095 `(("alsa-lib" ,alsa-lib)
1096 ("gfortran" ,gfortran "lib")
1097 ("glib" ,glib)
1098 ("gstreamer" ,gstreamer)
1099 ("jack" ,jack-1)
1100 ("openblas" ,openblas)
1101 ("openfst" ,openfst)
1102 ("portaudio" ,portaudio)
1103 ("python" ,python)))
1104 (native-inputs
1105 `(("glib" ,glib "bin") ; glib-genmarshal
1106 ("grep" ,grep)
1107 ("sed" ,sed)
1108 ("pkg-config" ,pkg-config)
1109 ("which" ,which)))
1110 (home-page "https://kaldi-asr.org/")
1111 (synopsis "Speech recognition toolkit")
1112 (description "Kaldi is an extensible toolkit for speech recognition
1113 written in C++.")
1114 (license license:asl2.0))))
1115
1116 (define-public gst-kaldi-nnet2-online
1117 (let ((commit "617e43e73c7cc45eb9119028c02bd4178f738c4a")
1118 (revision "1"))
1119 (package
1120 (name "gst-kaldi-nnet2-online")
1121 (version (git-version "0" revision commit))
1122 (source (origin
1123 (method git-fetch)
1124 (uri (git-reference
1125 (url "https://github.com/alumae/gst-kaldi-nnet2-online.git")
1126 (commit commit)))
1127 (file-name (git-file-name name version))
1128 (sha256
1129 (base32
1130 "0xh3w67b69818s6ib02ara4lw7wamjdmh4jznvkpzrs4skbs9jx9"))))
1131 (build-system gnu-build-system)
1132 (arguments
1133 `(#:tests? #f ; there are none
1134 #:make-flags
1135 (list (string-append "SHELL="
1136 (assoc-ref %build-inputs "bash") "/bin/bash")
1137 (string-append "KALDI_ROOT="
1138 (assoc-ref %build-inputs "kaldi-src"))
1139 (string-append "KALDILIBDIR="
1140 (assoc-ref %build-inputs "kaldi") "/lib")
1141 "KALDI_FLAVOR=dynamic")
1142 #:phases
1143 (modify-phases %standard-phases
1144 (add-after 'unpack 'chdir
1145 (lambda _ (chdir "src") #t))
1146 (replace 'configure
1147 (lambda* (#:key inputs #:allow-other-keys)
1148 (let ((glib (assoc-ref inputs "glib")))
1149 (setenv "CXXFLAGS" "-fPIC")
1150 (setenv "CPLUS_INCLUDE_PATH"
1151 (string-append glib "/include/glib-2.0:"
1152 glib "/lib/glib-2.0/include:"
1153 (assoc-ref inputs "gstreamer")
1154 "/include/gstreamer-1.0")))
1155 (substitute* "Makefile"
1156 (("include \\$\\(KALDI_ROOT\\)/src/kaldi.mk") "")
1157 (("\\$\\(error Cannot find") "#"))
1158 #t))
1159 (add-before 'build 'build-depend
1160 (lambda* (#:key make-flags #:allow-other-keys)
1161 (apply invoke "make" "depend" make-flags)))
1162 (replace 'install
1163 (lambda* (#:key outputs #:allow-other-keys)
1164 (let* ((out (assoc-ref outputs "out"))
1165 (lib (string-append out "/lib/gstreamer-1.0")))
1166 (install-file "libgstkaldinnet2onlinedecoder.so" lib)
1167 #t))))))
1168 (inputs
1169 `(("glib" ,glib)
1170 ("gstreamer" ,gstreamer)
1171 ("jansson" ,jansson)
1172 ("openfst" ,openfst)
1173 ("kaldi" ,kaldi)))
1174 (native-inputs
1175 `(("bash" ,bash)
1176 ("glib:bin" ,glib "bin") ; glib-genmarshal
1177 ("kaldi-src" ,(package-source kaldi))
1178 ("pkg-config" ,pkg-config)))
1179 (home-page "https://kaldi-asr.org/")
1180 (synopsis "Gstreamer plugin for decoding speech")
1181 (description "This package provides a GStreamer plugin that wraps
1182 Kaldi's @code{SingleUtteranceNnet2Decoder}. It requires iVector-adapted DNN
1183 acoustic models. The iVectors are adapted to the current audio stream
1184 automatically.")
1185 (license license:asl2.0))))
1186
1187 (define-public kaldi-gstreamer-server
1188 (let ((commit "1735ba49c5dc0ebfc184e45105fc600cd9f1f508")
1189 (revision "1"))
1190 (package
1191 (name "kaldi-gstreamer-server")
1192 (version (git-version "0" revision commit))
1193 (source (origin
1194 (method git-fetch)
1195 (uri (git-reference
1196 (url "https://github.com/alumae/kaldi-gstreamer-server.git")
1197 (commit commit)))
1198 (file-name (git-file-name name version))
1199 (sha256
1200 (base32
1201 "0j701m7lbwmzqxsfanj882v7881hrbmpqybbczbxqpcbg8q34w0k"))))
1202 (build-system gnu-build-system)
1203 (arguments
1204 `(#:tests? #f ; there are no tests that can be run automatically
1205 #:modules ((guix build utils)
1206 (guix build gnu-build-system)
1207 (srfi srfi-26))
1208 #:phases
1209 (modify-phases %standard-phases
1210 (delete 'configure)
1211 (replace 'build
1212 (lambda* (#:key outputs #:allow-other-keys)
1213 ;; Disable hash randomization to ensure the generated .pycs
1214 ;; are reproducible.
1215 (setenv "PYTHONHASHSEED" "0")
1216 (with-directory-excursion "kaldigstserver"
1217 (for-each (lambda (file)
1218 (apply invoke
1219 `("python"
1220 "-m" "compileall"
1221 "-f" ; force rebuild
1222 ,file)))
1223 (find-files "." "\\.py$")))
1224 #t))
1225 (replace 'install
1226 (lambda* (#:key inputs outputs #:allow-other-keys)
1227 (let* ((out (assoc-ref outputs "out"))
1228 (bin (string-append out "/bin"))
1229 (share (string-append out "/share/kaldi-gstreamer-server/")))
1230 ;; Install Python files
1231 (with-directory-excursion "kaldigstserver"
1232 (for-each (cut install-file <> share)
1233 (find-files "." ".*")))
1234
1235 ;; Install sample configuration files
1236 (for-each (cut install-file <> share)
1237 (find-files "." "\\.yaml"))
1238
1239 ;; Install executables
1240 (mkdir-p bin)
1241 (let* ((server (string-append bin "/kaldi-gst-server"))
1242 (client (string-append bin "/kaldi-gst-client"))
1243 (worker (string-append bin "/kaldi-gst-worker"))
1244 (PYTHONPATH (getenv "PYTHONPATH"))
1245 (GST_PLUGIN_PATH (string-append
1246 (assoc-ref inputs "gst-kaldi-nnet2-online")
1247 "/lib/gstreamer-1.0:${GST_PLUGIN_PATH}"))
1248 (wrap (lambda (wrapper what)
1249 (with-output-to-file wrapper
1250 (lambda _
1251 (format #t
1252 "#!~a
1253 export PYTHONPATH=~a
1254 export GST_PLUGIN_PATH=~a
1255 exec ~a ~a/~a \"$@\"~%"
1256 (which "bash") PYTHONPATH GST_PLUGIN_PATH
1257 (which "python") share what)))
1258 (chmod wrapper #o555))))
1259 (for-each wrap
1260 (list server client worker)
1261 (list "master_server.py"
1262 "client.py"
1263 "worker.py")))
1264 #t))))))
1265 (inputs
1266 `(("gst-kaldi-nnet2-online" ,gst-kaldi-nnet2-online)
1267 ("python2" ,python-2)
1268 ("python2-futures" ,python2-futures)
1269 ("python2-pygobject" ,python2-pygobject)
1270 ("python2-pyyaml" ,python2-pyyaml)
1271 ("python2-tornado" ,python2-tornado)
1272 ("python2-ws4py" ,python2-ws4py-for-kaldi-gstreamer-server)))
1273 (home-page "https://github.com/alumae/kaldi-gstreamer-server")
1274 (synopsis "Real-time full-duplex speech recognition server")
1275 (description "This is a real-time full-duplex speech recognition server,
1276 based on the Kaldi toolkit and the GStreamer framework and implemented in
1277 Python.")
1278 (license license:bsd-2))))
1279
1280 (define-public grpc
1281 (package
1282 (name "grpc")
1283 (version "1.16.1")
1284 (source (origin
1285 (method git-fetch)
1286 (uri (git-reference
1287 (url "https://github.com/grpc/grpc.git")
1288 (commit (string-append "v" version))))
1289 (file-name (git-file-name name version))
1290 (sha256
1291 (base32
1292 "1jimqz3115f9pli5w6ik9wi7mjc7ix6y7yrq4a1ab9fc3dalj7p2"))))
1293 (build-system cmake-build-system)
1294 (arguments
1295 `(#:tests? #f ; no test target
1296 #:configure-flags
1297 (list "-DgRPC_ZLIB_PROVIDER=package"
1298 "-DgRPC_CARES_PROVIDER=package"
1299 "-DgRPC_SSL_PROVIDER=package"
1300 "-DgRPC_PROTOBUF_PROVIDER=package")))
1301 (inputs
1302 `(("c-ares" ,c-ares/cmake)
1303 ("openssl" ,openssl)
1304 ("zlib" ,zlib)))
1305 (native-inputs
1306 `(("protobuf" ,protobuf)
1307 ("python" ,python-wrapper)))
1308 (home-page "https://grpc.io")
1309 (synopsis "High performance universal RPC framework")
1310 (description "gRPC is a modern high performance @dfn{Remote Procedure Call}
1311 (RPC) framework that can run in any environment. It can efficiently connect
1312 services in and across data centers with pluggable support for load balancing,
1313 tracing, health checking and authentication. It is also applicable in last
1314 mile of distributed computing to connect devices, mobile applications and
1315 browsers to backend services.")
1316 (license license:asl2.0)))
1317
1318 ;; Note that Tensorflow includes a "third_party" directory, which seems to not
1319 ;; only contain modified subsets of upstream library source code, but also
1320 ;; adapter headers provided by Google (such as the fft.h header, which is not
1321 ;; part of the upstream project code). The Tensorflow code includes headers
1322 ;; from the "third_party" directory. It does not look like we can replace
1323 ;; these headers with unmodified upstream files, so we keep them.
1324 (define-public tensorflow
1325 (package
1326 (name "tensorflow")
1327 (version "1.9.0")
1328 (source
1329 (origin
1330 (method git-fetch)
1331 (uri (git-reference
1332 (url "https://github.com/tensorflow/tensorflow.git")
1333 (commit (string-append "v" version))))
1334 (file-name (string-append "tensorflow-" version "-checkout"))
1335 (sha256
1336 (base32
1337 "0a9kwha395g3wgxfwln5j8vn9nkspmd75xldrlqdq540w996g8xa"))))
1338 (build-system cmake-build-system)
1339 (arguments
1340 `(#:tests? #f ; no "check" target
1341 #:build-type "Release"
1342 #:configure-flags
1343 (let ((protobuf (assoc-ref %build-inputs "protobuf"))
1344 (protobuf:native (assoc-ref %build-inputs "protobuf:native"))
1345 (jsoncpp (assoc-ref %build-inputs "jsoncpp"))
1346 (snappy (assoc-ref %build-inputs "snappy"))
1347 (sqlite (assoc-ref %build-inputs "sqlite")))
1348 (list
1349 ;; Use protobuf from Guix
1350 (string-append "-Dprotobuf_STATIC_LIBRARIES="
1351 protobuf "/lib/libprotobuf.so")
1352 (string-append "-DPROTOBUF_PROTOC_EXECUTABLE="
1353 protobuf:native "/bin/protoc")
1354
1355 ;; Use snappy from Guix
1356 (string-append "-Dsnappy_STATIC_LIBRARIES="
1357 snappy "/lib/libsnappy.so")
1358 ;; Yes, this is not actually the include directory but a prefix...
1359 (string-append "-Dsnappy_INCLUDE_DIR=" snappy)
1360
1361 ;; Use jsoncpp from Guix
1362 (string-append "-Djsoncpp_STATIC_LIBRARIES="
1363 jsoncpp "/lib/libjsoncpp.so")
1364 ;; Yes, this is not actually the include directory but a prefix...
1365 (string-append "-Djsoncpp_INCLUDE_DIR=" jsoncpp)
1366
1367 ;; Use sqlite from Guix
1368 (string-append "-Dsqlite_STATIC_LIBRARIES="
1369 sqlite "/lib/libsqlite.a")
1370
1371 ;; Use system libraries wherever possible. Currently, this
1372 ;; only affects zlib.
1373 "-Dsystemlib_ALL=ON"
1374 "-Dtensorflow_ENABLE_POSITION_INDEPENDENT_CODE=ON"
1375 "-Dtensorflow_BUILD_SHARED_LIB=ON"
1376 "-Dtensorflow_OPTIMIZE_FOR_NATIVE_ARCH=OFF"
1377 "-Dtensorflow_ENABLE_SSL_SUPPORT=OFF"
1378 "-Dtensorflow_BUILD_CONTRIB_KERNELS=OFF"))
1379 #:make-flags
1380 (list "CC=gcc")
1381 #:modules ((ice-9 ftw)
1382 (guix build utils)
1383 (guix build cmake-build-system))
1384 #:phases
1385 (modify-phases %standard-phases
1386 (add-after 'unpack 'set-source-file-times-to-1980
1387 ;; At the end of the tf_python_build_pip_package target, a ZIP
1388 ;; archive should be generated via bdist_wheel, but it fails with
1389 ;; "ZIP does not support timestamps before 1980". Luckily,
1390 ;; SOURCE_DATE_EPOCH is respected, which we set to some time in
1391 ;; 1980.
1392 (lambda _ (setenv "SOURCE_DATE_EPOCH" "315532800") #t))
1393 ;; See https://github.com/tensorflow/tensorflow/issues/20517#issuecomment-406373913
1394 (add-after 'unpack 'python3.7-compatibility
1395 (lambda _
1396 (substitute* '("tensorflow/python/eager/pywrap_tfe_src.cc"
1397 "tensorflow/python/lib/core/ndarray_tensor.cc"
1398 "tensorflow/python/lib/core/py_func.cc")
1399 (("PyUnicode_AsUTF8") "(char *)PyUnicode_AsUTF8"))
1400 (substitute* "tensorflow/c/eager/c_api.h"
1401 (("unsigned char async")
1402 "unsigned char is_async"))
1403
1404 ;; Remove dependency on tensorboard, a complicated but probably
1405 ;; optional package.
1406 (substitute* "tensorflow/tools/pip_package/setup.py"
1407 ((".*'tensorboard >.*") ""))
1408 #t))
1409 (add-after 'python3.7-compatibility 'chdir
1410 (lambda _ (chdir "tensorflow/contrib/cmake") #t))
1411 (add-after 'chdir 'disable-downloads
1412 (lambda* (#:key inputs #:allow-other-keys)
1413 (substitute* (find-files "external" "\\.cmake$")
1414 (("GIT_REPOSITORY.*") "")
1415 (("GIT_TAG.*") "")
1416 (("PREFIX ")
1417 "DOWNLOAD_COMMAND \"\"\nPREFIX "))
1418
1419 ;; Use packages from Guix
1420 (let ((grpc (assoc-ref inputs "grpc")))
1421 (substitute* "CMakeLists.txt"
1422 ;; Sqlite
1423 (("include\\(sqlite\\)") "")
1424 (("\\$\\{sqlite_STATIC_LIBRARIES\\}")
1425 (string-append (assoc-ref inputs "sqlite")
1426 "/lib/libsqlite3.so"))
1427 (("sqlite_copy_headers_to_destination") "")
1428
1429 ;; PNG
1430 (("include\\(png\\)") "")
1431 (("\\$\\{png_STATIC_LIBRARIES\\}")
1432 (string-append (assoc-ref inputs "libpng")
1433 "/lib/libpng16.so"))
1434 (("png_copy_headers_to_destination") "")
1435
1436 ;; JPEG
1437 (("include\\(jpeg\\)") "")
1438 (("\\$\\{jpeg_STATIC_LIBRARIES\\}")
1439 (string-append (assoc-ref inputs "libjpeg")
1440 "/lib/libjpeg.so"))
1441 (("jpeg_copy_headers_to_destination") "")
1442
1443 ;; GIF
1444 (("include\\(gif\\)") "")
1445 (("\\$\\{gif_STATIC_LIBRARIES\\}")
1446 (string-append (assoc-ref inputs "giflib")
1447 "/lib/libgif.so"))
1448 (("gif_copy_headers_to_destination") "")
1449
1450 ;; lmdb
1451 (("include\\(lmdb\\)") "")
1452 (("\\$\\{lmdb_STATIC_LIBRARIES\\}")
1453 (string-append (assoc-ref inputs "lmdb")
1454 "/lib/liblmdb.so"))
1455 (("lmdb_copy_headers_to_destination") "")
1456
1457 ;; Protobuf
1458 (("include\\(protobuf\\)") "")
1459 (("protobuf_copy_headers_to_destination") "")
1460 (("^ +protobuf") "")
1461
1462 ;; gRPC
1463 (("include\\(grpc\\)")
1464 "find_package(grpc REQUIRED NAMES gRPC)")
1465 (("list\\(APPEND tensorflow_EXTERNAL_DEPENDENCIES grpc\\)") "")
1466
1467 ;; Eigen
1468 (("include\\(eigen\\)")
1469 (string-append "find_package(eigen REQUIRED NAMES Eigen3)
1470 set(eigen_INCLUDE_DIRS ${CMAKE_CURRENT_BINARY_DIR}/external/eigen_archive "
1471 (assoc-ref inputs "eigen") "/include/eigen3)"))
1472 (("^ +eigen") "")
1473
1474 ;; snappy
1475 (("include\\(snappy\\)")
1476 "add_definitions(-DTF_USE_SNAPPY)")
1477 (("list\\(APPEND tensorflow_EXTERNAL_DEPENDENCIES snappy\\)") "")
1478
1479 ;; jsoncpp
1480 (("include\\(jsoncpp\\)") "")
1481 (("^ +jsoncpp") ""))
1482
1483 (substitute* "tf_core_framework.cmake"
1484 ((" grpc") "")
1485 (("\\$\\{GRPC_BUILD\\}/grpc_cpp_plugin")
1486 (which "grpc_cpp_plugin"))
1487 ;; Link with gRPC libraries
1488 (("add_library\\(tf_protos_cc.*" m)
1489 (string-append m
1490 (format #f "\ntarget_link_libraries(tf_protos_cc PRIVATE \
1491 ~a/lib/libgrpc++_unsecure.a \
1492 ~a/lib/libgrpc_unsecure.a \
1493 ~a/lib/libaddress_sorting.a \
1494 ~a/lib/libgpr.a \
1495 ~a//lib/libcares.so
1496 )\n"
1497 grpc grpc grpc grpc
1498 (assoc-ref inputs "c-ares"))))))
1499 (substitute* "tf_tools.cmake"
1500 (("add_dependencies\\(\\$\\{proto_text.*") ""))
1501 ;; Remove dependency on bundled grpc
1502 (substitute* "tf_core_distributed_runtime.cmake"
1503 (("tf_core_cpu grpc") "tf_core_cpu"))
1504
1505 ;; This directory is a dependency of many targets.
1506 (mkdir-p "protobuf")
1507 #t))
1508 (add-after 'configure 'unpack-third-party-sources
1509 (lambda* (#:key inputs #:allow-other-keys)
1510 ;; This is needed to configure bundled packages properly.
1511 (setenv "CONFIG_SHELL" (which "bash"))
1512 (for-each
1513 (lambda (name)
1514 (let* ((what (assoc-ref inputs (string-append name "-src")))
1515 (name* (string-map (lambda (c)
1516 (if (char=? c #\-)
1517 #\_ c)) name))
1518 (where (string-append "../build/" name* "/src/" name*)))
1519 (cond
1520 ((string-suffix? ".zip" what)
1521 (mkdir-p where)
1522 (with-directory-excursion where
1523 (invoke "unzip" what)))
1524 ((string-suffix? ".tar.gz" what)
1525 (mkdir-p where)
1526 (invoke "tar" "xf" what
1527 "-C" where "--strip-components=1"))
1528 (else
1529 (let ((parent (dirname where)))
1530 (mkdir-p parent)
1531 (with-directory-excursion parent
1532 (when (file-exists? name*)
1533 (delete-file-recursively name*))
1534 (copy-recursively what name*)
1535 (map make-file-writable
1536 (find-files name* ".*"))))))))
1537 (list "boringssl"
1538 "cub"
1539 "double-conversion"
1540 "farmhash"
1541 "fft2d"
1542 "highwayhash"
1543 "nsync"
1544 "re2"))
1545
1546 (rename-file "../build/cub/src/cub/cub-1.8.0/"
1547 "../build/cub/src/cub/cub/")
1548 #t))
1549 (add-after 'unpack 'fix-python-build
1550 (lambda* (#:key inputs outputs #:allow-other-keys)
1551 (mkdir-p "protobuf-src")
1552 (invoke "tar" "xf" (assoc-ref inputs "protobuf:src")
1553 "-C" "protobuf-src" "--strip-components=1")
1554 (mkdir-p "eigen-src")
1555 (invoke "tar" "xf" (assoc-ref inputs "eigen:src")
1556 "-C" "eigen-src" "--strip-components=1")
1557
1558 (substitute* "tensorflow/contrib/cmake/tf_python.cmake"
1559 ;; Ensure that all Python dependencies can be found at build time.
1560 (("PYTHONPATH=\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/tf_python" m)
1561 (string-append m ":" (getenv "PYTHONPATH")))
1562 ;; Take protobuf source files from our source package.
1563 (("\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/protobuf/src/protobuf/src/google")
1564 (string-append (getcwd) "/protobuf-src/src/google")))
1565
1566 (substitute* '("tensorflow/contrib/cmake/tf_shared_lib.cmake"
1567 "tensorflow/contrib/cmake/tf_python.cmake")
1568 ;; Take Eigen source files from our source package.
1569 (("\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/eigen/src/eigen/")
1570 (string-append (getcwd) "/eigen-src/"))
1571 ;; Take Eigen headers from our own package.
1572 (("\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/external/eigen_archive")
1573 (string-append (assoc-ref inputs "eigen") "/include/eigen3")))
1574
1575 ;; Correct the RUNPATH of ops libraries generated for Python.
1576 ;; TODO: this doesn't work :(
1577 ;; /gnu/store/...-tensorflow-1.9.0/lib/python3.7/site-packages/tensorflow/contrib/seq2seq/python/ops/lib_beam_search_ops.so:
1578 ;; warning: RUNPATH contains bogus entries: ("/tmp/guix-build-tensorflow-1.9.0.drv-0/source/tensorflow/contrib/build")
1579 ;; /gnu/store/...-tensorflow-1.9.0/lib/python3.7/site-packages/tensorflow/contrib/seq2seq/python/ops/lib_beam_search_ops.so:
1580 ;; error: depends on 'libpywrap_tensorflow_internal.so', which
1581 ;; cannot be found in RUNPATH ...
1582 (substitute* "tensorflow/contrib/cmake/tf_cc_ops.cmake"
1583 (("set_target_properties.*")
1584 (string-append "set_target_properties(${_AT_TARGET} PROPERTIES \
1585 COMPILE_FLAGS ${target_compile_flags} \
1586 INSTALL_RPATH_USE_LINK_PATH TRUE \
1587 INSTALL_RPATH " (assoc-ref outputs "out") "/lib)\n")))
1588 #t))
1589 (add-after 'build 'build-pip-package
1590 (lambda* (#:key outputs #:allow-other-keys)
1591 (setenv "LDFLAGS"
1592 (string-append "-Wl,-rpath="
1593 (assoc-ref outputs "out") "/lib"))
1594 (invoke "make" "tf_python_build_pip_package")
1595 #t))
1596 (add-after 'build-pip-package 'install-python
1597 (lambda* (#:key outputs #:allow-other-keys)
1598 (let ((out (assoc-ref outputs "out"))
1599 (wheel (car (find-files "../build/tf_python/dist/" "\\.whl$"))))
1600 (invoke "python" "-m" "pip" "install" wheel
1601 (string-append "--prefix=" out))
1602
1603 ;; XXX: broken RUNPATH, see fix-python-build phase.
1604 (delete-file
1605 (string-append
1606 out "/lib/python3.7/site-packages/tensorflow/contrib/"
1607 "seq2seq/python/ops/lib_beam_search_ops.so"))
1608 #t))))))
1609 (native-inputs
1610 `(("pkg-config" ,pkg-config)
1611 ("protobuf:native" ,protobuf-3.6) ; protoc
1612 ("protobuf:src" ,(package-source protobuf-3.6))
1613 ("eigen:src" ,(package-source eigen-for-tensorflow))
1614 ;; install_pip_packages.sh wants setuptools 39.1.0 specifically.
1615 ("python-setuptools" ,python-setuptools-for-tensorflow)
1616
1617 ;; The commit hashes and URLs for third-party source code are taken
1618 ;; from "tensorflow/workspace.bzl".
1619 ("boringssl-src"
1620 ,(let ((commit "ee7aa02")
1621 (revision "1"))
1622 (origin
1623 (method git-fetch)
1624 (uri (git-reference
1625 (url "https://boringssl.googlesource.com/boringssl")
1626 (commit commit)))
1627 (file-name (string-append "boringssl-0-" revision
1628 (string-take commit 7)
1629 "-checkout"))
1630 (sha256
1631 (base32
1632 "1jf693q0nw0adsic6cgmbdx6g7wr4rj4vxa8j1hpn792fqhd8wgw")))))
1633 ("cub-src"
1634 ,(let ((version "1.8.0"))
1635 (origin
1636 (method url-fetch)
1637 (uri (string-append "https://mirror.bazel.build/github.com/NVlabs/"
1638 "cub/archive/" version ".zip"))
1639 (file-name (string-append "cub-" version ".zip"))
1640 (sha256
1641 (base32
1642 "1hsqikqridb90dkxkjr2918dcry6pfh46ccnwrzawl56aamhdykb")))))
1643 ("double-conversion-src"
1644 ,(let ((commit "5664746")
1645 (revision "1"))
1646 (origin
1647 (method git-fetch)
1648 (uri (git-reference
1649 (url "https://github.com/google/double-conversion.git")
1650 (commit commit)))
1651 (file-name
1652 (git-file-name "double-conversion"
1653 (string-append "0-" revision "."
1654 (string-take commit 7))))
1655 (sha256
1656 (base32
1657 "1h5lppqqxcvdg5jq42i5msgwx20ryij3apvmndflngrgdpc04gn1")))))
1658 ("farmhash-src"
1659 ,(let ((commit "816a4ae622e964763ca0862d9dbd19324a1eaf45"))
1660 (origin
1661 (method url-fetch)
1662 (uri (string-append
1663 "https://mirror.bazel.build/github.com/google/farmhash/archive/"
1664 commit ".tar.gz"))
1665 (file-name (string-append "farmhash-0-" (string-take commit 7)
1666 ".tar.gz"))
1667 (sha256
1668 (base32
1669 "185b2xdxl4d4cnsnv6abg8s22gxvx8673jq2yaq85bz4cdy58q35")))))
1670 ;; The license notice on the home page at
1671 ;; http://www.kurims.kyoto-u.ac.jp/~ooura/fft.html says:
1672 ;; Copyright Takuya OOURA, 1996-2001
1673 ;;
1674 ;; You may use, copy, modify and distribute this code for any purpose
1675 ;; (include commercial use) and without fee. Please refer to this
1676 ;; package when you modify this code.
1677 ;;
1678 ;; We take the identical tarball from the Bazel mirror, because the URL
1679 ;; at the home page is not versioned and might change.
1680 ("fft2d-src"
1681 ,(origin
1682 (method url-fetch)
1683 (uri "https://mirror.bazel.build/www.kurims.kyoto-u.ac.jp/~ooura/fft.tgz")
1684 (file-name "fft2d.tar.gz")
1685 (sha256
1686 (base32
1687 "15jjkfvhqvl2c0753d2di8hz0pyzn598g74wqy79awdrf1y67fsj"))))
1688 ("highwayhash-src"
1689 ,(let ((commit "be5edafc2e1a455768e260ccd68ae7317b6690ee")
1690 (revision "1"))
1691 (origin
1692 (method git-fetch)
1693 (uri (git-reference
1694 (url "https://github.com/google/highwayhash.git")
1695 (commit commit)))
1696 (file-name (string-append "highwayhash-0-" revision
1697 (string-take commit 7)
1698 "-checkout"))
1699 (sha256
1700 (base32
1701 "154jwf98cyy54hldr94pgjn85zynly3abpnc1avmb8a18lzwjyb6")))))
1702 ("nsync-src"
1703 ,(let ((version "0559ce013feac8db639ee1bf776aca0325d28777")
1704 (revision "1"))
1705 (origin
1706 (method url-fetch)
1707 (uri (string-append "https://mirror.bazel.build/"
1708 "github.com/google/nsync/archive/"
1709 version ".tar.gz"))
1710 (file-name (string-append "nsync-0." revision
1711 "-" (string-take version 7)
1712 ".tar.gz"))
1713 (sha256
1714 (base32
1715 "0qdkyqym34x739mmzv97ah5r7ph462v5xkxqxvidmcfqbi64b132")))))
1716 ("re2-src"
1717 ,(let ((commit "e7efc48")
1718 (revision "1"))
1719 (origin
1720 (method git-fetch)
1721 (uri (git-reference
1722 (url "https://github.com/google/re2")
1723 (commit commit)))
1724 (file-name (string-append "re2-0-" revision
1725 (string-take commit 7)
1726 "-checkout"))
1727 (sha256
1728 (base32
1729 "161g9841rjfsy5pn52fcis0s9hdr7rxvb06pad38j5rppfihvign")))))
1730 ("googletest" ,googletest)
1731 ("swig" ,swig)
1732 ("unzip" ,unzip)))
1733 (propagated-inputs
1734 `(("python-absl-py" ,python-absl-py)
1735 ("python-astor" ,python-astor)
1736 ("python-gast" ,python-gast)
1737 ("python-grpcio" ,python-grpcio)
1738 ("python-numpy" ,python-numpy)
1739 ("python-protobuf" ,python-protobuf-3.6)
1740 ("python-six" ,python-six)
1741 ("python-termcolo" ,python-termcolor)
1742 ("python-wheel" ,python-wheel)))
1743 (inputs
1744 `(("c-ares" ,c-ares)
1745 ("eigen" ,eigen-for-tensorflow)
1746 ("gemmlowp" ,gemmlowp-for-tensorflow)
1747 ("lmdb" ,lmdb)
1748 ("libjpeg" ,libjpeg)
1749 ("libpng" ,libpng)
1750 ("giflib" ,giflib)
1751 ("grpc" ,grpc)
1752 ("jsoncpp" ,jsoncpp-for-tensorflow)
1753 ("snappy" ,snappy)
1754 ("sqlite" ,sqlite)
1755 ("protobuf" ,protobuf-3.6)
1756 ("python" ,python-wrapper)
1757 ("zlib" ,zlib)))
1758 (home-page "https://tensorflow.org")
1759 (synopsis "Machine learning framework")
1760 (description
1761 "TensorFlow is a flexible platform for building and training machine
1762 learning models. It provides a library for high performance numerical
1763 computation and includes high level Python APIs, including both a sequential
1764 API for beginners that allows users to build models quickly by plugging
1765 together building blocks and a subclassing API with an imperative style for
1766 advanced research.")
1767 (license license:asl2.0)))
1768
1769 (define-public python-iml
1770 (package
1771 (name "python-iml")
1772 (version "0.6.2")
1773 (source
1774 (origin
1775 (method url-fetch)
1776 (uri (pypi-uri "iml" version))
1777 (sha256
1778 (base32
1779 "1k8szlpm19rcwcxdny9qdm3gmaqq8akb4xlvrzyz8c2d679aak6l"))))
1780 (build-system python-build-system)
1781 (propagated-inputs
1782 `(("ipython" ,python-ipython)
1783 ("nose" ,python-nose)
1784 ("numpy" ,python-numpy)
1785 ("pandas" ,python-pandas)
1786 ("scipy" ,python-scipy)))
1787 (home-page "http://github.com/interpretable-ml/iml")
1788 (synopsis "Interpretable Machine Learning (iML) package")
1789 (description "Interpretable ML (iML) is a set of data type objects,
1790 visualizations, and interfaces that can be used by any method designed to
1791 explain the predictions of machine learning models (or really the output of
1792 any function). It currently contains the interface and IO code from the Shap
1793 project, and it will potentially also do the same for the Lime project.")
1794 (license license:expat)))
1795
1796 (define-public python-keras-applications
1797 (package
1798 (name "python-keras-applications")
1799 (version "1.0.8")
1800 (source
1801 (origin
1802 (method url-fetch)
1803 (uri (pypi-uri "Keras_Applications" version))
1804 (sha256
1805 (base32
1806 "1rcz31ca4axa6kzhjx4lwqxbg4wvlljkj8qj9a7p9sfd5fhzjyam"))))
1807 (build-system python-build-system)
1808 ;; The tests require Keras, but this package is needed to build Keras.
1809 (arguments '(#:tests? #f))
1810 (propagated-inputs
1811 `(("python-h5py" ,python-h5py)
1812 ("python-numpy" ,python-numpy)))
1813 (native-inputs
1814 `(("python-pytest" ,python-pytest)
1815 ("python-pytest-cov" ,python-pytest-cov)
1816 ("python-pytest-pep8" ,python-pytest-pep8)
1817 ("python-pytest-xdist" ,python-pytest-xdist)))
1818 (home-page "https://github.com/keras-team/keras-applications")
1819 (synopsis "Reference implementations of popular deep learning models")
1820 (description
1821 "This package provides reference implementations of popular deep learning
1822 models for use with the Keras deep learning framework.")
1823 (license license:expat)))
1824
1825 (define-public python-keras-preprocessing
1826 (package
1827 (name "python-keras-preprocessing")
1828 (version "1.1.0")
1829 (source
1830 (origin
1831 (method url-fetch)
1832 (uri (pypi-uri "Keras_Preprocessing" version))
1833 (sha256
1834 (base32
1835 "1r98nm4k1svsqjyaqkfk23i31bl1kcfcyp7094yyj3c43phfp3as"))))
1836 (build-system python-build-system)
1837 (propagated-inputs
1838 `(("python-numpy" ,python-numpy)
1839 ("python-six" ,python-six)))
1840 (native-inputs
1841 `(("python-pandas" ,python-pandas)
1842 ("python-pillow" ,python-pillow)
1843 ("python-pytest" ,python-pytest)
1844 ("python-pytest-cov" ,python-pytest-cov)
1845 ("python-pytest-xdist" ,python-pytest-xdist)
1846 ("tensorflow" ,tensorflow)))
1847 (home-page "https://github.com/keras-team/keras-preprocessing/")
1848 (synopsis "Data preprocessing and augmentation for deep learning models")
1849 (description
1850 "Keras Preprocessing is the data preprocessing and data augmentation
1851 module of the Keras deep learning library. It provides utilities for working
1852 with image data, text data, and sequence data.")
1853 (license license:expat)))
1854
1855 (define-public python-keras
1856 (package
1857 (name "python-keras")
1858 (version "2.2.4")
1859 (source
1860 (origin
1861 (method url-fetch)
1862 (uri (pypi-uri "Keras" version))
1863 (patches (search-patches "python-keras-integration-test.patch"))
1864 (sha256
1865 (base32
1866 "1j8bsqzh49vjdxy6l1k4iwax5vpjzniynyd041xjavdzvfii1dlh"))))
1867 (build-system python-build-system)
1868 (arguments
1869 `(#:phases
1870 (modify-phases %standard-phases
1871 (add-after 'unpack 'remove-tests-for-unavailable-features
1872 (lambda _
1873 (delete-file "keras/backend/theano_backend.py")
1874 (delete-file "keras/backend/cntk_backend.py")
1875 (delete-file "tests/keras/backend/backend_test.py")
1876
1877 ;; FIXME: This doesn't work because Tensorflow is missing the
1878 ;; coder ops library.
1879 (delete-file "tests/keras/test_callbacks.py")
1880 #t))
1881 (replace 'check
1882 (lambda _
1883 ;; These tests attempt to download data files from the internet.
1884 (delete-file "tests/integration_tests/test_datasets.py")
1885 (delete-file "tests/integration_tests/imagenet_utils_test.py")
1886
1887 (setenv "PYTHONPATH"
1888 (string-append (getcwd) "/build/lib:"
1889 (getenv "PYTHONPATH")))
1890 (invoke "py.test" "-v"
1891 "-p" "no:cacheprovider"
1892 "--ignore" "keras/utils"))))))
1893 (propagated-inputs
1894 `(("python-h5py" ,python-h5py)
1895 ("python-keras-applications" ,python-keras-applications)
1896 ("python-keras-preprocessing" ,python-keras-preprocessing)
1897 ("python-numpy" ,python-numpy)
1898 ("python-pydot" ,python-pydot)
1899 ("python-pyyaml" ,python-pyyaml)
1900 ("python-scipy" ,python-scipy)
1901 ("python-six" ,python-six)
1902 ("tensorflow" ,tensorflow)
1903 ("graphviz" ,graphviz)))
1904 (native-inputs
1905 `(("python-pandas" ,python-pandas)
1906 ("python-pytest" ,python-pytest)
1907 ("python-pytest-cov" ,python-pytest-cov)
1908 ("python-pytest-pep8" ,python-pytest-pep8)
1909 ("python-pytest-timeout" ,python-pytest-timeout)
1910 ("python-pytest-xdist" ,python-pytest-xdist)
1911 ("python-sphinx" ,python-sphinx)
1912 ("python-requests" ,python-requests)))
1913 (home-page "https://github.com/keras-team/keras")
1914 (synopsis "High-level deep learning framework")
1915 (description "Keras is a high-level neural networks API, written in Python
1916 and capable of running on top of TensorFlow. It was developed with a focus on
1917 enabling fast experimentation. Use Keras if you need a deep learning library
1918 that:
1919
1920 @itemize
1921 @item Allows for easy and fast prototyping (through user friendliness,
1922 modularity, and extensibility).
1923 @item Supports both convolutional networks and recurrent networks, as well as
1924 combinations of the two.
1925 @item Runs seamlessly on CPU and GPU.
1926 @end itemize\n")
1927 (license license:expat)))
1928
1929 (define-public sbcl-cl-libsvm-format
1930 (let ((commit "3300f84fd8d9f5beafc114f543f9d83417c742fb")
1931 (revision "0"))
1932 (package
1933 (name "sbcl-cl-libsvm-format")
1934 (version (git-version "0.1.0" revision commit))
1935 (source
1936 (origin
1937 (method git-fetch)
1938 (uri (git-reference
1939 (url "https://github.com/masatoi/cl-libsvm-format.git")
1940 (commit commit)))
1941 (file-name (git-file-name name version))
1942 (sha256
1943 (base32
1944 "0284aj84xszhkhlivaigf9qj855fxad3mzmv3zfr0qzb5k0nzwrg"))))
1945 (build-system asdf-build-system/sbcl)
1946 (native-inputs
1947 `(("prove" ,sbcl-prove)
1948 ("prove-asdf" ,sbcl-prove-asdf)))
1949 (inputs
1950 `(("alexandria" ,sbcl-alexandria)))
1951 (synopsis "LibSVM data format reader for Common Lisp")
1952 (description
1953 "This Common Lisp library provides a fast reader for data in LibSVM
1954 format.")
1955 (home-page "https://github.com/masatoi/cl-libsvm-format")
1956 (license license:expat))))
1957
1958 (define-public cl-libsvm-format
1959 (sbcl-package->cl-source-package sbcl-cl-libsvm-format))
1960
1961 (define-public ecl-cl-libsvm-format
1962 (sbcl-package->ecl-package sbcl-cl-libsvm-format))
1963
1964 (define-public sbcl-cl-online-learning
1965 (let ((commit "fc7a34f4f161cd1c7dd747d2ed8f698947781423")
1966 (revision "0"))
1967 (package
1968 (name "sbcl-cl-online-learning")
1969 (version (git-version "0.5" revision commit))
1970 (source
1971 (origin
1972 (method git-fetch)
1973 (uri (git-reference
1974 (url "https://github.com/masatoi/cl-online-learning.git")
1975 (commit commit)))
1976 (file-name (git-file-name name version))
1977 (sha256
1978 (base32
1979 "14x95rlg80ay5hv645ki57pqvy12v28hz4k1w0f6bsfi2rmpxchq"))))
1980 (build-system asdf-build-system/sbcl)
1981 (native-inputs
1982 `(("prove" ,sbcl-prove)
1983 ("prove-asdf" ,sbcl-prove-asdf)))
1984 (inputs
1985 `(("cl-libsvm-format" ,sbcl-cl-libsvm-format)
1986 ("cl-store" ,sbcl-cl-store)))
1987 (arguments
1988 `(;; FIXME: Tests pass but then the check phase crashes
1989 #:tests? #f))
1990 (synopsis "Online Machine Learning for Common Lisp")
1991 (description
1992 "This library contains a collection of machine learning algorithms for
1993 online linear classification written in Common Lisp.")
1994 (home-page "https://github.com/masatoi/cl-online-learning")
1995 (license license:expat))))
1996
1997 (define-public cl-online-learning
1998 (sbcl-package->cl-source-package sbcl-cl-online-learning))
1999
2000 (define-public ecl-cl-online-learning
2001 (sbcl-package->ecl-package sbcl-cl-online-learning))
2002
2003 (define-public sbcl-cl-random-forest
2004 (let ((commit "85fbdd4596d40e824f70f1b7cf239cf544e49d51")
2005 (revision "0"))
2006 (package
2007 (name "sbcl-cl-random-forest")
2008 (version (git-version "0.1" revision commit))
2009 (source
2010 (origin
2011 (method git-fetch)
2012 (uri (git-reference
2013 (url "https://github.com/masatoi/cl-random-forest.git")
2014 (commit commit)))
2015 (file-name (git-file-name name version))
2016 (sha256
2017 (base32
2018 "097xv60i1ndz68sg9p4pc7c5gvyp9i1xgw966b4wwfq3x6hbz421"))))
2019 (build-system asdf-build-system/sbcl)
2020 (native-inputs
2021 `(("prove" ,sbcl-prove)
2022 ("prove-asdf" ,sbcl-prove-asdf)
2023 ("trivial-garbage" ,sbcl-trivial-garbage)))
2024 (inputs
2025 `(("alexandria" ,sbcl-alexandria)
2026 ("cl-libsvm-format" ,sbcl-cl-libsvm-format)
2027 ("cl-online-learning" ,sbcl-cl-online-learning)
2028 ("lparallel" ,sbcl-lparallel)))
2029 (arguments
2030 `(;; The tests download data from the Internet
2031 #:tests? #f
2032 #:phases
2033 (modify-phases %standard-phases
2034 (add-after 'unpack 'add-sb-cltl2-dependency
2035 (lambda _
2036 ;; sb-cltl2 is required by lparallel when using sbcl, but it is
2037 ;; not loaded automatically.
2038 (substitute* "cl-random-forest.asd"
2039 (("\\(in-package :cl-user\\)")
2040 "(in-package :cl-user) #+sbcl (require :sb-cltl2)"))
2041 #t)))))
2042 (synopsis "Random Forest and Global Refinement for Common Lisp")
2043 (description
2044 "CL-random-forest is an implementation of Random Forest for multiclass
2045 classification and univariate regression written in Common Lisp. It also
2046 includes an implementation of Global Refinement of Random Forest.")
2047 (home-page "https://github.com/masatoi/cl-random-forest")
2048 (license license:expat))))
2049
2050 (define-public cl-random-forest
2051 (sbcl-package->cl-source-package sbcl-cl-random-forest))
2052
2053 (define-public ecl-cl-random-forest
2054 (sbcl-package->ecl-package sbcl-cl-random-forest))
2055
2056 (define-public gloo
2057 (let ((version "0.0.0") ; no proper version tag
2058 (commit "ca528e32fea9ca8f2b16053cff17160290fc84ce")
2059 (revision "0"))
2060 (package
2061 (name "gloo")
2062 (version (git-version version revision commit))
2063 (source
2064 (origin
2065 (method git-fetch)
2066 (uri (git-reference
2067 (url "https://github.com/facebookincubator/gloo.git")
2068 (commit commit)))
2069 (file-name (git-file-name name version))
2070 (sha256
2071 (base32
2072 "1q9f80zy75f6njrzrqkmhc0g3qxs4gskr7ns2jdqanxa2ww7a99w"))))
2073 (build-system cmake-build-system)
2074 (native-inputs
2075 `(("googletest" ,googletest)))
2076 (arguments
2077 `(#:configure-flags '("-DBUILD_TEST=1")
2078 #:phases
2079 (modify-phases %standard-phases
2080 (replace 'check
2081 (lambda _
2082 (invoke "make" "gloo_test")
2083 #t)))))
2084 (synopsis "Collective communications library")
2085 (description
2086 "Gloo is a collective communications library. It comes with a
2087 number of collective algorithms useful for machine learning applications.
2088 These include a barrier, broadcast, and allreduce.")
2089 (home-page "https://github.com/facebookincubator/gloo")
2090 (license license:bsd-3))))