Merge branch 'staging' into core-updates
[jackhill/guix/guix.git] / gnu / packages / machine-learning.scm
1 ;;; GNU Guix --- Functional package management for GNU
2 ;;; Copyright © 2015, 2016, 2017, 2018, 2019 Ricardo Wurmus <rekado@elephly.net>
3 ;;; Copyright © 2016 Efraim Flashner <efraim@flashner.co.il>
4 ;;; Copyright © 2016, 2017 Marius Bakke <mbakke@fastmail.com>
5 ;;; Copyright © 2016 Hartmut Goebel <h.goebel@crazy-compilers.com>
6 ;;; Copyright © 2018, 2019 Tobias Geerinckx-Rice <me@tobias.gr>
7 ;;; Copyright © 2018 Kei Kebreau <kkebreau@posteo.net>
8 ;;; Copyright © 2018 Mark Meyer <mark@ofosos.org>
9 ;;; Copyright © 2018 Ben Woodcroft <donttrustben@gmail.com>
10 ;;; Copyright © 2018 Fis Trivial <ybbs.daans@hotmail.com>
11 ;;; Copyright © 2018 Julien Lepiller <julien@lepiller.eu>
12 ;;; Copyright © 2018 Björn Höfling <bjoern.hoefling@bjoernhoefling.de>
13 ;;; Copyright © 2019 Nicolas Goaziou <mail@nicolasgoaziou.fr>
14 ;;; Copyright © 2019 Guillaume Le Vaillant <glv@posteo.net>
15 ;;; Copyright © 2019 Brett Gilio <brettg@gnu.org>
16 ;;;
17 ;;; This file is part of GNU Guix.
18 ;;;
19 ;;; GNU Guix is free software; you can redistribute it and/or modify it
20 ;;; under the terms of the GNU General Public License as published by
21 ;;; the Free Software Foundation; either version 3 of the License, or (at
22 ;;; your option) any later version.
23 ;;;
24 ;;; GNU Guix is distributed in the hope that it will be useful, but
25 ;;; WITHOUT ANY WARRANTY; without even the implied warranty of
26 ;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
27 ;;; GNU General Public License for more details.
28 ;;;
29 ;;; You should have received a copy of the GNU General Public License
30 ;;; along with GNU Guix. If not, see <http://www.gnu.org/licenses/>.
31
32 (define-module (gnu packages machine-learning)
33 #:use-module ((guix licenses) #:prefix license:)
34 #:use-module (guix packages)
35 #:use-module (guix utils)
36 #:use-module (guix download)
37 #:use-module (guix svn-download)
38 #:use-module (guix build-system asdf)
39 #:use-module (guix build-system cmake)
40 #:use-module (guix build-system gnu)
41 #:use-module (guix build-system ocaml)
42 #:use-module (guix build-system python)
43 #:use-module (guix build-system r)
44 #:use-module (guix git-download)
45 #:use-module (gnu packages)
46 #:use-module (gnu packages adns)
47 #:use-module (gnu packages algebra)
48 #:use-module (gnu packages audio)
49 #:use-module (gnu packages autotools)
50 #:use-module (gnu packages base)
51 #:use-module (gnu packages bash)
52 #:use-module (gnu packages boost)
53 #:use-module (gnu packages check)
54 #:use-module (gnu packages compression)
55 #:use-module (gnu packages cran)
56 #:use-module (gnu packages databases)
57 #:use-module (gnu packages dejagnu)
58 #:use-module (gnu packages gcc)
59 #:use-module (gnu packages glib)
60 #:use-module (gnu packages graphviz)
61 #:use-module (gnu packages gstreamer)
62 #:use-module (gnu packages image)
63 #:use-module (gnu packages linux)
64 #:use-module (gnu packages lisp-xyz)
65 #:use-module (gnu packages maths)
66 #:use-module (gnu packages mpi)
67 #:use-module (gnu packages ocaml)
68 #:use-module (gnu packages onc-rpc)
69 #:use-module (gnu packages perl)
70 #:use-module (gnu packages pkg-config)
71 #:use-module (gnu packages protobuf)
72 #:use-module (gnu packages python)
73 #:use-module (gnu packages python-science)
74 #:use-module (gnu packages python-web)
75 #:use-module (gnu packages python-xyz)
76 #:use-module (gnu packages serialization)
77 #:use-module (gnu packages sphinx)
78 #:use-module (gnu packages statistics)
79 #:use-module (gnu packages sqlite)
80 #:use-module (gnu packages swig)
81 #:use-module (gnu packages tls)
82 #:use-module (gnu packages web)
83 #:use-module (gnu packages xml)
84 #:use-module (gnu packages xorg)
85 #:use-module (ice-9 match))
86
87 (define-public fann
88 ;; The last release is >100 commits behind, so we package from git.
89 (let ((commit "d71d54788bee56ba4cf7522801270152da5209d7"))
90 (package
91 (name "fann")
92 (version (string-append "2.2.0-1." (string-take commit 8)))
93 (source (origin
94 (method git-fetch)
95 (uri (git-reference
96 (url "https://github.com/libfann/fann.git")
97 (commit commit)))
98 (file-name (string-append name "-" version "-checkout"))
99 (sha256
100 (base32
101 "0ibwpfrjs6q2lijs8slxjgzb2llcl6rk3v2ski4r6215g5jjhg3x"))))
102 (build-system cmake-build-system)
103 (arguments
104 `(#:phases
105 (modify-phases %standard-phases
106 (replace 'check
107 (lambda* (#:key outputs #:allow-other-keys)
108 (let* ((out (assoc-ref outputs "out")))
109 (with-directory-excursion (string-append (getcwd) "/tests")
110 (invoke "./fann_tests"))))))))
111 (home-page "http://leenissen.dk/fann/wp/")
112 (synopsis "Fast Artificial Neural Network")
113 (description
114 "FANN is a neural network library, which implements multilayer
115 artificial neural networks in C with support for both fully connected and
116 sparsely connected networks.")
117 (license license:lgpl2.1))))
118
119 (define-public libsvm
120 (package
121 (name "libsvm")
122 (version "3.23")
123 (source
124 (origin
125 (method url-fetch)
126 (uri (string-append "https://www.csie.ntu.edu.tw/~cjlin/libsvm/"
127 name "-" version ".tar.gz"))
128 (sha256
129 (base32 "0jpaq0rr92x38p4nk3gjan79ip67m6p80anb28z1d8601miysyi5"))))
130 (build-system gnu-build-system)
131 (arguments
132 `(#:tests? #f ; no "check" target
133 #:phases (modify-phases %standard-phases
134 (delete 'configure)
135 (replace
136 'install ; no ‘install’ target
137 (lambda* (#:key outputs #:allow-other-keys)
138 (let* ((out (assoc-ref outputs "out"))
139 (bin (string-append out "/bin/")))
140 (mkdir-p bin)
141 (for-each (lambda (file)
142 (copy-file file (string-append bin file)))
143 '("svm-train"
144 "svm-predict"
145 "svm-scale")))
146 #t)))))
147 (home-page "http://www.csie.ntu.edu.tw/~cjlin/libsvm/")
148 (synopsis "Library for Support Vector Machines")
149 (description
150 "LIBSVM is a machine learning library for support vector
151 classification, (C-SVC, nu-SVC), regression (epsilon-SVR, nu-SVR) and
152 distribution estimation (one-class SVM). It supports multi-class
153 classification.")
154 (license license:bsd-3)))
155
156 (define-public python-libsvm
157 (package (inherit libsvm)
158 (name "python-libsvm")
159 (build-system gnu-build-system)
160 (arguments
161 `(#:tests? #f ; no "check" target
162 #:make-flags '("-C" "python")
163 #:phases
164 (modify-phases %standard-phases
165 (delete 'configure)
166 (replace
167 'install ; no ‘install’ target
168 (lambda* (#:key inputs outputs #:allow-other-keys)
169 (let ((site (string-append (assoc-ref outputs "out")
170 "/lib/python"
171 (string-take
172 (string-take-right
173 (assoc-ref inputs "python") 5) 3)
174 "/site-packages/")))
175 (substitute* "python/svm.py"
176 (("../libsvm.so.2") "libsvm.so.2"))
177 (mkdir-p site)
178 (for-each (lambda (file)
179 (copy-file file (string-append site (basename file))))
180 (find-files "python" "\\.py"))
181 (copy-file "libsvm.so.2"
182 (string-append site "libsvm.so.2")))
183 #t)))))
184 (inputs
185 `(("python" ,python)))
186 (synopsis "Python bindings of libSVM")))
187
188 (define-public ghmm
189 ;; The latest release candidate is several years and a couple of fixes have
190 ;; been published since. This is why we download the sources from the SVN
191 ;; repository.
192 (let ((svn-revision 2341))
193 (package
194 (name "ghmm")
195 (version (string-append "0.9-rc3-0." (number->string svn-revision)))
196 (source (origin
197 (method svn-fetch)
198 (uri (svn-reference
199 (url "http://svn.code.sf.net/p/ghmm/code/trunk")
200 (revision svn-revision)))
201 (file-name (string-append name "-" version "-checkout"))
202 (sha256
203 (base32
204 "0qbq1rqp94l530f043qzp8aw5lj7dng9wq0miffd7spd1ff638wq"))))
205 (build-system gnu-build-system)
206 (arguments
207 `(#:imported-modules (,@%gnu-build-system-modules
208 (guix build python-build-system))
209 #:phases
210 (modify-phases %standard-phases
211 (add-after 'unpack 'enter-dir
212 (lambda _ (chdir "ghmm") #t))
213 (delete 'check)
214 (add-after 'install 'check
215 (assoc-ref %standard-phases 'check))
216 (add-before 'check 'fix-PYTHONPATH
217 (lambda* (#:key inputs outputs #:allow-other-keys)
218 (let ((python-version (python-version
219 (assoc-ref inputs "python"))))
220 (setenv "PYTHONPATH"
221 (string-append (getenv "PYTHONPATH")
222 ":" (assoc-ref outputs "out")
223 "/lib/python" python-version
224 "/site-packages")))
225 #t))
226 (add-after 'enter-dir 'fix-runpath
227 (lambda* (#:key outputs #:allow-other-keys)
228 (substitute* "ghmmwrapper/setup.py"
229 (("^(.*)extra_compile_args = \\[" line indent)
230 (string-append indent
231 "extra_link_args = [\"-Wl,-rpath="
232 (assoc-ref outputs "out") "/lib\"],\n"
233 line
234 "\"-Wl,-rpath="
235 (assoc-ref outputs "out")
236 "/lib\", ")))
237 #t))
238 (add-after 'enter-dir 'disable-broken-tests
239 (lambda _
240 (substitute* "tests/Makefile.am"
241 ;; GHMM_SILENT_TESTS is assumed to be a command.
242 (("TESTS_ENVIRONMENT.*") "")
243 ;; Do not build broken tests.
244 (("chmm .*") "")
245 (("read_fa .*") "")
246 (("mcmc .*") "")
247 (("label_higher_order_test.*$")
248 "label_higher_order_test\n"))
249
250 ;; These Python unittests are broken as there is no gato.
251 ;; See https://sourceforge.net/p/ghmm/support-requests/3/
252 (substitute* "ghmmwrapper/ghmmunittests.py"
253 (("^(.*)def (testNewXML|testMultipleTransitionClasses|testNewXML)"
254 line indent)
255 (string-append indent
256 "@unittest.skip(\"Disabled by Guix\")\n"
257 line)))
258 #t)))))
259 (inputs
260 `(("python" ,python-2) ; only Python 2 is supported
261 ("libxml2" ,libxml2)))
262 (native-inputs
263 `(("pkg-config" ,pkg-config)
264 ("dejagnu" ,dejagnu)
265 ("swig" ,swig)
266 ("autoconf" ,autoconf)
267 ("automake" ,automake)
268 ("libtool" ,libtool)))
269 (home-page "http://ghmm.org")
270 (synopsis "Hidden Markov Model library")
271 (description
272 "The General Hidden Markov Model library (GHMM) is a C library with
273 additional Python bindings implementing a wide range of types of @dfn{Hidden
274 Markov Models} (HMM) and algorithms: discrete, continuous emissions, basic
275 training, HMM clustering, HMM mixtures.")
276 (license license:lgpl2.0+))))
277
278 (define-public mcl
279 (package
280 (name "mcl")
281 (version "14.137")
282 (source (origin
283 (method url-fetch)
284 (uri (string-append
285 "http://micans.org/mcl/src/mcl-"
286 (string-replace-substring version "." "-")
287 ".tar.gz"))
288 (sha256
289 (base32
290 "15xlax3z31lsn62vlg94hkm75nm40q4679amnfg13jm8m2bnhy5m"))))
291 (build-system gnu-build-system)
292 (arguments
293 `(#:configure-flags (list "--enable-blast")))
294 (inputs
295 `(("perl" ,perl)))
296 (home-page "http://micans.org/mcl/")
297 (synopsis "Clustering algorithm for graphs")
298 (description
299 "The MCL algorithm is short for the @dfn{Markov Cluster Algorithm}, a
300 fast and scalable unsupervised cluster algorithm for graphs (also known as
301 networks) based on simulation of (stochastic) flow in graphs.")
302 ;; In the LICENCE file and web page it says "The software is licensed
303 ;; under the GNU General Public License, version 3.", but in several of
304 ;; the source code files it suggests GPL3 or later.
305 ;; http://listserver.ebi.ac.uk/pipermail/mcl-users/2016/000376.html
306 (license license:gpl3)))
307
308 (define-public ocaml-mcl
309 (package
310 (name "ocaml-mcl")
311 (version "12-068oasis4")
312 (source
313 (origin
314 (method git-fetch)
315 (uri (git-reference
316 (url "https://github.com/fhcrc/mcl.git")
317 (commit version)))
318 (file-name (git-file-name name version))
319 (sha256
320 (base32
321 "0009dc3h2jp3qg5val452wngpqnbfyhbcxylghq0mrjqxx0jdq5p"))))
322 (build-system ocaml-build-system)
323 (arguments
324 `(#:phases
325 (modify-phases %standard-phases
326 (add-before 'configure 'patch-paths
327 (lambda _
328 (substitute* "configure"
329 (("/bin/sh") (which "sh")))
330 (substitute* "setup.ml"
331 (("LDFLAGS=-fPIC")
332 (string-append "LDFLAGS=-fPIC\"; \"SHELL=" (which "sh")))
333 (("-std=c89") "-std=gnu99")
334
335 ;; This is a mutable string, which is no longer supported. Use
336 ;; a byte buffer instead.
337 (("String.make \\(String.length s\\)")
338 "Bytes.make (String.length s)")
339
340 ;; These two belong together.
341 (("OASISString.replace_chars")
342 "Bytes.to_string (OASISString.replace_chars")
343 ((" s;")
344 " s);"))
345 (substitute* "myocamlbuild.ml"
346 (("std=c89") "std=gnu99"))
347 ;; Since we build with a more recent OCaml, we have to use C99 or
348 ;; later. This causes problems with the old C code.
349 (substitute* "src/impala/matrix.c"
350 (("restrict") "restrict_"))
351 #t)))))
352 (native-inputs
353 `(("ocamlbuild" ,ocamlbuild)))
354 (home-page "https://github.com/fhcrc/mcl")
355 (synopsis "OCaml wrappers around MCL")
356 (description
357 "This package provides OCaml bindings for the MCL graph clustering
358 algorithm.")
359 (license license:gpl3)))
360
361 (define-public randomjungle
362 (package
363 (name "randomjungle")
364 (version "2.1.0")
365 (source
366 (origin
367 (method url-fetch)
368 (uri (string-append
369 "https://www.imbs.uni-luebeck.de/fileadmin/files/Software"
370 "/randomjungle/randomjungle-" version ".tar_.gz"))
371 (patches (search-patches "randomjungle-disable-static-build.patch"))
372 (sha256
373 (base32
374 "12c8rf30cla71swx2mf4ww9mfd8jbdw5lnxd7dxhyw1ygrvg6y4w"))))
375 (build-system gnu-build-system)
376 (arguments
377 `(#:configure-flags
378 (list "--disable-static"
379 (string-append "--with-boost="
380 (assoc-ref %build-inputs "boost")))
381 #:phases
382 (modify-phases %standard-phases
383 (add-before
384 'configure 'set-CXXFLAGS
385 (lambda _
386 (setenv "CXXFLAGS" "-fpermissive ")
387 #t)))))
388 (inputs
389 `(("boost" ,boost)
390 ("gsl" ,gsl)
391 ("libxml2" ,libxml2)
392 ("zlib" ,zlib)))
393 (native-inputs
394 `(("gfortran" ,gfortran)
395 ("gfortran:lib" ,gfortran "lib")))
396 ;; Non-portable assembly instructions are used so building fails on
397 ;; platforms other than x86_64 or i686.
398 (supported-systems '("x86_64-linux" "i686-linux"))
399 (home-page "https://www.imbs.uni-luebeck.de/forschung/software/details.html#c224")
400 (synopsis "Implementation of the Random Forests machine learning method")
401 (description
402 "Random Jungle is an implementation of Random Forests. It is supposed to
403 analyse high dimensional data. In genetics, it can be used for analysing big
404 Genome Wide Association (GWA) data. Random Forests is a powerful machine
405 learning method. Most interesting features are variable selection, missing
406 value imputation, classifier creation, generalization error estimation and
407 sample proximities between pairs of cases.")
408 (license license:gpl3+)))
409
410 (define-public openfst
411 (package
412 (name "openfst")
413 (version "1.7.2")
414 (source (origin
415 (method url-fetch)
416 (uri (string-append "http://www.openfst.org/twiki/pub/FST/"
417 "FstDownload/openfst-" version ".tar.gz"))
418 (sha256
419 (base32
420 "0fqgk8195kz21is09gwzwnrg7fr9526bi9mh4apyskapz27pbhr1"))))
421 (build-system gnu-build-system)
422 (home-page "http://www.openfst.org")
423 (synopsis "Library for weighted finite-state transducers")
424 (description "OpenFst is a library for constructing, combining,
425 optimizing, and searching weighted finite-state transducers (FSTs).")
426 (license license:asl2.0)))
427
428 (define-public shogun
429 (package
430 (name "shogun")
431 (version "6.1.3")
432 (source
433 (origin
434 (method url-fetch)
435 (uri (string-append
436 "ftp://shogun-toolbox.org/shogun/releases/"
437 (version-major+minor version)
438 "/sources/shogun-" version ".tar.bz2"))
439 (sha256
440 (base32
441 "1rn9skm3nw6hr7mr3lgp2gfqhi7ii0lyxck7qmqnf8avq349s5jp"))
442 (modules '((guix build utils)
443 (ice-9 rdelim)))
444 (snippet
445 '(begin
446 ;; Remove non-free sources and files referencing them
447 (for-each delete-file
448 (find-files "src/shogun/classifier/svm/"
449 "SVMLight\\.(cpp|h)"))
450 (for-each delete-file
451 (find-files "examples/undocumented/libshogun/"
452 (string-append
453 "(classifier_.*svmlight.*|"
454 "evaluation_cross_validation_locked_comparison).cpp")))
455 ;; Remove non-free functions.
456 (define (delete-ifdefs file)
457 (with-atomic-file-replacement file
458 (lambda (in out)
459 (let loop ((line (read-line in 'concat))
460 (skipping? #f))
461 (if (eof-object? line)
462 #t
463 (let ((skip-next?
464 (or (and skipping?
465 (not (string-prefix?
466 "#endif //USE_SVMLIGHT" line)))
467 (string-prefix?
468 "#ifdef USE_SVMLIGHT" line))))
469 (when (or (not skipping?)
470 (and skipping? (not skip-next?)))
471 (display line out))
472 (loop (read-line in 'concat) skip-next?)))))))
473 (for-each delete-ifdefs
474 (append
475 (find-files "src/shogun/classifier/mkl"
476 "^MKLClassification\\.cpp")
477 (find-files "src/shogun/classifier/svm"
478 "^SVMLightOneClass\\.(cpp|h)")
479 (find-files "src/shogun/multiclass"
480 "^ScatterSVM\\.(cpp|h)")
481 (find-files "src/shogun/kernel/"
482 "^(Kernel|CombinedKernel|ProductKernel)\\.(cpp|h)")
483 (find-files "src/shogun/regression/svr"
484 "^(MKLRegression|SVRLight)\\.(cpp|h)")
485 (find-files "src/shogun/transfer/domain_adaptation"
486 "^DomainAdaptationSVM\\.(cpp|h)")))
487 #t))))
488 (build-system cmake-build-system)
489 (arguments
490 '(#:tests? #f ;no check target
491 #:phases
492 (modify-phases %standard-phases
493 (add-after 'unpack 'delete-broken-symlinks
494 (lambda _
495 (for-each delete-file '("applications/arts/data"
496 "applications/asp/data"
497 "applications/easysvm/data"
498 "applications/msplicer/data"
499 "applications/ocr/data"
500 "examples/meta/data"
501 "examples/undocumented/data"))
502 #t))
503 (add-after 'unpack 'change-R-target-path
504 (lambda* (#:key outputs #:allow-other-keys)
505 (substitute* '("src/interfaces/r/CMakeLists.txt"
506 "examples/meta/r/CMakeLists.txt")
507 (("\\$\\{R_COMPONENT_LIB_PATH\\}")
508 (string-append (assoc-ref outputs "out")
509 "/lib/R/library/")))
510 #t))
511 (add-after 'unpack 'fix-octave-modules
512 (lambda* (#:key outputs #:allow-other-keys)
513 (substitute* "src/interfaces/octave/CMakeLists.txt"
514 (("^include_directories\\(\\$\\{OCTAVE_INCLUDE_DIRS\\}")
515 "include_directories(${OCTAVE_INCLUDE_DIRS} ${OCTAVE_INCLUDE_DIRS}/octave")
516 ;; change target directory
517 (("\\$\\{OCTAVE_OCT_LOCAL_API_FILE_DIR\\}")
518 (string-append (assoc-ref outputs "out")
519 "/share/octave/packages")))
520 (substitute* '("src/interfaces/octave/swig_typemaps.i"
521 "src/interfaces/octave/sg_print_functions.cpp")
522 ;; "octave/config.h" and "octave/oct-obj.h" deprecated in Octave.
523 (("octave/config\\.h") "octave/octave-config.h")
524 (("octave/oct-obj.h") "octave/ovl.h"))
525 #t))
526 (add-after 'unpack 'move-rxcpp
527 (lambda* (#:key inputs #:allow-other-keys)
528 (let ((rxcpp-dir "shogun/third-party/rxcpp"))
529 (mkdir-p rxcpp-dir)
530 (install-file (assoc-ref inputs "rxcpp") rxcpp-dir)
531 #t)))
532 (add-before 'build 'set-HOME
533 ;; $HOME needs to be set at some point during the build phase
534 (lambda _ (setenv "HOME" "/tmp") #t)))
535 #:configure-flags
536 (list "-DCMAKE_BUILD_WITH_INSTALL_RPATH=TRUE"
537 "-DUSE_SVMLIGHT=OFF" ;disable proprietary SVMLIGHT
538 "-DBUILD_META_EXAMPLES=OFF" ;requires unpackaged ctags
539 ;;"-DINTERFACE_JAVA=ON" ;requires unpackaged jblas
540 ;;"-DINTERFACE_RUBY=ON" ;requires unpackaged ruby-narray
541 ;;"-DINTERFACE_PERL=ON" ;"FindPerlLibs" does not exist
542 ;;"-DINTERFACE_LUA=ON" ;fails because lua doesn't build pkgconfig file
543 "-DINTERFACE_OCTAVE=ON"
544 "-DINTERFACE_PYTHON=ON"
545 "-DINTERFACE_R=ON")))
546 (inputs
547 `(("python" ,python)
548 ("numpy" ,python-numpy)
549 ("r-minimal" ,r-minimal)
550 ("octave" ,octave-cli)
551 ("swig" ,swig)
552 ("eigen" ,eigen)
553 ("hdf5" ,hdf5)
554 ("atlas" ,atlas)
555 ("arpack" ,arpack-ng)
556 ("lapack" ,lapack)
557 ("glpk" ,glpk)
558 ("libxml2" ,libxml2)
559 ("lzo" ,lzo)
560 ("zlib" ,zlib)))
561 (native-inputs
562 `(("pkg-config" ,pkg-config)
563 ("rxcpp" ,rxcpp)))
564 ;; Non-portable SSE instructions are used so building fails on platforms
565 ;; other than x86_64.
566 (supported-systems '("x86_64-linux"))
567 (home-page "http://shogun-toolbox.org/")
568 (synopsis "Machine learning toolbox")
569 (description
570 "The Shogun Machine learning toolbox provides a wide range of unified and
571 efficient Machine Learning (ML) methods. The toolbox seamlessly allows to
572 combine multiple data representations, algorithm classes, and general purpose
573 tools. This enables both rapid prototyping of data pipelines and extensibility
574 in terms of new algorithms.")
575 (license license:gpl3+)))
576
577 (define-public rxcpp
578 (package
579 (name "rxcpp")
580 (version "4.1.0")
581 (source
582 (origin
583 (method git-fetch)
584 (uri (git-reference
585 (url "https://github.com/ReactiveX/RxCpp.git")
586 (commit (string-append "v" version))))
587 (sha256
588 (base32 "1rdpa3jlc181jd08nk437aar085h28i45s6nzrv65apb3xyyz0ij"))
589 (file-name (git-file-name name version))))
590 (build-system cmake-build-system)
591 (arguments
592 `(#:phases
593 (modify-phases %standard-phases
594 (add-after 'unpack 'remove-werror
595 (lambda _
596 (substitute* (find-files ".")
597 (("-Werror") ""))
598 #t))
599 (replace 'check
600 (lambda _
601 (invoke "ctest"))))))
602 (native-inputs
603 `(("catch" ,catch-framework)))
604 (home-page "http://reactivex.io/")
605 (synopsis "Reactive Extensions for C++")
606 (description
607 "The Reactive Extensions for C++ (RxCpp) is a library of algorithms for
608 values-distributed-in-time. ReactiveX is a library for composing asynchronous
609 and event-based programs by using observable sequences.
610
611 It extends the observer pattern to support sequences of data and/or events and
612 adds operators that allow you to compose sequences together declaratively while
613 abstracting away concerns about things like low-level threading,
614 synchronization, thread-safety, concurrent data structures, and non-blocking
615 I/O.")
616 (license license:asl2.0)))
617
618 (define-public r-adaptivesparsity
619 (package
620 (name "r-adaptivesparsity")
621 (version "1.6")
622 (source (origin
623 (method url-fetch)
624 (uri (cran-uri "AdaptiveSparsity" version))
625 (sha256
626 (base32
627 "0imr5m8mll9j6n4icsv6z9rl5kbnwsp9wvzrg7n90nnmcxq2cz91"))))
628 (properties
629 `((upstream-name . "AdaptiveSparsity")))
630 (build-system r-build-system)
631 (arguments
632 `(#:phases
633 (modify-phases %standard-phases
634 (add-after 'unpack 'link-against-armadillo
635 (lambda _
636 (substitute* "src/Makevars"
637 (("PKG_LIBS=" prefix)
638 (string-append prefix "-larmadillo"))))))))
639 (propagated-inputs
640 `(("r-mass" ,r-mass)
641 ("r-matrix" ,r-matrix)
642 ("r-rcpp" ,r-rcpp)
643 ("r-rcpparmadillo" ,r-rcpparmadillo)))
644 (inputs
645 `(("armadillo" ,armadillo)))
646 (home-page "https://cran.r-project.org/web/packages/AdaptiveSparsity")
647 (synopsis "Adaptive sparsity models")
648 (description
649 "This package implements the Figueiredo machine learning algorithm for
650 adaptive sparsity and the Wong algorithm for adaptively sparse gaussian
651 geometric models.")
652 (license license:lgpl3+)))
653
654 (define-public gemmlowp-for-tensorflow
655 ;; The commit hash is taken from "tensorflow/workspace.bzl".
656 (let ((commit "38ebac7b059e84692f53e5938f97a9943c120d98")
657 (revision "2"))
658 (package
659 (name "gemmlowp")
660 (version (git-version "0" revision commit))
661 (source (origin
662 (method url-fetch)
663 (uri (string-append "https://mirror.bazel.build/"
664 "github.com/google/gemmlowp/archive/"
665 commit ".zip"))
666 (file-name (string-append "gemmlowp-" version ".zip"))
667 (sha256
668 (base32
669 "0n56s2g8hrssm4w8qj1v58gfm56a04n9v992ixkmvk6zjiralzxq"))))
670 (build-system cmake-build-system)
671 (arguments
672 `(#:configure-flags
673 (list ,@(match (%current-system)
674 ((or "x86_64-linux" "i686-linux")
675 '("-DCMAKE_CXX_FLAGS=-msse2"))
676 (_ '())))
677 #:phases
678 (modify-phases %standard-phases
679 ;; This directory contains the CMakeLists.txt.
680 (add-after 'unpack 'chdir
681 (lambda _ (chdir "contrib") #t))
682 ;; There is no install target
683 (replace 'install
684 (lambda* (#:key outputs #:allow-other-keys)
685 (let* ((out (assoc-ref outputs "out"))
686 (lib (string-append out "/lib/"))
687 (inc (string-append out "/include/")))
688 (install-file "../build/libeight_bit_int_gemm.so" lib)
689 (for-each (lambda (dir)
690 (let ((target (string-append inc "/" dir)))
691 (mkdir-p target)
692 (for-each (lambda (h)
693 (install-file h target))
694 (find-files (string-append "../" dir)
695 "\\.h$"))))
696 '("meta" "profiling" "public" "fixedpoint"
697 "eight_bit_int_gemm" "internal"))
698 #t))))))
699 (native-inputs
700 `(("unzip" ,unzip)))
701 (home-page "https://github.com/google/gemmlowp")
702 (synopsis "Small self-contained low-precision GEMM library")
703 (description
704 "This is a small self-contained low-precision @dfn{general matrix
705 multiplication} (GEMM) library. It is not a full linear algebra library.
706 Low-precision means that the input and output matrix entries are integers on
707 at most 8 bits. To avoid overflow, results are internally accumulated on more
708 than 8 bits, and at the end only some significant 8 bits are kept.")
709 (license license:asl2.0))))
710
711 (define-public dlib
712 (package
713 (name "dlib")
714 (version "19.7")
715 (source (origin
716 (method url-fetch)
717 (uri (string-append
718 "http://dlib.net/files/dlib-" version ".tar.bz2"))
719 (sha256
720 (base32
721 "1mljz02kwkrbggyncxv5fpnyjdybw2qihaacb3js8yfkw12vwpc2"))
722 (modules '((guix build utils)))
723 (snippet
724 '(begin
725 ;; Delete ~13MB of bundled dependencies.
726 (delete-file-recursively "dlib/external")
727 (delete-file-recursively "docs/dlib/external")
728 #t))))
729 (build-system cmake-build-system)
730 (arguments
731 `(#:phases
732 (modify-phases %standard-phases
733 (add-after 'unpack 'disable-asserts
734 (lambda _
735 ;; config.h recommends explicitly enabling or disabling asserts
736 ;; when building as a shared library. By default neither is set.
737 (substitute* "dlib/config.h"
738 (("^//#define DLIB_DISABLE_ASSERTS") "#define DLIB_DISABLE_ASSERTS"))
739 #t))
740 (add-after 'disable-asserts 'disable-failing-tests
741 (lambda _
742 ;; One test times out on MIPS, so we need to disable it.
743 ;; Others are flaky on some platforms.
744 (let* ((system ,(or (%current-target-system)
745 (%current-system)))
746 (disabled-tests (cond
747 ((string-prefix? "mips64" system)
748 '("object_detector" ; timeout
749 "data_io"))
750 ((string-prefix? "armhf" system)
751 '("learning_to_track"))
752 ((string-prefix? "i686" system)
753 '("optimization"))
754 (else '()))))
755 (for-each
756 (lambda (test)
757 (substitute* "dlib/test/makefile"
758 (((string-append "SRC \\+= " test "\\.cpp")) "")))
759 disabled-tests)
760 #t)))
761 (replace 'check
762 (lambda _
763 ;; No test target, so we build and run the unit tests here.
764 (let ((test-dir (string-append "../dlib-" ,version "/dlib/test")))
765 (with-directory-excursion test-dir
766 (invoke "make" "-j" (number->string (parallel-job-count)))
767 (invoke "./dtest" "--runall"))
768 #t)))
769 (add-after 'install 'delete-static-library
770 (lambda* (#:key outputs #:allow-other-keys)
771 (delete-file (string-append (assoc-ref outputs "out")
772 "/lib/libdlib.a"))
773 #t)))))
774 (native-inputs
775 `(("pkg-config" ,pkg-config)
776 ;; For tests.
777 ("libnsl" ,libnsl)))
778 (inputs
779 `(("giflib" ,giflib)
780 ("lapack" ,lapack)
781 ("libjpeg" ,libjpeg)
782 ("libpng" ,libpng)
783 ("libx11" ,libx11)
784 ("openblas" ,openblas)
785 ("zlib" ,zlib)))
786 (synopsis
787 "Toolkit for making machine learning and data analysis applications in C++")
788 (description
789 "Dlib is a modern C++ toolkit containing machine learning algorithms and
790 tools. It is used in both industry and academia in a wide range of domains
791 including robotics, embedded devices, mobile phones, and large high performance
792 computing environments.")
793 (home-page "http://dlib.net")
794 (license license:boost1.0)))
795
796 (define-public python-scikit-learn
797 (package
798 (name "python-scikit-learn")
799 (version "0.20.4")
800 (source
801 (origin
802 (method git-fetch)
803 (uri (git-reference
804 (url "https://github.com/scikit-learn/scikit-learn.git")
805 (commit version)))
806 (file-name (git-file-name name version))
807 (sha256
808 (base32
809 "08zbzi8yx5wdlxfx9jap61vg1malc9ajf576w7a0liv6jvvrxlpj"))))
810 (build-system python-build-system)
811 (arguments
812 `(#:phases
813 (modify-phases %standard-phases
814 (add-after 'build 'build-ext
815 (lambda _ (invoke "python" "setup.py" "build_ext" "--inplace") #t))
816 (replace 'check
817 (lambda _
818 ;; Restrict OpenBLAS threads to prevent segfaults while testing!
819 (setenv "OPENBLAS_NUM_THREADS" "1")
820
821 ;; Some tests require write access to $HOME.
822 (setenv "HOME" "/tmp")
823
824 (invoke "pytest" "sklearn" "-m" "not network")))
825 (add-before 'reset-gzip-timestamps 'make-files-writable
826 (lambda* (#:key outputs #:allow-other-keys)
827 ;; Make sure .gz files are writable so that the
828 ;; 'reset-gzip-timestamps' phase can do its work.
829 (let ((out (assoc-ref outputs "out")))
830 (for-each make-file-writable
831 (find-files out "\\.gz$"))
832 #t))))))
833 (inputs
834 `(("openblas" ,openblas)))
835 (native-inputs
836 `(("python-pytest" ,python-pytest)
837 ("python-pandas" ,python-pandas) ;for tests
838 ("python-cython" ,python-cython)))
839 (propagated-inputs
840 `(("python-numpy" ,python-numpy)
841 ("python-scipy" ,python-scipy)))
842 (home-page "http://scikit-learn.org/")
843 (synopsis "Machine Learning in Python")
844 (description
845 "Scikit-learn provides simple and efficient tools for data mining and
846 data analysis.")
847 (license license:bsd-3)))
848
849 (define-public python2-scikit-learn
850 (package-with-python2 python-scikit-learn))
851
852 (define-public python-autograd
853 (let* ((commit "442205dfefe407beffb33550846434baa90c4de7")
854 (revision "0")
855 (version (git-version "0.0.0" revision commit)))
856 (package
857 (name "python-autograd")
858 (home-page "https://github.com/HIPS/autograd")
859 (source (origin
860 (method git-fetch)
861 (uri (git-reference
862 (url home-page)
863 (commit commit)))
864 (sha256
865 (base32
866 "189sv2xb0mwnjawa9z7mrgdglc1miaq93pnck26r28fi1jdwg0z4"))
867 (file-name (git-file-name name version))))
868 (version version)
869 (build-system python-build-system)
870 (native-inputs
871 `(("python-nose" ,python-nose)
872 ("python-pytest" ,python-pytest)))
873 (propagated-inputs
874 `(("python-future" ,python-future)
875 ("python-numpy" ,python-numpy)))
876 (arguments
877 `(#:phases (modify-phases %standard-phases
878 (replace 'check
879 (lambda _
880 (invoke "py.test" "-v"))))))
881 (synopsis "Efficiently computes derivatives of NumPy code")
882 (description "Autograd can automatically differentiate native Python and
883 NumPy code. It can handle a large subset of Python's features, including loops,
884 ifs, recursion and closures, and it can even take derivatives of derivatives
885 of derivatives. It supports reverse-mode differentiation
886 (a.k.a. backpropagation), which means it can efficiently take gradients of
887 scalar-valued functions with respect to array-valued arguments, as well as
888 forward-mode differentiation, and the two can be composed arbitrarily. The
889 main intended application of Autograd is gradient-based optimization.")
890 (license license:expat))))
891
892 (define-public python2-autograd
893 (package-with-python2 python-autograd))
894
895 (define-public lightgbm
896 (package
897 (name "lightgbm")
898 (version "2.0.12")
899 (source (origin
900 (method url-fetch)
901 (uri (string-append
902 "https://github.com/Microsoft/LightGBM/archive/v"
903 version ".tar.gz"))
904 (sha256
905 (base32
906 "132zf0yk0545mg72hyzxm102g3hpb6ixx9hnf8zd2k55gas6cjj1"))
907 (file-name (string-append name "-" version ".tar.gz"))))
908 (native-inputs
909 `(("python-pytest" ,python-pytest)
910 ("python-nose" ,python-nose)))
911 (inputs
912 `(("openmpi" ,openmpi)))
913 (propagated-inputs
914 `(("python-numpy" ,python-numpy)
915 ("python-scipy" ,python-scipy)))
916 (arguments
917 `(#:configure-flags
918 '("-DUSE_MPI=ON")
919 #:phases
920 (modify-phases %standard-phases
921 (replace 'check
922 (lambda* (#:key outputs #:allow-other-keys)
923 (with-directory-excursion ,(string-append "../LightGBM-" version)
924 (invoke "pytest" "tests/c_api_test/test_.py")))))))
925 (build-system cmake-build-system)
926 (home-page "https://github.com/Microsoft/LightGBM")
927 (synopsis "Gradient boosting framework based on decision tree algorithms")
928 (description "LightGBM is a gradient boosting framework that uses tree
929 based learning algorithms. It is designed to be distributed and efficient with
930 the following advantages:
931
932 @itemize
933 @item Faster training speed and higher efficiency
934 @item Lower memory usage
935 @item Better accuracy
936 @item Parallel and GPU learning supported (not enabled in this package)
937 @item Capable of handling large-scale data
938 @end itemize\n")
939 (license license:expat)))
940
941 (define-public vowpal-wabbit
942 ;; Language bindings not included.
943 (package
944 (name "vowpal-wabbit")
945 (version "8.5.0")
946 (source (origin
947 (method url-fetch)
948 (uri (string-append
949 "https://github.com/JohnLangford/vowpal_wabbit/archive/"
950 version ".tar.gz"))
951 (sha256
952 (base32
953 "0clp2kb7rk5sckhllxjr5a651awf4s8dgzg4659yh4hf5cqnf0gr"))
954 (file-name (string-append name "-" version ".tar.gz"))))
955 (inputs
956 `(("boost" ,boost)
957 ("zlib" ,zlib)))
958 (arguments
959 `(#:configure-flags
960 (list (string-append "--with-boost="
961 (assoc-ref %build-inputs "boost")))))
962 (build-system gnu-build-system)
963 (home-page "https://github.com/JohnLangford/vowpal_wabbit")
964 (synopsis "Fast machine learning library for online learning")
965 (description "Vowpal Wabbit is a machine learning system with techniques
966 such as online, hashing, allreduce, reductions, learning2search, active, and
967 interactive learning.")
968 (license license:bsd-3)))
969
970 (define-public python2-fastlmm
971 (package
972 (name "python2-fastlmm")
973 (version "0.2.21")
974 (source
975 (origin
976 (method url-fetch)
977 (uri (pypi-uri "fastlmm" version ".zip"))
978 (sha256
979 (base32
980 "1q8c34rpmwkfy3r4d5172pzdkpfryj561897z9r3x22gq7813x1m"))))
981 (build-system python-build-system)
982 (arguments
983 `(#:tests? #f ; some test files are missing
984 #:python ,python-2)) ; only Python 2.7 is supported
985 (propagated-inputs
986 `(("python2-numpy" ,python2-numpy)
987 ("python2-scipy" ,python2-scipy)
988 ("python2-matplotlib" ,python2-matplotlib)
989 ("python2-pandas" ,python2-pandas)
990 ("python2-scikit-learn" ,python2-scikit-learn)
991 ("python2-pysnptools" ,python2-pysnptools)))
992 (native-inputs
993 `(("unzip" ,unzip)
994 ("python2-cython" ,python2-cython)
995 ("python2-mock" ,python2-mock)
996 ("python2-nose" ,python2-nose)))
997 (home-page "http://research.microsoft.com/en-us/um/redmond/projects/mscompbio/fastlmm/")
998 (synopsis "Perform genome-wide association studies on large data sets")
999 (description
1000 "FaST-LMM, which stands for Factored Spectrally Transformed Linear Mixed
1001 Models, is a program for performing both single-SNP and SNP-set genome-wide
1002 association studies (GWAS) on extremely large data sets.")
1003 (license license:asl2.0)))
1004
1005 ;; There have been no proper releases yet.
1006 (define-public kaldi
1007 (let ((commit "2f95609f0bb085bd3a1dc5eb0a39f3edea59e606")
1008 (revision "1"))
1009 (package
1010 (name "kaldi")
1011 (version (git-version "0" revision commit))
1012 (source (origin
1013 (method git-fetch)
1014 (uri (git-reference
1015 (url "https://github.com/kaldi-asr/kaldi.git")
1016 (commit commit)))
1017 (file-name (git-file-name name version))
1018 (sha256
1019 (base32
1020 "082qh3pfi7hvncylp4xsmkfahbd7gb0whdfa4rwrx7fxk9rdh3kz"))))
1021 (build-system gnu-build-system)
1022 (arguments
1023 `(#:test-target "test"
1024 #:phases
1025 (modify-phases %standard-phases
1026 (add-after 'unpack 'chdir
1027 (lambda _ (chdir "src") #t))
1028 (replace 'configure
1029 (lambda* (#:key build system inputs outputs #:allow-other-keys)
1030 (when (not (or (string-prefix? "x86_64" system)
1031 (string-prefix? "i686" system)))
1032 (substitute* "makefiles/linux_openblas.mk"
1033 (("-msse -msse2") "")))
1034 (substitute* "makefiles/default_rules.mk"
1035 (("/bin/bash") (which "bash")))
1036 (substitute* "Makefile"
1037 (("ext_depend: check_portaudio")
1038 "ext_depend:"))
1039 (substitute* '("online/Makefile"
1040 "onlinebin/Makefile"
1041 "gst-plugin/Makefile")
1042 (("../../tools/portaudio/install")
1043 (assoc-ref inputs "portaudio")))
1044
1045 ;; This `configure' script doesn't support variables passed as
1046 ;; arguments, nor does it support "prefix".
1047 (let ((out (assoc-ref outputs "out"))
1048 (openblas (assoc-ref inputs "openblas"))
1049 (openfst (assoc-ref inputs "openfst")))
1050 (substitute* "configure"
1051 (("check_for_slow_expf;") "")
1052 ;; This affects the RPATH and also serves as the installation
1053 ;; directory.
1054 (("KALDILIBDIR=`pwd`/lib")
1055 (string-append "KALDILIBDIR=" out "/lib")))
1056 (mkdir-p out) ; must exist
1057 (setenv "CONFIG_SHELL" (which "bash"))
1058 (setenv "OPENFST_VER" ,(package-version openfst))
1059 (invoke "./configure"
1060 "--use-cuda=no"
1061 "--shared"
1062 (string-append "--openblas-root=" openblas)
1063 (string-append "--fst-root=" openfst)))))
1064 (add-after 'build 'build-ext-and-gstreamer-plugin
1065 (lambda _
1066 (invoke "make" "-C" "online" "depend")
1067 (invoke "make" "-C" "online")
1068 (invoke "make" "-C" "onlinebin" "depend")
1069 (invoke "make" "-C" "onlinebin")
1070 (invoke "make" "-C" "gst-plugin" "depend")
1071 (invoke "make" "-C" "gst-plugin")
1072 #t))
1073 ;; TODO: also install the executables.
1074 (replace 'install
1075 (lambda* (#:key outputs #:allow-other-keys)
1076 (let* ((out (assoc-ref outputs "out"))
1077 (inc (string-append out "/include"))
1078 (lib (string-append out "/lib")))
1079 (mkdir-p lib)
1080 ;; The build phase installed symlinks to the actual
1081 ;; libraries. Install the actual targets.
1082 (for-each (lambda (file)
1083 (let ((target (readlink file)))
1084 (delete-file file)
1085 (install-file target lib)))
1086 (find-files lib "\\.so"))
1087 ;; Install headers
1088 (for-each (lambda (file)
1089 (let ((target-dir (string-append inc "/" (dirname file))))
1090 (install-file file target-dir)))
1091 (find-files "." "\\.h"))
1092 (install-file "gst-plugin/libgstonlinegmmdecodefaster.so"
1093 (string-append lib "/gstreamer-1.0"))
1094 #t))))))
1095 (inputs
1096 `(("alsa-lib" ,alsa-lib)
1097 ("gfortran" ,gfortran "lib")
1098 ("glib" ,glib)
1099 ("gstreamer" ,gstreamer)
1100 ("jack" ,jack-1)
1101 ("openblas" ,openblas)
1102 ("openfst" ,openfst)
1103 ("portaudio" ,portaudio)
1104 ("python" ,python)))
1105 (native-inputs
1106 `(("glib" ,glib "bin") ; glib-genmarshal
1107 ("grep" ,grep)
1108 ("sed" ,sed)
1109 ("pkg-config" ,pkg-config)
1110 ("which" ,which)))
1111 (home-page "https://kaldi-asr.org/")
1112 (synopsis "Speech recognition toolkit")
1113 (description "Kaldi is an extensible toolkit for speech recognition
1114 written in C++.")
1115 (license license:asl2.0))))
1116
1117 (define-public gst-kaldi-nnet2-online
1118 (let ((commit "617e43e73c7cc45eb9119028c02bd4178f738c4a")
1119 (revision "1"))
1120 (package
1121 (name "gst-kaldi-nnet2-online")
1122 (version (git-version "0" revision commit))
1123 (source (origin
1124 (method git-fetch)
1125 (uri (git-reference
1126 (url "https://github.com/alumae/gst-kaldi-nnet2-online.git")
1127 (commit commit)))
1128 (file-name (git-file-name name version))
1129 (sha256
1130 (base32
1131 "0xh3w67b69818s6ib02ara4lw7wamjdmh4jznvkpzrs4skbs9jx9"))))
1132 (build-system gnu-build-system)
1133 (arguments
1134 `(#:tests? #f ; there are none
1135 #:make-flags
1136 (list (string-append "SHELL="
1137 (assoc-ref %build-inputs "bash") "/bin/bash")
1138 (string-append "KALDI_ROOT="
1139 (assoc-ref %build-inputs "kaldi-src"))
1140 (string-append "KALDILIBDIR="
1141 (assoc-ref %build-inputs "kaldi") "/lib")
1142 "KALDI_FLAVOR=dynamic")
1143 #:phases
1144 (modify-phases %standard-phases
1145 (add-after 'unpack 'chdir
1146 (lambda _ (chdir "src") #t))
1147 (replace 'configure
1148 (lambda* (#:key inputs #:allow-other-keys)
1149 (let ((glib (assoc-ref inputs "glib")))
1150 (setenv "CXXFLAGS" "-fPIC")
1151 (setenv "CPLUS_INCLUDE_PATH"
1152 (string-append glib "/include/glib-2.0:"
1153 glib "/lib/glib-2.0/include:"
1154 (assoc-ref inputs "gstreamer")
1155 "/include/gstreamer-1.0")))
1156 (substitute* "Makefile"
1157 (("include \\$\\(KALDI_ROOT\\)/src/kaldi.mk") "")
1158 (("\\$\\(error Cannot find") "#"))
1159 #t))
1160 (add-before 'build 'build-depend
1161 (lambda* (#:key make-flags #:allow-other-keys)
1162 (apply invoke "make" "depend" make-flags)))
1163 (replace 'install
1164 (lambda* (#:key outputs #:allow-other-keys)
1165 (let* ((out (assoc-ref outputs "out"))
1166 (lib (string-append out "/lib/gstreamer-1.0")))
1167 (install-file "libgstkaldinnet2onlinedecoder.so" lib)
1168 #t))))))
1169 (inputs
1170 `(("glib" ,glib)
1171 ("gstreamer" ,gstreamer)
1172 ("jansson" ,jansson)
1173 ("openfst" ,openfst)
1174 ("kaldi" ,kaldi)))
1175 (native-inputs
1176 `(("bash" ,bash)
1177 ("glib:bin" ,glib "bin") ; glib-genmarshal
1178 ("kaldi-src" ,(package-source kaldi))
1179 ("pkg-config" ,pkg-config)))
1180 (home-page "https://kaldi-asr.org/")
1181 (synopsis "Gstreamer plugin for decoding speech")
1182 (description "This package provides a GStreamer plugin that wraps
1183 Kaldi's @code{SingleUtteranceNnet2Decoder}. It requires iVector-adapted DNN
1184 acoustic models. The iVectors are adapted to the current audio stream
1185 automatically.")
1186 (license license:asl2.0))))
1187
1188 (define-public kaldi-gstreamer-server
1189 (let ((commit "1735ba49c5dc0ebfc184e45105fc600cd9f1f508")
1190 (revision "1"))
1191 (package
1192 (name "kaldi-gstreamer-server")
1193 (version (git-version "0" revision commit))
1194 (source (origin
1195 (method git-fetch)
1196 (uri (git-reference
1197 (url "https://github.com/alumae/kaldi-gstreamer-server.git")
1198 (commit commit)))
1199 (file-name (git-file-name name version))
1200 (sha256
1201 (base32
1202 "0j701m7lbwmzqxsfanj882v7881hrbmpqybbczbxqpcbg8q34w0k"))))
1203 (build-system gnu-build-system)
1204 (arguments
1205 `(#:tests? #f ; there are no tests that can be run automatically
1206 #:modules ((guix build utils)
1207 (guix build gnu-build-system)
1208 (srfi srfi-26))
1209 #:phases
1210 (modify-phases %standard-phases
1211 (delete 'configure)
1212 (replace 'build
1213 (lambda* (#:key outputs #:allow-other-keys)
1214 ;; Disable hash randomization to ensure the generated .pycs
1215 ;; are reproducible.
1216 (setenv "PYTHONHASHSEED" "0")
1217 (with-directory-excursion "kaldigstserver"
1218 (for-each (lambda (file)
1219 (apply invoke
1220 `("python"
1221 "-m" "compileall"
1222 "-f" ; force rebuild
1223 ,file)))
1224 (find-files "." "\\.py$")))
1225 #t))
1226 (replace 'install
1227 (lambda* (#:key inputs outputs #:allow-other-keys)
1228 (let* ((out (assoc-ref outputs "out"))
1229 (bin (string-append out "/bin"))
1230 (share (string-append out "/share/kaldi-gstreamer-server/")))
1231 ;; Install Python files
1232 (with-directory-excursion "kaldigstserver"
1233 (for-each (cut install-file <> share)
1234 (find-files "." ".*")))
1235
1236 ;; Install sample configuration files
1237 (for-each (cut install-file <> share)
1238 (find-files "." "\\.yaml"))
1239
1240 ;; Install executables
1241 (mkdir-p bin)
1242 (let* ((server (string-append bin "/kaldi-gst-server"))
1243 (client (string-append bin "/kaldi-gst-client"))
1244 (worker (string-append bin "/kaldi-gst-worker"))
1245 (PYTHONPATH (getenv "PYTHONPATH"))
1246 (GST_PLUGIN_PATH (string-append
1247 (assoc-ref inputs "gst-kaldi-nnet2-online")
1248 "/lib/gstreamer-1.0:${GST_PLUGIN_PATH}"))
1249 (wrap (lambda (wrapper what)
1250 (with-output-to-file wrapper
1251 (lambda _
1252 (format #t
1253 "#!~a
1254 export PYTHONPATH=~a
1255 export GST_PLUGIN_PATH=~a
1256 exec ~a ~a/~a \"$@\"~%"
1257 (which "bash") PYTHONPATH GST_PLUGIN_PATH
1258 (which "python") share what)))
1259 (chmod wrapper #o555))))
1260 (for-each wrap
1261 (list server client worker)
1262 (list "master_server.py"
1263 "client.py"
1264 "worker.py")))
1265 #t))))))
1266 (inputs
1267 `(("gst-kaldi-nnet2-online" ,gst-kaldi-nnet2-online)
1268 ("python2" ,python-2)
1269 ("python2-futures" ,python2-futures)
1270 ("python2-pygobject" ,python2-pygobject)
1271 ("python2-pyyaml" ,python2-pyyaml)
1272 ("python2-tornado" ,python2-tornado)
1273 ("python2-ws4py" ,python2-ws4py-for-kaldi-gstreamer-server)))
1274 (home-page "https://github.com/alumae/kaldi-gstreamer-server")
1275 (synopsis "Real-time full-duplex speech recognition server")
1276 (description "This is a real-time full-duplex speech recognition server,
1277 based on the Kaldi toolkit and the GStreamer framework and implemented in
1278 Python.")
1279 (license license:bsd-2))))
1280
1281 (define-public grpc
1282 (package
1283 (name "grpc")
1284 (version "1.16.1")
1285 (source (origin
1286 (method git-fetch)
1287 (uri (git-reference
1288 (url "https://github.com/grpc/grpc.git")
1289 (commit (string-append "v" version))))
1290 (file-name (git-file-name name version))
1291 (sha256
1292 (base32
1293 "1jimqz3115f9pli5w6ik9wi7mjc7ix6y7yrq4a1ab9fc3dalj7p2"))))
1294 (build-system cmake-build-system)
1295 (arguments
1296 `(#:tests? #f ; no test target
1297 #:configure-flags
1298 (list "-DgRPC_ZLIB_PROVIDER=package"
1299 "-DgRPC_CARES_PROVIDER=package"
1300 "-DgRPC_SSL_PROVIDER=package"
1301 "-DgRPC_PROTOBUF_PROVIDER=package")))
1302 (inputs
1303 `(("c-ares" ,c-ares/cmake)
1304 ("openssl" ,openssl)
1305 ("zlib" ,zlib)))
1306 (native-inputs
1307 `(("protobuf" ,protobuf)
1308 ("python" ,python-wrapper)))
1309 (home-page "https://grpc.io")
1310 (synopsis "High performance universal RPC framework")
1311 (description "gRPC is a modern high performance @dfn{Remote Procedure Call}
1312 (RPC) framework that can run in any environment. It can efficiently connect
1313 services in and across data centers with pluggable support for load balancing,
1314 tracing, health checking and authentication. It is also applicable in last
1315 mile of distributed computing to connect devices, mobile applications and
1316 browsers to backend services.")
1317 (license license:asl2.0)))
1318
1319 ;; Note that Tensorflow includes a "third_party" directory, which seems to not
1320 ;; only contain modified subsets of upstream library source code, but also
1321 ;; adapter headers provided by Google (such as the fft.h header, which is not
1322 ;; part of the upstream project code). The Tensorflow code includes headers
1323 ;; from the "third_party" directory. It does not look like we can replace
1324 ;; these headers with unmodified upstream files, so we keep them.
1325 (define-public tensorflow
1326 (package
1327 (name "tensorflow")
1328 (version "1.9.0")
1329 (source
1330 (origin
1331 (method git-fetch)
1332 (uri (git-reference
1333 (url "https://github.com/tensorflow/tensorflow.git")
1334 (commit (string-append "v" version))))
1335 (file-name (string-append "tensorflow-" version "-checkout"))
1336 (sha256
1337 (base32
1338 "0a9kwha395g3wgxfwln5j8vn9nkspmd75xldrlqdq540w996g8xa"))))
1339 (build-system cmake-build-system)
1340 (arguments
1341 `(#:tests? #f ; no "check" target
1342 #:build-type "Release"
1343 #:configure-flags
1344 (let ((protobuf (assoc-ref %build-inputs "protobuf"))
1345 (protobuf:native (assoc-ref %build-inputs "protobuf:native"))
1346 (jsoncpp (assoc-ref %build-inputs "jsoncpp"))
1347 (snappy (assoc-ref %build-inputs "snappy"))
1348 (sqlite (assoc-ref %build-inputs "sqlite")))
1349 (list
1350 ;; Use protobuf from Guix
1351 (string-append "-Dprotobuf_STATIC_LIBRARIES="
1352 protobuf "/lib/libprotobuf.so")
1353 (string-append "-DPROTOBUF_PROTOC_EXECUTABLE="
1354 protobuf:native "/bin/protoc")
1355
1356 ;; Use snappy from Guix
1357 (string-append "-Dsnappy_STATIC_LIBRARIES="
1358 snappy "/lib/libsnappy.so")
1359 ;; Yes, this is not actually the include directory but a prefix...
1360 (string-append "-Dsnappy_INCLUDE_DIR=" snappy)
1361
1362 ;; Use jsoncpp from Guix
1363 (string-append "-Djsoncpp_STATIC_LIBRARIES="
1364 jsoncpp "/lib/libjsoncpp.so")
1365 ;; Yes, this is not actually the include directory but a prefix...
1366 (string-append "-Djsoncpp_INCLUDE_DIR=" jsoncpp)
1367
1368 ;; Use sqlite from Guix
1369 (string-append "-Dsqlite_STATIC_LIBRARIES="
1370 sqlite "/lib/libsqlite.a")
1371
1372 ;; Use system libraries wherever possible. Currently, this
1373 ;; only affects zlib.
1374 "-Dsystemlib_ALL=ON"
1375 "-Dtensorflow_ENABLE_POSITION_INDEPENDENT_CODE=ON"
1376 "-Dtensorflow_BUILD_SHARED_LIB=ON"
1377 "-Dtensorflow_OPTIMIZE_FOR_NATIVE_ARCH=OFF"
1378 "-Dtensorflow_ENABLE_SSL_SUPPORT=OFF"
1379 "-Dtensorflow_BUILD_CONTRIB_KERNELS=OFF"))
1380 #:make-flags
1381 (list "CC=gcc")
1382 #:modules ((ice-9 ftw)
1383 (guix build utils)
1384 (guix build cmake-build-system))
1385 #:phases
1386 (modify-phases %standard-phases
1387 (add-after 'unpack 'set-source-file-times-to-1980
1388 ;; At the end of the tf_python_build_pip_package target, a ZIP
1389 ;; archive should be generated via bdist_wheel, but it fails with
1390 ;; "ZIP does not support timestamps before 1980". Luckily,
1391 ;; SOURCE_DATE_EPOCH is respected, which we set to some time in
1392 ;; 1980.
1393 (lambda _ (setenv "SOURCE_DATE_EPOCH" "315532800") #t))
1394 ;; See https://github.com/tensorflow/tensorflow/issues/20517#issuecomment-406373913
1395 (add-after 'unpack 'python3.7-compatibility
1396 (lambda _
1397 (substitute* '("tensorflow/python/eager/pywrap_tfe_src.cc"
1398 "tensorflow/python/lib/core/ndarray_tensor.cc"
1399 "tensorflow/python/lib/core/py_func.cc")
1400 (("PyUnicode_AsUTF8") "(char *)PyUnicode_AsUTF8"))
1401 (substitute* "tensorflow/c/eager/c_api.h"
1402 (("unsigned char async")
1403 "unsigned char is_async"))
1404
1405 ;; Remove dependency on tensorboard, a complicated but probably
1406 ;; optional package.
1407 (substitute* "tensorflow/tools/pip_package/setup.py"
1408 ((".*'tensorboard >.*") ""))
1409 #t))
1410 (add-after 'python3.7-compatibility 'chdir
1411 (lambda _ (chdir "tensorflow/contrib/cmake") #t))
1412 (add-after 'chdir 'disable-downloads
1413 (lambda* (#:key inputs #:allow-other-keys)
1414 (substitute* (find-files "external" "\\.cmake$")
1415 (("GIT_REPOSITORY.*") "")
1416 (("GIT_TAG.*") "")
1417 (("PREFIX ")
1418 "DOWNLOAD_COMMAND \"\"\nPREFIX "))
1419
1420 ;; Use packages from Guix
1421 (let ((grpc (assoc-ref inputs "grpc")))
1422 (substitute* "CMakeLists.txt"
1423 ;; Sqlite
1424 (("include\\(sqlite\\)") "")
1425 (("\\$\\{sqlite_STATIC_LIBRARIES\\}")
1426 (string-append (assoc-ref inputs "sqlite")
1427 "/lib/libsqlite3.so"))
1428 (("sqlite_copy_headers_to_destination") "")
1429
1430 ;; PNG
1431 (("include\\(png\\)") "")
1432 (("\\$\\{png_STATIC_LIBRARIES\\}")
1433 (string-append (assoc-ref inputs "libpng")
1434 "/lib/libpng16.so"))
1435 (("png_copy_headers_to_destination") "")
1436
1437 ;; JPEG
1438 (("include\\(jpeg\\)") "")
1439 (("\\$\\{jpeg_STATIC_LIBRARIES\\}")
1440 (string-append (assoc-ref inputs "libjpeg")
1441 "/lib/libjpeg.so"))
1442 (("jpeg_copy_headers_to_destination") "")
1443
1444 ;; GIF
1445 (("include\\(gif\\)") "")
1446 (("\\$\\{gif_STATIC_LIBRARIES\\}")
1447 (string-append (assoc-ref inputs "giflib")
1448 "/lib/libgif.so"))
1449 (("gif_copy_headers_to_destination") "")
1450
1451 ;; lmdb
1452 (("include\\(lmdb\\)") "")
1453 (("\\$\\{lmdb_STATIC_LIBRARIES\\}")
1454 (string-append (assoc-ref inputs "lmdb")
1455 "/lib/liblmdb.so"))
1456 (("lmdb_copy_headers_to_destination") "")
1457
1458 ;; Protobuf
1459 (("include\\(protobuf\\)") "")
1460 (("protobuf_copy_headers_to_destination") "")
1461 (("^ +protobuf") "")
1462
1463 ;; gRPC
1464 (("include\\(grpc\\)")
1465 "find_package(grpc REQUIRED NAMES gRPC)")
1466 (("list\\(APPEND tensorflow_EXTERNAL_DEPENDENCIES grpc\\)") "")
1467
1468 ;; Eigen
1469 (("include\\(eigen\\)")
1470 (string-append "find_package(eigen REQUIRED NAMES Eigen3)
1471 set(eigen_INCLUDE_DIRS ${CMAKE_CURRENT_BINARY_DIR}/external/eigen_archive "
1472 (assoc-ref inputs "eigen") "/include/eigen3)"))
1473 (("^ +eigen") "")
1474
1475 ;; snappy
1476 (("include\\(snappy\\)")
1477 "add_definitions(-DTF_USE_SNAPPY)")
1478 (("list\\(APPEND tensorflow_EXTERNAL_DEPENDENCIES snappy\\)") "")
1479
1480 ;; jsoncpp
1481 (("include\\(jsoncpp\\)") "")
1482 (("^ +jsoncpp") ""))
1483
1484 (substitute* "tf_core_framework.cmake"
1485 ((" grpc") "")
1486 (("\\$\\{GRPC_BUILD\\}/grpc_cpp_plugin")
1487 (which "grpc_cpp_plugin"))
1488 ;; Link with gRPC libraries
1489 (("add_library\\(tf_protos_cc.*" m)
1490 (string-append m
1491 (format #f "\ntarget_link_libraries(tf_protos_cc PRIVATE \
1492 ~a/lib/libgrpc++_unsecure.a \
1493 ~a/lib/libgrpc_unsecure.a \
1494 ~a/lib/libaddress_sorting.a \
1495 ~a/lib/libgpr.a \
1496 ~a//lib/libcares.so
1497 )\n"
1498 grpc grpc grpc grpc
1499 (assoc-ref inputs "c-ares"))))))
1500 (substitute* "tf_tools.cmake"
1501 (("add_dependencies\\(\\$\\{proto_text.*") ""))
1502 ;; Remove dependency on bundled grpc
1503 (substitute* "tf_core_distributed_runtime.cmake"
1504 (("tf_core_cpu grpc") "tf_core_cpu"))
1505
1506 ;; This directory is a dependency of many targets.
1507 (mkdir-p "protobuf")
1508 #t))
1509 (add-after 'configure 'unpack-third-party-sources
1510 (lambda* (#:key inputs #:allow-other-keys)
1511 ;; This is needed to configure bundled packages properly.
1512 (setenv "CONFIG_SHELL" (which "bash"))
1513 (for-each
1514 (lambda (name)
1515 (let* ((what (assoc-ref inputs (string-append name "-src")))
1516 (name* (string-map (lambda (c)
1517 (if (char=? c #\-)
1518 #\_ c)) name))
1519 (where (string-append "../build/" name* "/src/" name*)))
1520 (cond
1521 ((string-suffix? ".zip" what)
1522 (mkdir-p where)
1523 (with-directory-excursion where
1524 (invoke "unzip" what)))
1525 ((string-suffix? ".tar.gz" what)
1526 (mkdir-p where)
1527 (invoke "tar" "xf" what
1528 "-C" where "--strip-components=1"))
1529 (else
1530 (let ((parent (dirname where)))
1531 (mkdir-p parent)
1532 (with-directory-excursion parent
1533 (when (file-exists? name*)
1534 (delete-file-recursively name*))
1535 (copy-recursively what name*)
1536 (map make-file-writable
1537 (find-files name* ".*"))))))))
1538 (list "boringssl"
1539 "cub"
1540 "double-conversion"
1541 "farmhash"
1542 "fft2d"
1543 "highwayhash"
1544 "nsync"
1545 "re2"))
1546
1547 (rename-file "../build/cub/src/cub/cub-1.8.0/"
1548 "../build/cub/src/cub/cub/")
1549 #t))
1550 (add-after 'unpack 'fix-python-build
1551 (lambda* (#:key inputs outputs #:allow-other-keys)
1552 (mkdir-p "protobuf-src")
1553 (invoke "tar" "xf" (assoc-ref inputs "protobuf:src")
1554 "-C" "protobuf-src" "--strip-components=1")
1555 (mkdir-p "eigen-src")
1556 (invoke "tar" "xf" (assoc-ref inputs "eigen:src")
1557 "-C" "eigen-src" "--strip-components=1")
1558
1559 (substitute* "tensorflow/contrib/cmake/tf_python.cmake"
1560 ;; Ensure that all Python dependencies can be found at build time.
1561 (("PYTHONPATH=\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/tf_python" m)
1562 (string-append m ":" (getenv "PYTHONPATH")))
1563 ;; Take protobuf source files from our source package.
1564 (("\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/protobuf/src/protobuf/src/google")
1565 (string-append (getcwd) "/protobuf-src/src/google")))
1566
1567 (substitute* '("tensorflow/contrib/cmake/tf_shared_lib.cmake"
1568 "tensorflow/contrib/cmake/tf_python.cmake")
1569 ;; Take Eigen source files from our source package.
1570 (("\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/eigen/src/eigen/")
1571 (string-append (getcwd) "/eigen-src/"))
1572 ;; Take Eigen headers from our own package.
1573 (("\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/external/eigen_archive")
1574 (string-append (assoc-ref inputs "eigen") "/include/eigen3")))
1575
1576 ;; Correct the RUNPATH of ops libraries generated for Python.
1577 ;; TODO: this doesn't work :(
1578 ;; /gnu/store/...-tensorflow-1.9.0/lib/python3.7/site-packages/tensorflow/contrib/seq2seq/python/ops/lib_beam_search_ops.so:
1579 ;; warning: RUNPATH contains bogus entries: ("/tmp/guix-build-tensorflow-1.9.0.drv-0/source/tensorflow/contrib/build")
1580 ;; /gnu/store/...-tensorflow-1.9.0/lib/python3.7/site-packages/tensorflow/contrib/seq2seq/python/ops/lib_beam_search_ops.so:
1581 ;; error: depends on 'libpywrap_tensorflow_internal.so', which
1582 ;; cannot be found in RUNPATH ...
1583 (substitute* "tensorflow/contrib/cmake/tf_cc_ops.cmake"
1584 (("set_target_properties.*")
1585 (string-append "set_target_properties(${_AT_TARGET} PROPERTIES \
1586 COMPILE_FLAGS ${target_compile_flags} \
1587 INSTALL_RPATH_USE_LINK_PATH TRUE \
1588 INSTALL_RPATH " (assoc-ref outputs "out") "/lib)\n")))
1589 #t))
1590 (add-after 'build 'build-pip-package
1591 (lambda* (#:key outputs #:allow-other-keys)
1592 (setenv "LDFLAGS"
1593 (string-append "-Wl,-rpath="
1594 (assoc-ref outputs "out") "/lib"))
1595 (invoke "make" "tf_python_build_pip_package")
1596 #t))
1597 (add-after 'build-pip-package 'install-python
1598 (lambda* (#:key outputs #:allow-other-keys)
1599 (let ((out (assoc-ref outputs "out"))
1600 (wheel (car (find-files "../build/tf_python/dist/" "\\.whl$"))))
1601 (invoke "python" "-m" "pip" "install" wheel
1602 (string-append "--prefix=" out))
1603
1604 ;; XXX: broken RUNPATH, see fix-python-build phase.
1605 (delete-file
1606 (string-append
1607 out "/lib/python3.7/site-packages/tensorflow/contrib/"
1608 "seq2seq/python/ops/lib_beam_search_ops.so"))
1609 #t))))))
1610 (native-inputs
1611 `(("pkg-config" ,pkg-config)
1612 ("protobuf:native" ,protobuf-3.6) ; protoc
1613 ("protobuf:src" ,(package-source protobuf-3.6))
1614 ("eigen:src" ,(package-source eigen-for-tensorflow))
1615 ;; install_pip_packages.sh wants setuptools 39.1.0 specifically.
1616 ("python-setuptools" ,python-setuptools-for-tensorflow)
1617
1618 ;; The commit hashes and URLs for third-party source code are taken
1619 ;; from "tensorflow/workspace.bzl".
1620 ("boringssl-src"
1621 ,(let ((commit "ee7aa02")
1622 (revision "1"))
1623 (origin
1624 (method git-fetch)
1625 (uri (git-reference
1626 (url "https://boringssl.googlesource.com/boringssl")
1627 (commit commit)))
1628 (file-name (string-append "boringssl-0-" revision
1629 (string-take commit 7)
1630 "-checkout"))
1631 (sha256
1632 (base32
1633 "1jf693q0nw0adsic6cgmbdx6g7wr4rj4vxa8j1hpn792fqhd8wgw")))))
1634 ("cub-src"
1635 ,(let ((version "1.8.0"))
1636 (origin
1637 (method url-fetch)
1638 (uri (string-append "https://mirror.bazel.build/github.com/NVlabs/"
1639 "cub/archive/" version ".zip"))
1640 (file-name (string-append "cub-" version ".zip"))
1641 (sha256
1642 (base32
1643 "1hsqikqridb90dkxkjr2918dcry6pfh46ccnwrzawl56aamhdykb")))))
1644 ("double-conversion-src"
1645 ,(let ((commit "5664746")
1646 (revision "1"))
1647 (origin
1648 (method git-fetch)
1649 (uri (git-reference
1650 (url "https://github.com/google/double-conversion.git")
1651 (commit commit)))
1652 (file-name
1653 (git-file-name "double-conversion"
1654 (string-append "0-" revision "."
1655 (string-take commit 7))))
1656 (sha256
1657 (base32
1658 "1h5lppqqxcvdg5jq42i5msgwx20ryij3apvmndflngrgdpc04gn1")))))
1659 ("farmhash-src"
1660 ,(let ((commit "816a4ae622e964763ca0862d9dbd19324a1eaf45"))
1661 (origin
1662 (method url-fetch)
1663 (uri (string-append
1664 "https://mirror.bazel.build/github.com/google/farmhash/archive/"
1665 commit ".tar.gz"))
1666 (file-name (string-append "farmhash-0-" (string-take commit 7)
1667 ".tar.gz"))
1668 (sha256
1669 (base32
1670 "185b2xdxl4d4cnsnv6abg8s22gxvx8673jq2yaq85bz4cdy58q35")))))
1671 ;; The license notice on the home page at
1672 ;; http://www.kurims.kyoto-u.ac.jp/~ooura/fft.html says:
1673 ;; Copyright Takuya OOURA, 1996-2001
1674 ;;
1675 ;; You may use, copy, modify and distribute this code for any purpose
1676 ;; (include commercial use) and without fee. Please refer to this
1677 ;; package when you modify this code.
1678 ;;
1679 ;; We take the identical tarball from the Bazel mirror, because the URL
1680 ;; at the home page is not versioned and might change.
1681 ("fft2d-src"
1682 ,(origin
1683 (method url-fetch)
1684 (uri "https://mirror.bazel.build/www.kurims.kyoto-u.ac.jp/~ooura/fft.tgz")
1685 (file-name "fft2d.tar.gz")
1686 (sha256
1687 (base32
1688 "15jjkfvhqvl2c0753d2di8hz0pyzn598g74wqy79awdrf1y67fsj"))))
1689 ("highwayhash-src"
1690 ,(let ((commit "be5edafc2e1a455768e260ccd68ae7317b6690ee")
1691 (revision "1"))
1692 (origin
1693 (method git-fetch)
1694 (uri (git-reference
1695 (url "https://github.com/google/highwayhash.git")
1696 (commit commit)))
1697 (file-name (string-append "highwayhash-0-" revision
1698 (string-take commit 7)
1699 "-checkout"))
1700 (sha256
1701 (base32
1702 "154jwf98cyy54hldr94pgjn85zynly3abpnc1avmb8a18lzwjyb6")))))
1703 ("nsync-src"
1704 ,(let ((version "0559ce013feac8db639ee1bf776aca0325d28777")
1705 (revision "1"))
1706 (origin
1707 (method url-fetch)
1708 (uri (string-append "https://mirror.bazel.build/"
1709 "github.com/google/nsync/archive/"
1710 version ".tar.gz"))
1711 (file-name (string-append "nsync-0." revision
1712 "-" (string-take version 7)
1713 ".tar.gz"))
1714 (sha256
1715 (base32
1716 "0qdkyqym34x739mmzv97ah5r7ph462v5xkxqxvidmcfqbi64b132")))))
1717 ("re2-src"
1718 ,(let ((commit "e7efc48")
1719 (revision "1"))
1720 (origin
1721 (method git-fetch)
1722 (uri (git-reference
1723 (url "https://github.com/google/re2")
1724 (commit commit)))
1725 (file-name (string-append "re2-0-" revision
1726 (string-take commit 7)
1727 "-checkout"))
1728 (sha256
1729 (base32
1730 "161g9841rjfsy5pn52fcis0s9hdr7rxvb06pad38j5rppfihvign")))))
1731 ("googletest" ,googletest)
1732 ("swig" ,swig)
1733 ("unzip" ,unzip)))
1734 (propagated-inputs
1735 `(("python-absl-py" ,python-absl-py)
1736 ("python-astor" ,python-astor)
1737 ("python-gast" ,python-gast)
1738 ("python-grpcio" ,python-grpcio)
1739 ("python-numpy" ,python-numpy)
1740 ("python-protobuf" ,python-protobuf-3.6)
1741 ("python-six" ,python-six)
1742 ("python-termcolo" ,python-termcolor)
1743 ("python-wheel" ,python-wheel)))
1744 (inputs
1745 `(("c-ares" ,c-ares)
1746 ("eigen" ,eigen-for-tensorflow)
1747 ("gemmlowp" ,gemmlowp-for-tensorflow)
1748 ("lmdb" ,lmdb)
1749 ("libjpeg" ,libjpeg)
1750 ("libpng" ,libpng)
1751 ("giflib" ,giflib)
1752 ("grpc" ,grpc)
1753 ("jsoncpp" ,jsoncpp-for-tensorflow)
1754 ("snappy" ,snappy)
1755 ("sqlite" ,sqlite)
1756 ("protobuf" ,protobuf-3.6)
1757 ("python" ,python-wrapper)
1758 ("zlib" ,zlib)))
1759 (home-page "https://tensorflow.org")
1760 (synopsis "Machine learning framework")
1761 (description
1762 "TensorFlow is a flexible platform for building and training machine
1763 learning models. It provides a library for high performance numerical
1764 computation and includes high level Python APIs, including both a sequential
1765 API for beginners that allows users to build models quickly by plugging
1766 together building blocks and a subclassing API with an imperative style for
1767 advanced research.")
1768 (license license:asl2.0)))
1769
1770 (define-public python-iml
1771 (package
1772 (name "python-iml")
1773 (version "0.6.2")
1774 (source
1775 (origin
1776 (method url-fetch)
1777 (uri (pypi-uri "iml" version))
1778 (sha256
1779 (base32
1780 "1k8szlpm19rcwcxdny9qdm3gmaqq8akb4xlvrzyz8c2d679aak6l"))))
1781 (build-system python-build-system)
1782 (propagated-inputs
1783 `(("ipython" ,python-ipython)
1784 ("nose" ,python-nose)
1785 ("numpy" ,python-numpy)
1786 ("pandas" ,python-pandas)
1787 ("scipy" ,python-scipy)))
1788 (home-page "http://github.com/interpretable-ml/iml")
1789 (synopsis "Interpretable Machine Learning (iML) package")
1790 (description "Interpretable ML (iML) is a set of data type objects,
1791 visualizations, and interfaces that can be used by any method designed to
1792 explain the predictions of machine learning models (or really the output of
1793 any function). It currently contains the interface and IO code from the Shap
1794 project, and it will potentially also do the same for the Lime project.")
1795 (license license:expat)))
1796
1797 (define-public python-keras-applications
1798 (package
1799 (name "python-keras-applications")
1800 (version "1.0.8")
1801 (source
1802 (origin
1803 (method url-fetch)
1804 (uri (pypi-uri "Keras_Applications" version))
1805 (sha256
1806 (base32
1807 "1rcz31ca4axa6kzhjx4lwqxbg4wvlljkj8qj9a7p9sfd5fhzjyam"))))
1808 (build-system python-build-system)
1809 ;; The tests require Keras, but this package is needed to build Keras.
1810 (arguments '(#:tests? #f))
1811 (propagated-inputs
1812 `(("python-h5py" ,python-h5py)
1813 ("python-numpy" ,python-numpy)))
1814 (native-inputs
1815 `(("python-pytest" ,python-pytest)
1816 ("python-pytest-cov" ,python-pytest-cov)
1817 ("python-pytest-pep8" ,python-pytest-pep8)
1818 ("python-pytest-xdist" ,python-pytest-xdist)))
1819 (home-page "https://github.com/keras-team/keras-applications")
1820 (synopsis "Reference implementations of popular deep learning models")
1821 (description
1822 "This package provides reference implementations of popular deep learning
1823 models for use with the Keras deep learning framework.")
1824 (license license:expat)))
1825
1826 (define-public python-keras-preprocessing
1827 (package
1828 (name "python-keras-preprocessing")
1829 (version "1.1.0")
1830 (source
1831 (origin
1832 (method url-fetch)
1833 (uri (pypi-uri "Keras_Preprocessing" version))
1834 (sha256
1835 (base32
1836 "1r98nm4k1svsqjyaqkfk23i31bl1kcfcyp7094yyj3c43phfp3as"))))
1837 (build-system python-build-system)
1838 (propagated-inputs
1839 `(("python-numpy" ,python-numpy)
1840 ("python-six" ,python-six)))
1841 (native-inputs
1842 `(("python-pandas" ,python-pandas)
1843 ("python-pillow" ,python-pillow)
1844 ("python-pytest" ,python-pytest)
1845 ("python-pytest-cov" ,python-pytest-cov)
1846 ("python-pytest-xdist" ,python-pytest-xdist)
1847 ("tensorflow" ,tensorflow)))
1848 (home-page "https://github.com/keras-team/keras-preprocessing/")
1849 (synopsis "Data preprocessing and augmentation for deep learning models")
1850 (description
1851 "Keras Preprocessing is the data preprocessing and data augmentation
1852 module of the Keras deep learning library. It provides utilities for working
1853 with image data, text data, and sequence data.")
1854 (license license:expat)))
1855
1856 (define-public python-keras
1857 (package
1858 (name "python-keras")
1859 (version "2.2.4")
1860 (source
1861 (origin
1862 (method url-fetch)
1863 (uri (pypi-uri "Keras" version))
1864 (patches (search-patches "python-keras-integration-test.patch"))
1865 (sha256
1866 (base32
1867 "1j8bsqzh49vjdxy6l1k4iwax5vpjzniynyd041xjavdzvfii1dlh"))))
1868 (build-system python-build-system)
1869 (arguments
1870 `(#:phases
1871 (modify-phases %standard-phases
1872 (add-after 'unpack 'remove-tests-for-unavailable-features
1873 (lambda _
1874 (delete-file "keras/backend/theano_backend.py")
1875 (delete-file "keras/backend/cntk_backend.py")
1876 (delete-file "tests/keras/backend/backend_test.py")
1877
1878 ;; FIXME: This doesn't work because Tensorflow is missing the
1879 ;; coder ops library.
1880 (delete-file "tests/keras/test_callbacks.py")
1881 #t))
1882 (replace 'check
1883 (lambda _
1884 ;; These tests attempt to download data files from the internet.
1885 (delete-file "tests/integration_tests/test_datasets.py")
1886 (delete-file "tests/integration_tests/imagenet_utils_test.py")
1887
1888 (setenv "PYTHONPATH"
1889 (string-append (getcwd) "/build/lib:"
1890 (getenv "PYTHONPATH")))
1891 (invoke "py.test" "-v"
1892 "-p" "no:cacheprovider"
1893 "--ignore" "keras/utils"))))))
1894 (propagated-inputs
1895 `(("python-h5py" ,python-h5py)
1896 ("python-keras-applications" ,python-keras-applications)
1897 ("python-keras-preprocessing" ,python-keras-preprocessing)
1898 ("python-numpy" ,python-numpy)
1899 ("python-pydot" ,python-pydot)
1900 ("python-pyyaml" ,python-pyyaml)
1901 ("python-scipy" ,python-scipy)
1902 ("python-six" ,python-six)
1903 ("tensorflow" ,tensorflow)
1904 ("graphviz" ,graphviz)))
1905 (native-inputs
1906 `(("python-pandas" ,python-pandas)
1907 ("python-pytest" ,python-pytest)
1908 ("python-pytest-cov" ,python-pytest-cov)
1909 ("python-pytest-pep8" ,python-pytest-pep8)
1910 ("python-pytest-timeout" ,python-pytest-timeout)
1911 ("python-pytest-xdist" ,python-pytest-xdist)
1912 ("python-sphinx" ,python-sphinx)
1913 ("python-requests" ,python-requests)))
1914 (home-page "https://github.com/keras-team/keras")
1915 (synopsis "High-level deep learning framework")
1916 (description "Keras is a high-level neural networks API, written in Python
1917 and capable of running on top of TensorFlow. It was developed with a focus on
1918 enabling fast experimentation. Use Keras if you need a deep learning library
1919 that:
1920
1921 @itemize
1922 @item Allows for easy and fast prototyping (through user friendliness,
1923 modularity, and extensibility).
1924 @item Supports both convolutional networks and recurrent networks, as well as
1925 combinations of the two.
1926 @item Runs seamlessly on CPU and GPU.
1927 @end itemize\n")
1928 (license license:expat)))
1929
1930 (define-public sbcl-cl-libsvm-format
1931 (let ((commit "3300f84fd8d9f5beafc114f543f9d83417c742fb")
1932 (revision "0"))
1933 (package
1934 (name "sbcl-cl-libsvm-format")
1935 (version (git-version "0.1.0" revision commit))
1936 (source
1937 (origin
1938 (method git-fetch)
1939 (uri (git-reference
1940 (url "https://github.com/masatoi/cl-libsvm-format.git")
1941 (commit commit)))
1942 (file-name (git-file-name name version))
1943 (sha256
1944 (base32
1945 "0284aj84xszhkhlivaigf9qj855fxad3mzmv3zfr0qzb5k0nzwrg"))))
1946 (build-system asdf-build-system/sbcl)
1947 (native-inputs
1948 `(("prove" ,sbcl-prove)
1949 ("prove-asdf" ,sbcl-prove-asdf)))
1950 (inputs
1951 `(("alexandria" ,sbcl-alexandria)))
1952 (synopsis "LibSVM data format reader for Common Lisp")
1953 (description
1954 "This Common Lisp library provides a fast reader for data in LibSVM
1955 format.")
1956 (home-page "https://github.com/masatoi/cl-libsvm-format")
1957 (license license:expat))))
1958
1959 (define-public cl-libsvm-format
1960 (sbcl-package->cl-source-package sbcl-cl-libsvm-format))
1961
1962 (define-public ecl-cl-libsvm-format
1963 (sbcl-package->ecl-package sbcl-cl-libsvm-format))
1964
1965 (define-public sbcl-cl-online-learning
1966 (let ((commit "fc7a34f4f161cd1c7dd747d2ed8f698947781423")
1967 (revision "0"))
1968 (package
1969 (name "sbcl-cl-online-learning")
1970 (version (git-version "0.5" revision commit))
1971 (source
1972 (origin
1973 (method git-fetch)
1974 (uri (git-reference
1975 (url "https://github.com/masatoi/cl-online-learning.git")
1976 (commit commit)))
1977 (file-name (git-file-name name version))
1978 (sha256
1979 (base32
1980 "14x95rlg80ay5hv645ki57pqvy12v28hz4k1w0f6bsfi2rmpxchq"))))
1981 (build-system asdf-build-system/sbcl)
1982 (native-inputs
1983 `(("prove" ,sbcl-prove)
1984 ("prove-asdf" ,sbcl-prove-asdf)))
1985 (inputs
1986 `(("cl-libsvm-format" ,sbcl-cl-libsvm-format)
1987 ("cl-store" ,sbcl-cl-store)))
1988 (arguments
1989 `(;; FIXME: Tests pass but then the check phase crashes
1990 #:tests? #f))
1991 (synopsis "Online Machine Learning for Common Lisp")
1992 (description
1993 "This library contains a collection of machine learning algorithms for
1994 online linear classification written in Common Lisp.")
1995 (home-page "https://github.com/masatoi/cl-online-learning")
1996 (license license:expat))))
1997
1998 (define-public cl-online-learning
1999 (sbcl-package->cl-source-package sbcl-cl-online-learning))
2000
2001 (define-public ecl-cl-online-learning
2002 (sbcl-package->ecl-package sbcl-cl-online-learning))
2003
2004 (define-public sbcl-cl-random-forest
2005 (let ((commit "85fbdd4596d40e824f70f1b7cf239cf544e49d51")
2006 (revision "0"))
2007 (package
2008 (name "sbcl-cl-random-forest")
2009 (version (git-version "0.1" revision commit))
2010 (source
2011 (origin
2012 (method git-fetch)
2013 (uri (git-reference
2014 (url "https://github.com/masatoi/cl-random-forest.git")
2015 (commit commit)))
2016 (file-name (git-file-name name version))
2017 (sha256
2018 (base32
2019 "097xv60i1ndz68sg9p4pc7c5gvyp9i1xgw966b4wwfq3x6hbz421"))))
2020 (build-system asdf-build-system/sbcl)
2021 (native-inputs
2022 `(("prove" ,sbcl-prove)
2023 ("prove-asdf" ,sbcl-prove-asdf)
2024 ("trivial-garbage" ,sbcl-trivial-garbage)))
2025 (inputs
2026 `(("alexandria" ,sbcl-alexandria)
2027 ("cl-libsvm-format" ,sbcl-cl-libsvm-format)
2028 ("cl-online-learning" ,sbcl-cl-online-learning)
2029 ("lparallel" ,sbcl-lparallel)))
2030 (arguments
2031 `(;; The tests download data from the Internet
2032 #:tests? #f
2033 #:phases
2034 (modify-phases %standard-phases
2035 (add-after 'unpack 'add-sb-cltl2-dependency
2036 (lambda _
2037 ;; sb-cltl2 is required by lparallel when using sbcl, but it is
2038 ;; not loaded automatically.
2039 (substitute* "cl-random-forest.asd"
2040 (("\\(in-package :cl-user\\)")
2041 "(in-package :cl-user) #+sbcl (require :sb-cltl2)"))
2042 #t)))))
2043 (synopsis "Random Forest and Global Refinement for Common Lisp")
2044 (description
2045 "CL-random-forest is an implementation of Random Forest for multiclass
2046 classification and univariate regression written in Common Lisp. It also
2047 includes an implementation of Global Refinement of Random Forest.")
2048 (home-page "https://github.com/masatoi/cl-random-forest")
2049 (license license:expat))))
2050
2051 (define-public cl-random-forest
2052 (sbcl-package->cl-source-package sbcl-cl-random-forest))
2053
2054 (define-public ecl-cl-random-forest
2055 (sbcl-package->ecl-package sbcl-cl-random-forest))
2056
2057 (define-public gloo
2058 (let ((version "0.0.0") ; no proper version tag
2059 (commit "ca528e32fea9ca8f2b16053cff17160290fc84ce")
2060 (revision "0"))
2061 (package
2062 (name "gloo")
2063 (version (git-version version revision commit))
2064 (source
2065 (origin
2066 (method git-fetch)
2067 (uri (git-reference
2068 (url "https://github.com/facebookincubator/gloo.git")
2069 (commit commit)))
2070 (file-name (git-file-name name version))
2071 (sha256
2072 (base32
2073 "1q9f80zy75f6njrzrqkmhc0g3qxs4gskr7ns2jdqanxa2ww7a99w"))))
2074 (build-system cmake-build-system)
2075 (native-inputs
2076 `(("googletest" ,googletest)))
2077 (arguments
2078 `(#:configure-flags '("-DBUILD_TEST=1")
2079 #:phases
2080 (modify-phases %standard-phases
2081 (replace 'check
2082 (lambda _
2083 (invoke "make" "gloo_test")
2084 #t)))))
2085 (synopsis "Collective communications library")
2086 (description
2087 "Gloo is a collective communications library. It comes with a
2088 number of collective algorithms useful for machine learning applications.
2089 These include a barrier, broadcast, and allreduce.")
2090 (home-page "https://github.com/facebookincubator/gloo")
2091 (license license:bsd-3))))
2092
2093 (define-public python-umap-learn
2094 (package
2095 (name "python-umap-learn")
2096 (version "0.3.10")
2097 (source
2098 (origin
2099 (method url-fetch)
2100 (uri (pypi-uri "umap-learn" version))
2101 (sha256
2102 (base32
2103 "02ada2yy6km6zgk2836kg1c97yrcpalvan34p8c57446finnpki1"))))
2104 (build-system python-build-system)
2105 (native-inputs
2106 `(("python-nose" ,python-nose)))
2107 (propagated-inputs
2108 `(("python-numba" ,python-numba)
2109 ("python-numpy" ,python-numpy)
2110 ("python-scikit-learn" ,python-scikit-learn)
2111 ("python-scipy" ,python-scipy)))
2112 (home-page "https://github.com/lmcinnes/umap")
2113 (synopsis
2114 "Uniform Manifold Approximation and Projection")
2115 (description
2116 "Uniform Manifold Approximation and Projection is a dimension reduction
2117 technique that can be used for visualisation similarly to t-SNE, but also for
2118 general non-linear dimension reduction.")
2119 (license license:bsd-3)))