gnu: Add python-scikit-rebate.
[jackhill/guix/guix.git] / gnu / packages / machine-learning.scm
1 ;;; GNU Guix --- Functional package management for GNU
2 ;;; Copyright © 2015, 2016, 2017, 2018, 2019 Ricardo Wurmus <rekado@elephly.net>
3 ;;; Copyright © 2016, 2020 Efraim Flashner <efraim@flashner.co.il>
4 ;;; Copyright © 2016, 2017, 2020 Marius Bakke <mbakke@fastmail.com>
5 ;;; Copyright © 2016 Hartmut Goebel <h.goebel@crazy-compilers.com>
6 ;;; Copyright © 2018, 2019 Tobias Geerinckx-Rice <me@tobias.gr>
7 ;;; Copyright © 2018 Kei Kebreau <kkebreau@posteo.net>
8 ;;; Copyright © 2018 Mark Meyer <mark@ofosos.org>
9 ;;; Copyright © 2018 Ben Woodcroft <donttrustben@gmail.com>
10 ;;; Copyright © 2018 Fis Trivial <ybbs.daans@hotmail.com>
11 ;;; Copyright © 2018 Julien Lepiller <julien@lepiller.eu>
12 ;;; Copyright © 2018 Björn Höfling <bjoern.hoefling@bjoernhoefling.de>
13 ;;; Copyright © 2019 Nicolas Goaziou <mail@nicolasgoaziou.fr>
14 ;;; Copyright © 2019 Guillaume Le Vaillant <glv@posteo.net>
15 ;;; Copyright © 2019 Brett Gilio <brettg@gnu.org>
16 ;;;
17 ;;; This file is part of GNU Guix.
18 ;;;
19 ;;; GNU Guix is free software; you can redistribute it and/or modify it
20 ;;; under the terms of the GNU General Public License as published by
21 ;;; the Free Software Foundation; either version 3 of the License, or (at
22 ;;; your option) any later version.
23 ;;;
24 ;;; GNU Guix is distributed in the hope that it will be useful, but
25 ;;; WITHOUT ANY WARRANTY; without even the implied warranty of
26 ;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
27 ;;; GNU General Public License for more details.
28 ;;;
29 ;;; You should have received a copy of the GNU General Public License
30 ;;; along with GNU Guix. If not, see <http://www.gnu.org/licenses/>.
31
32 (define-module (gnu packages machine-learning)
33 #:use-module ((guix licenses) #:prefix license:)
34 #:use-module (guix packages)
35 #:use-module (guix utils)
36 #:use-module (guix download)
37 #:use-module (guix svn-download)
38 #:use-module (guix build-system asdf)
39 #:use-module (guix build-system cmake)
40 #:use-module (guix build-system gnu)
41 #:use-module (guix build-system ocaml)
42 #:use-module (guix build-system python)
43 #:use-module (guix build-system r)
44 #:use-module (guix git-download)
45 #:use-module (gnu packages)
46 #:use-module (gnu packages adns)
47 #:use-module (gnu packages algebra)
48 #:use-module (gnu packages audio)
49 #:use-module (gnu packages autotools)
50 #:use-module (gnu packages base)
51 #:use-module (gnu packages bash)
52 #:use-module (gnu packages boost)
53 #:use-module (gnu packages check)
54 #:use-module (gnu packages compression)
55 #:use-module (gnu packages cran)
56 #:use-module (gnu packages databases)
57 #:use-module (gnu packages dejagnu)
58 #:use-module (gnu packages gcc)
59 #:use-module (gnu packages glib)
60 #:use-module (gnu packages graphviz)
61 #:use-module (gnu packages gstreamer)
62 #:use-module (gnu packages image)
63 #:use-module (gnu packages linux)
64 #:use-module (gnu packages lisp-xyz)
65 #:use-module (gnu packages maths)
66 #:use-module (gnu packages mpi)
67 #:use-module (gnu packages ocaml)
68 #:use-module (gnu packages onc-rpc)
69 #:use-module (gnu packages perl)
70 #:use-module (gnu packages pkg-config)
71 #:use-module (gnu packages protobuf)
72 #:use-module (gnu packages python)
73 #:use-module (gnu packages python-science)
74 #:use-module (gnu packages python-web)
75 #:use-module (gnu packages python-xyz)
76 #:use-module (gnu packages rpc)
77 #:use-module (gnu packages serialization)
78 #:use-module (gnu packages sphinx)
79 #:use-module (gnu packages statistics)
80 #:use-module (gnu packages sqlite)
81 #:use-module (gnu packages swig)
82 #:use-module (gnu packages web)
83 #:use-module (gnu packages xml)
84 #:use-module (gnu packages xorg)
85 #:use-module (ice-9 match))
86
87 (define-public fann
88 ;; The last release is >100 commits behind, so we package from git.
89 (let ((commit "d71d54788bee56ba4cf7522801270152da5209d7"))
90 (package
91 (name "fann")
92 (version (string-append "2.2.0-1." (string-take commit 8)))
93 (source (origin
94 (method git-fetch)
95 (uri (git-reference
96 (url "https://github.com/libfann/fann.git")
97 (commit commit)))
98 (file-name (string-append name "-" version "-checkout"))
99 (sha256
100 (base32
101 "0ibwpfrjs6q2lijs8slxjgzb2llcl6rk3v2ski4r6215g5jjhg3x"))))
102 (build-system cmake-build-system)
103 (arguments
104 `(#:phases
105 (modify-phases %standard-phases
106 (replace 'check
107 (lambda* (#:key outputs #:allow-other-keys)
108 (let* ((out (assoc-ref outputs "out")))
109 (with-directory-excursion (string-append (getcwd) "/tests")
110 (invoke "./fann_tests"))))))))
111 (home-page "http://leenissen.dk/fann/wp/")
112 (synopsis "Fast Artificial Neural Network")
113 (description
114 "FANN is a neural network library, which implements multilayer
115 artificial neural networks in C with support for both fully connected and
116 sparsely connected networks.")
117 (license license:lgpl2.1))))
118
119 (define-public libsvm
120 (package
121 (name "libsvm")
122 (version "3.23")
123 (source
124 (origin
125 (method url-fetch)
126 (uri (string-append "https://www.csie.ntu.edu.tw/~cjlin/libsvm/"
127 name "-" version ".tar.gz"))
128 (sha256
129 (base32 "0jpaq0rr92x38p4nk3gjan79ip67m6p80anb28z1d8601miysyi5"))))
130 (build-system gnu-build-system)
131 (arguments
132 `(#:tests? #f ; no "check" target
133 #:phases (modify-phases %standard-phases
134 (delete 'configure)
135 (replace
136 'install ; no ‘install’ target
137 (lambda* (#:key outputs #:allow-other-keys)
138 (let* ((out (assoc-ref outputs "out"))
139 (bin (string-append out "/bin/")))
140 (mkdir-p bin)
141 (for-each (lambda (file)
142 (copy-file file (string-append bin file)))
143 '("svm-train"
144 "svm-predict"
145 "svm-scale")))
146 #t)))))
147 (home-page "https://www.csie.ntu.edu.tw/~cjlin/libsvm/")
148 (synopsis "Library for Support Vector Machines")
149 (description
150 "LIBSVM is a machine learning library for support vector
151 classification, (C-SVC, nu-SVC), regression (epsilon-SVR, nu-SVR) and
152 distribution estimation (one-class SVM). It supports multi-class
153 classification.")
154 (license license:bsd-3)))
155
156 (define-public python-libsvm
157 (package (inherit libsvm)
158 (name "python-libsvm")
159 (build-system gnu-build-system)
160 (arguments
161 `(#:tests? #f ; no "check" target
162 #:make-flags '("-C" "python")
163 #:phases
164 (modify-phases %standard-phases
165 (delete 'configure)
166 (replace
167 'install ; no ‘install’ target
168 (lambda* (#:key inputs outputs #:allow-other-keys)
169 (let ((site (string-append (assoc-ref outputs "out")
170 "/lib/python"
171 (string-take
172 (string-take-right
173 (assoc-ref inputs "python") 5) 3)
174 "/site-packages/")))
175 (substitute* "python/svm.py"
176 (("../libsvm.so.2") "libsvm.so.2"))
177 (mkdir-p site)
178 (for-each (lambda (file)
179 (copy-file file (string-append site (basename file))))
180 (find-files "python" "\\.py"))
181 (copy-file "libsvm.so.2"
182 (string-append site "libsvm.so.2")))
183 #t)))))
184 (inputs
185 `(("python" ,python)))
186 (synopsis "Python bindings of libSVM")))
187
188 (define-public ghmm
189 ;; The latest release candidate is several years and a couple of fixes have
190 ;; been published since. This is why we download the sources from the SVN
191 ;; repository.
192 (let ((svn-revision 2341))
193 (package
194 (name "ghmm")
195 (version (string-append "0.9-rc3-0." (number->string svn-revision)))
196 (source (origin
197 (method svn-fetch)
198 (uri (svn-reference
199 (url "http://svn.code.sf.net/p/ghmm/code/trunk")
200 (revision svn-revision)))
201 (file-name (string-append name "-" version "-checkout"))
202 (sha256
203 (base32
204 "0qbq1rqp94l530f043qzp8aw5lj7dng9wq0miffd7spd1ff638wq"))))
205 (build-system gnu-build-system)
206 (arguments
207 `(#:imported-modules (,@%gnu-build-system-modules
208 (guix build python-build-system))
209 #:modules ((guix build python-build-system)
210 ,@%gnu-build-system-modules)
211 #:phases
212 (modify-phases %standard-phases
213 (add-after 'unpack 'enter-dir
214 (lambda _ (chdir "ghmm") #t))
215 (delete 'check)
216 (add-after 'install 'check
217 (assoc-ref %standard-phases 'check))
218 (add-before 'check 'fix-PYTHONPATH
219 (lambda* (#:key inputs outputs #:allow-other-keys)
220 (let ((python-version (python-version
221 (assoc-ref inputs "python"))))
222 (setenv "PYTHONPATH"
223 (string-append (getenv "PYTHONPATH")
224 ":" (assoc-ref outputs "out")
225 "/lib/python" python-version
226 "/site-packages")))
227 #t))
228 (add-after 'enter-dir 'fix-runpath
229 (lambda* (#:key outputs #:allow-other-keys)
230 (substitute* "ghmmwrapper/setup.py"
231 (("^(.*)extra_compile_args = \\[" line indent)
232 (string-append indent
233 "extra_link_args = [\"-Wl,-rpath="
234 (assoc-ref outputs "out") "/lib\"],\n"
235 line
236 "\"-Wl,-rpath="
237 (assoc-ref outputs "out")
238 "/lib\", ")))
239 #t))
240 (add-after 'enter-dir 'disable-broken-tests
241 (lambda _
242 (substitute* "tests/Makefile.am"
243 ;; GHMM_SILENT_TESTS is assumed to be a command.
244 (("TESTS_ENVIRONMENT.*") "")
245 ;; Do not build broken tests.
246 (("chmm .*") "")
247 (("read_fa .*") "")
248 (("mcmc .*") "")
249 (("label_higher_order_test.*$")
250 "label_higher_order_test\n"))
251
252 ;; These Python unittests are broken as there is no gato.
253 ;; See https://sourceforge.net/p/ghmm/support-requests/3/
254 (substitute* "ghmmwrapper/ghmmunittests.py"
255 (("^(.*)def (testNewXML|testMultipleTransitionClasses|testNewXML)"
256 line indent)
257 (string-append indent
258 "@unittest.skip(\"Disabled by Guix\")\n"
259 line)))
260 #t)))))
261 (inputs
262 `(("python" ,python-2) ; only Python 2 is supported
263 ("libxml2" ,libxml2)))
264 (native-inputs
265 `(("pkg-config" ,pkg-config)
266 ("dejagnu" ,dejagnu)
267 ("swig" ,swig)
268 ("autoconf" ,autoconf)
269 ("automake" ,automake)
270 ("libtool" ,libtool)))
271 (home-page "http://ghmm.org")
272 (synopsis "Hidden Markov Model library")
273 (description
274 "The General Hidden Markov Model library (GHMM) is a C library with
275 additional Python bindings implementing a wide range of types of @dfn{Hidden
276 Markov Models} (HMM) and algorithms: discrete, continuous emissions, basic
277 training, HMM clustering, HMM mixtures.")
278 (license license:lgpl2.0+))))
279
280 (define-public mcl
281 (package
282 (name "mcl")
283 (version "14.137")
284 (source (origin
285 (method url-fetch)
286 (uri (string-append
287 "http://micans.org/mcl/src/mcl-"
288 (string-replace-substring version "." "-")
289 ".tar.gz"))
290 (sha256
291 (base32
292 "15xlax3z31lsn62vlg94hkm75nm40q4679amnfg13jm8m2bnhy5m"))))
293 (build-system gnu-build-system)
294 (arguments
295 `(#:configure-flags (list "--enable-blast")))
296 (inputs
297 `(("perl" ,perl)))
298 (home-page "http://micans.org/mcl/")
299 (synopsis "Clustering algorithm for graphs")
300 (description
301 "The MCL algorithm is short for the @dfn{Markov Cluster Algorithm}, a
302 fast and scalable unsupervised cluster algorithm for graphs (also known as
303 networks) based on simulation of (stochastic) flow in graphs.")
304 ;; In the LICENCE file and web page it says "The software is licensed
305 ;; under the GNU General Public License, version 3.", but in several of
306 ;; the source code files it suggests GPL3 or later.
307 ;; http://listserver.ebi.ac.uk/pipermail/mcl-users/2016/000376.html
308 (license license:gpl3)))
309
310 (define-public ocaml-mcl
311 (package
312 (name "ocaml-mcl")
313 (version "12-068oasis4")
314 (source
315 (origin
316 (method git-fetch)
317 (uri (git-reference
318 (url "https://github.com/fhcrc/mcl.git")
319 (commit version)))
320 (file-name (git-file-name name version))
321 (sha256
322 (base32
323 "0009dc3h2jp3qg5val452wngpqnbfyhbcxylghq0mrjqxx0jdq5p"))))
324 (build-system ocaml-build-system)
325 (arguments
326 `(#:phases
327 (modify-phases %standard-phases
328 (add-before 'configure 'patch-paths
329 (lambda _
330 (substitute* "configure"
331 (("/bin/sh") (which "sh")))
332 (substitute* "setup.ml"
333 (("LDFLAGS=-fPIC")
334 (string-append "LDFLAGS=-fPIC\"; \"SHELL=" (which "sh")))
335 (("-std=c89") "-std=gnu99")
336
337 ;; This is a mutable string, which is no longer supported. Use
338 ;; a byte buffer instead.
339 (("String.make \\(String.length s\\)")
340 "Bytes.make (String.length s)")
341
342 ;; These two belong together.
343 (("OASISString.replace_chars")
344 "Bytes.to_string (OASISString.replace_chars")
345 ((" s;")
346 " s);"))
347 (substitute* "myocamlbuild.ml"
348 (("std=c89") "std=gnu99"))
349 ;; Since we build with a more recent OCaml, we have to use C99 or
350 ;; later. This causes problems with the old C code.
351 (substitute* "src/impala/matrix.c"
352 (("restrict") "restrict_"))
353 #t)))))
354 (native-inputs
355 `(("ocamlbuild" ,ocamlbuild)))
356 (home-page "https://github.com/fhcrc/mcl")
357 (synopsis "OCaml wrappers around MCL")
358 (description
359 "This package provides OCaml bindings for the MCL graph clustering
360 algorithm.")
361 (license license:gpl3)))
362
363 (define-public randomjungle
364 (package
365 (name "randomjungle")
366 (version "2.1.0")
367 (source
368 (origin
369 (method url-fetch)
370 (uri (string-append
371 "https://www.imbs.uni-luebeck.de/fileadmin/files/Software"
372 "/randomjungle/randomjungle-" version ".tar_.gz"))
373 (patches (search-patches "randomjungle-disable-static-build.patch"))
374 (sha256
375 (base32
376 "12c8rf30cla71swx2mf4ww9mfd8jbdw5lnxd7dxhyw1ygrvg6y4w"))))
377 (build-system gnu-build-system)
378 (arguments
379 `(#:configure-flags
380 (list "--disable-static"
381 (string-append "--with-boost="
382 (assoc-ref %build-inputs "boost")))
383 #:phases
384 (modify-phases %standard-phases
385 (add-before
386 'configure 'set-CXXFLAGS
387 (lambda _
388 (setenv "CXXFLAGS" "-fpermissive ")
389 #t)))))
390 (inputs
391 `(("boost" ,boost)
392 ("gsl" ,gsl)
393 ("libxml2" ,libxml2)
394 ("zlib" ,zlib)))
395 (native-inputs
396 `(("gfortran" ,gfortran)
397 ("gfortran:lib" ,gfortran "lib")))
398 ;; Non-portable assembly instructions are used so building fails on
399 ;; platforms other than x86_64 or i686.
400 (supported-systems '("x86_64-linux" "i686-linux"))
401 (home-page "https://www.imbs.uni-luebeck.de/forschung/software/details.html#c224")
402 (synopsis "Implementation of the Random Forests machine learning method")
403 (description
404 "Random Jungle is an implementation of Random Forests. It is supposed to
405 analyse high dimensional data. In genetics, it can be used for analysing big
406 Genome Wide Association (GWA) data. Random Forests is a powerful machine
407 learning method. Most interesting features are variable selection, missing
408 value imputation, classifier creation, generalization error estimation and
409 sample proximities between pairs of cases.")
410 (license license:gpl3+)))
411
412 (define-public openfst
413 (package
414 (name "openfst")
415 (version "1.7.2")
416 (source (origin
417 (method url-fetch)
418 (uri (string-append "http://www.openfst.org/twiki/pub/FST/"
419 "FstDownload/openfst-" version ".tar.gz"))
420 (sha256
421 (base32
422 "0fqgk8195kz21is09gwzwnrg7fr9526bi9mh4apyskapz27pbhr1"))))
423 (build-system gnu-build-system)
424 (home-page "http://www.openfst.org")
425 (synopsis "Library for weighted finite-state transducers")
426 (description "OpenFst is a library for constructing, combining,
427 optimizing, and searching weighted finite-state transducers (FSTs).")
428 (license license:asl2.0)))
429
430 (define-public shogun
431 (package
432 (name "shogun")
433 (version "6.1.3")
434 (source
435 (origin
436 (method url-fetch)
437 (uri (string-append
438 "ftp://shogun-toolbox.org/shogun/releases/"
439 (version-major+minor version)
440 "/sources/shogun-" version ".tar.bz2"))
441 (sha256
442 (base32
443 "1rn9skm3nw6hr7mr3lgp2gfqhi7ii0lyxck7qmqnf8avq349s5jp"))
444 (modules '((guix build utils)
445 (ice-9 rdelim)))
446 (snippet
447 '(begin
448 ;; Remove non-free sources and files referencing them
449 (for-each delete-file
450 (find-files "src/shogun/classifier/svm/"
451 "SVMLight\\.(cpp|h)"))
452 (for-each delete-file
453 (find-files "examples/undocumented/libshogun/"
454 (string-append
455 "(classifier_.*svmlight.*|"
456 "evaluation_cross_validation_locked_comparison).cpp")))
457 ;; Remove non-free functions.
458 (define (delete-ifdefs file)
459 (with-atomic-file-replacement file
460 (lambda (in out)
461 (let loop ((line (read-line in 'concat))
462 (skipping? #f))
463 (if (eof-object? line)
464 #t
465 (let ((skip-next?
466 (or (and skipping?
467 (not (string-prefix?
468 "#endif //USE_SVMLIGHT" line)))
469 (string-prefix?
470 "#ifdef USE_SVMLIGHT" line))))
471 (when (or (not skipping?)
472 (and skipping? (not skip-next?)))
473 (display line out))
474 (loop (read-line in 'concat) skip-next?)))))))
475 (for-each delete-ifdefs
476 (append
477 (find-files "src/shogun/classifier/mkl"
478 "^MKLClassification\\.cpp")
479 (find-files "src/shogun/classifier/svm"
480 "^SVMLightOneClass\\.(cpp|h)")
481 (find-files "src/shogun/multiclass"
482 "^ScatterSVM\\.(cpp|h)")
483 (find-files "src/shogun/kernel/"
484 "^(Kernel|CombinedKernel|ProductKernel)\\.(cpp|h)")
485 (find-files "src/shogun/regression/svr"
486 "^(MKLRegression|SVRLight)\\.(cpp|h)")
487 (find-files "src/shogun/transfer/domain_adaptation"
488 "^DomainAdaptationSVM\\.(cpp|h)")))
489 #t))))
490 (build-system cmake-build-system)
491 (arguments
492 '(#:tests? #f ;no check target
493 #:phases
494 (modify-phases %standard-phases
495 (add-after 'unpack 'delete-broken-symlinks
496 (lambda _
497 (for-each delete-file '("applications/arts/data"
498 "applications/asp/data"
499 "applications/easysvm/data"
500 "applications/msplicer/data"
501 "applications/ocr/data"
502 "examples/meta/data"
503 "examples/undocumented/data"))
504 #t))
505 (add-after 'unpack 'change-R-target-path
506 (lambda* (#:key outputs #:allow-other-keys)
507 (substitute* '("src/interfaces/r/CMakeLists.txt"
508 "examples/meta/r/CMakeLists.txt")
509 (("\\$\\{R_COMPONENT_LIB_PATH\\}")
510 (string-append (assoc-ref outputs "out")
511 "/lib/R/library/")))
512 #t))
513 (add-after 'unpack 'fix-octave-modules
514 (lambda* (#:key outputs #:allow-other-keys)
515 (substitute* "src/interfaces/octave/CMakeLists.txt"
516 (("^include_directories\\(\\$\\{OCTAVE_INCLUDE_DIRS\\}")
517 "include_directories(${OCTAVE_INCLUDE_DIRS} ${OCTAVE_INCLUDE_DIRS}/octave")
518 ;; change target directory
519 (("\\$\\{OCTAVE_OCT_LOCAL_API_FILE_DIR\\}")
520 (string-append (assoc-ref outputs "out")
521 "/share/octave/packages")))
522 (substitute* '("src/interfaces/octave/swig_typemaps.i"
523 "src/interfaces/octave/sg_print_functions.cpp")
524 ;; "octave/config.h" and "octave/oct-obj.h" deprecated in Octave.
525 (("octave/config\\.h") "octave/octave-config.h")
526 (("octave/oct-obj.h") "octave/ovl.h"))
527 #t))
528 (add-after 'unpack 'move-rxcpp
529 (lambda* (#:key inputs #:allow-other-keys)
530 (let ((rxcpp-dir "shogun/third-party/rxcpp"))
531 (mkdir-p rxcpp-dir)
532 (install-file (assoc-ref inputs "rxcpp") rxcpp-dir)
533 #t)))
534 (add-before 'build 'set-HOME
535 ;; $HOME needs to be set at some point during the build phase
536 (lambda _ (setenv "HOME" "/tmp") #t)))
537 #:configure-flags
538 (list "-DCMAKE_BUILD_WITH_INSTALL_RPATH=TRUE"
539 "-DUSE_SVMLIGHT=OFF" ;disable proprietary SVMLIGHT
540 "-DBUILD_META_EXAMPLES=OFF" ;requires unpackaged ctags
541 ;;"-DINTERFACE_JAVA=ON" ;requires unpackaged jblas
542 ;;"-DINTERFACE_RUBY=ON" ;requires unpackaged ruby-narray
543 ;;"-DINTERFACE_PERL=ON" ;"FindPerlLibs" does not exist
544 ;;"-DINTERFACE_LUA=ON" ;fails because lua doesn't build pkgconfig file
545 "-DINTERFACE_OCTAVE=ON"
546 "-DINTERFACE_PYTHON=ON"
547 "-DINTERFACE_R=ON")))
548 (inputs
549 `(("python" ,python)
550 ("numpy" ,python-numpy)
551 ("r-minimal" ,r-minimal)
552 ("octave" ,octave-cli)
553 ("swig" ,swig)
554 ("eigen" ,eigen)
555 ("hdf5" ,hdf5)
556 ("atlas" ,atlas)
557 ("arpack" ,arpack-ng)
558 ("lapack" ,lapack)
559 ("glpk" ,glpk)
560 ("libxml2" ,libxml2)
561 ("lzo" ,lzo)
562 ("zlib" ,zlib)))
563 (native-inputs
564 `(("pkg-config" ,pkg-config)
565 ("rxcpp" ,rxcpp)))
566 ;; Non-portable SSE instructions are used so building fails on platforms
567 ;; other than x86_64.
568 (supported-systems '("x86_64-linux"))
569 (home-page "https://shogun-toolbox.org/")
570 (synopsis "Machine learning toolbox")
571 (description
572 "The Shogun Machine learning toolbox provides a wide range of unified and
573 efficient Machine Learning (ML) methods. The toolbox seamlessly
574 combines multiple data representations, algorithm classes, and general purpose
575 tools. This enables both rapid prototyping of data pipelines and extensibility
576 in terms of new algorithms.")
577 (license license:gpl3+)))
578
579 (define-public rxcpp
580 (package
581 (name "rxcpp")
582 (version "4.1.0")
583 (source
584 (origin
585 (method git-fetch)
586 (uri (git-reference
587 (url "https://github.com/ReactiveX/RxCpp.git")
588 (commit (string-append "v" version))))
589 (sha256
590 (base32 "1rdpa3jlc181jd08nk437aar085h28i45s6nzrv65apb3xyyz0ij"))
591 (file-name (git-file-name name version))))
592 (build-system cmake-build-system)
593 (arguments
594 `(#:phases
595 (modify-phases %standard-phases
596 (add-after 'unpack 'remove-werror
597 (lambda _
598 (substitute* (find-files ".")
599 (("-Werror") ""))
600 #t))
601 (replace 'check
602 (lambda _
603 (invoke "ctest"))))))
604 (native-inputs
605 `(("catch" ,catch-framework)))
606 (home-page "http://reactivex.io/")
607 (synopsis "Reactive Extensions for C++")
608 (description
609 "The Reactive Extensions for C++ (RxCpp) is a library of algorithms for
610 values-distributed-in-time. ReactiveX is a library for composing asynchronous
611 and event-based programs by using observable sequences.
612
613 It extends the observer pattern to support sequences of data and/or events and
614 adds operators that allow you to compose sequences together declaratively while
615 abstracting away concerns about things like low-level threading,
616 synchronization, thread-safety, concurrent data structures, and non-blocking
617 I/O.")
618 (license license:asl2.0)))
619
620 (define-public r-adaptivesparsity
621 (package
622 (name "r-adaptivesparsity")
623 (version "1.6")
624 (source (origin
625 (method url-fetch)
626 (uri (cran-uri "AdaptiveSparsity" version))
627 (sha256
628 (base32
629 "0imr5m8mll9j6n4icsv6z9rl5kbnwsp9wvzrg7n90nnmcxq2cz91"))))
630 (properties
631 `((upstream-name . "AdaptiveSparsity")))
632 (build-system r-build-system)
633 (arguments
634 `(#:phases
635 (modify-phases %standard-phases
636 (add-after 'unpack 'link-against-armadillo
637 (lambda _
638 (substitute* "src/Makevars"
639 (("PKG_LIBS=" prefix)
640 (string-append prefix "-larmadillo"))))))))
641 (propagated-inputs
642 `(("r-mass" ,r-mass)
643 ("r-matrix" ,r-matrix)
644 ("r-rcpp" ,r-rcpp)
645 ("r-rcpparmadillo" ,r-rcpparmadillo)))
646 (inputs
647 `(("armadillo" ,armadillo)))
648 (home-page "https://cran.r-project.org/web/packages/AdaptiveSparsity")
649 (synopsis "Adaptive sparsity models")
650 (description
651 "This package implements the Figueiredo machine learning algorithm for
652 adaptive sparsity and the Wong algorithm for adaptively sparse gaussian
653 geometric models.")
654 (license license:lgpl3+)))
655
656 (define-public gemmlowp-for-tensorflow
657 ;; The commit hash is taken from "tensorflow/workspace.bzl".
658 (let ((commit "38ebac7b059e84692f53e5938f97a9943c120d98")
659 (revision "2"))
660 (package
661 (name "gemmlowp")
662 (version (git-version "0" revision commit))
663 (source (origin
664 (method url-fetch)
665 (uri (string-append "https://mirror.bazel.build/"
666 "github.com/google/gemmlowp/archive/"
667 commit ".zip"))
668 (file-name (string-append "gemmlowp-" version ".zip"))
669 (sha256
670 (base32
671 "0n56s2g8hrssm4w8qj1v58gfm56a04n9v992ixkmvk6zjiralzxq"))))
672 (build-system cmake-build-system)
673 (arguments
674 `(#:configure-flags
675 (list ,@(match (%current-system)
676 ((or "x86_64-linux" "i686-linux")
677 '("-DCMAKE_CXX_FLAGS=-msse2"))
678 (_ '())))
679 #:phases
680 (modify-phases %standard-phases
681 ;; This directory contains the CMakeLists.txt.
682 (add-after 'unpack 'chdir
683 (lambda _ (chdir "contrib") #t))
684 ;; There is no install target
685 (replace 'install
686 (lambda* (#:key outputs #:allow-other-keys)
687 (let* ((out (assoc-ref outputs "out"))
688 (lib (string-append out "/lib/"))
689 (inc (string-append out "/include/")))
690 (install-file "../build/libeight_bit_int_gemm.so" lib)
691 (for-each (lambda (dir)
692 (let ((target (string-append inc "/" dir)))
693 (mkdir-p target)
694 (for-each (lambda (h)
695 (install-file h target))
696 (find-files (string-append "../" dir)
697 "\\.h$"))))
698 '("meta" "profiling" "public" "fixedpoint"
699 "eight_bit_int_gemm" "internal"))
700 #t))))))
701 (native-inputs
702 `(("unzip" ,unzip)))
703 (home-page "https://github.com/google/gemmlowp")
704 (synopsis "Small self-contained low-precision GEMM library")
705 (description
706 "This is a small self-contained low-precision @dfn{general matrix
707 multiplication} (GEMM) library. It is not a full linear algebra library.
708 Low-precision means that the input and output matrix entries are integers on
709 at most 8 bits. To avoid overflow, results are internally accumulated on more
710 than 8 bits, and at the end only some significant 8 bits are kept.")
711 (license license:asl2.0))))
712
713 (define-public dlib
714 (package
715 (name "dlib")
716 (version "19.7")
717 (source (origin
718 (method url-fetch)
719 (uri (string-append
720 "http://dlib.net/files/dlib-" version ".tar.bz2"))
721 (sha256
722 (base32
723 "1mljz02kwkrbggyncxv5fpnyjdybw2qihaacb3js8yfkw12vwpc2"))
724 (modules '((guix build utils)))
725 (snippet
726 '(begin
727 ;; Delete ~13MB of bundled dependencies.
728 (delete-file-recursively "dlib/external")
729 (delete-file-recursively "docs/dlib/external")
730 #t))))
731 (build-system cmake-build-system)
732 (arguments
733 `(#:phases
734 (modify-phases %standard-phases
735 (add-after 'unpack 'disable-asserts
736 (lambda _
737 ;; config.h recommends explicitly enabling or disabling asserts
738 ;; when building as a shared library. By default neither is set.
739 (substitute* "dlib/config.h"
740 (("^//#define DLIB_DISABLE_ASSERTS") "#define DLIB_DISABLE_ASSERTS"))
741 #t))
742 (add-after 'disable-asserts 'disable-failing-tests
743 (lambda _
744 ;; One test times out on MIPS, so we need to disable it.
745 ;; Others are flaky on some platforms.
746 (let* ((system ,(or (%current-target-system)
747 (%current-system)))
748 (disabled-tests (cond
749 ((string-prefix? "mips64" system)
750 '("object_detector" ; timeout
751 "data_io"))
752 ((string-prefix? "armhf" system)
753 '("learning_to_track"))
754 ((string-prefix? "i686" system)
755 '("optimization"))
756 (else '()))))
757 (for-each
758 (lambda (test)
759 (substitute* "dlib/test/makefile"
760 (((string-append "SRC \\+= " test "\\.cpp")) "")))
761 disabled-tests)
762 #t)))
763 (replace 'check
764 (lambda _
765 ;; No test target, so we build and run the unit tests here.
766 (let ((test-dir (string-append "../dlib-" ,version "/dlib/test")))
767 (with-directory-excursion test-dir
768 (invoke "make" "-j" (number->string (parallel-job-count)))
769 (invoke "./dtest" "--runall"))
770 #t)))
771 (add-after 'install 'delete-static-library
772 (lambda* (#:key outputs #:allow-other-keys)
773 (delete-file (string-append (assoc-ref outputs "out")
774 "/lib/libdlib.a"))
775 #t)))))
776 (native-inputs
777 `(("pkg-config" ,pkg-config)
778 ;; For tests.
779 ("libnsl" ,libnsl)))
780 (inputs
781 `(("giflib" ,giflib)
782 ("lapack" ,lapack)
783 ("libjpeg" ,libjpeg)
784 ("libpng" ,libpng)
785 ("libx11" ,libx11)
786 ("openblas" ,openblas)
787 ("zlib" ,zlib)))
788 (synopsis
789 "Toolkit for making machine learning and data analysis applications in C++")
790 (description
791 "Dlib is a modern C++ toolkit containing machine learning algorithms and
792 tools. It is used in both industry and academia in a wide range of domains
793 including robotics, embedded devices, mobile phones, and large high performance
794 computing environments.")
795 (home-page "http://dlib.net")
796 (license license:boost1.0)))
797
798 (define-public python-scikit-learn
799 (package
800 (name "python-scikit-learn")
801 (version "0.22.1")
802 (source
803 (origin
804 (method git-fetch)
805 (uri (git-reference
806 (url "https://github.com/scikit-learn/scikit-learn.git")
807 (commit version)))
808 (file-name (git-file-name name version))
809 (sha256
810 (base32
811 "1xqxv210gsmjw094vc5ghq2y9lmm74qkk22pq6flcjzj51b86jxf"))))
812 (build-system python-build-system)
813 (arguments
814 `(#:phases
815 (modify-phases %standard-phases
816 (add-after 'build 'build-ext
817 (lambda _ (invoke "python" "setup.py" "build_ext" "--inplace") #t))
818 (replace 'check
819 (lambda _
820 ;; Restrict OpenBLAS threads to prevent segfaults while testing!
821 (setenv "OPENBLAS_NUM_THREADS" "1")
822
823 ;; Some tests require write access to $HOME.
824 (setenv "HOME" "/tmp")
825
826 (invoke "pytest" "sklearn" "-m" "not network")))
827 (add-before 'reset-gzip-timestamps 'make-files-writable
828 (lambda* (#:key outputs #:allow-other-keys)
829 ;; Make sure .gz files are writable so that the
830 ;; 'reset-gzip-timestamps' phase can do its work.
831 (let ((out (assoc-ref outputs "out")))
832 (for-each make-file-writable
833 (find-files out "\\.gz$"))
834 #t))))))
835 (inputs
836 `(("openblas" ,openblas)))
837 (native-inputs
838 `(("python-joblib" ,python-joblib)
839 ("python-pytest" ,python-pytest)
840 ("python-pandas" ,python-pandas) ;for tests
841 ("python-cython" ,python-cython)))
842 (propagated-inputs
843 `(("python-numpy" ,python-numpy)
844 ("python-scipy" ,python-scipy)))
845 (home-page "https://scikit-learn.org/")
846 (synopsis "Machine Learning in Python")
847 (description
848 "Scikit-learn provides simple and efficient tools for data mining and
849 data analysis.")
850 (properties `((python2-variant . ,(delay python2-scikit-learn))))
851 (license license:bsd-3)))
852
853 ;; scikit-learn 0.22 and later only supports Python 3, so we stick with
854 ;; an older version here.
855 (define-public python2-scikit-learn
856 (let ((base (package-with-python2 (strip-python2-variant python-scikit-learn))))
857 (package
858 (inherit base)
859 (version "0.20.4")
860 (source (origin
861 (method git-fetch)
862 (uri (git-reference
863 (url "https://github.com/scikit-learn/scikit-learn.git")
864 (commit version)))
865 (file-name (git-file-name "python-scikit-learn" version))
866 (sha256
867 (base32
868 "08zbzi8yx5wdlxfx9jap61vg1malc9ajf576w7a0liv6jvvrxlpj")))))))
869
870 (define-public python-scikit-rebate
871 (package
872 (name "python-scikit-rebate")
873 (version "0.6")
874 (source (origin
875 (method url-fetch)
876 (uri (pypi-uri "skrebate" version))
877 (sha256
878 (base32
879 "1h7qs9gjxpzqabzhb8rmpv3jpmi5iq41kqdibg48299h94iikiw7"))))
880 (build-system python-build-system)
881 ;; Pandas is only needed to run the tests.
882 (native-inputs
883 `(("python-pandas" ,python-pandas)))
884 (propagated-inputs
885 `(("python-numpy" ,python-numpy)
886 ("python-scipy" ,python-scipy)
887 ("python-scikit-learn" ,python-scikit-learn)
888 ("python-joblib" ,python-joblib)))
889 (home-page "https://epistasislab.github.io/scikit-rebate/")
890 (synopsis "Relief-based feature selection algorithms for Python")
891 (description "Scikit-rebate is a scikit-learn-compatible Python
892 implementation of ReBATE, a suite of Relief-based feature selection algorithms
893 for Machine Learning. These algorithms excel at identifying features that are
894 predictive of the outcome in supervised learning problems, and are especially
895 good at identifying feature interactions that are normally overlooked by
896 standard feature selection algorithms.")
897 (license license:expat)))
898
899 (define-public python-autograd
900 (let* ((commit "442205dfefe407beffb33550846434baa90c4de7")
901 (revision "0")
902 (version (git-version "0.0.0" revision commit)))
903 (package
904 (name "python-autograd")
905 (home-page "https://github.com/HIPS/autograd")
906 (source (origin
907 (method git-fetch)
908 (uri (git-reference
909 (url home-page)
910 (commit commit)))
911 (sha256
912 (base32
913 "189sv2xb0mwnjawa9z7mrgdglc1miaq93pnck26r28fi1jdwg0z4"))
914 (file-name (git-file-name name version))))
915 (version version)
916 (build-system python-build-system)
917 (native-inputs
918 `(("python-nose" ,python-nose)
919 ("python-pytest" ,python-pytest)))
920 (propagated-inputs
921 `(("python-future" ,python-future)
922 ("python-numpy" ,python-numpy)))
923 (arguments
924 `(#:phases (modify-phases %standard-phases
925 (replace 'check
926 (lambda _
927 (invoke "py.test" "-v"))))))
928 (synopsis "Efficiently computes derivatives of NumPy code")
929 (description "Autograd can automatically differentiate native Python and
930 NumPy code. It can handle a large subset of Python's features, including loops,
931 ifs, recursion and closures, and it can even take derivatives of derivatives
932 of derivatives. It supports reverse-mode differentiation
933 (a.k.a. backpropagation), which means it can efficiently take gradients of
934 scalar-valued functions with respect to array-valued arguments, as well as
935 forward-mode differentiation, and the two can be composed arbitrarily. The
936 main intended application of Autograd is gradient-based optimization.")
937 (license license:expat))))
938
939 (define-public python2-autograd
940 (package-with-python2 python-autograd))
941
942 (define-public lightgbm
943 (package
944 (name "lightgbm")
945 (version "2.0.12")
946 (source (origin
947 (method url-fetch)
948 (uri (string-append
949 "https://github.com/Microsoft/LightGBM/archive/v"
950 version ".tar.gz"))
951 (sha256
952 (base32
953 "132zf0yk0545mg72hyzxm102g3hpb6ixx9hnf8zd2k55gas6cjj1"))
954 (file-name (string-append name "-" version ".tar.gz"))))
955 (native-inputs
956 `(("python-pytest" ,python-pytest)
957 ("python-nose" ,python-nose)))
958 (inputs
959 `(("openmpi" ,openmpi)))
960 (propagated-inputs
961 `(("python-numpy" ,python-numpy)
962 ("python-scipy" ,python-scipy)))
963 (arguments
964 `(#:configure-flags
965 '("-DUSE_MPI=ON")
966 #:phases
967 (modify-phases %standard-phases
968 (replace 'check
969 (lambda* (#:key outputs #:allow-other-keys)
970 (with-directory-excursion ,(string-append "../LightGBM-" version)
971 (invoke "pytest" "tests/c_api_test/test_.py")))))))
972 (build-system cmake-build-system)
973 (home-page "https://github.com/Microsoft/LightGBM")
974 (synopsis "Gradient boosting framework based on decision tree algorithms")
975 (description "LightGBM is a gradient boosting framework that uses tree
976 based learning algorithms. It is designed to be distributed and efficient with
977 the following advantages:
978
979 @itemize
980 @item Faster training speed and higher efficiency
981 @item Lower memory usage
982 @item Better accuracy
983 @item Parallel and GPU learning supported (not enabled in this package)
984 @item Capable of handling large-scale data
985 @end itemize\n")
986 (license license:expat)))
987
988 (define-public vowpal-wabbit
989 ;; Language bindings not included.
990 (package
991 (name "vowpal-wabbit")
992 (version "8.5.0")
993 (source (origin
994 (method git-fetch)
995 (uri (git-reference
996 (url "https://github.com/JohnLangford/vowpal_wabbit")
997 (commit version)))
998 (sha256
999 (base32
1000 "04bwzk6ifgnz3fmzid8b7avxf9n5pnx9xcjm61nkjng1vv0bpj8x"))
1001 (file-name (git-file-name name version))))
1002 (inputs
1003 `(("boost" ,boost)
1004 ("zlib" ,zlib)))
1005 (arguments
1006 `(#:configure-flags
1007 (list (string-append "--with-boost="
1008 (assoc-ref %build-inputs "boost")))
1009 #:phases
1010 (modify-phases %standard-phases
1011 (add-after 'unpack 'make-files-writable
1012 (lambda _
1013 (for-each make-file-writable (find-files "." ".*")) #t)))))
1014 (build-system gnu-build-system)
1015 (home-page "https://github.com/JohnLangford/vowpal_wabbit")
1016 (synopsis "Fast machine learning library for online learning")
1017 (description "Vowpal Wabbit is a machine learning system with techniques
1018 such as online, hashing, allreduce, reductions, learning2search, active, and
1019 interactive learning.")
1020 (license license:bsd-3)))
1021
1022 (define-public python2-fastlmm
1023 (package
1024 (name "python2-fastlmm")
1025 (version "0.2.21")
1026 (source
1027 (origin
1028 (method url-fetch)
1029 (uri (pypi-uri "fastlmm" version ".zip"))
1030 (sha256
1031 (base32
1032 "1q8c34rpmwkfy3r4d5172pzdkpfryj561897z9r3x22gq7813x1m"))))
1033 (build-system python-build-system)
1034 (arguments
1035 `(#:tests? #f ; some test files are missing
1036 #:python ,python-2)) ; only Python 2.7 is supported
1037 (propagated-inputs
1038 `(("python2-numpy" ,python2-numpy)
1039 ("python2-scipy" ,python2-scipy)
1040 ("python2-matplotlib" ,python2-matplotlib)
1041 ("python2-pandas" ,python2-pandas)
1042 ("python2-scikit-learn" ,python2-scikit-learn)
1043 ("python2-pysnptools" ,python2-pysnptools)))
1044 (native-inputs
1045 `(("unzip" ,unzip)
1046 ("python2-cython" ,python2-cython)
1047 ("python2-mock" ,python2-mock)
1048 ("python2-nose" ,python2-nose)))
1049 (home-page "http://research.microsoft.com/en-us/um/redmond/projects/mscompbio/fastlmm/")
1050 (synopsis "Perform genome-wide association studies on large data sets")
1051 (description
1052 "FaST-LMM, which stands for Factored Spectrally Transformed Linear Mixed
1053 Models, is a program for performing both single-SNP and SNP-set genome-wide
1054 association studies (GWAS) on extremely large data sets.")
1055 (license license:asl2.0)))
1056
1057 ;; There have been no proper releases yet.
1058 (define-public kaldi
1059 (let ((commit "2f95609f0bb085bd3a1dc5eb0a39f3edea59e606")
1060 (revision "1"))
1061 (package
1062 (name "kaldi")
1063 (version (git-version "0" revision commit))
1064 (source (origin
1065 (method git-fetch)
1066 (uri (git-reference
1067 (url "https://github.com/kaldi-asr/kaldi.git")
1068 (commit commit)))
1069 (file-name (git-file-name name version))
1070 (sha256
1071 (base32
1072 "082qh3pfi7hvncylp4xsmkfahbd7gb0whdfa4rwrx7fxk9rdh3kz"))))
1073 (build-system gnu-build-system)
1074 (arguments
1075 `(#:test-target "test"
1076 #:phases
1077 (modify-phases %standard-phases
1078 (add-after 'unpack 'chdir
1079 (lambda _ (chdir "src") #t))
1080 (replace 'configure
1081 (lambda* (#:key build system inputs outputs #:allow-other-keys)
1082 (when (not (or (string-prefix? "x86_64" system)
1083 (string-prefix? "i686" system)))
1084 (substitute* "makefiles/linux_openblas.mk"
1085 (("-msse -msse2") "")))
1086 (substitute* "makefiles/default_rules.mk"
1087 (("/bin/bash") (which "bash")))
1088 (substitute* "Makefile"
1089 (("ext_depend: check_portaudio")
1090 "ext_depend:"))
1091 (substitute* '("online/Makefile"
1092 "onlinebin/Makefile"
1093 "gst-plugin/Makefile")
1094 (("../../tools/portaudio/install")
1095 (assoc-ref inputs "portaudio")))
1096
1097 ;; This `configure' script doesn't support variables passed as
1098 ;; arguments, nor does it support "prefix".
1099 (let ((out (assoc-ref outputs "out"))
1100 (openblas (assoc-ref inputs "openblas"))
1101 (openfst (assoc-ref inputs "openfst")))
1102 (substitute* "configure"
1103 (("check_for_slow_expf;") "")
1104 ;; This affects the RPATH and also serves as the installation
1105 ;; directory.
1106 (("KALDILIBDIR=`pwd`/lib")
1107 (string-append "KALDILIBDIR=" out "/lib")))
1108 (mkdir-p out) ; must exist
1109 (setenv "CONFIG_SHELL" (which "bash"))
1110 (setenv "OPENFST_VER" ,(package-version openfst))
1111 (invoke "./configure"
1112 "--use-cuda=no"
1113 "--shared"
1114 (string-append "--openblas-root=" openblas)
1115 (string-append "--fst-root=" openfst)))))
1116 (add-after 'build 'build-ext-and-gstreamer-plugin
1117 (lambda _
1118 (invoke "make" "-C" "online" "depend")
1119 (invoke "make" "-C" "online")
1120 (invoke "make" "-C" "onlinebin" "depend")
1121 (invoke "make" "-C" "onlinebin")
1122 (invoke "make" "-C" "gst-plugin" "depend")
1123 (invoke "make" "-C" "gst-plugin")
1124 #t))
1125 ;; TODO: also install the executables.
1126 (replace 'install
1127 (lambda* (#:key outputs #:allow-other-keys)
1128 (let* ((out (assoc-ref outputs "out"))
1129 (inc (string-append out "/include"))
1130 (lib (string-append out "/lib")))
1131 (mkdir-p lib)
1132 ;; The build phase installed symlinks to the actual
1133 ;; libraries. Install the actual targets.
1134 (for-each (lambda (file)
1135 (let ((target (readlink file)))
1136 (delete-file file)
1137 (install-file target lib)))
1138 (find-files lib "\\.so"))
1139 ;; Install headers
1140 (for-each (lambda (file)
1141 (let ((target-dir (string-append inc "/" (dirname file))))
1142 (install-file file target-dir)))
1143 (find-files "." "\\.h"))
1144 (install-file "gst-plugin/libgstonlinegmmdecodefaster.so"
1145 (string-append lib "/gstreamer-1.0"))
1146 #t))))))
1147 (inputs
1148 `(("alsa-lib" ,alsa-lib)
1149 ("gfortran" ,gfortran "lib")
1150 ("glib" ,glib)
1151 ("gstreamer" ,gstreamer)
1152 ("jack" ,jack-1)
1153 ("openblas" ,openblas)
1154 ("openfst" ,openfst)
1155 ("portaudio" ,portaudio)
1156 ("python" ,python)))
1157 (native-inputs
1158 `(("glib" ,glib "bin") ; glib-genmarshal
1159 ("grep" ,grep)
1160 ("sed" ,sed)
1161 ("pkg-config" ,pkg-config)
1162 ("which" ,which)))
1163 (home-page "https://kaldi-asr.org/")
1164 (synopsis "Speech recognition toolkit")
1165 (description "Kaldi is an extensible toolkit for speech recognition
1166 written in C++.")
1167 (license license:asl2.0))))
1168
1169 (define-public gst-kaldi-nnet2-online
1170 (let ((commit "617e43e73c7cc45eb9119028c02bd4178f738c4a")
1171 (revision "1"))
1172 (package
1173 (name "gst-kaldi-nnet2-online")
1174 (version (git-version "0" revision commit))
1175 (source (origin
1176 (method git-fetch)
1177 (uri (git-reference
1178 (url "https://github.com/alumae/gst-kaldi-nnet2-online.git")
1179 (commit commit)))
1180 (file-name (git-file-name name version))
1181 (sha256
1182 (base32
1183 "0xh3w67b69818s6ib02ara4lw7wamjdmh4jznvkpzrs4skbs9jx9"))))
1184 (build-system gnu-build-system)
1185 (arguments
1186 `(#:tests? #f ; there are none
1187 #:make-flags
1188 (list (string-append "SHELL="
1189 (assoc-ref %build-inputs "bash") "/bin/bash")
1190 (string-append "KALDI_ROOT="
1191 (assoc-ref %build-inputs "kaldi-src"))
1192 (string-append "KALDILIBDIR="
1193 (assoc-ref %build-inputs "kaldi") "/lib")
1194 "KALDI_FLAVOR=dynamic")
1195 #:phases
1196 (modify-phases %standard-phases
1197 (add-after 'unpack 'chdir
1198 (lambda _ (chdir "src") #t))
1199 (replace 'configure
1200 (lambda* (#:key inputs #:allow-other-keys)
1201 (let ((glib (assoc-ref inputs "glib")))
1202 (setenv "CXXFLAGS" "-fPIC")
1203 (setenv "CPLUS_INCLUDE_PATH"
1204 (string-append glib "/include/glib-2.0:"
1205 glib "/lib/glib-2.0/include:"
1206 (assoc-ref inputs "gstreamer")
1207 "/include/gstreamer-1.0")))
1208 (substitute* "Makefile"
1209 (("include \\$\\(KALDI_ROOT\\)/src/kaldi.mk") "")
1210 (("\\$\\(error Cannot find") "#"))
1211 #t))
1212 (add-before 'build 'build-depend
1213 (lambda* (#:key make-flags #:allow-other-keys)
1214 (apply invoke "make" "depend" make-flags)))
1215 (replace 'install
1216 (lambda* (#:key outputs #:allow-other-keys)
1217 (let* ((out (assoc-ref outputs "out"))
1218 (lib (string-append out "/lib/gstreamer-1.0")))
1219 (install-file "libgstkaldinnet2onlinedecoder.so" lib)
1220 #t))))))
1221 (inputs
1222 `(("glib" ,glib)
1223 ("gstreamer" ,gstreamer)
1224 ("jansson" ,jansson)
1225 ("openfst" ,openfst)
1226 ("kaldi" ,kaldi)))
1227 (native-inputs
1228 `(("bash" ,bash)
1229 ("glib:bin" ,glib "bin") ; glib-genmarshal
1230 ("kaldi-src" ,(package-source kaldi))
1231 ("pkg-config" ,pkg-config)))
1232 (home-page "https://kaldi-asr.org/")
1233 (synopsis "Gstreamer plugin for decoding speech")
1234 (description "This package provides a GStreamer plugin that wraps
1235 Kaldi's @code{SingleUtteranceNnet2Decoder}. It requires iVector-adapted DNN
1236 acoustic models. The iVectors are adapted to the current audio stream
1237 automatically.")
1238 (license license:asl2.0))))
1239
1240 (define-public kaldi-gstreamer-server
1241 (let ((commit "1735ba49c5dc0ebfc184e45105fc600cd9f1f508")
1242 (revision "1"))
1243 (package
1244 (name "kaldi-gstreamer-server")
1245 (version (git-version "0" revision commit))
1246 (source (origin
1247 (method git-fetch)
1248 (uri (git-reference
1249 (url "https://github.com/alumae/kaldi-gstreamer-server.git")
1250 (commit commit)))
1251 (file-name (git-file-name name version))
1252 (sha256
1253 (base32
1254 "0j701m7lbwmzqxsfanj882v7881hrbmpqybbczbxqpcbg8q34w0k"))))
1255 (build-system gnu-build-system)
1256 (arguments
1257 `(#:tests? #f ; there are no tests that can be run automatically
1258 #:modules ((guix build utils)
1259 (guix build gnu-build-system)
1260 (srfi srfi-26))
1261 #:phases
1262 (modify-phases %standard-phases
1263 (delete 'configure)
1264 (replace 'build
1265 (lambda* (#:key outputs #:allow-other-keys)
1266 ;; Disable hash randomization to ensure the generated .pycs
1267 ;; are reproducible.
1268 (setenv "PYTHONHASHSEED" "0")
1269 (with-directory-excursion "kaldigstserver"
1270 (for-each (lambda (file)
1271 (apply invoke
1272 `("python"
1273 "-m" "compileall"
1274 "-f" ; force rebuild
1275 ,file)))
1276 (find-files "." "\\.py$")))
1277 #t))
1278 (replace 'install
1279 (lambda* (#:key inputs outputs #:allow-other-keys)
1280 (let* ((out (assoc-ref outputs "out"))
1281 (bin (string-append out "/bin"))
1282 (share (string-append out "/share/kaldi-gstreamer-server/")))
1283 ;; Install Python files
1284 (with-directory-excursion "kaldigstserver"
1285 (for-each (cut install-file <> share)
1286 (find-files "." ".*")))
1287
1288 ;; Install sample configuration files
1289 (for-each (cut install-file <> share)
1290 (find-files "." "\\.yaml"))
1291
1292 ;; Install executables
1293 (mkdir-p bin)
1294 (let* ((server (string-append bin "/kaldi-gst-server"))
1295 (client (string-append bin "/kaldi-gst-client"))
1296 (worker (string-append bin "/kaldi-gst-worker"))
1297 (PYTHONPATH (getenv "PYTHONPATH"))
1298 (GST_PLUGIN_PATH (string-append
1299 (assoc-ref inputs "gst-kaldi-nnet2-online")
1300 "/lib/gstreamer-1.0:${GST_PLUGIN_PATH}"))
1301 (wrap (lambda (wrapper what)
1302 (with-output-to-file wrapper
1303 (lambda _
1304 (format #t
1305 "#!~a
1306 export PYTHONPATH=~a
1307 export GST_PLUGIN_PATH=~a
1308 exec ~a ~a/~a \"$@\"~%"
1309 (which "bash") PYTHONPATH GST_PLUGIN_PATH
1310 (which "python") share what)))
1311 (chmod wrapper #o555))))
1312 (for-each wrap
1313 (list server client worker)
1314 (list "master_server.py"
1315 "client.py"
1316 "worker.py")))
1317 #t))))))
1318 (inputs
1319 `(("gst-kaldi-nnet2-online" ,gst-kaldi-nnet2-online)
1320 ("python2" ,python-2)
1321 ("python2-futures" ,python2-futures)
1322 ("python2-pygobject" ,python2-pygobject)
1323 ("python2-pyyaml" ,python2-pyyaml)
1324 ("python2-tornado" ,python2-tornado)
1325 ("python2-ws4py" ,python2-ws4py-for-kaldi-gstreamer-server)))
1326 (home-page "https://github.com/alumae/kaldi-gstreamer-server")
1327 (synopsis "Real-time full-duplex speech recognition server")
1328 (description "This is a real-time full-duplex speech recognition server,
1329 based on the Kaldi toolkit and the GStreamer framework and implemented in
1330 Python.")
1331 (license license:bsd-2))))
1332
1333 ;; Note that Tensorflow includes a "third_party" directory, which seems to not
1334 ;; only contain modified subsets of upstream library source code, but also
1335 ;; adapter headers provided by Google (such as the fft.h header, which is not
1336 ;; part of the upstream project code). The Tensorflow code includes headers
1337 ;; from the "third_party" directory. It does not look like we can replace
1338 ;; these headers with unmodified upstream files, so we keep them.
1339 (define-public tensorflow
1340 (package
1341 (name "tensorflow")
1342 (version "1.9.0")
1343 (source
1344 (origin
1345 (method git-fetch)
1346 (uri (git-reference
1347 (url "https://github.com/tensorflow/tensorflow.git")
1348 (commit (string-append "v" version))))
1349 (file-name (string-append "tensorflow-" version "-checkout"))
1350 (sha256
1351 (base32
1352 "0a9kwha395g3wgxfwln5j8vn9nkspmd75xldrlqdq540w996g8xa"))))
1353 (build-system cmake-build-system)
1354 (arguments
1355 `(#:tests? #f ; no "check" target
1356 #:build-type "Release"
1357 #:configure-flags
1358 (let ((protobuf (assoc-ref %build-inputs "protobuf"))
1359 (protobuf:native (assoc-ref %build-inputs "protobuf:native"))
1360 (jsoncpp (assoc-ref %build-inputs "jsoncpp"))
1361 (snappy (assoc-ref %build-inputs "snappy"))
1362 (sqlite (assoc-ref %build-inputs "sqlite")))
1363 (list
1364 ;; Use protobuf from Guix
1365 (string-append "-Dprotobuf_STATIC_LIBRARIES="
1366 protobuf "/lib/libprotobuf.so")
1367 (string-append "-DPROTOBUF_PROTOC_EXECUTABLE="
1368 protobuf:native "/bin/protoc")
1369
1370 ;; Use snappy from Guix
1371 (string-append "-Dsnappy_STATIC_LIBRARIES="
1372 snappy "/lib/libsnappy.so")
1373 ;; Yes, this is not actually the include directory but a prefix...
1374 (string-append "-Dsnappy_INCLUDE_DIR=" snappy)
1375
1376 ;; Use jsoncpp from Guix
1377 (string-append "-Djsoncpp_STATIC_LIBRARIES="
1378 jsoncpp "/lib/libjsoncpp.so")
1379 ;; Yes, this is not actually the include directory but a prefix...
1380 (string-append "-Djsoncpp_INCLUDE_DIR=" jsoncpp)
1381
1382 ;; Use sqlite from Guix
1383 (string-append "-Dsqlite_STATIC_LIBRARIES="
1384 sqlite "/lib/libsqlite.a")
1385
1386 ;; Use system libraries wherever possible. Currently, this
1387 ;; only affects zlib.
1388 "-Dsystemlib_ALL=ON"
1389 "-Dtensorflow_ENABLE_POSITION_INDEPENDENT_CODE=ON"
1390 "-Dtensorflow_BUILD_SHARED_LIB=ON"
1391 "-Dtensorflow_OPTIMIZE_FOR_NATIVE_ARCH=OFF"
1392 "-Dtensorflow_ENABLE_SSL_SUPPORT=OFF"
1393 "-Dtensorflow_BUILD_CONTRIB_KERNELS=OFF"))
1394 #:make-flags
1395 (list "CC=gcc")
1396 #:modules ((ice-9 ftw)
1397 (guix build utils)
1398 (guix build cmake-build-system))
1399 #:phases
1400 (modify-phases %standard-phases
1401 (add-after 'unpack 'set-source-file-times-to-1980
1402 ;; At the end of the tf_python_build_pip_package target, a ZIP
1403 ;; archive should be generated via bdist_wheel, but it fails with
1404 ;; "ZIP does not support timestamps before 1980". Luckily,
1405 ;; SOURCE_DATE_EPOCH is respected, which we set to some time in
1406 ;; 1980.
1407 (lambda _ (setenv "SOURCE_DATE_EPOCH" "315532800") #t))
1408 ;; See https://github.com/tensorflow/tensorflow/issues/20517#issuecomment-406373913
1409 (add-after 'unpack 'python3.7-compatibility
1410 (lambda _
1411 (substitute* '("tensorflow/python/eager/pywrap_tfe_src.cc"
1412 "tensorflow/python/lib/core/ndarray_tensor.cc"
1413 "tensorflow/python/lib/core/py_func.cc")
1414 (("PyUnicode_AsUTF8") "(char *)PyUnicode_AsUTF8"))
1415 (substitute* "tensorflow/c/eager/c_api.h"
1416 (("unsigned char async")
1417 "unsigned char is_async"))
1418
1419 ;; Remove dependency on tensorboard, a complicated but probably
1420 ;; optional package.
1421 (substitute* "tensorflow/tools/pip_package/setup.py"
1422 ((".*'tensorboard >.*") ""))
1423 #t))
1424 (add-after 'python3.7-compatibility 'chdir
1425 (lambda _ (chdir "tensorflow/contrib/cmake") #t))
1426 (add-after 'chdir 'disable-downloads
1427 (lambda* (#:key inputs #:allow-other-keys)
1428 (substitute* (find-files "external" "\\.cmake$")
1429 (("GIT_REPOSITORY.*") "")
1430 (("GIT_TAG.*") "")
1431 (("PREFIX ")
1432 "DOWNLOAD_COMMAND \"\"\nPREFIX "))
1433
1434 ;; Use packages from Guix
1435 (let ((grpc (assoc-ref inputs "grpc")))
1436 (substitute* "CMakeLists.txt"
1437 ;; Sqlite
1438 (("include\\(sqlite\\)") "")
1439 (("\\$\\{sqlite_STATIC_LIBRARIES\\}")
1440 (string-append (assoc-ref inputs "sqlite")
1441 "/lib/libsqlite3.so"))
1442 (("sqlite_copy_headers_to_destination") "")
1443
1444 ;; PNG
1445 (("include\\(png\\)") "")
1446 (("\\$\\{png_STATIC_LIBRARIES\\}")
1447 (string-append (assoc-ref inputs "libpng")
1448 "/lib/libpng16.so"))
1449 (("png_copy_headers_to_destination") "")
1450
1451 ;; JPEG
1452 (("include\\(jpeg\\)") "")
1453 (("\\$\\{jpeg_STATIC_LIBRARIES\\}")
1454 (string-append (assoc-ref inputs "libjpeg")
1455 "/lib/libjpeg.so"))
1456 (("jpeg_copy_headers_to_destination") "")
1457
1458 ;; GIF
1459 (("include\\(gif\\)") "")
1460 (("\\$\\{gif_STATIC_LIBRARIES\\}")
1461 (string-append (assoc-ref inputs "giflib")
1462 "/lib/libgif.so"))
1463 (("gif_copy_headers_to_destination") "")
1464
1465 ;; lmdb
1466 (("include\\(lmdb\\)") "")
1467 (("\\$\\{lmdb_STATIC_LIBRARIES\\}")
1468 (string-append (assoc-ref inputs "lmdb")
1469 "/lib/liblmdb.so"))
1470 (("lmdb_copy_headers_to_destination") "")
1471
1472 ;; Protobuf
1473 (("include\\(protobuf\\)") "")
1474 (("protobuf_copy_headers_to_destination") "")
1475 (("^ +protobuf") "")
1476
1477 ;; gRPC
1478 (("include\\(grpc\\)")
1479 "find_package(grpc REQUIRED NAMES gRPC)")
1480 (("list\\(APPEND tensorflow_EXTERNAL_DEPENDENCIES grpc\\)") "")
1481
1482 ;; Eigen
1483 (("include\\(eigen\\)")
1484 (string-append "find_package(eigen REQUIRED NAMES Eigen3)
1485 set(eigen_INCLUDE_DIRS ${CMAKE_CURRENT_BINARY_DIR}/external/eigen_archive "
1486 (assoc-ref inputs "eigen") "/include/eigen3)"))
1487 (("^ +eigen") "")
1488
1489 ;; snappy
1490 (("include\\(snappy\\)")
1491 "add_definitions(-DTF_USE_SNAPPY)")
1492 (("list\\(APPEND tensorflow_EXTERNAL_DEPENDENCIES snappy\\)") "")
1493
1494 ;; jsoncpp
1495 (("include\\(jsoncpp\\)") "")
1496 (("^ +jsoncpp") ""))
1497
1498 (substitute* "tf_core_framework.cmake"
1499 ((" grpc") "")
1500 (("\\$\\{GRPC_BUILD\\}/grpc_cpp_plugin")
1501 (which "grpc_cpp_plugin"))
1502 ;; Link with gRPC libraries
1503 (("add_library\\(tf_protos_cc.*" m)
1504 (string-append m
1505 (format #f "\ntarget_link_libraries(tf_protos_cc PRIVATE \
1506 ~a/lib/libgrpc++_unsecure.a \
1507 ~a/lib/libgrpc_unsecure.a \
1508 ~a/lib/libaddress_sorting.a \
1509 ~a/lib/libgpr.a \
1510 ~a//lib/libcares.so
1511 )\n"
1512 grpc grpc grpc grpc
1513 (assoc-ref inputs "c-ares"))))))
1514 (substitute* "tf_tools.cmake"
1515 (("add_dependencies\\(\\$\\{proto_text.*") ""))
1516 ;; Remove dependency on bundled grpc
1517 (substitute* "tf_core_distributed_runtime.cmake"
1518 (("tf_core_cpu grpc") "tf_core_cpu"))
1519
1520 ;; This directory is a dependency of many targets.
1521 (mkdir-p "protobuf")
1522 #t))
1523 (add-after 'configure 'unpack-third-party-sources
1524 (lambda* (#:key inputs #:allow-other-keys)
1525 ;; This is needed to configure bundled packages properly.
1526 (setenv "CONFIG_SHELL" (which "bash"))
1527 (for-each
1528 (lambda (name)
1529 (let* ((what (assoc-ref inputs (string-append name "-src")))
1530 (name* (string-map (lambda (c)
1531 (if (char=? c #\-)
1532 #\_ c)) name))
1533 (where (string-append "../build/" name* "/src/" name*)))
1534 (cond
1535 ((string-suffix? ".zip" what)
1536 (mkdir-p where)
1537 (with-directory-excursion where
1538 (invoke "unzip" what)))
1539 ((string-suffix? ".tar.gz" what)
1540 (mkdir-p where)
1541 (invoke "tar" "xf" what
1542 "-C" where "--strip-components=1"))
1543 (else
1544 (let ((parent (dirname where)))
1545 (mkdir-p parent)
1546 (with-directory-excursion parent
1547 (when (file-exists? name*)
1548 (delete-file-recursively name*))
1549 (copy-recursively what name*)
1550 (map make-file-writable
1551 (find-files name* ".*"))))))))
1552 (list "boringssl"
1553 "cub"
1554 "double-conversion"
1555 "farmhash"
1556 "fft2d"
1557 "highwayhash"
1558 "nsync"
1559 "re2"))
1560
1561 (rename-file "../build/cub/src/cub/cub-1.8.0/"
1562 "../build/cub/src/cub/cub/")
1563 #t))
1564 (add-after 'unpack 'fix-python-build
1565 (lambda* (#:key inputs outputs #:allow-other-keys)
1566 (mkdir-p "protobuf-src")
1567 (invoke "tar" "xf" (assoc-ref inputs "protobuf:src")
1568 "-C" "protobuf-src" "--strip-components=1")
1569 (mkdir-p "eigen-src")
1570 (invoke "tar" "xf" (assoc-ref inputs "eigen:src")
1571 "-C" "eigen-src" "--strip-components=1")
1572
1573 (substitute* "tensorflow/contrib/cmake/tf_python.cmake"
1574 ;; Ensure that all Python dependencies can be found at build time.
1575 (("PYTHONPATH=\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/tf_python" m)
1576 (string-append m ":" (getenv "PYTHONPATH")))
1577 ;; Take protobuf source files from our source package.
1578 (("\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/protobuf/src/protobuf/src/google")
1579 (string-append (getcwd) "/protobuf-src/src/google")))
1580
1581 (substitute* '("tensorflow/contrib/cmake/tf_shared_lib.cmake"
1582 "tensorflow/contrib/cmake/tf_python.cmake")
1583 ;; Take Eigen source files from our source package.
1584 (("\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/eigen/src/eigen/")
1585 (string-append (getcwd) "/eigen-src/"))
1586 ;; Take Eigen headers from our own package.
1587 (("\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/external/eigen_archive")
1588 (string-append (assoc-ref inputs "eigen") "/include/eigen3")))
1589
1590 ;; Correct the RUNPATH of ops libraries generated for Python.
1591 ;; TODO: this doesn't work :(
1592 ;; /gnu/store/...-tensorflow-1.9.0/lib/python3.7/site-packages/tensorflow/contrib/seq2seq/python/ops/lib_beam_search_ops.so:
1593 ;; warning: RUNPATH contains bogus entries: ("/tmp/guix-build-tensorflow-1.9.0.drv-0/source/tensorflow/contrib/build")
1594 ;; /gnu/store/...-tensorflow-1.9.0/lib/python3.7/site-packages/tensorflow/contrib/seq2seq/python/ops/lib_beam_search_ops.so:
1595 ;; error: depends on 'libpywrap_tensorflow_internal.so', which
1596 ;; cannot be found in RUNPATH ...
1597 (substitute* "tensorflow/contrib/cmake/tf_cc_ops.cmake"
1598 (("set_target_properties.*")
1599 (string-append "set_target_properties(${_AT_TARGET} PROPERTIES \
1600 COMPILE_FLAGS ${target_compile_flags} \
1601 INSTALL_RPATH_USE_LINK_PATH TRUE \
1602 INSTALL_RPATH " (assoc-ref outputs "out") "/lib)\n")))
1603 #t))
1604 (add-after 'build 'build-pip-package
1605 (lambda* (#:key outputs #:allow-other-keys)
1606 (setenv "LDFLAGS"
1607 (string-append "-Wl,-rpath="
1608 (assoc-ref outputs "out") "/lib"))
1609 (invoke "make" "tf_python_build_pip_package")
1610 #t))
1611 (add-after 'build-pip-package 'install-python
1612 (lambda* (#:key outputs #:allow-other-keys)
1613 (let ((out (assoc-ref outputs "out"))
1614 (wheel (car (find-files "../build/tf_python/dist/" "\\.whl$"))))
1615 (invoke "python" "-m" "pip" "install" wheel
1616 (string-append "--prefix=" out))
1617
1618 ;; XXX: broken RUNPATH, see fix-python-build phase.
1619 (delete-file
1620 (string-append
1621 out "/lib/python3.7/site-packages/tensorflow/contrib/"
1622 "seq2seq/python/ops/lib_beam_search_ops.so"))
1623 #t))))))
1624 (native-inputs
1625 `(("pkg-config" ,pkg-config)
1626 ("protobuf:native" ,protobuf-3.6) ; protoc
1627 ("protobuf:src" ,(package-source protobuf-3.6))
1628 ("eigen:src" ,(package-source eigen-for-tensorflow))
1629 ;; install_pip_packages.sh wants setuptools 39.1.0 specifically.
1630 ("python-setuptools" ,python-setuptools-for-tensorflow)
1631
1632 ;; The commit hashes and URLs for third-party source code are taken
1633 ;; from "tensorflow/workspace.bzl".
1634 ("boringssl-src"
1635 ,(let ((commit "ee7aa02")
1636 (revision "1"))
1637 (origin
1638 (method git-fetch)
1639 (uri (git-reference
1640 (url "https://boringssl.googlesource.com/boringssl")
1641 (commit commit)))
1642 (file-name (string-append "boringssl-0-" revision
1643 (string-take commit 7)
1644 "-checkout"))
1645 (sha256
1646 (base32
1647 "1jf693q0nw0adsic6cgmbdx6g7wr4rj4vxa8j1hpn792fqhd8wgw")))))
1648 ("cub-src"
1649 ,(let ((version "1.8.0"))
1650 (origin
1651 (method url-fetch)
1652 (uri (string-append "https://mirror.bazel.build/github.com/NVlabs/"
1653 "cub/archive/" version ".zip"))
1654 (file-name (string-append "cub-" version ".zip"))
1655 (sha256
1656 (base32
1657 "1hsqikqridb90dkxkjr2918dcry6pfh46ccnwrzawl56aamhdykb")))))
1658 ("double-conversion-src"
1659 ,(let ((commit "5664746")
1660 (revision "1"))
1661 (origin
1662 (method git-fetch)
1663 (uri (git-reference
1664 (url "https://github.com/google/double-conversion.git")
1665 (commit commit)))
1666 (file-name
1667 (git-file-name "double-conversion"
1668 (string-append "0-" revision "."
1669 (string-take commit 7))))
1670 (sha256
1671 (base32
1672 "1h5lppqqxcvdg5jq42i5msgwx20ryij3apvmndflngrgdpc04gn1")))))
1673 ("farmhash-src"
1674 ,(let ((commit "816a4ae622e964763ca0862d9dbd19324a1eaf45"))
1675 (origin
1676 (method url-fetch)
1677 (uri (string-append
1678 "https://mirror.bazel.build/github.com/google/farmhash/archive/"
1679 commit ".tar.gz"))
1680 (file-name (string-append "farmhash-0-" (string-take commit 7)
1681 ".tar.gz"))
1682 (sha256
1683 (base32
1684 "185b2xdxl4d4cnsnv6abg8s22gxvx8673jq2yaq85bz4cdy58q35")))))
1685 ;; The license notice on the home page at
1686 ;; http://www.kurims.kyoto-u.ac.jp/~ooura/fft.html says:
1687 ;; Copyright Takuya OOURA, 1996-2001
1688 ;;
1689 ;; You may use, copy, modify and distribute this code for any purpose
1690 ;; (include commercial use) and without fee. Please refer to this
1691 ;; package when you modify this code.
1692 ;;
1693 ;; We take the identical tarball from the Bazel mirror, because the URL
1694 ;; at the home page is not versioned and might change.
1695 ("fft2d-src"
1696 ,(origin
1697 (method url-fetch)
1698 (uri "https://mirror.bazel.build/www.kurims.kyoto-u.ac.jp/~ooura/fft.tgz")
1699 (file-name "fft2d.tar.gz")
1700 (sha256
1701 (base32
1702 "15jjkfvhqvl2c0753d2di8hz0pyzn598g74wqy79awdrf1y67fsj"))))
1703 ("highwayhash-src"
1704 ,(let ((commit "be5edafc2e1a455768e260ccd68ae7317b6690ee")
1705 (revision "1"))
1706 (origin
1707 (method git-fetch)
1708 (uri (git-reference
1709 (url "https://github.com/google/highwayhash.git")
1710 (commit commit)))
1711 (file-name (string-append "highwayhash-0-" revision
1712 (string-take commit 7)
1713 "-checkout"))
1714 (sha256
1715 (base32
1716 "154jwf98cyy54hldr94pgjn85zynly3abpnc1avmb8a18lzwjyb6")))))
1717 ("nsync-src"
1718 ,(let ((version "0559ce013feac8db639ee1bf776aca0325d28777")
1719 (revision "1"))
1720 (origin
1721 (method url-fetch)
1722 (uri (string-append "https://mirror.bazel.build/"
1723 "github.com/google/nsync/archive/"
1724 version ".tar.gz"))
1725 (file-name (string-append "nsync-0." revision
1726 "-" (string-take version 7)
1727 ".tar.gz"))
1728 (sha256
1729 (base32
1730 "0qdkyqym34x739mmzv97ah5r7ph462v5xkxqxvidmcfqbi64b132")))))
1731 ("re2-src"
1732 ,(let ((commit "e7efc48")
1733 (revision "1"))
1734 (origin
1735 (method git-fetch)
1736 (uri (git-reference
1737 (url "https://github.com/google/re2")
1738 (commit commit)))
1739 (file-name (string-append "re2-0-" revision
1740 (string-take commit 7)
1741 "-checkout"))
1742 (sha256
1743 (base32
1744 "161g9841rjfsy5pn52fcis0s9hdr7rxvb06pad38j5rppfihvign")))))
1745 ("googletest" ,googletest)
1746 ("swig" ,swig)
1747 ("unzip" ,unzip)))
1748 (propagated-inputs
1749 `(("python-absl-py" ,python-absl-py)
1750 ("python-astor" ,python-astor)
1751 ("python-gast" ,python-gast)
1752 ("python-grpcio" ,python-grpcio)
1753 ("python-numpy" ,python-numpy)
1754 ("python-protobuf" ,python-protobuf-3.6)
1755 ("python-six" ,python-six)
1756 ("python-termcolo" ,python-termcolor)
1757 ("python-wheel" ,python-wheel)))
1758 (inputs
1759 `(("c-ares" ,c-ares)
1760 ("eigen" ,eigen-for-tensorflow)
1761 ("gemmlowp" ,gemmlowp-for-tensorflow)
1762 ("lmdb" ,lmdb)
1763 ("libjpeg" ,libjpeg)
1764 ("libpng" ,libpng)
1765 ("giflib" ,giflib)
1766 ("grpc" ,grpc-1.16.1 "static")
1767 ("grpc:bin" ,grpc-1.16.1)
1768 ("jsoncpp" ,jsoncpp-for-tensorflow)
1769 ("snappy" ,snappy)
1770 ("sqlite" ,sqlite)
1771 ("protobuf" ,protobuf-3.6)
1772 ("python" ,python-wrapper)
1773 ("zlib" ,zlib)))
1774 (home-page "https://tensorflow.org")
1775 (synopsis "Machine learning framework")
1776 (description
1777 "TensorFlow is a flexible platform for building and training machine
1778 learning models. It provides a library for high performance numerical
1779 computation and includes high level Python APIs, including both a sequential
1780 API for beginners that allows users to build models quickly by plugging
1781 together building blocks and a subclassing API with an imperative style for
1782 advanced research.")
1783 (license license:asl2.0)))
1784
1785 (define-public python-iml
1786 (package
1787 (name "python-iml")
1788 (version "0.6.2")
1789 (source
1790 (origin
1791 (method url-fetch)
1792 (uri (pypi-uri "iml" version))
1793 (sha256
1794 (base32
1795 "1k8szlpm19rcwcxdny9qdm3gmaqq8akb4xlvrzyz8c2d679aak6l"))))
1796 (build-system python-build-system)
1797 (propagated-inputs
1798 `(("ipython" ,python-ipython)
1799 ("nose" ,python-nose)
1800 ("numpy" ,python-numpy)
1801 ("pandas" ,python-pandas)
1802 ("scipy" ,python-scipy)))
1803 (home-page "http://github.com/interpretable-ml/iml")
1804 (synopsis "Interpretable Machine Learning (iML) package")
1805 (description "Interpretable ML (iML) is a set of data type objects,
1806 visualizations, and interfaces that can be used by any method designed to
1807 explain the predictions of machine learning models (or really the output of
1808 any function). It currently contains the interface and IO code from the Shap
1809 project, and it will potentially also do the same for the Lime project.")
1810 (license license:expat)))
1811
1812 (define-public python-keras-applications
1813 (package
1814 (name "python-keras-applications")
1815 (version "1.0.8")
1816 (source
1817 (origin
1818 (method url-fetch)
1819 (uri (pypi-uri "Keras_Applications" version))
1820 (sha256
1821 (base32
1822 "1rcz31ca4axa6kzhjx4lwqxbg4wvlljkj8qj9a7p9sfd5fhzjyam"))))
1823 (build-system python-build-system)
1824 ;; The tests require Keras, but this package is needed to build Keras.
1825 (arguments '(#:tests? #f))
1826 (propagated-inputs
1827 `(("python-h5py" ,python-h5py)
1828 ("python-numpy" ,python-numpy)))
1829 (native-inputs
1830 `(("python-pytest" ,python-pytest)
1831 ("python-pytest-cov" ,python-pytest-cov)
1832 ("python-pytest-pep8" ,python-pytest-pep8)
1833 ("python-pytest-xdist" ,python-pytest-xdist)))
1834 (home-page "https://github.com/keras-team/keras-applications")
1835 (synopsis "Reference implementations of popular deep learning models")
1836 (description
1837 "This package provides reference implementations of popular deep learning
1838 models for use with the Keras deep learning framework.")
1839 (license license:expat)))
1840
1841 (define-public python-keras-preprocessing
1842 (package
1843 (name "python-keras-preprocessing")
1844 (version "1.1.0")
1845 (source
1846 (origin
1847 (method url-fetch)
1848 (uri (pypi-uri "Keras_Preprocessing" version))
1849 (sha256
1850 (base32
1851 "1r98nm4k1svsqjyaqkfk23i31bl1kcfcyp7094yyj3c43phfp3as"))))
1852 (build-system python-build-system)
1853 (propagated-inputs
1854 `(("python-numpy" ,python-numpy)
1855 ("python-six" ,python-six)))
1856 (native-inputs
1857 `(("python-pandas" ,python-pandas)
1858 ("python-pillow" ,python-pillow)
1859 ("python-pytest" ,python-pytest)
1860 ("python-pytest-cov" ,python-pytest-cov)
1861 ("python-pytest-xdist" ,python-pytest-xdist)
1862 ("tensorflow" ,tensorflow)))
1863 (home-page "https://github.com/keras-team/keras-preprocessing/")
1864 (synopsis "Data preprocessing and augmentation for deep learning models")
1865 (description
1866 "Keras Preprocessing is the data preprocessing and data augmentation
1867 module of the Keras deep learning library. It provides utilities for working
1868 with image data, text data, and sequence data.")
1869 (license license:expat)))
1870
1871 (define-public python-keras
1872 (package
1873 (name "python-keras")
1874 (version "2.2.4")
1875 (source
1876 (origin
1877 (method url-fetch)
1878 (uri (pypi-uri "Keras" version))
1879 (patches (search-patches "python-keras-integration-test.patch"))
1880 (sha256
1881 (base32
1882 "1j8bsqzh49vjdxy6l1k4iwax5vpjzniynyd041xjavdzvfii1dlh"))))
1883 (build-system python-build-system)
1884 (arguments
1885 `(#:phases
1886 (modify-phases %standard-phases
1887 (add-after 'unpack 'remove-tests-for-unavailable-features
1888 (lambda _
1889 (delete-file "keras/backend/theano_backend.py")
1890 (delete-file "keras/backend/cntk_backend.py")
1891 (delete-file "tests/keras/backend/backend_test.py")
1892
1893 ;; FIXME: This doesn't work because Tensorflow is missing the
1894 ;; coder ops library.
1895 (delete-file "tests/keras/test_callbacks.py")
1896 #t))
1897 (replace 'check
1898 (lambda _
1899 ;; These tests attempt to download data files from the internet.
1900 (delete-file "tests/integration_tests/test_datasets.py")
1901 (delete-file "tests/integration_tests/imagenet_utils_test.py")
1902
1903 (setenv "PYTHONPATH"
1904 (string-append (getcwd) "/build/lib:"
1905 (getenv "PYTHONPATH")))
1906 (invoke "py.test" "-v"
1907 "-p" "no:cacheprovider"
1908 "--ignore" "keras/utils"))))))
1909 (propagated-inputs
1910 `(("python-h5py" ,python-h5py)
1911 ("python-keras-applications" ,python-keras-applications)
1912 ("python-keras-preprocessing" ,python-keras-preprocessing)
1913 ("python-numpy" ,python-numpy)
1914 ("python-pydot" ,python-pydot)
1915 ("python-pyyaml" ,python-pyyaml)
1916 ("python-scipy" ,python-scipy)
1917 ("python-six" ,python-six)
1918 ("tensorflow" ,tensorflow)
1919 ("graphviz" ,graphviz)))
1920 (native-inputs
1921 `(("python-pandas" ,python-pandas)
1922 ("python-pytest" ,python-pytest)
1923 ("python-pytest-cov" ,python-pytest-cov)
1924 ("python-pytest-pep8" ,python-pytest-pep8)
1925 ("python-pytest-timeout" ,python-pytest-timeout)
1926 ("python-pytest-xdist" ,python-pytest-xdist)
1927 ("python-sphinx" ,python-sphinx)
1928 ("python-requests" ,python-requests)))
1929 (home-page "https://github.com/keras-team/keras")
1930 (synopsis "High-level deep learning framework")
1931 (description "Keras is a high-level neural networks API, written in Python
1932 and capable of running on top of TensorFlow. It was developed with a focus on
1933 enabling fast experimentation. Use Keras if you need a deep learning library
1934 that:
1935
1936 @itemize
1937 @item Allows for easy and fast prototyping (through user friendliness,
1938 modularity, and extensibility).
1939 @item Supports both convolutional networks and recurrent networks, as well as
1940 combinations of the two.
1941 @item Runs seamlessly on CPU and GPU.
1942 @end itemize\n")
1943 (license license:expat)))
1944
1945 (define-public sbcl-cl-libsvm-format
1946 (let ((commit "3300f84fd8d9f5beafc114f543f9d83417c742fb")
1947 (revision "0"))
1948 (package
1949 (name "sbcl-cl-libsvm-format")
1950 (version (git-version "0.1.0" revision commit))
1951 (source
1952 (origin
1953 (method git-fetch)
1954 (uri (git-reference
1955 (url "https://github.com/masatoi/cl-libsvm-format.git")
1956 (commit commit)))
1957 (file-name (git-file-name name version))
1958 (sha256
1959 (base32
1960 "0284aj84xszhkhlivaigf9qj855fxad3mzmv3zfr0qzb5k0nzwrg"))))
1961 (build-system asdf-build-system/sbcl)
1962 (native-inputs
1963 `(("prove" ,sbcl-prove)
1964 ("prove-asdf" ,sbcl-prove-asdf)))
1965 (inputs
1966 `(("alexandria" ,sbcl-alexandria)))
1967 (synopsis "LibSVM data format reader for Common Lisp")
1968 (description
1969 "This Common Lisp library provides a fast reader for data in LibSVM
1970 format.")
1971 (home-page "https://github.com/masatoi/cl-libsvm-format")
1972 (license license:expat))))
1973
1974 (define-public cl-libsvm-format
1975 (sbcl-package->cl-source-package sbcl-cl-libsvm-format))
1976
1977 (define-public ecl-cl-libsvm-format
1978 (sbcl-package->ecl-package sbcl-cl-libsvm-format))
1979
1980 (define-public sbcl-cl-online-learning
1981 (let ((commit "fc7a34f4f161cd1c7dd747d2ed8f698947781423")
1982 (revision "0"))
1983 (package
1984 (name "sbcl-cl-online-learning")
1985 (version (git-version "0.5" revision commit))
1986 (source
1987 (origin
1988 (method git-fetch)
1989 (uri (git-reference
1990 (url "https://github.com/masatoi/cl-online-learning.git")
1991 (commit commit)))
1992 (file-name (git-file-name name version))
1993 (sha256
1994 (base32
1995 "14x95rlg80ay5hv645ki57pqvy12v28hz4k1w0f6bsfi2rmpxchq"))))
1996 (build-system asdf-build-system/sbcl)
1997 (native-inputs
1998 `(("prove" ,sbcl-prove)
1999 ("prove-asdf" ,sbcl-prove-asdf)))
2000 (inputs
2001 `(("cl-libsvm-format" ,sbcl-cl-libsvm-format)
2002 ("cl-store" ,sbcl-cl-store)))
2003 (arguments
2004 `(;; FIXME: Tests pass but then the check phase crashes
2005 #:tests? #f))
2006 (synopsis "Online Machine Learning for Common Lisp")
2007 (description
2008 "This library contains a collection of machine learning algorithms for
2009 online linear classification written in Common Lisp.")
2010 (home-page "https://github.com/masatoi/cl-online-learning")
2011 (license license:expat))))
2012
2013 (define-public cl-online-learning
2014 (sbcl-package->cl-source-package sbcl-cl-online-learning))
2015
2016 (define-public ecl-cl-online-learning
2017 (sbcl-package->ecl-package sbcl-cl-online-learning))
2018
2019 (define-public sbcl-cl-random-forest
2020 (let ((commit "85fbdd4596d40e824f70f1b7cf239cf544e49d51")
2021 (revision "0"))
2022 (package
2023 (name "sbcl-cl-random-forest")
2024 (version (git-version "0.1" revision commit))
2025 (source
2026 (origin
2027 (method git-fetch)
2028 (uri (git-reference
2029 (url "https://github.com/masatoi/cl-random-forest.git")
2030 (commit commit)))
2031 (file-name (git-file-name name version))
2032 (sha256
2033 (base32
2034 "097xv60i1ndz68sg9p4pc7c5gvyp9i1xgw966b4wwfq3x6hbz421"))))
2035 (build-system asdf-build-system/sbcl)
2036 (native-inputs
2037 `(("prove" ,sbcl-prove)
2038 ("prove-asdf" ,sbcl-prove-asdf)
2039 ("trivial-garbage" ,sbcl-trivial-garbage)))
2040 (inputs
2041 `(("alexandria" ,sbcl-alexandria)
2042 ("cl-libsvm-format" ,sbcl-cl-libsvm-format)
2043 ("cl-online-learning" ,sbcl-cl-online-learning)
2044 ("lparallel" ,sbcl-lparallel)))
2045 (arguments
2046 `(;; The tests download data from the Internet
2047 #:tests? #f
2048 #:phases
2049 (modify-phases %standard-phases
2050 (add-after 'unpack 'add-sb-cltl2-dependency
2051 (lambda _
2052 ;; sb-cltl2 is required by lparallel when using sbcl, but it is
2053 ;; not loaded automatically.
2054 (substitute* "cl-random-forest.asd"
2055 (("\\(in-package :cl-user\\)")
2056 "(in-package :cl-user) #+sbcl (require :sb-cltl2)"))
2057 #t)))))
2058 (synopsis "Random Forest and Global Refinement for Common Lisp")
2059 (description
2060 "CL-random-forest is an implementation of Random Forest for multiclass
2061 classification and univariate regression written in Common Lisp. It also
2062 includes an implementation of Global Refinement of Random Forest.")
2063 (home-page "https://github.com/masatoi/cl-random-forest")
2064 (license license:expat))))
2065
2066 (define-public cl-random-forest
2067 (sbcl-package->cl-source-package sbcl-cl-random-forest))
2068
2069 (define-public ecl-cl-random-forest
2070 (sbcl-package->ecl-package sbcl-cl-random-forest))
2071
2072 (define-public gloo
2073 (let ((version "0.0.0") ; no proper version tag
2074 (commit "ca528e32fea9ca8f2b16053cff17160290fc84ce")
2075 (revision "0"))
2076 (package
2077 (name "gloo")
2078 (version (git-version version revision commit))
2079 (source
2080 (origin
2081 (method git-fetch)
2082 (uri (git-reference
2083 (url "https://github.com/facebookincubator/gloo.git")
2084 (commit commit)))
2085 (file-name (git-file-name name version))
2086 (sha256
2087 (base32
2088 "1q9f80zy75f6njrzrqkmhc0g3qxs4gskr7ns2jdqanxa2ww7a99w"))))
2089 (build-system cmake-build-system)
2090 (native-inputs
2091 `(("googletest" ,googletest)))
2092 (arguments
2093 `(#:configure-flags '("-DBUILD_TEST=1")
2094 #:phases
2095 (modify-phases %standard-phases
2096 (replace 'check
2097 (lambda _
2098 (invoke "make" "gloo_test")
2099 #t)))))
2100 (synopsis "Collective communications library")
2101 (description
2102 "Gloo is a collective communications library. It comes with a
2103 number of collective algorithms useful for machine learning applications.
2104 These include a barrier, broadcast, and allreduce.")
2105 (home-page "https://github.com/facebookincubator/gloo")
2106 (license license:bsd-3))))
2107
2108 (define-public python-umap-learn
2109 (package
2110 (name "python-umap-learn")
2111 (version "0.3.10")
2112 (source
2113 (origin
2114 (method url-fetch)
2115 (uri (pypi-uri "umap-learn" version))
2116 (sha256
2117 (base32
2118 "02ada2yy6km6zgk2836kg1c97yrcpalvan34p8c57446finnpki1"))))
2119 (build-system python-build-system)
2120 (native-inputs
2121 `(("python-joblib" ,python-joblib)
2122 ("python-nose" ,python-nose)))
2123 (propagated-inputs
2124 `(("python-numba" ,python-numba)
2125 ("python-numpy" ,python-numpy)
2126 ("python-scikit-learn" ,python-scikit-learn)
2127 ("python-scipy" ,python-scipy)))
2128 (home-page "https://github.com/lmcinnes/umap")
2129 (synopsis
2130 "Uniform Manifold Approximation and Projection")
2131 (description
2132 "Uniform Manifold Approximation and Projection is a dimension reduction
2133 technique that can be used for visualisation similarly to t-SNE, but also for
2134 general non-linear dimension reduction.")
2135 (license license:bsd-3)))