gnu: Add bats.
[jackhill/guix/guix.git] / gnu / packages / machine-learning.scm
1 ;;; GNU Guix --- Functional package management for GNU
2 ;;; Copyright © 2015, 2016, 2017, 2018, 2019, 2020 Ricardo Wurmus <rekado@elephly.net>
3 ;;; Copyright © 2016, 2020 Efraim Flashner <efraim@flashner.co.il>
4 ;;; Copyright © 2016, 2017, 2020 Marius Bakke <mbakke@fastmail.com>
5 ;;; Copyright © 2016 Hartmut Goebel <h.goebel@crazy-compilers.com>
6 ;;; Copyright © 2018, 2019, 2020 Tobias Geerinckx-Rice <me@tobias.gr>
7 ;;; Copyright © 2018 Kei Kebreau <kkebreau@posteo.net>
8 ;;; Copyright © 2018 Mark Meyer <mark@ofosos.org>
9 ;;; Copyright © 2018 Ben Woodcroft <donttrustben@gmail.com>
10 ;;; Copyright © 2018 Fis Trivial <ybbs.daans@hotmail.com>
11 ;;; Copyright © 2018 Julien Lepiller <julien@lepiller.eu>
12 ;;; Copyright © 2018 Björn Höfling <bjoern.hoefling@bjoernhoefling.de>
13 ;;; Copyright © 2019 Nicolas Goaziou <mail@nicolasgoaziou.fr>
14 ;;; Copyright © 2019, 2020 Guillaume Le Vaillant <glv@posteo.net>
15 ;;; Copyright © 2019 Brett Gilio <brettg@gnu.org>
16 ;;; Copyright © 2020 Konrad Hinsen <konrad.hinsen@fastmail.net>
17 ;;; Copyright © 2020 Edouard Klein <edk@beaver-labs.com>
18 ;;;
19 ;;; This file is part of GNU Guix.
20 ;;;
21 ;;; GNU Guix is free software; you can redistribute it and/or modify it
22 ;;; under the terms of the GNU General Public License as published by
23 ;;; the Free Software Foundation; either version 3 of the License, or (at
24 ;;; your option) any later version.
25 ;;;
26 ;;; GNU Guix is distributed in the hope that it will be useful, but
27 ;;; WITHOUT ANY WARRANTY; without even the implied warranty of
28 ;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
29 ;;; GNU General Public License for more details.
30 ;;;
31 ;;; You should have received a copy of the GNU General Public License
32 ;;; along with GNU Guix. If not, see <http://www.gnu.org/licenses/>.
33
34 (define-module (gnu packages machine-learning)
35 #:use-module ((guix licenses) #:prefix license:)
36 #:use-module (guix packages)
37 #:use-module (guix utils)
38 #:use-module (guix download)
39 #:use-module (guix svn-download)
40 #:use-module (guix build-system asdf)
41 #:use-module (guix build-system cmake)
42 #:use-module (guix build-system gnu)
43 #:use-module (guix build-system ocaml)
44 #:use-module (guix build-system python)
45 #:use-module (guix build-system r)
46 #:use-module (guix git-download)
47 #:use-module (gnu packages)
48 #:use-module (gnu packages adns)
49 #:use-module (gnu packages algebra)
50 #:use-module (gnu packages audio)
51 #:use-module (gnu packages autotools)
52 #:use-module (gnu packages base)
53 #:use-module (gnu packages bash)
54 #:use-module (gnu packages boost)
55 #:use-module (gnu packages check)
56 #:use-module (gnu packages compression)
57 #:use-module (gnu packages cran)
58 #:use-module (gnu packages databases)
59 #:use-module (gnu packages dejagnu)
60 #:use-module (gnu packages gcc)
61 #:use-module (gnu packages glib)
62 #:use-module (gnu packages graphviz)
63 #:use-module (gnu packages gstreamer)
64 #:use-module (gnu packages image)
65 #:use-module (gnu packages linux)
66 #:use-module (gnu packages lisp-xyz)
67 #:use-module (gnu packages maths)
68 #:use-module (gnu packages mpi)
69 #:use-module (gnu packages ocaml)
70 #:use-module (gnu packages onc-rpc)
71 #:use-module (gnu packages perl)
72 #:use-module (gnu packages pkg-config)
73 #:use-module (gnu packages protobuf)
74 #:use-module (gnu packages python)
75 #:use-module (gnu packages python-science)
76 #:use-module (gnu packages python-web)
77 #:use-module (gnu packages python-xyz)
78 #:use-module (gnu packages rpc)
79 #:use-module (gnu packages serialization)
80 #:use-module (gnu packages sphinx)
81 #:use-module (gnu packages statistics)
82 #:use-module (gnu packages sqlite)
83 #:use-module (gnu packages swig)
84 #:use-module (gnu packages web)
85 #:use-module (gnu packages xml)
86 #:use-module (gnu packages xorg)
87 #:use-module (ice-9 match))
88
89 (define-public fann
90 ;; The last release is >100 commits behind, so we package from git.
91 (let ((commit "d71d54788bee56ba4cf7522801270152da5209d7"))
92 (package
93 (name "fann")
94 (version (string-append "2.2.0-1." (string-take commit 8)))
95 (source (origin
96 (method git-fetch)
97 (uri (git-reference
98 (url "https://github.com/libfann/fann.git")
99 (commit commit)))
100 (file-name (string-append name "-" version "-checkout"))
101 (sha256
102 (base32
103 "0ibwpfrjs6q2lijs8slxjgzb2llcl6rk3v2ski4r6215g5jjhg3x"))))
104 (build-system cmake-build-system)
105 (arguments
106 `(#:phases
107 (modify-phases %standard-phases
108 (replace 'check
109 (lambda* (#:key outputs #:allow-other-keys)
110 (let* ((out (assoc-ref outputs "out")))
111 (with-directory-excursion (string-append (getcwd) "/tests")
112 (invoke "./fann_tests"))))))))
113 (home-page "http://leenissen.dk/fann/wp/")
114 (synopsis "Fast Artificial Neural Network")
115 (description
116 "FANN is a neural network library, which implements multilayer
117 artificial neural networks in C with support for both fully connected and
118 sparsely connected networks.")
119 (license license:lgpl2.1))))
120
121 (define-public libsvm
122 (package
123 (name "libsvm")
124 (version "3.23")
125 (source
126 (origin
127 (method url-fetch)
128 (uri (string-append "https://www.csie.ntu.edu.tw/~cjlin/libsvm/"
129 name "-" version ".tar.gz"))
130 (sha256
131 (base32 "0jpaq0rr92x38p4nk3gjan79ip67m6p80anb28z1d8601miysyi5"))))
132 (build-system gnu-build-system)
133 (arguments
134 `(#:tests? #f ; no "check" target
135 #:phases (modify-phases %standard-phases
136 (delete 'configure)
137 (replace
138 'install ; no ‘install’ target
139 (lambda* (#:key outputs #:allow-other-keys)
140 (let* ((out (assoc-ref outputs "out"))
141 (bin (string-append out "/bin/")))
142 (mkdir-p bin)
143 (for-each (lambda (file)
144 (copy-file file (string-append bin file)))
145 '("svm-train"
146 "svm-predict"
147 "svm-scale")))
148 #t)))))
149 (home-page "https://www.csie.ntu.edu.tw/~cjlin/libsvm/")
150 (synopsis "Library for Support Vector Machines")
151 (description
152 "LIBSVM is a machine learning library for support vector
153 classification, (C-SVC, nu-SVC), regression (epsilon-SVR, nu-SVR) and
154 distribution estimation (one-class SVM). It supports multi-class
155 classification.")
156 (license license:bsd-3)))
157
158 (define-public python-libsvm
159 (package (inherit libsvm)
160 (name "python-libsvm")
161 (build-system gnu-build-system)
162 (arguments
163 `(#:tests? #f ; no "check" target
164 #:make-flags '("-C" "python")
165 #:phases
166 (modify-phases %standard-phases
167 (delete 'configure)
168 (replace
169 'install ; no ‘install’ target
170 (lambda* (#:key inputs outputs #:allow-other-keys)
171 (let ((site (string-append (assoc-ref outputs "out")
172 "/lib/python"
173 (string-take
174 (string-take-right
175 (assoc-ref inputs "python") 5) 3)
176 "/site-packages/")))
177 (substitute* "python/svm.py"
178 (("../libsvm.so.2") "libsvm.so.2"))
179 (mkdir-p site)
180 (for-each (lambda (file)
181 (copy-file file (string-append site (basename file))))
182 (find-files "python" "\\.py"))
183 (copy-file "libsvm.so.2"
184 (string-append site "libsvm.so.2")))
185 #t)))))
186 (inputs
187 `(("python" ,python)))
188 (synopsis "Python bindings of libSVM")))
189
190 (define-public ghmm
191 ;; The latest release candidate is several years and a couple of fixes have
192 ;; been published since. This is why we download the sources from the SVN
193 ;; repository.
194 (let ((svn-revision 2341))
195 (package
196 (name "ghmm")
197 (version (string-append "0.9-rc3-0." (number->string svn-revision)))
198 (source (origin
199 (method svn-fetch)
200 (uri (svn-reference
201 (url "http://svn.code.sf.net/p/ghmm/code/trunk")
202 (revision svn-revision)))
203 (file-name (string-append name "-" version "-checkout"))
204 (sha256
205 (base32
206 "0qbq1rqp94l530f043qzp8aw5lj7dng9wq0miffd7spd1ff638wq"))))
207 (build-system gnu-build-system)
208 (arguments
209 `(#:imported-modules (,@%gnu-build-system-modules
210 (guix build python-build-system))
211 #:modules ((guix build python-build-system)
212 ,@%gnu-build-system-modules)
213 #:phases
214 (modify-phases %standard-phases
215 (add-after 'unpack 'enter-dir
216 (lambda _ (chdir "ghmm") #t))
217 (delete 'check)
218 (add-after 'install 'check
219 (assoc-ref %standard-phases 'check))
220 (add-before 'check 'fix-PYTHONPATH
221 (lambda* (#:key inputs outputs #:allow-other-keys)
222 (let ((python-version (python-version
223 (assoc-ref inputs "python"))))
224 (setenv "PYTHONPATH"
225 (string-append (getenv "PYTHONPATH")
226 ":" (assoc-ref outputs "out")
227 "/lib/python" python-version
228 "/site-packages")))
229 #t))
230 (add-after 'enter-dir 'fix-runpath
231 (lambda* (#:key outputs #:allow-other-keys)
232 (substitute* "ghmmwrapper/setup.py"
233 (("^(.*)extra_compile_args = \\[" line indent)
234 (string-append indent
235 "extra_link_args = [\"-Wl,-rpath="
236 (assoc-ref outputs "out") "/lib\"],\n"
237 line
238 "\"-Wl,-rpath="
239 (assoc-ref outputs "out")
240 "/lib\", ")))
241 #t))
242 (add-after 'enter-dir 'disable-broken-tests
243 (lambda _
244 (substitute* "tests/Makefile.am"
245 ;; GHMM_SILENT_TESTS is assumed to be a command.
246 (("TESTS_ENVIRONMENT.*") "")
247 ;; Do not build broken tests.
248 (("chmm .*") "")
249 (("read_fa .*") "")
250 (("mcmc .*") "")
251 (("label_higher_order_test.*$")
252 "label_higher_order_test\n"))
253
254 ;; These Python unittests are broken as there is no gato.
255 ;; See https://sourceforge.net/p/ghmm/support-requests/3/
256 (substitute* "ghmmwrapper/ghmmunittests.py"
257 (("^(.*)def (testNewXML|testMultipleTransitionClasses|testNewXML)"
258 line indent)
259 (string-append indent
260 "@unittest.skip(\"Disabled by Guix\")\n"
261 line)))
262 #t)))))
263 (inputs
264 `(("python" ,python-2) ; only Python 2 is supported
265 ("libxml2" ,libxml2)))
266 (native-inputs
267 `(("pkg-config" ,pkg-config)
268 ("dejagnu" ,dejagnu)
269 ("swig" ,swig)
270 ("autoconf" ,autoconf)
271 ("automake" ,automake)
272 ("libtool" ,libtool)))
273 (home-page "http://ghmm.org")
274 (synopsis "Hidden Markov Model library")
275 (description
276 "The General Hidden Markov Model library (GHMM) is a C library with
277 additional Python bindings implementing a wide range of types of @dfn{Hidden
278 Markov Models} (HMM) and algorithms: discrete, continuous emissions, basic
279 training, HMM clustering, HMM mixtures.")
280 (license license:lgpl2.0+))))
281
282 (define-public mcl
283 (package
284 (name "mcl")
285 (version "14.137")
286 (source (origin
287 (method url-fetch)
288 (uri (string-append
289 "http://micans.org/mcl/src/mcl-"
290 (string-replace-substring version "." "-")
291 ".tar.gz"))
292 (sha256
293 (base32
294 "15xlax3z31lsn62vlg94hkm75nm40q4679amnfg13jm8m2bnhy5m"))))
295 (build-system gnu-build-system)
296 (arguments
297 `(#:configure-flags (list "--enable-blast")))
298 (inputs
299 `(("perl" ,perl)))
300 (home-page "http://micans.org/mcl/")
301 (synopsis "Clustering algorithm for graphs")
302 (description
303 "The MCL algorithm is short for the @dfn{Markov Cluster Algorithm}, a
304 fast and scalable unsupervised cluster algorithm for graphs (also known as
305 networks) based on simulation of (stochastic) flow in graphs.")
306 ;; In the LICENCE file and web page it says "The software is licensed
307 ;; under the GNU General Public License, version 3.", but in several of
308 ;; the source code files it suggests GPL3 or later.
309 ;; http://listserver.ebi.ac.uk/pipermail/mcl-users/2016/000376.html
310 (license license:gpl3)))
311
312 (define-public ocaml-mcl
313 (package
314 (name "ocaml-mcl")
315 (version "12-068oasis4")
316 (source
317 (origin
318 (method git-fetch)
319 (uri (git-reference
320 (url "https://github.com/fhcrc/mcl.git")
321 (commit version)))
322 (file-name (git-file-name name version))
323 (sha256
324 (base32
325 "0009dc3h2jp3qg5val452wngpqnbfyhbcxylghq0mrjqxx0jdq5p"))))
326 (build-system ocaml-build-system)
327 (arguments
328 `(#:phases
329 (modify-phases %standard-phases
330 (add-before 'configure 'patch-paths
331 (lambda _
332 (substitute* "configure"
333 (("/bin/sh") (which "sh")))
334 (substitute* "setup.ml"
335 (("LDFLAGS=-fPIC")
336 (string-append "LDFLAGS=-fPIC\"; \"SHELL=" (which "sh")))
337 (("-std=c89") "-std=gnu99")
338
339 ;; This is a mutable string, which is no longer supported. Use
340 ;; a byte buffer instead.
341 (("String.make \\(String.length s\\)")
342 "Bytes.make (String.length s)")
343
344 ;; These two belong together.
345 (("OASISString.replace_chars")
346 "Bytes.to_string (OASISString.replace_chars")
347 ((" s;")
348 " s);"))
349 (substitute* "myocamlbuild.ml"
350 (("std=c89") "std=gnu99"))
351 ;; Since we build with a more recent OCaml, we have to use C99 or
352 ;; later. This causes problems with the old C code.
353 (substitute* "src/impala/matrix.c"
354 (("restrict") "restrict_"))
355 #t)))))
356 (native-inputs
357 `(("ocamlbuild" ,ocamlbuild)))
358 (home-page "https://github.com/fhcrc/mcl")
359 (synopsis "OCaml wrappers around MCL")
360 (description
361 "This package provides OCaml bindings for the MCL graph clustering
362 algorithm.")
363 (license license:gpl3)))
364
365 (define-public randomjungle
366 (package
367 (name "randomjungle")
368 (version "2.1.0")
369 (source
370 (origin
371 (method url-fetch)
372 (uri (string-append
373 "https://www.imbs.uni-luebeck.de/fileadmin/files/Software"
374 "/randomjungle/randomjungle-" version ".tar_.gz"))
375 (patches (search-patches "randomjungle-disable-static-build.patch"))
376 (sha256
377 (base32
378 "12c8rf30cla71swx2mf4ww9mfd8jbdw5lnxd7dxhyw1ygrvg6y4w"))))
379 (build-system gnu-build-system)
380 (arguments
381 `(#:configure-flags
382 (list "--disable-static"
383 (string-append "--with-boost="
384 (assoc-ref %build-inputs "boost")))
385 #:phases
386 (modify-phases %standard-phases
387 (add-before
388 'configure 'set-CXXFLAGS
389 (lambda _
390 (setenv "CXXFLAGS" "-fpermissive ")
391 #t)))))
392 (inputs
393 `(("boost" ,boost)
394 ("gsl" ,gsl)
395 ("libxml2" ,libxml2)
396 ("zlib" ,zlib)))
397 (native-inputs
398 `(("gfortran" ,gfortran)
399 ("gfortran:lib" ,gfortran "lib")))
400 ;; Non-portable assembly instructions are used so building fails on
401 ;; platforms other than x86_64 or i686.
402 (supported-systems '("x86_64-linux" "i686-linux"))
403 (home-page "https://www.imbs.uni-luebeck.de/forschung/software/details.html#c224")
404 (synopsis "Implementation of the Random Forests machine learning method")
405 (description
406 "Random Jungle is an implementation of Random Forests. It is supposed to
407 analyse high dimensional data. In genetics, it can be used for analysing big
408 Genome Wide Association (GWA) data. Random Forests is a powerful machine
409 learning method. Most interesting features are variable selection, missing
410 value imputation, classifier creation, generalization error estimation and
411 sample proximities between pairs of cases.")
412 (license license:gpl3+)))
413
414 (define-public openfst
415 (package
416 (name "openfst")
417 (version "1.7.2")
418 (source (origin
419 (method url-fetch)
420 (uri (string-append "http://www.openfst.org/twiki/pub/FST/"
421 "FstDownload/openfst-" version ".tar.gz"))
422 (sha256
423 (base32
424 "0fqgk8195kz21is09gwzwnrg7fr9526bi9mh4apyskapz27pbhr1"))))
425 (build-system gnu-build-system)
426 (home-page "http://www.openfst.org")
427 (synopsis "Library for weighted finite-state transducers")
428 (description "OpenFst is a library for constructing, combining,
429 optimizing, and searching weighted finite-state transducers (FSTs).")
430 (license license:asl2.0)))
431
432 (define-public shogun
433 (package
434 (name "shogun")
435 (version "6.1.3")
436 (source
437 (origin
438 (method url-fetch)
439 (uri (string-append
440 "ftp://shogun-toolbox.org/shogun/releases/"
441 (version-major+minor version)
442 "/sources/shogun-" version ".tar.bz2"))
443 (sha256
444 (base32
445 "1rn9skm3nw6hr7mr3lgp2gfqhi7ii0lyxck7qmqnf8avq349s5jp"))
446 (modules '((guix build utils)
447 (ice-9 rdelim)))
448 (snippet
449 '(begin
450 ;; Remove non-free sources and files referencing them
451 (for-each delete-file
452 (find-files "src/shogun/classifier/svm/"
453 "SVMLight\\.(cpp|h)"))
454 (for-each delete-file
455 (find-files "examples/undocumented/libshogun/"
456 (string-append
457 "(classifier_.*svmlight.*|"
458 "evaluation_cross_validation_locked_comparison).cpp")))
459 ;; Remove non-free functions.
460 (define (delete-ifdefs file)
461 (with-atomic-file-replacement file
462 (lambda (in out)
463 (let loop ((line (read-line in 'concat))
464 (skipping? #f))
465 (if (eof-object? line)
466 #t
467 (let ((skip-next?
468 (or (and skipping?
469 (not (string-prefix?
470 "#endif //USE_SVMLIGHT" line)))
471 (string-prefix?
472 "#ifdef USE_SVMLIGHT" line))))
473 (when (or (not skipping?)
474 (and skipping? (not skip-next?)))
475 (display line out))
476 (loop (read-line in 'concat) skip-next?)))))))
477 (for-each delete-ifdefs
478 (append
479 (find-files "src/shogun/classifier/mkl"
480 "^MKLClassification\\.cpp")
481 (find-files "src/shogun/classifier/svm"
482 "^SVMLightOneClass\\.(cpp|h)")
483 (find-files "src/shogun/multiclass"
484 "^ScatterSVM\\.(cpp|h)")
485 (find-files "src/shogun/kernel/"
486 "^(Kernel|CombinedKernel|ProductKernel)\\.(cpp|h)")
487 (find-files "src/shogun/regression/svr"
488 "^(MKLRegression|SVRLight)\\.(cpp|h)")
489 (find-files "src/shogun/transfer/domain_adaptation"
490 "^DomainAdaptationSVM\\.(cpp|h)")))
491 #t))))
492 (build-system cmake-build-system)
493 (arguments
494 '(#:tests? #f ;no check target
495 #:phases
496 (modify-phases %standard-phases
497 (add-after 'unpack 'delete-broken-symlinks
498 (lambda _
499 (for-each delete-file '("applications/arts/data"
500 "applications/asp/data"
501 "applications/easysvm/data"
502 "applications/msplicer/data"
503 "applications/ocr/data"
504 "examples/meta/data"
505 "examples/undocumented/data"))
506 #t))
507 (add-after 'unpack 'change-R-target-path
508 (lambda* (#:key outputs #:allow-other-keys)
509 (substitute* '("src/interfaces/r/CMakeLists.txt"
510 "examples/meta/r/CMakeLists.txt")
511 (("\\$\\{R_COMPONENT_LIB_PATH\\}")
512 (string-append (assoc-ref outputs "out")
513 "/lib/R/library/")))
514 #t))
515 (add-after 'unpack 'fix-octave-modules
516 (lambda* (#:key outputs #:allow-other-keys)
517 (substitute* "src/interfaces/octave/CMakeLists.txt"
518 (("^include_directories\\(\\$\\{OCTAVE_INCLUDE_DIRS\\}")
519 "include_directories(${OCTAVE_INCLUDE_DIRS} ${OCTAVE_INCLUDE_DIRS}/octave")
520 ;; change target directory
521 (("\\$\\{OCTAVE_OCT_LOCAL_API_FILE_DIR\\}")
522 (string-append (assoc-ref outputs "out")
523 "/share/octave/packages")))
524 (substitute* '("src/interfaces/octave/swig_typemaps.i"
525 "src/interfaces/octave/sg_print_functions.cpp")
526 ;; "octave/config.h" and "octave/oct-obj.h" deprecated in Octave.
527 (("octave/config\\.h") "octave/octave-config.h")
528 (("octave/oct-obj.h") "octave/ovl.h"))
529 #t))
530 (add-after 'unpack 'move-rxcpp
531 (lambda* (#:key inputs #:allow-other-keys)
532 (let ((rxcpp-dir "shogun/third-party/rxcpp"))
533 (mkdir-p rxcpp-dir)
534 (install-file (assoc-ref inputs "rxcpp") rxcpp-dir)
535 #t)))
536 (add-before 'build 'set-HOME
537 ;; $HOME needs to be set at some point during the build phase
538 (lambda _ (setenv "HOME" "/tmp") #t)))
539 #:configure-flags
540 (list "-DCMAKE_BUILD_WITH_INSTALL_RPATH=TRUE"
541 "-DUSE_SVMLIGHT=OFF" ;disable proprietary SVMLIGHT
542 "-DBUILD_META_EXAMPLES=OFF" ;requires unpackaged ctags
543 ;;"-DINTERFACE_JAVA=ON" ;requires unpackaged jblas
544 ;;"-DINTERFACE_RUBY=ON" ;requires unpackaged ruby-narray
545 ;;"-DINTERFACE_PERL=ON" ;"FindPerlLibs" does not exist
546 ;;"-DINTERFACE_LUA=ON" ;fails because lua doesn't build pkgconfig file
547 "-DINTERFACE_OCTAVE=ON"
548 "-DINTERFACE_PYTHON=ON"
549 "-DINTERFACE_R=ON")))
550 (inputs
551 `(("python" ,python)
552 ("numpy" ,python-numpy)
553 ("r-minimal" ,r-minimal)
554 ("octave" ,octave-cli)
555 ("swig" ,swig)
556 ("eigen" ,eigen)
557 ("hdf5" ,hdf5)
558 ("atlas" ,atlas)
559 ("arpack" ,arpack-ng)
560 ("lapack" ,lapack)
561 ("glpk" ,glpk)
562 ("libxml2" ,libxml2)
563 ("lzo" ,lzo)
564 ("zlib" ,zlib)))
565 (native-inputs
566 `(("pkg-config" ,pkg-config)
567 ("rxcpp" ,rxcpp)))
568 ;; Non-portable SSE instructions are used so building fails on platforms
569 ;; other than x86_64.
570 (supported-systems '("x86_64-linux"))
571 (home-page "https://shogun-toolbox.org/")
572 (synopsis "Machine learning toolbox")
573 (description
574 "The Shogun Machine learning toolbox provides a wide range of unified and
575 efficient Machine Learning (ML) methods. The toolbox seamlessly
576 combines multiple data representations, algorithm classes, and general purpose
577 tools. This enables both rapid prototyping of data pipelines and extensibility
578 in terms of new algorithms.")
579 (license license:gpl3+)))
580
581 (define-public rxcpp
582 (package
583 (name "rxcpp")
584 (version "4.1.0")
585 (source
586 (origin
587 (method git-fetch)
588 (uri (git-reference
589 (url "https://github.com/ReactiveX/RxCpp.git")
590 (commit (string-append "v" version))))
591 (sha256
592 (base32 "1rdpa3jlc181jd08nk437aar085h28i45s6nzrv65apb3xyyz0ij"))
593 (file-name (git-file-name name version))))
594 (build-system cmake-build-system)
595 (arguments
596 `(#:phases
597 (modify-phases %standard-phases
598 (add-after 'unpack 'remove-werror
599 (lambda _
600 (substitute* (find-files ".")
601 (("-Werror") ""))
602 #t))
603 (replace 'check
604 (lambda _
605 (invoke "ctest"))))))
606 (native-inputs
607 `(("catch" ,catch-framework)))
608 (home-page "http://reactivex.io/")
609 (synopsis "Reactive Extensions for C++")
610 (description
611 "The Reactive Extensions for C++ (RxCpp) is a library of algorithms for
612 values-distributed-in-time. ReactiveX is a library for composing asynchronous
613 and event-based programs by using observable sequences.
614
615 It extends the observer pattern to support sequences of data and/or events and
616 adds operators that allow you to compose sequences together declaratively while
617 abstracting away concerns about things like low-level threading,
618 synchronization, thread-safety, concurrent data structures, and non-blocking
619 I/O.")
620 (license license:asl2.0)))
621
622 (define-public r-adaptivesparsity
623 (package
624 (name "r-adaptivesparsity")
625 (version "1.6")
626 (source (origin
627 (method url-fetch)
628 (uri (cran-uri "AdaptiveSparsity" version))
629 (sha256
630 (base32
631 "0imr5m8mll9j6n4icsv6z9rl5kbnwsp9wvzrg7n90nnmcxq2cz91"))))
632 (properties
633 `((upstream-name . "AdaptiveSparsity")))
634 (build-system r-build-system)
635 (arguments
636 `(#:phases
637 (modify-phases %standard-phases
638 (add-after 'unpack 'link-against-armadillo
639 (lambda _
640 (substitute* "src/Makevars"
641 (("PKG_LIBS=" prefix)
642 (string-append prefix "-larmadillo"))))))))
643 (propagated-inputs
644 `(("r-mass" ,r-mass)
645 ("r-matrix" ,r-matrix)
646 ("r-rcpp" ,r-rcpp)
647 ("r-rcpparmadillo" ,r-rcpparmadillo)))
648 (inputs
649 `(("armadillo" ,armadillo)))
650 (home-page "https://cran.r-project.org/web/packages/AdaptiveSparsity")
651 (synopsis "Adaptive sparsity models")
652 (description
653 "This package implements the Figueiredo machine learning algorithm for
654 adaptive sparsity and the Wong algorithm for adaptively sparse gaussian
655 geometric models.")
656 (license license:lgpl3+)))
657
658 (define-public gemmlowp-for-tensorflow
659 ;; The commit hash is taken from "tensorflow/workspace.bzl".
660 (let ((commit "38ebac7b059e84692f53e5938f97a9943c120d98")
661 (revision "2"))
662 (package
663 (name "gemmlowp")
664 (version (git-version "0" revision commit))
665 (source (origin
666 (method url-fetch)
667 (uri (string-append "https://mirror.bazel.build/"
668 "github.com/google/gemmlowp/archive/"
669 commit ".zip"))
670 (file-name (string-append "gemmlowp-" version ".zip"))
671 (sha256
672 (base32
673 "0n56s2g8hrssm4w8qj1v58gfm56a04n9v992ixkmvk6zjiralzxq"))))
674 (build-system cmake-build-system)
675 (arguments
676 `(#:configure-flags
677 (list ,@(match (%current-system)
678 ((or "x86_64-linux" "i686-linux")
679 '("-DCMAKE_CXX_FLAGS=-msse2"))
680 (_ '())))
681 #:phases
682 (modify-phases %standard-phases
683 ;; This directory contains the CMakeLists.txt.
684 (add-after 'unpack 'chdir
685 (lambda _ (chdir "contrib") #t))
686 ;; There is no install target
687 (replace 'install
688 (lambda* (#:key outputs #:allow-other-keys)
689 (let* ((out (assoc-ref outputs "out"))
690 (lib (string-append out "/lib/"))
691 (inc (string-append out "/include/")))
692 (install-file "../build/libeight_bit_int_gemm.so" lib)
693 (for-each (lambda (dir)
694 (let ((target (string-append inc "/" dir)))
695 (mkdir-p target)
696 (for-each (lambda (h)
697 (install-file h target))
698 (find-files (string-append "../" dir)
699 "\\.h$"))))
700 '("meta" "profiling" "public" "fixedpoint"
701 "eight_bit_int_gemm" "internal"))
702 #t))))))
703 (native-inputs
704 `(("unzip" ,unzip)))
705 (home-page "https://github.com/google/gemmlowp")
706 (synopsis "Small self-contained low-precision GEMM library")
707 (description
708 "This is a small self-contained low-precision @dfn{general matrix
709 multiplication} (GEMM) library. It is not a full linear algebra library.
710 Low-precision means that the input and output matrix entries are integers on
711 at most 8 bits. To avoid overflow, results are internally accumulated on more
712 than 8 bits, and at the end only some significant 8 bits are kept.")
713 (license license:asl2.0))))
714
715 (define-public dlib
716 (package
717 (name "dlib")
718 (version "19.20")
719 (source (origin
720 (method url-fetch)
721 (uri (string-append
722 "http://dlib.net/files/dlib-" version ".tar.bz2"))
723 (sha256
724 (base32
725 "139jyi19qz37wwmmy48gil9d1kkh2r3w3bwdzabha6ayxmba96nz"))
726 (modules '((guix build utils)))
727 (snippet
728 '(begin
729 ;; Delete ~13MB of bundled dependencies.
730 (delete-file-recursively "dlib/external")
731 (delete-file-recursively "docs/dlib/external")
732 #t))))
733 (build-system cmake-build-system)
734 (arguments
735 `(#:phases
736 (modify-phases %standard-phases
737 (add-after 'unpack 'disable-asserts
738 (lambda _
739 ;; config.h recommends explicitly enabling or disabling asserts
740 ;; when building as a shared library. By default neither is set.
741 (substitute* "dlib/config.h"
742 (("^//#define DLIB_DISABLE_ASSERTS") "#define DLIB_DISABLE_ASSERTS"))
743 #t))
744 (add-after 'disable-asserts 'disable-failing-tests
745 (lambda _
746 ;; One test times out on MIPS, so we need to disable it.
747 ;; Others are flaky on some platforms.
748 (let* ((system ,(or (%current-target-system)
749 (%current-system)))
750 (disabled-tests (cond
751 ((string-prefix? "mips64" system)
752 '("object_detector" ; timeout
753 "data_io"))
754 ((string-prefix? "armhf" system)
755 '("learning_to_track"))
756 ((string-prefix? "i686" system)
757 '("optimization"))
758 (else '()))))
759 (for-each
760 (lambda (test)
761 (substitute* "dlib/test/makefile"
762 (((string-append "SRC \\+= " test "\\.cpp")) "")))
763 disabled-tests)
764 #t)))
765 (replace 'check
766 (lambda _
767 ;; No test target, so we build and run the unit tests here.
768 (let ((test-dir (string-append "../dlib-" ,version "/dlib/test")))
769 (with-directory-excursion test-dir
770 (invoke "make" "-j" (number->string (parallel-job-count)))
771 (invoke "./dtest" "--runall"))
772 #t)))
773 (add-after 'install 'delete-static-library
774 (lambda* (#:key outputs #:allow-other-keys)
775 (delete-file (string-append (assoc-ref outputs "out")
776 "/lib/libdlib.a"))
777 #t)))))
778 (native-inputs
779 `(("pkg-config" ,pkg-config)
780 ;; For tests.
781 ("libnsl" ,libnsl)))
782 (inputs
783 `(("giflib" ,giflib)
784 ("lapack" ,lapack)
785 ("libjpeg" ,libjpeg-turbo)
786 ("libpng" ,libpng)
787 ("libx11" ,libx11)
788 ("openblas" ,openblas)
789 ("zlib" ,zlib)))
790 (synopsis
791 "Toolkit for making machine learning and data analysis applications in C++")
792 (description
793 "Dlib is a modern C++ toolkit containing machine learning algorithms and
794 tools. It is used in both industry and academia in a wide range of domains
795 including robotics, embedded devices, mobile phones, and large high performance
796 computing environments.")
797 (home-page "http://dlib.net")
798 (license license:boost1.0)))
799
800 (define-public python-scikit-learn
801 (package
802 (name "python-scikit-learn")
803 (version "0.22.1")
804 (source
805 (origin
806 (method git-fetch)
807 (uri (git-reference
808 (url "https://github.com/scikit-learn/scikit-learn.git")
809 (commit version)))
810 (file-name (git-file-name name version))
811 (sha256
812 (base32
813 "1xqxv210gsmjw094vc5ghq2y9lmm74qkk22pq6flcjzj51b86jxf"))))
814 (build-system python-build-system)
815 (arguments
816 `(#:phases
817 (modify-phases %standard-phases
818 (add-after 'build 'build-ext
819 (lambda _ (invoke "python" "setup.py" "build_ext" "--inplace") #t))
820 (replace 'check
821 (lambda _
822 ;; Restrict OpenBLAS threads to prevent segfaults while testing!
823 (setenv "OPENBLAS_NUM_THREADS" "1")
824
825 ;; Some tests require write access to $HOME.
826 (setenv "HOME" "/tmp")
827
828 (invoke "pytest" "sklearn" "-m" "not network")))
829 (add-before 'reset-gzip-timestamps 'make-files-writable
830 (lambda* (#:key outputs #:allow-other-keys)
831 ;; Make sure .gz files are writable so that the
832 ;; 'reset-gzip-timestamps' phase can do its work.
833 (let ((out (assoc-ref outputs "out")))
834 (for-each make-file-writable
835 (find-files out "\\.gz$"))
836 #t))))))
837 (inputs
838 `(("openblas" ,openblas)))
839 (native-inputs
840 `(("python-pytest" ,python-pytest)
841 ("python-pandas" ,python-pandas) ;for tests
842 ("python-cython" ,python-cython)))
843 (propagated-inputs
844 `(("python-numpy" ,python-numpy)
845 ("python-scipy" ,python-scipy)
846 ("python-joblib" ,python-joblib)))
847 (home-page "https://scikit-learn.org/")
848 (synopsis "Machine Learning in Python")
849 (description
850 "Scikit-learn provides simple and efficient tools for data mining and
851 data analysis.")
852 (properties `((python2-variant . ,(delay python2-scikit-learn))))
853 (license license:bsd-3)))
854
855 ;; scikit-learn 0.22 and later only supports Python 3, so we stick with
856 ;; an older version here.
857 (define-public python2-scikit-learn
858 (let ((base (package-with-python2 (strip-python2-variant python-scikit-learn))))
859 (package
860 (inherit base)
861 (version "0.20.4")
862 (source (origin
863 (method git-fetch)
864 (uri (git-reference
865 (url "https://github.com/scikit-learn/scikit-learn.git")
866 (commit version)))
867 (file-name (git-file-name "python-scikit-learn" version))
868 (sha256
869 (base32
870 "08zbzi8yx5wdlxfx9jap61vg1malc9ajf576w7a0liv6jvvrxlpj")))))))
871
872 (define-public python-scikit-rebate
873 (package
874 (name "python-scikit-rebate")
875 (version "0.6")
876 (source (origin
877 (method url-fetch)
878 (uri (pypi-uri "skrebate" version))
879 (sha256
880 (base32
881 "1h7qs9gjxpzqabzhb8rmpv3jpmi5iq41kqdibg48299h94iikiw7"))))
882 (build-system python-build-system)
883 ;; Pandas is only needed to run the tests.
884 (native-inputs
885 `(("python-pandas" ,python-pandas)))
886 (propagated-inputs
887 `(("python-numpy" ,python-numpy)
888 ("python-scipy" ,python-scipy)
889 ("python-scikit-learn" ,python-scikit-learn)
890 ("python-joblib" ,python-joblib)))
891 (home-page "https://epistasislab.github.io/scikit-rebate/")
892 (synopsis "Relief-based feature selection algorithms for Python")
893 (description "Scikit-rebate is a scikit-learn-compatible Python
894 implementation of ReBATE, a suite of Relief-based feature selection algorithms
895 for Machine Learning. These algorithms excel at identifying features that are
896 predictive of the outcome in supervised learning problems, and are especially
897 good at identifying feature interactions that are normally overlooked by
898 standard feature selection algorithms.")
899 (license license:expat)))
900
901 (define-public python-autograd
902 (let* ((commit "442205dfefe407beffb33550846434baa90c4de7")
903 (revision "0")
904 (version (git-version "0.0.0" revision commit)))
905 (package
906 (name "python-autograd")
907 (home-page "https://github.com/HIPS/autograd")
908 (source (origin
909 (method git-fetch)
910 (uri (git-reference
911 (url home-page)
912 (commit commit)))
913 (sha256
914 (base32
915 "189sv2xb0mwnjawa9z7mrgdglc1miaq93pnck26r28fi1jdwg0z4"))
916 (file-name (git-file-name name version))))
917 (version version)
918 (build-system python-build-system)
919 (native-inputs
920 `(("python-nose" ,python-nose)
921 ("python-pytest" ,python-pytest)))
922 (propagated-inputs
923 `(("python-future" ,python-future)
924 ("python-numpy" ,python-numpy)))
925 (arguments
926 `(#:phases (modify-phases %standard-phases
927 (replace 'check
928 (lambda _
929 (invoke "py.test" "-v"))))))
930 (synopsis "Efficiently computes derivatives of NumPy code")
931 (description "Autograd can automatically differentiate native Python and
932 NumPy code. It can handle a large subset of Python's features, including loops,
933 ifs, recursion and closures, and it can even take derivatives of derivatives
934 of derivatives. It supports reverse-mode differentiation
935 (a.k.a. backpropagation), which means it can efficiently take gradients of
936 scalar-valued functions with respect to array-valued arguments, as well as
937 forward-mode differentiation, and the two can be composed arbitrarily. The
938 main intended application of Autograd is gradient-based optimization.")
939 (license license:expat))))
940
941 (define-public python2-autograd
942 (package-with-python2 python-autograd))
943
944 (define-public lightgbm
945 (package
946 (name "lightgbm")
947 (version "2.0.12")
948 (source (origin
949 (method git-fetch)
950 (uri (git-reference
951 (url "https://github.com/Microsoft/LightGBM")
952 (commit (string-append "v" version))))
953 (sha256
954 (base32
955 "0jlvyn7k81dzrh9ij3zw576wbgiwmmr26rzpdxjn1dbpc3njpvzi"))
956 (file-name (git-file-name name version))))
957 (native-inputs
958 `(("python-pytest" ,python-pytest)
959 ("python-nose" ,python-nose)))
960 (inputs
961 `(("openmpi" ,openmpi)))
962 (propagated-inputs
963 `(("python-numpy" ,python-numpy)
964 ("python-scipy" ,python-scipy)))
965 (arguments
966 `(#:configure-flags
967 '("-DUSE_MPI=ON")
968 #:phases
969 (modify-phases %standard-phases
970 (replace 'check
971 (lambda _
972 (with-directory-excursion "../source"
973 (invoke "pytest" "tests/c_api_test/test_.py")))))))
974 (build-system cmake-build-system)
975 (home-page "https://github.com/Microsoft/LightGBM")
976 (synopsis "Gradient boosting framework based on decision tree algorithms")
977 (description "LightGBM is a gradient boosting framework that uses tree
978 based learning algorithms. It is designed to be distributed and efficient with
979 the following advantages:
980
981 @itemize
982 @item Faster training speed and higher efficiency
983 @item Lower memory usage
984 @item Better accuracy
985 @item Parallel and GPU learning supported (not enabled in this package)
986 @item Capable of handling large-scale data
987 @end itemize\n")
988 (license license:expat)))
989
990 (define-public vowpal-wabbit
991 ;; Language bindings not included.
992 (package
993 (name "vowpal-wabbit")
994 (version "8.5.0")
995 (source (origin
996 (method git-fetch)
997 (uri (git-reference
998 (url "https://github.com/JohnLangford/vowpal_wabbit")
999 (commit version)))
1000 (sha256
1001 (base32
1002 "04bwzk6ifgnz3fmzid8b7avxf9n5pnx9xcjm61nkjng1vv0bpj8x"))
1003 (file-name (git-file-name name version))))
1004 (inputs
1005 `(("boost" ,boost)
1006 ("zlib" ,zlib)))
1007 (arguments
1008 `(#:configure-flags
1009 (list (string-append "--with-boost="
1010 (assoc-ref %build-inputs "boost")))
1011 #:phases
1012 (modify-phases %standard-phases
1013 (add-after 'unpack 'make-files-writable
1014 (lambda _
1015 (for-each make-file-writable (find-files "." ".*")) #t)))))
1016 (build-system gnu-build-system)
1017 (home-page "https://github.com/JohnLangford/vowpal_wabbit")
1018 (synopsis "Fast machine learning library for online learning")
1019 (description "Vowpal Wabbit is a machine learning system with techniques
1020 such as online, hashing, allreduce, reductions, learning2search, active, and
1021 interactive learning.")
1022 (license license:bsd-3)))
1023
1024 (define-public python2-fastlmm
1025 (package
1026 (name "python2-fastlmm")
1027 (version "0.2.21")
1028 (source
1029 (origin
1030 (method url-fetch)
1031 (uri (pypi-uri "fastlmm" version ".zip"))
1032 (sha256
1033 (base32
1034 "1q8c34rpmwkfy3r4d5172pzdkpfryj561897z9r3x22gq7813x1m"))))
1035 (build-system python-build-system)
1036 (arguments
1037 `(#:tests? #f ; some test files are missing
1038 #:python ,python-2)) ; only Python 2.7 is supported
1039 (propagated-inputs
1040 `(("python2-numpy" ,python2-numpy)
1041 ("python2-scipy" ,python2-scipy)
1042 ("python2-matplotlib" ,python2-matplotlib)
1043 ("python2-pandas" ,python2-pandas)
1044 ("python2-scikit-learn" ,python2-scikit-learn)
1045 ("python2-pysnptools" ,python2-pysnptools)))
1046 (native-inputs
1047 `(("unzip" ,unzip)
1048 ("python2-cython" ,python2-cython)
1049 ("python2-mock" ,python2-mock)
1050 ("python2-nose" ,python2-nose)))
1051 (home-page "http://research.microsoft.com/en-us/um/redmond/projects/mscompbio/fastlmm/")
1052 (synopsis "Perform genome-wide association studies on large data sets")
1053 (description
1054 "FaST-LMM, which stands for Factored Spectrally Transformed Linear Mixed
1055 Models, is a program for performing both single-SNP and SNP-set genome-wide
1056 association studies (GWAS) on extremely large data sets.")
1057 (license license:asl2.0)))
1058
1059 ;; There have been no proper releases yet.
1060 (define-public kaldi
1061 (let ((commit "d4791c0f3fc1a09c042dac365e120899ee2ad21e")
1062 (revision "2"))
1063 (package
1064 (name "kaldi")
1065 (version (git-version "0" revision commit))
1066 (source (origin
1067 (method git-fetch)
1068 (uri (git-reference
1069 (url "https://github.com/kaldi-asr/kaldi.git")
1070 (commit commit)))
1071 (file-name (git-file-name name version))
1072 (sha256
1073 (base32
1074 "07k80my6f19mhrkwbzhjsnpf9871wmrwkl0ym468i830w67qyjrz"))))
1075 (build-system gnu-build-system)
1076 (arguments
1077 `(#:test-target "test"
1078 #:phases
1079 (modify-phases %standard-phases
1080 (add-after 'unpack 'chdir
1081 (lambda _ (chdir "src") #t))
1082 (replace 'configure
1083 (lambda* (#:key build system inputs outputs #:allow-other-keys)
1084 (when (not (or (string-prefix? "x86_64" system)
1085 (string-prefix? "i686" system)))
1086 (substitute* "makefiles/linux_openblas.mk"
1087 (("-msse -msse2") "")))
1088 (substitute* "makefiles/default_rules.mk"
1089 (("/bin/bash") (which "bash")))
1090 (substitute* "Makefile"
1091 (("ext_depend: check_portaudio")
1092 "ext_depend:"))
1093 (substitute* '("online/Makefile"
1094 "onlinebin/Makefile"
1095 "gst-plugin/Makefile")
1096 (("../../tools/portaudio/install")
1097 (assoc-ref inputs "portaudio")))
1098
1099 ;; This `configure' script doesn't support variables passed as
1100 ;; arguments, nor does it support "prefix".
1101 (let ((out (assoc-ref outputs "out"))
1102 (openblas (assoc-ref inputs "openblas"))
1103 (openfst (assoc-ref inputs "openfst")))
1104 (substitute* "configure"
1105 (("check_for_slow_expf;") "")
1106 ;; This affects the RPATH and also serves as the installation
1107 ;; directory.
1108 (("KALDILIBDIR=`pwd`/lib")
1109 (string-append "KALDILIBDIR=" out "/lib")))
1110 (mkdir-p out) ; must exist
1111 (setenv "CONFIG_SHELL" (which "bash"))
1112 (setenv "OPENFST_VER" ,(package-version openfst))
1113 (invoke "./configure"
1114 "--use-cuda=no"
1115 "--shared"
1116 (string-append "--openblas-root=" openblas)
1117 (string-append "--fst-root=" openfst)))))
1118 (add-after 'build 'build-ext-and-gstreamer-plugin
1119 (lambda _
1120 (invoke "make" "-C" "online" "depend")
1121 (invoke "make" "-C" "online")
1122 (invoke "make" "-C" "onlinebin" "depend")
1123 (invoke "make" "-C" "onlinebin")
1124 (invoke "make" "-C" "gst-plugin" "depend")
1125 (invoke "make" "-C" "gst-plugin")
1126 #t))
1127 ;; TODO: also install the executables.
1128 (replace 'install
1129 (lambda* (#:key outputs #:allow-other-keys)
1130 (let* ((out (assoc-ref outputs "out"))
1131 (inc (string-append out "/include"))
1132 (lib (string-append out "/lib")))
1133 (mkdir-p lib)
1134 ;; The build phase installed symlinks to the actual
1135 ;; libraries. Install the actual targets.
1136 (for-each (lambda (file)
1137 (let ((target (readlink file)))
1138 (delete-file file)
1139 (install-file target lib)))
1140 (find-files lib "\\.so"))
1141 ;; Install headers
1142 (for-each (lambda (file)
1143 (let ((target-dir (string-append inc "/" (dirname file))))
1144 (install-file file target-dir)))
1145 (find-files "." "\\.h"))
1146 (install-file "gst-plugin/libgstonlinegmmdecodefaster.so"
1147 (string-append lib "/gstreamer-1.0"))
1148 #t))))))
1149 (inputs
1150 `(("alsa-lib" ,alsa-lib)
1151 ("gfortran" ,gfortran "lib")
1152 ("glib" ,glib)
1153 ("gstreamer" ,gstreamer)
1154 ("jack" ,jack-1)
1155 ("openblas" ,openblas)
1156 ("openfst" ,openfst)
1157 ("portaudio" ,portaudio)
1158 ("python" ,python)))
1159 (native-inputs
1160 `(("glib" ,glib "bin") ; glib-genmarshal
1161 ("grep" ,grep)
1162 ("sed" ,sed)
1163 ("pkg-config" ,pkg-config)
1164 ("which" ,which)))
1165 (home-page "https://kaldi-asr.org/")
1166 (synopsis "Speech recognition toolkit")
1167 (description "Kaldi is an extensible toolkit for speech recognition
1168 written in C++.")
1169 (license license:asl2.0))))
1170
1171 (define-public gst-kaldi-nnet2-online
1172 (let ((commit "cb227ef43b66a9835c14eb0ad39e08ee03c210ad")
1173 (revision "2"))
1174 (package
1175 (name "gst-kaldi-nnet2-online")
1176 (version (git-version "0" revision commit))
1177 (source (origin
1178 (method git-fetch)
1179 (uri (git-reference
1180 (url "https://github.com/alumae/gst-kaldi-nnet2-online.git")
1181 (commit commit)))
1182 (file-name (git-file-name name version))
1183 (sha256
1184 (base32
1185 "1i6ffwiavxx07ri0lxix6s8q0r31x7i4xxvhys5jxkixf5q34w8g"))))
1186 (build-system gnu-build-system)
1187 (arguments
1188 `(#:tests? #f ; there are none
1189 #:make-flags
1190 (list (string-append "SHELL="
1191 (assoc-ref %build-inputs "bash") "/bin/bash")
1192 (string-append "KALDI_ROOT="
1193 (assoc-ref %build-inputs "kaldi-src"))
1194 (string-append "KALDILIBDIR="
1195 (assoc-ref %build-inputs "kaldi") "/lib")
1196 "KALDI_FLAVOR=dynamic")
1197 #:phases
1198 (modify-phases %standard-phases
1199 (add-after 'unpack 'chdir
1200 (lambda _ (chdir "src") #t))
1201 (replace 'configure
1202 (lambda* (#:key inputs #:allow-other-keys)
1203 (let ((glib (assoc-ref inputs "glib")))
1204 (setenv "CXXFLAGS" "-fPIC")
1205 (setenv "CPLUS_INCLUDE_PATH"
1206 (string-append glib "/include/glib-2.0:"
1207 glib "/lib/glib-2.0/include:"
1208 (assoc-ref inputs "gstreamer")
1209 "/include/gstreamer-1.0")))
1210 (substitute* "Makefile"
1211 (("include \\$\\(KALDI_ROOT\\)/src/kaldi.mk") "")
1212 (("\\$\\(error Cannot find") "#"))
1213 #t))
1214 (add-before 'build 'build-depend
1215 (lambda* (#:key make-flags #:allow-other-keys)
1216 (apply invoke "make" "depend" make-flags)))
1217 (replace 'install
1218 (lambda* (#:key outputs #:allow-other-keys)
1219 (let* ((out (assoc-ref outputs "out"))
1220 (lib (string-append out "/lib/gstreamer-1.0")))
1221 (install-file "libgstkaldinnet2onlinedecoder.so" lib)
1222 #t))))))
1223 (inputs
1224 `(("glib" ,glib)
1225 ("gstreamer" ,gstreamer)
1226 ("jansson" ,jansson)
1227 ("openfst" ,openfst)
1228 ("kaldi" ,kaldi)))
1229 (native-inputs
1230 `(("bash" ,bash)
1231 ("glib:bin" ,glib "bin") ; glib-genmarshal
1232 ("kaldi-src" ,(package-source kaldi))
1233 ("pkg-config" ,pkg-config)))
1234 (home-page "https://kaldi-asr.org/")
1235 (synopsis "Gstreamer plugin for decoding speech")
1236 (description "This package provides a GStreamer plugin that wraps
1237 Kaldi's @code{SingleUtteranceNnet2Decoder}. It requires iVector-adapted DNN
1238 acoustic models. The iVectors are adapted to the current audio stream
1239 automatically.")
1240 (license license:asl2.0))))
1241
1242 (define-public kaldi-gstreamer-server
1243 ;; This is the tip of the py3 branch
1244 (let ((commit "f68cab490be7eb0da2af1475fbc16655f50a60cb")
1245 (revision "2"))
1246 (package
1247 (name "kaldi-gstreamer-server")
1248 (version (git-version "0" revision commit))
1249 (source (origin
1250 (method git-fetch)
1251 (uri (git-reference
1252 (url "https://github.com/alumae/kaldi-gstreamer-server.git")
1253 (commit commit)))
1254 (file-name (git-file-name name version))
1255 (sha256
1256 (base32
1257 "17lh1368vkg8ngrcbn2phvigzlmalrqg6djx2gg61qq1a0nj87dm"))))
1258 (build-system gnu-build-system)
1259 (arguments
1260 `(#:tests? #f ; there are no tests that can be run automatically
1261 #:modules ((guix build utils)
1262 (guix build gnu-build-system)
1263 (srfi srfi-26))
1264 #:phases
1265 (modify-phases %standard-phases
1266 (delete 'configure)
1267 (replace 'build
1268 (lambda* (#:key outputs #:allow-other-keys)
1269 ;; Disable hash randomization to ensure the generated .pycs
1270 ;; are reproducible.
1271 (setenv "PYTHONHASHSEED" "0")
1272 (with-directory-excursion "kaldigstserver"
1273 ;; See https://github.com/alumae/kaldi-gstreamer-server/issues/232
1274 (substitute* "master_server.py"
1275 (("\\.replace\\('\\\\.*") ")"))
1276
1277 ;; This is a Python 2 file
1278 (delete-file "decoder_test.py")
1279 (delete-file "test-buffer.py")
1280
1281 (for-each (lambda (file)
1282 (apply invoke
1283 `("python"
1284 "-m" "compileall"
1285 "-f" ; force rebuild
1286 ,file)))
1287 (find-files "." "\\.py$")))
1288 #t))
1289 (replace 'install
1290 (lambda* (#:key inputs outputs #:allow-other-keys)
1291 (let* ((out (assoc-ref outputs "out"))
1292 (bin (string-append out "/bin"))
1293 (share (string-append out "/share/kaldi-gstreamer-server/")))
1294 ;; Install Python files
1295 (with-directory-excursion "kaldigstserver"
1296 (for-each (cut install-file <> share)
1297 (find-files "." ".*")))
1298
1299 ;; Install sample configuration files
1300 (for-each (cut install-file <> share)
1301 (find-files "." "\\.yaml"))
1302
1303 ;; Install executables
1304 (mkdir-p bin)
1305 (let* ((server (string-append bin "/kaldi-gst-server"))
1306 (client (string-append bin "/kaldi-gst-client"))
1307 (worker (string-append bin "/kaldi-gst-worker"))
1308 (PYTHONPATH (getenv "PYTHONPATH"))
1309 (GST_PLUGIN_PATH (string-append
1310 (assoc-ref inputs "gst-kaldi-nnet2-online")
1311 "/lib/gstreamer-1.0:${GST_PLUGIN_PATH}"))
1312 (wrap (lambda (wrapper what)
1313 (with-output-to-file wrapper
1314 (lambda _
1315 (format #t
1316 "#!~a
1317 export PYTHONPATH=~a
1318 export GST_PLUGIN_PATH=~a
1319 exec ~a ~a/~a \"$@\"~%"
1320 (which "bash") PYTHONPATH GST_PLUGIN_PATH
1321 (which "python") share what)))
1322 (chmod wrapper #o555))))
1323 (for-each wrap
1324 (list server client worker)
1325 (list "master_server.py"
1326 "client.py"
1327 "worker.py")))
1328 #t))))))
1329 (inputs
1330 `(("gst-kaldi-nnet2-online" ,gst-kaldi-nnet2-online)
1331 ("python" ,python-wrapper)
1332 ("python-pygobject" ,python-pygobject)
1333 ("python-pyyaml" ,python-pyyaml)
1334 ("python-tornado" ,python-tornado-6)))
1335 (home-page "https://github.com/alumae/kaldi-gstreamer-server")
1336 (synopsis "Real-time full-duplex speech recognition server")
1337 (description "This is a real-time full-duplex speech recognition server,
1338 based on the Kaldi toolkit and the GStreamer framework and implemented in
1339 Python.")
1340 (license license:bsd-2))))
1341
1342 ;; Note that Tensorflow includes a "third_party" directory, which seems to not
1343 ;; only contain modified subsets of upstream library source code, but also
1344 ;; adapter headers provided by Google (such as the fft.h header, which is not
1345 ;; part of the upstream project code). The Tensorflow code includes headers
1346 ;; from the "third_party" directory. It does not look like we can replace
1347 ;; these headers with unmodified upstream files, so we keep them.
1348 (define-public tensorflow
1349 (package
1350 (name "tensorflow")
1351 (version "1.9.0")
1352 (source
1353 (origin
1354 (method git-fetch)
1355 (uri (git-reference
1356 (url "https://github.com/tensorflow/tensorflow.git")
1357 (commit (string-append "v" version))))
1358 (file-name (string-append "tensorflow-" version "-checkout"))
1359 (sha256
1360 (base32
1361 "0a9kwha395g3wgxfwln5j8vn9nkspmd75xldrlqdq540w996g8xa"))))
1362 (build-system cmake-build-system)
1363 (arguments
1364 `(#:tests? #f ; no "check" target
1365 #:build-type "Release"
1366 #:configure-flags
1367 (let ((protobuf (assoc-ref %build-inputs "protobuf"))
1368 (protobuf:native (assoc-ref %build-inputs "protobuf:native"))
1369 (jsoncpp (assoc-ref %build-inputs "jsoncpp"))
1370 (snappy (assoc-ref %build-inputs "snappy"))
1371 (sqlite (assoc-ref %build-inputs "sqlite")))
1372 (list
1373 ;; Use protobuf from Guix
1374 (string-append "-Dprotobuf_STATIC_LIBRARIES="
1375 protobuf "/lib/libprotobuf.so")
1376 (string-append "-DPROTOBUF_PROTOC_EXECUTABLE="
1377 protobuf:native "/bin/protoc")
1378
1379 ;; Use snappy from Guix
1380 (string-append "-Dsnappy_STATIC_LIBRARIES="
1381 snappy "/lib/libsnappy.so")
1382 ;; Yes, this is not actually the include directory but a prefix...
1383 (string-append "-Dsnappy_INCLUDE_DIR=" snappy)
1384
1385 ;; Use jsoncpp from Guix
1386 (string-append "-Djsoncpp_STATIC_LIBRARIES="
1387 jsoncpp "/lib/libjsoncpp.so")
1388 ;; Yes, this is not actually the include directory but a prefix...
1389 (string-append "-Djsoncpp_INCLUDE_DIR=" jsoncpp)
1390
1391 ;; Use sqlite from Guix
1392 (string-append "-Dsqlite_STATIC_LIBRARIES="
1393 sqlite "/lib/libsqlite.a")
1394
1395 ;; Use system libraries wherever possible. Currently, this
1396 ;; only affects zlib.
1397 "-Dsystemlib_ALL=ON"
1398 "-Dtensorflow_ENABLE_POSITION_INDEPENDENT_CODE=ON"
1399 "-Dtensorflow_BUILD_SHARED_LIB=ON"
1400 "-Dtensorflow_OPTIMIZE_FOR_NATIVE_ARCH=OFF"
1401 "-Dtensorflow_ENABLE_SSL_SUPPORT=OFF"
1402 "-Dtensorflow_BUILD_CONTRIB_KERNELS=OFF"))
1403 #:make-flags
1404 (list "CC=gcc")
1405 #:modules ((ice-9 ftw)
1406 (guix build utils)
1407 (guix build cmake-build-system)
1408 ((guix build python-build-system)
1409 #:select (python-version)))
1410 #:imported-modules (,@%cmake-build-system-modules
1411 (guix build python-build-system))
1412 #:phases
1413 (modify-phases %standard-phases
1414 (add-after 'unpack 'set-source-file-times-to-1980
1415 ;; At the end of the tf_python_build_pip_package target, a ZIP
1416 ;; archive should be generated via bdist_wheel, but it fails with
1417 ;; "ZIP does not support timestamps before 1980". Luckily,
1418 ;; SOURCE_DATE_EPOCH is respected, which we set to some time in
1419 ;; 1980.
1420 (lambda _ (setenv "SOURCE_DATE_EPOCH" "315532800") #t))
1421 ;; See https://github.com/tensorflow/tensorflow/issues/20517#issuecomment-406373913
1422 (add-after 'unpack 'python3.7-compatibility
1423 (lambda _
1424 (substitute* '("tensorflow/python/eager/pywrap_tfe_src.cc"
1425 "tensorflow/python/lib/core/ndarray_tensor.cc"
1426 "tensorflow/python/lib/core/py_func.cc")
1427 (("PyUnicode_AsUTF8") "(char *)PyUnicode_AsUTF8"))
1428 (substitute* "tensorflow/c/eager/c_api.h"
1429 (("unsigned char async")
1430 "unsigned char is_async"))
1431
1432 ;; Remove dependency on tensorboard, a complicated but probably
1433 ;; optional package.
1434 (substitute* "tensorflow/tools/pip_package/setup.py"
1435 ((".*'tensorboard >.*") ""))
1436
1437 ;; Fix the build with python-3.8, taken from rejected upstream patch:
1438 ;; https://github.com/tensorflow/tensorflow/issues/34197
1439 (substitute* (find-files "tensorflow/python" ".*\\.cc$")
1440 (("(nullptr,)(\\ +/. tp_print)" _ _ tp_print)
1441 (string-append "NULL, " tp_print)))
1442 #t))
1443 (add-after 'python3.7-compatibility 'chdir
1444 (lambda _ (chdir "tensorflow/contrib/cmake") #t))
1445 (add-after 'chdir 'disable-downloads
1446 (lambda* (#:key inputs #:allow-other-keys)
1447 (substitute* (find-files "external" "\\.cmake$")
1448 (("GIT_REPOSITORY.*") "")
1449 (("GIT_TAG.*") "")
1450 (("PREFIX ")
1451 "DOWNLOAD_COMMAND \"\"\nPREFIX "))
1452
1453 ;; Use packages from Guix
1454 (let ((grpc (assoc-ref inputs "grpc")))
1455 (substitute* "CMakeLists.txt"
1456 ;; Sqlite
1457 (("include\\(sqlite\\)") "")
1458 (("\\$\\{sqlite_STATIC_LIBRARIES\\}")
1459 (string-append (assoc-ref inputs "sqlite")
1460 "/lib/libsqlite3.so"))
1461 (("sqlite_copy_headers_to_destination") "")
1462
1463 ;; PNG
1464 (("include\\(png\\)") "")
1465 (("\\$\\{png_STATIC_LIBRARIES\\}")
1466 (string-append (assoc-ref inputs "libpng")
1467 "/lib/libpng16.so"))
1468 (("png_copy_headers_to_destination") "")
1469
1470 ;; JPEG
1471 (("include\\(jpeg\\)") "")
1472 (("\\$\\{jpeg_STATIC_LIBRARIES\\}")
1473 (string-append (assoc-ref inputs "libjpeg")
1474 "/lib/libjpeg.so"))
1475 (("jpeg_copy_headers_to_destination") "")
1476
1477 ;; GIF
1478 (("include\\(gif\\)") "")
1479 (("\\$\\{gif_STATIC_LIBRARIES\\}")
1480 (string-append (assoc-ref inputs "giflib")
1481 "/lib/libgif.so"))
1482 (("gif_copy_headers_to_destination") "")
1483
1484 ;; lmdb
1485 (("include\\(lmdb\\)") "")
1486 (("\\$\\{lmdb_STATIC_LIBRARIES\\}")
1487 (string-append (assoc-ref inputs "lmdb")
1488 "/lib/liblmdb.so"))
1489 (("lmdb_copy_headers_to_destination") "")
1490
1491 ;; Protobuf
1492 (("include\\(protobuf\\)") "")
1493 (("protobuf_copy_headers_to_destination") "")
1494 (("^ +protobuf") "")
1495
1496 ;; gRPC
1497 (("include\\(grpc\\)")
1498 "find_package(grpc REQUIRED NAMES gRPC)")
1499 (("list\\(APPEND tensorflow_EXTERNAL_DEPENDENCIES grpc\\)") "")
1500
1501 ;; Eigen
1502 (("include\\(eigen\\)")
1503 (string-append "find_package(eigen REQUIRED NAMES Eigen3)
1504 set(eigen_INCLUDE_DIRS ${CMAKE_CURRENT_BINARY_DIR}/external/eigen_archive "
1505 (assoc-ref inputs "eigen") "/include/eigen3)"))
1506 (("^ +eigen") "")
1507
1508 ;; snappy
1509 (("include\\(snappy\\)")
1510 "add_definitions(-DTF_USE_SNAPPY)")
1511 (("list\\(APPEND tensorflow_EXTERNAL_DEPENDENCIES snappy\\)") "")
1512
1513 ;; jsoncpp
1514 (("include\\(jsoncpp\\)") "")
1515 (("^ +jsoncpp") ""))
1516
1517 (substitute* "tf_core_framework.cmake"
1518 ((" grpc") "")
1519 (("\\$\\{GRPC_BUILD\\}/grpc_cpp_plugin")
1520 (which "grpc_cpp_plugin"))
1521 ;; Link with gRPC libraries
1522 (("add_library\\(tf_protos_cc.*" m)
1523 (string-append m
1524 (format #f "\ntarget_link_libraries(tf_protos_cc PRIVATE \
1525 ~a/lib/libgrpc++_unsecure.a \
1526 ~a/lib/libgrpc_unsecure.a \
1527 ~a/lib/libaddress_sorting.a \
1528 ~a/lib/libgpr.a \
1529 ~a//lib/libcares.so
1530 )\n"
1531 grpc grpc grpc grpc
1532 (assoc-ref inputs "c-ares"))))))
1533 (substitute* "tf_tools.cmake"
1534 (("add_dependencies\\(\\$\\{proto_text.*") ""))
1535 ;; Remove dependency on bundled grpc
1536 (substitute* "tf_core_distributed_runtime.cmake"
1537 (("tf_core_cpu grpc") "tf_core_cpu"))
1538
1539 ;; This directory is a dependency of many targets.
1540 (mkdir-p "protobuf")
1541 #t))
1542 (add-after 'configure 'unpack-third-party-sources
1543 (lambda* (#:key inputs #:allow-other-keys)
1544 ;; This is needed to configure bundled packages properly.
1545 (setenv "CONFIG_SHELL" (which "bash"))
1546 (for-each
1547 (lambda (name)
1548 (let* ((what (assoc-ref inputs (string-append name "-src")))
1549 (name* (string-map (lambda (c)
1550 (if (char=? c #\-)
1551 #\_ c)) name))
1552 (where (string-append "../build/" name* "/src/" name*)))
1553 (cond
1554 ((string-suffix? ".zip" what)
1555 (mkdir-p where)
1556 (with-directory-excursion where
1557 (invoke "unzip" what)))
1558 ((string-suffix? ".tar.gz" what)
1559 (mkdir-p where)
1560 (invoke "tar" "xf" what
1561 "-C" where "--strip-components=1"))
1562 (else
1563 (let ((parent (dirname where)))
1564 (mkdir-p parent)
1565 (with-directory-excursion parent
1566 (when (file-exists? name*)
1567 (delete-file-recursively name*))
1568 (copy-recursively what name*)
1569 (map make-file-writable
1570 (find-files name* ".*"))))))))
1571 (list "boringssl"
1572 "cub"
1573 "double-conversion"
1574 "farmhash"
1575 "fft2d"
1576 "highwayhash"
1577 "nsync"
1578 "re2"))
1579
1580 (rename-file "../build/cub/src/cub/cub-1.8.0/"
1581 "../build/cub/src/cub/cub/")
1582 #t))
1583 (add-after 'unpack 'fix-python-build
1584 (lambda* (#:key inputs outputs #:allow-other-keys)
1585 (mkdir-p "protobuf-src")
1586 (invoke "tar" "xf" (assoc-ref inputs "protobuf:src")
1587 "-C" "protobuf-src" "--strip-components=1")
1588 (mkdir-p "eigen-src")
1589 (invoke "tar" "xf" (assoc-ref inputs "eigen:src")
1590 "-C" "eigen-src" "--strip-components=1")
1591
1592 (substitute* "tensorflow/contrib/cmake/tf_python.cmake"
1593 ;; Ensure that all Python dependencies can be found at build time.
1594 (("PYTHONPATH=\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/tf_python" m)
1595 (string-append m ":" (getenv "PYTHONPATH")))
1596 ;; Take protobuf source files from our source package.
1597 (("\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/protobuf/src/protobuf/src/google")
1598 (string-append (getcwd) "/protobuf-src/src/google")))
1599
1600 (substitute* '("tensorflow/contrib/cmake/tf_shared_lib.cmake"
1601 "tensorflow/contrib/cmake/tf_python.cmake")
1602 ;; Take Eigen source files from our source package.
1603 (("\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/eigen/src/eigen/")
1604 (string-append (getcwd) "/eigen-src/"))
1605 ;; Take Eigen headers from our own package.
1606 (("\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/external/eigen_archive")
1607 (string-append (assoc-ref inputs "eigen") "/include/eigen3")))
1608
1609 ;; Correct the RUNPATH of ops libraries generated for Python.
1610 ;; TODO: this doesn't work :(
1611 ;; /gnu/store/...-tensorflow-1.9.0/lib/python3.7/site-packages/tensorflow/contrib/seq2seq/python/ops/lib_beam_search_ops.so:
1612 ;; warning: RUNPATH contains bogus entries: ("/tmp/guix-build-tensorflow-1.9.0.drv-0/source/tensorflow/contrib/build")
1613 ;; /gnu/store/...-tensorflow-1.9.0/lib/python3.7/site-packages/tensorflow/contrib/seq2seq/python/ops/lib_beam_search_ops.so:
1614 ;; error: depends on 'libpywrap_tensorflow_internal.so', which
1615 ;; cannot be found in RUNPATH ...
1616 (substitute* "tensorflow/contrib/cmake/tf_cc_ops.cmake"
1617 (("set_target_properties.*")
1618 (string-append "set_target_properties(${_AT_TARGET} PROPERTIES \
1619 COMPILE_FLAGS ${target_compile_flags} \
1620 INSTALL_RPATH_USE_LINK_PATH TRUE \
1621 INSTALL_RPATH " (assoc-ref outputs "out") "/lib)\n")))
1622 #t))
1623 (add-after 'build 'build-pip-package
1624 (lambda* (#:key outputs #:allow-other-keys)
1625 (setenv "LDFLAGS"
1626 (string-append "-Wl,-rpath="
1627 (assoc-ref outputs "out") "/lib"))
1628 (invoke "make" "tf_python_build_pip_package")
1629 #t))
1630 (add-after 'build-pip-package 'install-python
1631 (lambda* (#:key inputs outputs #:allow-other-keys)
1632 (let ((out (assoc-ref outputs "out"))
1633 (wheel (car (find-files "../build/tf_python/dist/" "\\.whl$")))
1634 (python-version (python-version
1635 (assoc-ref inputs "python"))))
1636 (invoke "python" "-m" "pip" "install" wheel
1637 (string-append "--prefix=" out))
1638
1639 ;; XXX: broken RUNPATH, see fix-python-build phase.
1640 (delete-file
1641 (string-append
1642 out "/lib/python" python-version
1643 "/site-packages/tensorflow/contrib/"
1644 "seq2seq/python/ops/lib_beam_search_ops.so"))
1645 #t))))))
1646 (native-inputs
1647 `(("pkg-config" ,pkg-config)
1648 ("protobuf:native" ,protobuf-3.6) ; protoc
1649 ("protobuf:src" ,(package-source protobuf-3.6))
1650 ("eigen:src" ,(package-source eigen-for-tensorflow))
1651 ;; install_pip_packages.sh wants setuptools 39.1.0 specifically.
1652 ("python-setuptools" ,python-setuptools-for-tensorflow)
1653
1654 ;; The commit hashes and URLs for third-party source code are taken
1655 ;; from "tensorflow/workspace.bzl".
1656 ("boringssl-src"
1657 ,(let ((commit "ee7aa02")
1658 (revision "1"))
1659 (origin
1660 (method git-fetch)
1661 (uri (git-reference
1662 (url "https://boringssl.googlesource.com/boringssl")
1663 (commit commit)))
1664 (file-name (string-append "boringssl-0-" revision
1665 (string-take commit 7)
1666 "-checkout"))
1667 (sha256
1668 (base32
1669 "1jf693q0nw0adsic6cgmbdx6g7wr4rj4vxa8j1hpn792fqhd8wgw")))))
1670 ("cub-src"
1671 ,(let ((version "1.8.0"))
1672 (origin
1673 (method url-fetch)
1674 (uri (string-append "https://mirror.bazel.build/github.com/NVlabs/"
1675 "cub/archive/" version ".zip"))
1676 (file-name (string-append "cub-" version ".zip"))
1677 (sha256
1678 (base32
1679 "1hsqikqridb90dkxkjr2918dcry6pfh46ccnwrzawl56aamhdykb")))))
1680 ("double-conversion-src"
1681 ,(let ((commit "5664746")
1682 (revision "1"))
1683 (origin
1684 (method git-fetch)
1685 (uri (git-reference
1686 (url "https://github.com/google/double-conversion.git")
1687 (commit commit)))
1688 (file-name
1689 (git-file-name "double-conversion"
1690 (string-append "0-" revision "."
1691 (string-take commit 7))))
1692 (sha256
1693 (base32
1694 "1h5lppqqxcvdg5jq42i5msgwx20ryij3apvmndflngrgdpc04gn1")))))
1695 ("farmhash-src"
1696 ,(let ((commit "816a4ae622e964763ca0862d9dbd19324a1eaf45"))
1697 (origin
1698 (method url-fetch)
1699 (uri (string-append
1700 "https://mirror.bazel.build/github.com/google/farmhash/archive/"
1701 commit ".tar.gz"))
1702 (file-name (string-append "farmhash-0-" (string-take commit 7)
1703 ".tar.gz"))
1704 (sha256
1705 (base32
1706 "185b2xdxl4d4cnsnv6abg8s22gxvx8673jq2yaq85bz4cdy58q35")))))
1707 ;; The license notice on the home page at
1708 ;; http://www.kurims.kyoto-u.ac.jp/~ooura/fft.html says:
1709 ;; Copyright Takuya OOURA, 1996-2001
1710 ;;
1711 ;; You may use, copy, modify and distribute this code for any purpose
1712 ;; (include commercial use) and without fee. Please refer to this
1713 ;; package when you modify this code.
1714 ;;
1715 ;; We take the identical tarball from the Bazel mirror, because the URL
1716 ;; at the home page is not versioned and might change.
1717 ("fft2d-src"
1718 ,(origin
1719 (method url-fetch)
1720 (uri "https://mirror.bazel.build/www.kurims.kyoto-u.ac.jp/~ooura/fft.tgz")
1721 (file-name "fft2d.tar.gz")
1722 (sha256
1723 (base32
1724 "15jjkfvhqvl2c0753d2di8hz0pyzn598g74wqy79awdrf1y67fsj"))))
1725 ("highwayhash-src"
1726 ,(let ((commit "be5edafc2e1a455768e260ccd68ae7317b6690ee")
1727 (revision "1"))
1728 (origin
1729 (method git-fetch)
1730 (uri (git-reference
1731 (url "https://github.com/google/highwayhash.git")
1732 (commit commit)))
1733 (file-name (string-append "highwayhash-0-" revision
1734 (string-take commit 7)
1735 "-checkout"))
1736 (sha256
1737 (base32
1738 "154jwf98cyy54hldr94pgjn85zynly3abpnc1avmb8a18lzwjyb6")))))
1739 ("nsync-src"
1740 ,(let ((version "0559ce013feac8db639ee1bf776aca0325d28777")
1741 (revision "1"))
1742 (origin
1743 (method url-fetch)
1744 (uri (string-append "https://mirror.bazel.build/"
1745 "github.com/google/nsync/archive/"
1746 version ".tar.gz"))
1747 (file-name (string-append "nsync-0." revision
1748 "-" (string-take version 7)
1749 ".tar.gz"))
1750 (sha256
1751 (base32
1752 "0qdkyqym34x739mmzv97ah5r7ph462v5xkxqxvidmcfqbi64b132")))))
1753 ("re2-src"
1754 ,(let ((commit "e7efc48")
1755 (revision "1"))
1756 (origin
1757 (method git-fetch)
1758 (uri (git-reference
1759 (url "https://github.com/google/re2")
1760 (commit commit)))
1761 (file-name (string-append "re2-0-" revision
1762 (string-take commit 7)
1763 "-checkout"))
1764 (sha256
1765 (base32
1766 "161g9841rjfsy5pn52fcis0s9hdr7rxvb06pad38j5rppfihvign")))))
1767 ("googletest" ,googletest)
1768 ("swig" ,swig)
1769 ("unzip" ,unzip)))
1770 (propagated-inputs
1771 `(("python-absl-py" ,python-absl-py)
1772 ("python-astor" ,python-astor)
1773 ("python-gast" ,python-gast)
1774 ("python-grpcio" ,python-grpcio)
1775 ("python-numpy" ,python-numpy)
1776 ("python-protobuf" ,python-protobuf-3.6)
1777 ("python-six" ,python-six)
1778 ("python-termcolo" ,python-termcolor)
1779 ("python-wheel" ,python-wheel)))
1780 (inputs
1781 `(("c-ares" ,c-ares)
1782 ("eigen" ,eigen-for-tensorflow)
1783 ("gemmlowp" ,gemmlowp-for-tensorflow)
1784 ("lmdb" ,lmdb)
1785 ("libjpeg" ,libjpeg-turbo)
1786 ("libpng" ,libpng)
1787 ("giflib" ,giflib)
1788 ("grpc" ,grpc-1.16.1 "static")
1789 ("grpc:bin" ,grpc-1.16.1)
1790 ("jsoncpp" ,jsoncpp-for-tensorflow)
1791 ("snappy" ,snappy)
1792 ("sqlite" ,sqlite)
1793 ("protobuf" ,protobuf-3.6)
1794 ("python" ,python-wrapper)
1795 ("zlib" ,zlib)))
1796 (home-page "https://tensorflow.org")
1797 (synopsis "Machine learning framework")
1798 (description
1799 "TensorFlow is a flexible platform for building and training machine
1800 learning models. It provides a library for high performance numerical
1801 computation and includes high level Python APIs, including both a sequential
1802 API for beginners that allows users to build models quickly by plugging
1803 together building blocks and a subclassing API with an imperative style for
1804 advanced research.")
1805 (license license:asl2.0)))
1806
1807 (define-public python-iml
1808 (package
1809 (name "python-iml")
1810 (version "0.6.2")
1811 (source
1812 (origin
1813 (method url-fetch)
1814 (uri (pypi-uri "iml" version))
1815 (sha256
1816 (base32
1817 "1k8szlpm19rcwcxdny9qdm3gmaqq8akb4xlvrzyz8c2d679aak6l"))))
1818 (build-system python-build-system)
1819 (propagated-inputs
1820 `(("ipython" ,python-ipython)
1821 ("numpy" ,python-numpy)
1822 ("pandas" ,python-pandas)
1823 ("scipy" ,python-scipy)))
1824 (native-inputs
1825 `(("nose" ,python-nose)))
1826 (home-page "https://github.com/interpretable-ml/iml")
1827 (synopsis "Interpretable Machine Learning (iML) package")
1828 (description "Interpretable ML (iML) is a set of data type objects,
1829 visualizations, and interfaces that can be used by any method designed to
1830 explain the predictions of machine learning models (or really the output of
1831 any function). It currently contains the interface and IO code from the Shap
1832 project, and it will potentially also do the same for the Lime project.")
1833 (license license:expat)))
1834
1835 (define-public python-keras-applications
1836 (package
1837 (name "python-keras-applications")
1838 (version "1.0.8")
1839 (source
1840 (origin
1841 (method url-fetch)
1842 (uri (pypi-uri "Keras_Applications" version))
1843 (sha256
1844 (base32
1845 "1rcz31ca4axa6kzhjx4lwqxbg4wvlljkj8qj9a7p9sfd5fhzjyam"))))
1846 (build-system python-build-system)
1847 ;; The tests require Keras, but this package is needed to build Keras.
1848 (arguments '(#:tests? #f))
1849 (propagated-inputs
1850 `(("python-h5py" ,python-h5py)
1851 ("python-numpy" ,python-numpy)))
1852 (native-inputs
1853 `(("python-pytest" ,python-pytest)
1854 ("python-pytest-cov" ,python-pytest-cov)
1855 ("python-pytest-pep8" ,python-pytest-pep8)
1856 ("python-pytest-xdist" ,python-pytest-xdist)))
1857 (home-page "https://github.com/keras-team/keras-applications")
1858 (synopsis "Reference implementations of popular deep learning models")
1859 (description
1860 "This package provides reference implementations of popular deep learning
1861 models for use with the Keras deep learning framework.")
1862 (license license:expat)))
1863
1864 (define-public python-keras-preprocessing
1865 (package
1866 (name "python-keras-preprocessing")
1867 (version "1.1.0")
1868 (source
1869 (origin
1870 (method url-fetch)
1871 (uri (pypi-uri "Keras_Preprocessing" version))
1872 (sha256
1873 (base32
1874 "1r98nm4k1svsqjyaqkfk23i31bl1kcfcyp7094yyj3c43phfp3as"))))
1875 (build-system python-build-system)
1876 (propagated-inputs
1877 `(("python-numpy" ,python-numpy)
1878 ("python-six" ,python-six)))
1879 (native-inputs
1880 `(("python-pandas" ,python-pandas)
1881 ("python-pillow" ,python-pillow)
1882 ("python-pytest" ,python-pytest)
1883 ("python-pytest-cov" ,python-pytest-cov)
1884 ("python-pytest-xdist" ,python-pytest-xdist)
1885 ("tensorflow" ,tensorflow)))
1886 (home-page "https://github.com/keras-team/keras-preprocessing/")
1887 (synopsis "Data preprocessing and augmentation for deep learning models")
1888 (description
1889 "Keras Preprocessing is the data preprocessing and data augmentation
1890 module of the Keras deep learning library. It provides utilities for working
1891 with image data, text data, and sequence data.")
1892 (license license:expat)))
1893
1894 (define-public python-keras
1895 (package
1896 (name "python-keras")
1897 (version "2.2.4")
1898 (source
1899 (origin
1900 (method url-fetch)
1901 (uri (pypi-uri "Keras" version))
1902 (patches (search-patches "python-keras-integration-test.patch"))
1903 (sha256
1904 (base32
1905 "1j8bsqzh49vjdxy6l1k4iwax5vpjzniynyd041xjavdzvfii1dlh"))))
1906 (build-system python-build-system)
1907 (arguments
1908 `(#:phases
1909 (modify-phases %standard-phases
1910 (add-after 'unpack 'remove-tests-for-unavailable-features
1911 (lambda _
1912 (delete-file "keras/backend/theano_backend.py")
1913 (delete-file "keras/backend/cntk_backend.py")
1914 (delete-file "tests/keras/backend/backend_test.py")
1915
1916 ;; FIXME: This doesn't work because Tensorflow is missing the
1917 ;; coder ops library.
1918 (delete-file "tests/keras/test_callbacks.py")
1919 #t))
1920 (replace 'check
1921 (lambda _
1922 ;; These tests attempt to download data files from the internet.
1923 (delete-file "tests/integration_tests/test_datasets.py")
1924 (delete-file "tests/integration_tests/imagenet_utils_test.py")
1925
1926 (setenv "PYTHONPATH"
1927 (string-append (getcwd) "/build/lib:"
1928 (getenv "PYTHONPATH")))
1929 (invoke "py.test" "-v"
1930 "-p" "no:cacheprovider"
1931 "--ignore" "keras/utils"))))))
1932 (propagated-inputs
1933 `(("python-h5py" ,python-h5py)
1934 ("python-keras-applications" ,python-keras-applications)
1935 ("python-keras-preprocessing" ,python-keras-preprocessing)
1936 ("python-numpy" ,python-numpy)
1937 ("python-pydot" ,python-pydot)
1938 ("python-pyyaml" ,python-pyyaml)
1939 ("python-scipy" ,python-scipy)
1940 ("python-six" ,python-six)
1941 ("tensorflow" ,tensorflow)
1942 ("graphviz" ,graphviz)))
1943 (native-inputs
1944 `(("python-pandas" ,python-pandas)
1945 ("python-pytest" ,python-pytest)
1946 ("python-pytest-cov" ,python-pytest-cov)
1947 ("python-pytest-pep8" ,python-pytest-pep8)
1948 ("python-pytest-timeout" ,python-pytest-timeout)
1949 ("python-pytest-xdist" ,python-pytest-xdist)
1950 ("python-sphinx" ,python-sphinx)
1951 ("python-requests" ,python-requests)))
1952 (home-page "https://github.com/keras-team/keras")
1953 (synopsis "High-level deep learning framework")
1954 (description "Keras is a high-level neural networks API, written in Python
1955 and capable of running on top of TensorFlow. It was developed with a focus on
1956 enabling fast experimentation. Use Keras if you need a deep learning library
1957 that:
1958
1959 @itemize
1960 @item Allows for easy and fast prototyping (through user friendliness,
1961 modularity, and extensibility).
1962 @item Supports both convolutional networks and recurrent networks, as well as
1963 combinations of the two.
1964 @item Runs seamlessly on CPU and GPU.
1965 @end itemize\n")
1966 (license license:expat)))
1967
1968 (define-public sbcl-cl-libsvm-format
1969 (let ((commit "3300f84fd8d9f5beafc114f543f9d83417c742fb")
1970 (revision "0"))
1971 (package
1972 (name "sbcl-cl-libsvm-format")
1973 (version (git-version "0.1.0" revision commit))
1974 (source
1975 (origin
1976 (method git-fetch)
1977 (uri (git-reference
1978 (url "https://github.com/masatoi/cl-libsvm-format.git")
1979 (commit commit)))
1980 (file-name (git-file-name name version))
1981 (sha256
1982 (base32
1983 "0284aj84xszhkhlivaigf9qj855fxad3mzmv3zfr0qzb5k0nzwrg"))))
1984 (build-system asdf-build-system/sbcl)
1985 (native-inputs
1986 `(("prove" ,sbcl-prove)
1987 ("prove-asdf" ,sbcl-prove-asdf)))
1988 (inputs
1989 `(("alexandria" ,sbcl-alexandria)))
1990 (synopsis "LibSVM data format reader for Common Lisp")
1991 (description
1992 "This Common Lisp library provides a fast reader for data in LibSVM
1993 format.")
1994 (home-page "https://github.com/masatoi/cl-libsvm-format")
1995 (license license:expat))))
1996
1997 (define-public cl-libsvm-format
1998 (sbcl-package->cl-source-package sbcl-cl-libsvm-format))
1999
2000 (define-public ecl-cl-libsvm-format
2001 (sbcl-package->ecl-package sbcl-cl-libsvm-format))
2002
2003 (define-public sbcl-cl-online-learning
2004 (let ((commit "fc7a34f4f161cd1c7dd747d2ed8f698947781423")
2005 (revision "0"))
2006 (package
2007 (name "sbcl-cl-online-learning")
2008 (version (git-version "0.5" revision commit))
2009 (source
2010 (origin
2011 (method git-fetch)
2012 (uri (git-reference
2013 (url "https://github.com/masatoi/cl-online-learning.git")
2014 (commit commit)))
2015 (file-name (git-file-name name version))
2016 (sha256
2017 (base32
2018 "14x95rlg80ay5hv645ki57pqvy12v28hz4k1w0f6bsfi2rmpxchq"))))
2019 (build-system asdf-build-system/sbcl)
2020 (native-inputs
2021 `(("prove" ,sbcl-prove)
2022 ("prove-asdf" ,sbcl-prove-asdf)))
2023 (inputs
2024 `(("cl-libsvm-format" ,sbcl-cl-libsvm-format)
2025 ("cl-store" ,sbcl-cl-store)))
2026 (arguments
2027 `(;; FIXME: Tests pass but then the check phase crashes
2028 #:tests? #f))
2029 (synopsis "Online Machine Learning for Common Lisp")
2030 (description
2031 "This library contains a collection of machine learning algorithms for
2032 online linear classification written in Common Lisp.")
2033 (home-page "https://github.com/masatoi/cl-online-learning")
2034 (license license:expat))))
2035
2036 (define-public cl-online-learning
2037 (sbcl-package->cl-source-package sbcl-cl-online-learning))
2038
2039 (define-public ecl-cl-online-learning
2040 (sbcl-package->ecl-package sbcl-cl-online-learning))
2041
2042 (define-public sbcl-cl-random-forest
2043 (let ((commit "85fbdd4596d40e824f70f1b7cf239cf544e49d51")
2044 (revision "0"))
2045 (package
2046 (name "sbcl-cl-random-forest")
2047 (version (git-version "0.1" revision commit))
2048 (source
2049 (origin
2050 (method git-fetch)
2051 (uri (git-reference
2052 (url "https://github.com/masatoi/cl-random-forest.git")
2053 (commit commit)))
2054 (file-name (git-file-name name version))
2055 (sha256
2056 (base32
2057 "097xv60i1ndz68sg9p4pc7c5gvyp9i1xgw966b4wwfq3x6hbz421"))))
2058 (build-system asdf-build-system/sbcl)
2059 (native-inputs
2060 `(("prove" ,sbcl-prove)
2061 ("prove-asdf" ,sbcl-prove-asdf)
2062 ("trivial-garbage" ,sbcl-trivial-garbage)))
2063 (inputs
2064 `(("alexandria" ,sbcl-alexandria)
2065 ("cl-libsvm-format" ,sbcl-cl-libsvm-format)
2066 ("cl-online-learning" ,sbcl-cl-online-learning)
2067 ("lparallel" ,sbcl-lparallel)))
2068 (arguments
2069 `(#:tests? #f)) ; The tests download data from the Internet
2070 (synopsis "Random Forest and Global Refinement for Common Lisp")
2071 (description
2072 "CL-random-forest is an implementation of Random Forest for multiclass
2073 classification and univariate regression written in Common Lisp. It also
2074 includes an implementation of Global Refinement of Random Forest.")
2075 (home-page "https://github.com/masatoi/cl-random-forest")
2076 (license license:expat))))
2077
2078 (define-public cl-random-forest
2079 (sbcl-package->cl-source-package sbcl-cl-random-forest))
2080
2081 (define-public ecl-cl-random-forest
2082 (sbcl-package->ecl-package sbcl-cl-random-forest))
2083
2084 (define-public gloo
2085 (let ((version "0.0.0") ; no proper version tag
2086 (commit "ca528e32fea9ca8f2b16053cff17160290fc84ce")
2087 (revision "0"))
2088 (package
2089 (name "gloo")
2090 (version (git-version version revision commit))
2091 (source
2092 (origin
2093 (method git-fetch)
2094 (uri (git-reference
2095 (url "https://github.com/facebookincubator/gloo.git")
2096 (commit commit)))
2097 (file-name (git-file-name name version))
2098 (sha256
2099 (base32
2100 "1q9f80zy75f6njrzrqkmhc0g3qxs4gskr7ns2jdqanxa2ww7a99w"))))
2101 (build-system cmake-build-system)
2102 (native-inputs
2103 `(("googletest" ,googletest)))
2104 (arguments
2105 `(#:configure-flags '("-DBUILD_TEST=1")
2106 #:phases
2107 (modify-phases %standard-phases
2108 (replace 'check
2109 (lambda _
2110 (invoke "make" "gloo_test")
2111 #t)))))
2112 (synopsis "Collective communications library")
2113 (description
2114 "Gloo is a collective communications library. It comes with a
2115 number of collective algorithms useful for machine learning applications.
2116 These include a barrier, broadcast, and allreduce.")
2117 (home-page "https://github.com/facebookincubator/gloo")
2118 (license license:bsd-3))))
2119
2120 (define-public python-umap-learn
2121 (package
2122 (name "python-umap-learn")
2123 (version "0.3.10")
2124 (source
2125 (origin
2126 (method url-fetch)
2127 (uri (pypi-uri "umap-learn" version))
2128 (sha256
2129 (base32
2130 "02ada2yy6km6zgk2836kg1c97yrcpalvan34p8c57446finnpki1"))))
2131 (build-system python-build-system)
2132 (native-inputs
2133 `(("python-joblib" ,python-joblib)
2134 ("python-nose" ,python-nose)))
2135 (propagated-inputs
2136 `(("python-numba" ,python-numba)
2137 ("python-numpy" ,python-numpy)
2138 ("python-scikit-learn" ,python-scikit-learn)
2139 ("python-scipy" ,python-scipy)))
2140 (home-page "https://github.com/lmcinnes/umap")
2141 (synopsis
2142 "Uniform Manifold Approximation and Projection")
2143 (description
2144 "Uniform Manifold Approximation and Projection is a dimension reduction
2145 technique that can be used for visualisation similarly to t-SNE, but also for
2146 general non-linear dimension reduction.")
2147 (license license:bsd-3)))