gnu: kdenlive: Add missing dependencies.
[jackhill/guix/guix.git] / gnu / packages / machine-learning.scm
1 ;;; GNU Guix --- Functional package management for GNU
2 ;;; Copyright © 2015, 2016, 2017, 2018, 2019, 2020 Ricardo Wurmus <rekado@elephly.net>
3 ;;; Copyright © 2016, 2020 Efraim Flashner <efraim@flashner.co.il>
4 ;;; Copyright © 2016, 2017, 2020 Marius Bakke <mbakke@fastmail.com>
5 ;;; Copyright © 2016 Hartmut Goebel <h.goebel@crazy-compilers.com>
6 ;;; Copyright © 2018, 2019, 2020 Tobias Geerinckx-Rice <me@tobias.gr>
7 ;;; Copyright © 2018 Kei Kebreau <kkebreau@posteo.net>
8 ;;; Copyright © 2018 Mark Meyer <mark@ofosos.org>
9 ;;; Copyright © 2018 Ben Woodcroft <donttrustben@gmail.com>
10 ;;; Copyright © 2018 Fis Trivial <ybbs.daans@hotmail.com>
11 ;;; Copyright © 2018 Julien Lepiller <julien@lepiller.eu>
12 ;;; Copyright © 2018 Björn Höfling <bjoern.hoefling@bjoernhoefling.de>
13 ;;; Copyright © 2019 Nicolas Goaziou <mail@nicolasgoaziou.fr>
14 ;;; Copyright © 2019, 2020 Guillaume Le Vaillant <glv@posteo.net>
15 ;;; Copyright © 2019 Brett Gilio <brettg@gnu.org>
16 ;;; Copyright © 2020 Konrad Hinsen <konrad.hinsen@fastmail.net>
17 ;;; Copyright © 2020 Edouard Klein <edk@beaver-labs.com>
18 ;;; Copyright © 2020 Vinicius Monego <monego@posteo.net>
19 ;;;
20 ;;; This file is part of GNU Guix.
21 ;;;
22 ;;; GNU Guix is free software; you can redistribute it and/or modify it
23 ;;; under the terms of the GNU General Public License as published by
24 ;;; the Free Software Foundation; either version 3 of the License, or (at
25 ;;; your option) any later version.
26 ;;;
27 ;;; GNU Guix is distributed in the hope that it will be useful, but
28 ;;; WITHOUT ANY WARRANTY; without even the implied warranty of
29 ;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
30 ;;; GNU General Public License for more details.
31 ;;;
32 ;;; You should have received a copy of the GNU General Public License
33 ;;; along with GNU Guix. If not, see <http://www.gnu.org/licenses/>.
34
35 (define-module (gnu packages machine-learning)
36 #:use-module ((guix licenses) #:prefix license:)
37 #:use-module (guix packages)
38 #:use-module (guix utils)
39 #:use-module (guix download)
40 #:use-module (guix svn-download)
41 #:use-module (guix build-system cmake)
42 #:use-module (guix build-system gnu)
43 #:use-module (guix build-system ocaml)
44 #:use-module (guix build-system python)
45 #:use-module (guix build-system r)
46 #:use-module (guix git-download)
47 #:use-module (gnu packages)
48 #:use-module (gnu packages adns)
49 #:use-module (gnu packages algebra)
50 #:use-module (gnu packages audio)
51 #:use-module (gnu packages autotools)
52 #:use-module (gnu packages base)
53 #:use-module (gnu packages bash)
54 #:use-module (gnu packages boost)
55 #:use-module (gnu packages check)
56 #:use-module (gnu packages compression)
57 #:use-module (gnu packages cmake)
58 #:use-module (gnu packages cran)
59 #:use-module (gnu packages databases)
60 #:use-module (gnu packages dejagnu)
61 #:use-module (gnu packages gcc)
62 #:use-module (gnu packages glib)
63 #:use-module (gnu packages graphviz)
64 #:use-module (gnu packages gstreamer)
65 #:use-module (gnu packages image)
66 #:use-module (gnu packages linux)
67 #:use-module (gnu packages maths)
68 #:use-module (gnu packages mpi)
69 #:use-module (gnu packages ocaml)
70 #:use-module (gnu packages onc-rpc)
71 #:use-module (gnu packages perl)
72 #:use-module (gnu packages pkg-config)
73 #:use-module (gnu packages protobuf)
74 #:use-module (gnu packages python)
75 #:use-module (gnu packages python-check)
76 #:use-module (gnu packages python-science)
77 #:use-module (gnu packages python-web)
78 #:use-module (gnu packages python-xyz)
79 #:use-module (gnu packages rpc)
80 #:use-module (gnu packages serialization)
81 #:use-module (gnu packages sphinx)
82 #:use-module (gnu packages statistics)
83 #:use-module (gnu packages sqlite)
84 #:use-module (gnu packages swig)
85 #:use-module (gnu packages web)
86 #:use-module (gnu packages xml)
87 #:use-module (gnu packages xorg)
88 #:use-module (ice-9 match))
89
90 (define-public fann
91 ;; The last release is >100 commits behind, so we package from git.
92 (let ((commit "d71d54788bee56ba4cf7522801270152da5209d7"))
93 (package
94 (name "fann")
95 (version (string-append "2.2.0-1." (string-take commit 8)))
96 (source (origin
97 (method git-fetch)
98 (uri (git-reference
99 (url "https://github.com/libfann/fann")
100 (commit commit)))
101 (file-name (string-append name "-" version "-checkout"))
102 (sha256
103 (base32
104 "0ibwpfrjs6q2lijs8slxjgzb2llcl6rk3v2ski4r6215g5jjhg3x"))))
105 (build-system cmake-build-system)
106 (arguments
107 `(#:phases
108 (modify-phases %standard-phases
109 (replace 'check
110 (lambda* (#:key outputs #:allow-other-keys)
111 (let* ((out (assoc-ref outputs "out")))
112 (with-directory-excursion (string-append (getcwd) "/tests")
113 (invoke "./fann_tests"))))))))
114 (home-page "http://leenissen.dk/fann/wp/")
115 (synopsis "Fast Artificial Neural Network")
116 (description
117 "FANN is a neural network library, which implements multilayer
118 artificial neural networks in C with support for both fully connected and
119 sparsely connected networks.")
120 (license license:lgpl2.1))))
121
122 (define-public libsvm
123 (package
124 (name "libsvm")
125 (version "3.23")
126 (source
127 (origin
128 (method url-fetch)
129 (uri (string-append "https://www.csie.ntu.edu.tw/~cjlin/libsvm/"
130 name "-" version ".tar.gz"))
131 (sha256
132 (base32 "0jpaq0rr92x38p4nk3gjan79ip67m6p80anb28z1d8601miysyi5"))))
133 (build-system gnu-build-system)
134 (arguments
135 `(#:tests? #f ; no "check" target
136 #:phases (modify-phases %standard-phases
137 (delete 'configure)
138 (replace
139 'install ; no ‘install’ target
140 (lambda* (#:key outputs #:allow-other-keys)
141 (let* ((out (assoc-ref outputs "out"))
142 (bin (string-append out "/bin/")))
143 (mkdir-p bin)
144 (for-each (lambda (file)
145 (copy-file file (string-append bin file)))
146 '("svm-train"
147 "svm-predict"
148 "svm-scale")))
149 #t)))))
150 (home-page "https://www.csie.ntu.edu.tw/~cjlin/libsvm/")
151 (synopsis "Library for Support Vector Machines")
152 (description
153 "LIBSVM is a machine learning library for support vector
154 classification, (C-SVC, nu-SVC), regression (epsilon-SVR, nu-SVR) and
155 distribution estimation (one-class SVM). It supports multi-class
156 classification.")
157 (license license:bsd-3)))
158
159 (define-public python-libsvm
160 (package (inherit libsvm)
161 (name "python-libsvm")
162 (build-system gnu-build-system)
163 (arguments
164 `(#:tests? #f ; no "check" target
165 #:make-flags '("-C" "python")
166 #:phases
167 (modify-phases %standard-phases
168 (delete 'configure)
169 (replace
170 'install ; no ‘install’ target
171 (lambda* (#:key inputs outputs #:allow-other-keys)
172 (let ((site (string-append (assoc-ref outputs "out")
173 "/lib/python"
174 (string-take
175 (string-take-right
176 (assoc-ref inputs "python") 5) 3)
177 "/site-packages/")))
178 (substitute* "python/svm.py"
179 (("../libsvm.so.2") "libsvm.so.2"))
180 (mkdir-p site)
181 (for-each (lambda (file)
182 (copy-file file (string-append site (basename file))))
183 (find-files "python" "\\.py"))
184 (copy-file "libsvm.so.2"
185 (string-append site "libsvm.so.2")))
186 #t)))))
187 (inputs
188 `(("python" ,python)))
189 (synopsis "Python bindings of libSVM")))
190
191 (define-public ghmm
192 ;; The latest release candidate is several years and a couple of fixes have
193 ;; been published since. This is why we download the sources from the SVN
194 ;; repository.
195 (let ((svn-revision 2341))
196 (package
197 (name "ghmm")
198 (version (string-append "0.9-rc3-0." (number->string svn-revision)))
199 (source (origin
200 (method svn-fetch)
201 (uri (svn-reference
202 (url "http://svn.code.sf.net/p/ghmm/code/trunk")
203 (revision svn-revision)))
204 (file-name (string-append name "-" version "-checkout"))
205 (sha256
206 (base32
207 "0qbq1rqp94l530f043qzp8aw5lj7dng9wq0miffd7spd1ff638wq"))))
208 (build-system gnu-build-system)
209 (arguments
210 `(#:imported-modules (,@%gnu-build-system-modules
211 (guix build python-build-system))
212 #:modules ((guix build python-build-system)
213 ,@%gnu-build-system-modules)
214 #:phases
215 (modify-phases %standard-phases
216 (add-after 'unpack 'enter-dir
217 (lambda _ (chdir "ghmm") #t))
218 (delete 'check)
219 (add-after 'install 'check
220 (assoc-ref %standard-phases 'check))
221 (add-before 'check 'fix-PYTHONPATH
222 (lambda* (#:key inputs outputs #:allow-other-keys)
223 (let ((python-version (python-version
224 (assoc-ref inputs "python"))))
225 (setenv "PYTHONPATH"
226 (string-append (getenv "PYTHONPATH")
227 ":" (assoc-ref outputs "out")
228 "/lib/python" python-version
229 "/site-packages")))
230 #t))
231 (add-after 'enter-dir 'fix-runpath
232 (lambda* (#:key outputs #:allow-other-keys)
233 (substitute* "ghmmwrapper/setup.py"
234 (("^(.*)extra_compile_args = \\[" line indent)
235 (string-append indent
236 "extra_link_args = [\"-Wl,-rpath="
237 (assoc-ref outputs "out") "/lib\"],\n"
238 line
239 "\"-Wl,-rpath="
240 (assoc-ref outputs "out")
241 "/lib\", ")))
242 #t))
243 (add-after 'enter-dir 'disable-broken-tests
244 (lambda _
245 (substitute* "tests/Makefile.am"
246 ;; GHMM_SILENT_TESTS is assumed to be a command.
247 (("TESTS_ENVIRONMENT.*") "")
248 ;; Do not build broken tests.
249 (("chmm .*") "")
250 (("read_fa .*") "")
251 (("mcmc .*") "")
252 (("label_higher_order_test.*$")
253 "label_higher_order_test\n"))
254
255 ;; These Python unittests are broken as there is no gato.
256 ;; See https://sourceforge.net/p/ghmm/support-requests/3/
257 (substitute* "ghmmwrapper/ghmmunittests.py"
258 (("^(.*)def (testNewXML|testMultipleTransitionClasses|testNewXML)"
259 line indent)
260 (string-append indent
261 "@unittest.skip(\"Disabled by Guix\")\n"
262 line)))
263 #t)))))
264 (inputs
265 `(("python" ,python-2) ; only Python 2 is supported
266 ("libxml2" ,libxml2)))
267 (native-inputs
268 `(("pkg-config" ,pkg-config)
269 ("dejagnu" ,dejagnu)
270 ("swig" ,swig)
271 ("autoconf" ,autoconf)
272 ("automake" ,automake)
273 ("libtool" ,libtool)))
274 (home-page "http://ghmm.org")
275 (synopsis "Hidden Markov Model library")
276 (description
277 "The General Hidden Markov Model library (GHMM) is a C library with
278 additional Python bindings implementing a wide range of types of @dfn{Hidden
279 Markov Models} (HMM) and algorithms: discrete, continuous emissions, basic
280 training, HMM clustering, HMM mixtures.")
281 (license license:lgpl2.0+))))
282
283 (define-public mcl
284 (package
285 (name "mcl")
286 (version "14.137")
287 (source (origin
288 (method url-fetch)
289 (uri (string-append
290 "http://micans.org/mcl/src/mcl-"
291 (string-replace-substring version "." "-")
292 ".tar.gz"))
293 (sha256
294 (base32
295 "15xlax3z31lsn62vlg94hkm75nm40q4679amnfg13jm8m2bnhy5m"))))
296 (build-system gnu-build-system)
297 (arguments
298 `(#:configure-flags (list "--enable-blast")))
299 (inputs
300 `(("perl" ,perl)))
301 (home-page "http://micans.org/mcl/")
302 (synopsis "Clustering algorithm for graphs")
303 (description
304 "The MCL algorithm is short for the @dfn{Markov Cluster Algorithm}, a
305 fast and scalable unsupervised cluster algorithm for graphs (also known as
306 networks) based on simulation of (stochastic) flow in graphs.")
307 ;; In the LICENCE file and web page it says "The software is licensed
308 ;; under the GNU General Public License, version 3.", but in several of
309 ;; the source code files it suggests GPL3 or later.
310 ;; http://listserver.ebi.ac.uk/pipermail/mcl-users/2016/000376.html
311 (license license:gpl3)))
312
313 (define-public ocaml-mcl
314 (package
315 (name "ocaml-mcl")
316 (version "12-068oasis4")
317 (source
318 (origin
319 (method git-fetch)
320 (uri (git-reference
321 (url "https://github.com/fhcrc/mcl")
322 (commit version)))
323 (file-name (git-file-name name version))
324 (sha256
325 (base32
326 "0009dc3h2jp3qg5val452wngpqnbfyhbcxylghq0mrjqxx0jdq5p"))))
327 (build-system ocaml-build-system)
328 (arguments
329 `(#:phases
330 (modify-phases %standard-phases
331 (add-before 'configure 'patch-paths
332 (lambda _
333 (substitute* "configure"
334 (("/bin/sh") (which "sh")))
335 (substitute* "setup.ml"
336 (("LDFLAGS=-fPIC")
337 (string-append "LDFLAGS=-fPIC\"; \"SHELL=" (which "sh")))
338 (("-std=c89") "-std=gnu99")
339
340 ;; This is a mutable string, which is no longer supported. Use
341 ;; a byte buffer instead.
342 (("String.make \\(String.length s\\)")
343 "Bytes.make (String.length s)")
344
345 ;; These two belong together.
346 (("OASISString.replace_chars")
347 "Bytes.to_string (OASISString.replace_chars")
348 ((" s;")
349 " s);"))
350 (substitute* "myocamlbuild.ml"
351 (("std=c89") "std=gnu99"))
352 ;; Since we build with a more recent OCaml, we have to use C99 or
353 ;; later. This causes problems with the old C code.
354 (substitute* "src/impala/matrix.c"
355 (("restrict") "restrict_"))
356 #t)))))
357 (native-inputs
358 `(("ocamlbuild" ,ocamlbuild)))
359 (home-page "https://github.com/fhcrc/mcl")
360 (synopsis "OCaml wrappers around MCL")
361 (description
362 "This package provides OCaml bindings for the MCL graph clustering
363 algorithm.")
364 (license license:gpl3)))
365
366 (define-public randomjungle
367 (package
368 (name "randomjungle")
369 (version "2.1.0")
370 (source
371 (origin
372 (method url-fetch)
373 (uri (string-append
374 "https://www.imbs.uni-luebeck.de/fileadmin/files/Software"
375 "/randomjungle/randomjungle-" version ".tar_.gz"))
376 (patches (search-patches "randomjungle-disable-static-build.patch"))
377 (sha256
378 (base32
379 "12c8rf30cla71swx2mf4ww9mfd8jbdw5lnxd7dxhyw1ygrvg6y4w"))))
380 (build-system gnu-build-system)
381 (arguments
382 `(#:configure-flags
383 (list "--disable-static"
384 (string-append "--with-boost="
385 (assoc-ref %build-inputs "boost")))
386 #:phases
387 (modify-phases %standard-phases
388 (add-before
389 'configure 'set-CXXFLAGS
390 (lambda _
391 (setenv "CXXFLAGS" "-fpermissive ")
392 #t)))))
393 (inputs
394 `(("boost" ,boost)
395 ("gsl" ,gsl)
396 ("libxml2" ,libxml2)
397 ("zlib" ,zlib)))
398 (native-inputs
399 `(("gfortran" ,gfortran)
400 ("gfortran:lib" ,gfortran "lib")))
401 ;; Non-portable assembly instructions are used so building fails on
402 ;; platforms other than x86_64 or i686.
403 (supported-systems '("x86_64-linux" "i686-linux"))
404 (home-page "https://www.imbs.uni-luebeck.de/forschung/software/details.html#c224")
405 (synopsis "Implementation of the Random Forests machine learning method")
406 (description
407 "Random Jungle is an implementation of Random Forests. It is supposed to
408 analyse high dimensional data. In genetics, it can be used for analysing big
409 Genome Wide Association (GWA) data. Random Forests is a powerful machine
410 learning method. Most interesting features are variable selection, missing
411 value imputation, classifier creation, generalization error estimation and
412 sample proximities between pairs of cases.")
413 (license license:gpl3+)))
414
415 (define-public openfst
416 (package
417 (name "openfst")
418 (version "1.7.9")
419 (source (origin
420 (method url-fetch)
421 (uri (string-append "http://www.openfst.org/twiki/pub/FST/"
422 "FstDownload/openfst-" version ".tar.gz"))
423 (sha256
424 (base32
425 "1pmx1yhn2gknj0an0zwqmzgwjaycapi896244np50a8y3nrsw6ck"))))
426 (build-system gnu-build-system)
427 (home-page "http://www.openfst.org")
428 (synopsis "Library for weighted finite-state transducers")
429 (description "OpenFst is a library for constructing, combining,
430 optimizing, and searching weighted finite-state transducers (FSTs).")
431 (license license:asl2.0)))
432
433 (define-public shogun
434 (package
435 (name "shogun")
436 (version "6.1.3")
437 (source
438 (origin
439 (method url-fetch)
440 (uri (string-append
441 "ftp://shogun-toolbox.org/shogun/releases/"
442 (version-major+minor version)
443 "/sources/shogun-" version ".tar.bz2"))
444 (sha256
445 (base32
446 "1rn9skm3nw6hr7mr3lgp2gfqhi7ii0lyxck7qmqnf8avq349s5jp"))
447 (modules '((guix build utils)
448 (ice-9 rdelim)))
449 (snippet
450 '(begin
451 ;; Remove non-free sources and files referencing them
452 (for-each delete-file
453 (find-files "src/shogun/classifier/svm/"
454 "SVMLight\\.(cpp|h)"))
455 (for-each delete-file
456 (find-files "examples/undocumented/libshogun/"
457 (string-append
458 "(classifier_.*svmlight.*|"
459 "evaluation_cross_validation_locked_comparison).cpp")))
460 ;; Remove non-free functions.
461 (define (delete-ifdefs file)
462 (with-atomic-file-replacement file
463 (lambda (in out)
464 (let loop ((line (read-line in 'concat))
465 (skipping? #f))
466 (if (eof-object? line)
467 #t
468 (let ((skip-next?
469 (or (and skipping?
470 (not (string-prefix?
471 "#endif //USE_SVMLIGHT" line)))
472 (string-prefix?
473 "#ifdef USE_SVMLIGHT" line))))
474 (when (or (not skipping?)
475 (and skipping? (not skip-next?)))
476 (display line out))
477 (loop (read-line in 'concat) skip-next?)))))))
478 (for-each delete-ifdefs
479 (append
480 (find-files "src/shogun/classifier/mkl"
481 "^MKLClassification\\.cpp")
482 (find-files "src/shogun/classifier/svm"
483 "^SVMLightOneClass\\.(cpp|h)")
484 (find-files "src/shogun/multiclass"
485 "^ScatterSVM\\.(cpp|h)")
486 (find-files "src/shogun/kernel/"
487 "^(Kernel|CombinedKernel|ProductKernel)\\.(cpp|h)")
488 (find-files "src/shogun/regression/svr"
489 "^(MKLRegression|SVRLight)\\.(cpp|h)")
490 (find-files "src/shogun/transfer/domain_adaptation"
491 "^DomainAdaptationSVM\\.(cpp|h)")))
492 #t))))
493 (build-system cmake-build-system)
494 (arguments
495 '(#:tests? #f ;no check target
496 #:phases
497 (modify-phases %standard-phases
498 (add-after 'unpack 'delete-broken-symlinks
499 (lambda _
500 (for-each delete-file '("applications/arts/data"
501 "applications/asp/data"
502 "applications/easysvm/data"
503 "applications/msplicer/data"
504 "applications/ocr/data"
505 "examples/meta/data"
506 "examples/undocumented/data"))
507 #t))
508 (add-after 'unpack 'change-R-target-path
509 (lambda* (#:key outputs #:allow-other-keys)
510 (substitute* '("src/interfaces/r/CMakeLists.txt"
511 "examples/meta/r/CMakeLists.txt")
512 (("\\$\\{R_COMPONENT_LIB_PATH\\}")
513 (string-append (assoc-ref outputs "out")
514 "/lib/R/library/")))
515 #t))
516 (add-after 'unpack 'fix-octave-modules
517 (lambda* (#:key outputs #:allow-other-keys)
518 (substitute* "src/interfaces/octave/CMakeLists.txt"
519 (("^include_directories\\(\\$\\{OCTAVE_INCLUDE_DIRS\\}")
520 "include_directories(${OCTAVE_INCLUDE_DIRS} ${OCTAVE_INCLUDE_DIRS}/octave")
521 ;; change target directory
522 (("\\$\\{OCTAVE_OCT_LOCAL_API_FILE_DIR\\}")
523 (string-append (assoc-ref outputs "out")
524 "/share/octave/packages")))
525 (substitute* '("src/interfaces/octave/swig_typemaps.i"
526 "src/interfaces/octave/sg_print_functions.cpp")
527 ;; "octave/config.h" and "octave/oct-obj.h" deprecated in Octave.
528 (("octave/config\\.h") "octave/octave-config.h")
529 (("octave/oct-obj.h") "octave/ovl.h"))
530 #t))
531 (add-after 'unpack 'move-rxcpp
532 (lambda* (#:key inputs #:allow-other-keys)
533 (let ((rxcpp-dir "shogun/third-party/rxcpp"))
534 (mkdir-p rxcpp-dir)
535 (install-file (assoc-ref inputs "rxcpp") rxcpp-dir)
536 #t)))
537 (add-before 'build 'set-HOME
538 ;; $HOME needs to be set at some point during the build phase
539 (lambda _ (setenv "HOME" "/tmp") #t)))
540 #:configure-flags
541 (list "-DCMAKE_BUILD_WITH_INSTALL_RPATH=TRUE"
542 "-DUSE_SVMLIGHT=OFF" ;disable proprietary SVMLIGHT
543 "-DBUILD_META_EXAMPLES=OFF" ;requires unpackaged ctags
544 ;;"-DINTERFACE_JAVA=ON" ;requires unpackaged jblas
545 ;;"-DINTERFACE_RUBY=ON" ;requires unpackaged ruby-narray
546 ;;"-DINTERFACE_PERL=ON" ;"FindPerlLibs" does not exist
547 ;;"-DINTERFACE_LUA=ON" ;fails because lua doesn't build pkgconfig file
548 "-DINTERFACE_OCTAVE=ON"
549 "-DINTERFACE_PYTHON=ON"
550 "-DINTERFACE_R=ON")))
551 (inputs
552 `(("python" ,python)
553 ("numpy" ,python-numpy)
554 ("r-minimal" ,r-minimal)
555 ("octave" ,octave-cli)
556 ("swig" ,swig)
557 ("eigen" ,eigen)
558 ("hdf5" ,hdf5)
559 ("atlas" ,atlas)
560 ("arpack" ,arpack-ng)
561 ("lapack" ,lapack)
562 ("glpk" ,glpk)
563 ("libxml2" ,libxml2)
564 ("lzo" ,lzo)
565 ("zlib" ,zlib)))
566 (native-inputs
567 `(("pkg-config" ,pkg-config)
568 ("rxcpp" ,rxcpp)))
569 ;; Non-portable SSE instructions are used so building fails on platforms
570 ;; other than x86_64.
571 (supported-systems '("x86_64-linux"))
572 (home-page "https://shogun-toolbox.org/")
573 (synopsis "Machine learning toolbox")
574 (description
575 "The Shogun Machine learning toolbox provides a wide range of unified and
576 efficient Machine Learning (ML) methods. The toolbox seamlessly
577 combines multiple data representations, algorithm classes, and general purpose
578 tools. This enables both rapid prototyping of data pipelines and extensibility
579 in terms of new algorithms.")
580 (license license:gpl3+)))
581
582 (define-public python-onnx
583 (package
584 (name "python-onnx")
585 (version "1.7.0")
586 (source
587 (origin
588 (method url-fetch)
589 (uri (pypi-uri "onnx" version))
590 ;; ONNX will build googletest from a git checkout. Patch CMake
591 ;; to use googletest from Guix and enable tests by default.
592 (patches (search-patches "python-onnx-use-system-googletest.patch"))
593 (sha256
594 (base32 "0j6rgfbhsw3a8id8pyg18y93k68lbjbj1kq6qia36h69f6pvlyjy"))))
595 (build-system python-build-system)
596 (native-inputs
597 `(("cmake" ,cmake)
598 ("googletest" ,googletest)
599 ("pybind11" ,pybind11)
600 ("python-coverage" ,python-coverage)
601 ("python-nbval" ,python-nbval)
602 ("python-pytest" ,python-pytest)
603 ("python-pytest-runner" ,python-pytest-runner)))
604 (inputs
605 `(("protobuf" ,protobuf)))
606 (propagated-inputs
607 `(("python-numpy" ,python-numpy)
608 ("python-protobuf" ,python-protobuf)
609 ("python-six" ,python-six)
610 ("python-tabulate" ,python-tabulate)
611 ("python-typing-extensions"
612 ,python-typing-extensions)))
613 (home-page "https://onnx.ai/")
614 (synopsis "Open Neural Network Exchange")
615 (description
616 "Open Neural Network Exchange (ONNX) provides an open source format for
617 AI models, both deep learning and traditional ML. It defines an extensible
618 computation graph model, as well as definitions of built-in operators and
619 standard data types.")
620 (license license:expat)))
621
622 (define-public rxcpp
623 (package
624 (name "rxcpp")
625 (version "4.1.0")
626 (source
627 (origin
628 (method git-fetch)
629 (uri (git-reference
630 (url "https://github.com/ReactiveX/RxCpp")
631 (commit (string-append "v" version))))
632 (sha256
633 (base32 "1rdpa3jlc181jd08nk437aar085h28i45s6nzrv65apb3xyyz0ij"))
634 (file-name (git-file-name name version))))
635 (build-system cmake-build-system)
636 (arguments
637 `(#:phases
638 (modify-phases %standard-phases
639 (add-after 'unpack 'remove-werror
640 (lambda _
641 (substitute* (find-files ".")
642 (("-Werror") ""))
643 #t))
644 (replace 'check
645 (lambda _
646 (invoke "ctest"))))))
647 (native-inputs
648 `(("catch" ,catch-framework)))
649 (home-page "http://reactivex.io/")
650 (synopsis "Reactive Extensions for C++")
651 (description
652 "The Reactive Extensions for C++ (RxCpp) is a library of algorithms for
653 values-distributed-in-time. ReactiveX is a library for composing asynchronous
654 and event-based programs by using observable sequences.
655
656 It extends the observer pattern to support sequences of data and/or events and
657 adds operators that allow you to compose sequences together declaratively while
658 abstracting away concerns about things like low-level threading,
659 synchronization, thread-safety, concurrent data structures, and non-blocking
660 I/O.")
661 (license license:asl2.0)))
662
663 (define-public gemmlowp-for-tensorflow
664 ;; The commit hash is taken from "tensorflow/workspace.bzl".
665 (let ((commit "38ebac7b059e84692f53e5938f97a9943c120d98")
666 (revision "2"))
667 (package
668 (name "gemmlowp")
669 (version (git-version "0" revision commit))
670 (source (origin
671 (method url-fetch)
672 (uri (string-append "https://mirror.bazel.build/"
673 "github.com/google/gemmlowp/archive/"
674 commit ".zip"))
675 (file-name (string-append "gemmlowp-" version ".zip"))
676 (sha256
677 (base32
678 "0n56s2g8hrssm4w8qj1v58gfm56a04n9v992ixkmvk6zjiralzxq"))))
679 (build-system cmake-build-system)
680 (arguments
681 `(#:configure-flags
682 (list ,@(match (%current-system)
683 ((or "x86_64-linux" "i686-linux")
684 '("-DCMAKE_CXX_FLAGS=-msse2"))
685 (_ '())))
686 #:phases
687 (modify-phases %standard-phases
688 ;; This directory contains the CMakeLists.txt.
689 (add-after 'unpack 'chdir
690 (lambda _ (chdir "contrib") #t))
691 ;; There is no install target
692 (replace 'install
693 (lambda* (#:key outputs #:allow-other-keys)
694 (let* ((out (assoc-ref outputs "out"))
695 (lib (string-append out "/lib/"))
696 (inc (string-append out "/include/")))
697 (install-file "../build/libeight_bit_int_gemm.so" lib)
698 (for-each (lambda (dir)
699 (let ((target (string-append inc "/" dir)))
700 (mkdir-p target)
701 (for-each (lambda (h)
702 (install-file h target))
703 (find-files (string-append "../" dir)
704 "\\.h$"))))
705 '("meta" "profiling" "public" "fixedpoint"
706 "eight_bit_int_gemm" "internal"))
707 #t))))))
708 (native-inputs
709 `(("unzip" ,unzip)))
710 (home-page "https://github.com/google/gemmlowp")
711 (synopsis "Small self-contained low-precision GEMM library")
712 (description
713 "This is a small self-contained low-precision @dfn{general matrix
714 multiplication} (GEMM) library. It is not a full linear algebra library.
715 Low-precision means that the input and output matrix entries are integers on
716 at most 8 bits. To avoid overflow, results are internally accumulated on more
717 than 8 bits, and at the end only some significant 8 bits are kept.")
718 (license license:asl2.0))))
719
720 (define-public dlib
721 (package
722 (name "dlib")
723 (version "19.20")
724 (source (origin
725 (method url-fetch)
726 (uri (string-append
727 "http://dlib.net/files/dlib-" version ".tar.bz2"))
728 (sha256
729 (base32
730 "139jyi19qz37wwmmy48gil9d1kkh2r3w3bwdzabha6ayxmba96nz"))
731 (modules '((guix build utils)))
732 (snippet
733 '(begin
734 ;; Delete ~13MB of bundled dependencies.
735 (delete-file-recursively "dlib/external")
736 (delete-file-recursively "docs/dlib/external")
737 #t))))
738 (build-system cmake-build-system)
739 (arguments
740 `(#:configure-flags '("-DBUILD_SHARED_LIBS=ON")
741 #:phases
742 (modify-phases %standard-phases
743 (add-after 'unpack 'disable-asserts
744 (lambda _
745 ;; config.h recommends explicitly enabling or disabling asserts
746 ;; when building as a shared library. By default neither is set.
747 (substitute* "dlib/config.h"
748 (("^//#define DLIB_DISABLE_ASSERTS") "#define DLIB_DISABLE_ASSERTS"))
749 #t))
750 (add-after 'disable-asserts 'disable-failing-tests
751 (lambda _
752 ;; One test times out on MIPS, so we need to disable it.
753 ;; Others are flaky on some platforms.
754 (let* ((system ,(or (%current-target-system)
755 (%current-system)))
756 (disabled-tests (cond
757 ((string-prefix? "mips64" system)
758 '("object_detector" ; timeout
759 "data_io"))
760 ((string-prefix? "armhf" system)
761 '("learning_to_track"))
762 ((string-prefix? "i686" system)
763 '("optimization"))
764 (else '()))))
765 (for-each
766 (lambda (test)
767 (substitute* "dlib/test/makefile"
768 (((string-append "SRC \\+= " test "\\.cpp")) "")))
769 disabled-tests)
770 #t)))
771 (replace 'check
772 (lambda _
773 ;; No test target, so we build and run the unit tests here.
774 (let ((test-dir (string-append "../dlib-" ,version "/dlib/test")))
775 (with-directory-excursion test-dir
776 (invoke "make" "-j" (number->string (parallel-job-count)))
777 (invoke "./dtest" "--runall"))
778 #t))))))
779 (native-inputs
780 `(("pkg-config" ,pkg-config)
781 ;; For tests.
782 ("libnsl" ,libnsl)))
783 (inputs
784 `(("giflib" ,giflib)
785 ("lapack" ,lapack)
786 ("libjpeg" ,libjpeg-turbo)
787 ("libpng" ,libpng)
788 ("libx11" ,libx11)
789 ("openblas" ,openblas)
790 ("zlib" ,zlib)))
791 (synopsis
792 "Toolkit for making machine learning and data analysis applications in C++")
793 (description
794 "Dlib is a modern C++ toolkit containing machine learning algorithms and
795 tools. It is used in both industry and academia in a wide range of domains
796 including robotics, embedded devices, mobile phones, and large high performance
797 computing environments.")
798 (home-page "http://dlib.net")
799 (license license:boost1.0)))
800
801 (define-public python-scikit-learn
802 (package
803 (name "python-scikit-learn")
804 (version "0.22.1")
805 (source
806 (origin
807 (method git-fetch)
808 (uri (git-reference
809 (url "https://github.com/scikit-learn/scikit-learn")
810 (commit version)))
811 (file-name (git-file-name name version))
812 (sha256
813 (base32
814 "1xqxv210gsmjw094vc5ghq2y9lmm74qkk22pq6flcjzj51b86jxf"))))
815 (build-system python-build-system)
816 (arguments
817 `(#:phases
818 (modify-phases %standard-phases
819 (add-after 'build 'build-ext
820 (lambda _ (invoke "python" "setup.py" "build_ext" "--inplace") #t))
821 (replace 'check
822 (lambda _
823 ;; Restrict OpenBLAS threads to prevent segfaults while testing!
824 (setenv "OPENBLAS_NUM_THREADS" "1")
825
826 ;; Some tests require write access to $HOME.
827 (setenv "HOME" "/tmp")
828
829 (invoke "pytest" "sklearn" "-m" "not network")))
830 (add-before 'reset-gzip-timestamps 'make-files-writable
831 (lambda* (#:key outputs #:allow-other-keys)
832 ;; Make sure .gz files are writable so that the
833 ;; 'reset-gzip-timestamps' phase can do its work.
834 (let ((out (assoc-ref outputs "out")))
835 (for-each make-file-writable
836 (find-files out "\\.gz$"))
837 #t))))))
838 (inputs
839 `(("openblas" ,openblas)))
840 (native-inputs
841 `(("python-pytest" ,python-pytest)
842 ("python-pandas" ,python-pandas) ;for tests
843 ("python-cython" ,python-cython)))
844 (propagated-inputs
845 `(("python-numpy" ,python-numpy)
846 ("python-scipy" ,python-scipy)
847 ("python-joblib" ,python-joblib)))
848 (home-page "https://scikit-learn.org/")
849 (synopsis "Machine Learning in Python")
850 (description
851 "Scikit-learn provides simple and efficient tools for data mining and
852 data analysis.")
853 (properties `((python2-variant . ,(delay python2-scikit-learn))))
854 (license license:bsd-3)))
855
856 ;; scikit-learn 0.22 and later only supports Python 3, so we stick with
857 ;; an older version here.
858 (define-public python2-scikit-learn
859 (let ((base (package-with-python2 (strip-python2-variant python-scikit-learn))))
860 (package
861 (inherit base)
862 (version "0.20.4")
863 (source (origin
864 (method git-fetch)
865 (uri (git-reference
866 (url "https://github.com/scikit-learn/scikit-learn")
867 (commit version)))
868 (file-name (git-file-name "python-scikit-learn" version))
869 (sha256
870 (base32
871 "08zbzi8yx5wdlxfx9jap61vg1malc9ajf576w7a0liv6jvvrxlpj")))))))
872
873 (define-public python-scikit-rebate
874 (package
875 (name "python-scikit-rebate")
876 (version "0.6")
877 (source (origin
878 (method url-fetch)
879 (uri (pypi-uri "skrebate" version))
880 (sha256
881 (base32
882 "1h7qs9gjxpzqabzhb8rmpv3jpmi5iq41kqdibg48299h94iikiw7"))))
883 (build-system python-build-system)
884 ;; Pandas is only needed to run the tests.
885 (native-inputs
886 `(("python-pandas" ,python-pandas)))
887 (propagated-inputs
888 `(("python-numpy" ,python-numpy)
889 ("python-scipy" ,python-scipy)
890 ("python-scikit-learn" ,python-scikit-learn)
891 ("python-joblib" ,python-joblib)))
892 (home-page "https://epistasislab.github.io/scikit-rebate/")
893 (synopsis "Relief-based feature selection algorithms for Python")
894 (description "Scikit-rebate is a scikit-learn-compatible Python
895 implementation of ReBATE, a suite of Relief-based feature selection algorithms
896 for Machine Learning. These algorithms excel at identifying features that are
897 predictive of the outcome in supervised learning problems, and are especially
898 good at identifying feature interactions that are normally overlooked by
899 standard feature selection algorithms.")
900 (license license:expat)))
901
902 (define-public python-autograd
903 (let* ((commit "442205dfefe407beffb33550846434baa90c4de7")
904 (revision "0")
905 (version (git-version "0.0.0" revision commit)))
906 (package
907 (name "python-autograd")
908 (home-page "https://github.com/HIPS/autograd")
909 (source (origin
910 (method git-fetch)
911 (uri (git-reference
912 (url home-page)
913 (commit commit)))
914 (sha256
915 (base32
916 "189sv2xb0mwnjawa9z7mrgdglc1miaq93pnck26r28fi1jdwg0z4"))
917 (file-name (git-file-name name version))))
918 (version version)
919 (build-system python-build-system)
920 (native-inputs
921 `(("python-nose" ,python-nose)
922 ("python-pytest" ,python-pytest)))
923 (propagated-inputs
924 `(("python-future" ,python-future)
925 ("python-numpy" ,python-numpy)))
926 (arguments
927 `(#:phases (modify-phases %standard-phases
928 (replace 'check
929 (lambda _
930 (invoke "py.test" "-v"))))))
931 (synopsis "Efficiently computes derivatives of NumPy code")
932 (description "Autograd can automatically differentiate native Python and
933 NumPy code. It can handle a large subset of Python's features, including loops,
934 ifs, recursion and closures, and it can even take derivatives of derivatives
935 of derivatives. It supports reverse-mode differentiation
936 (a.k.a. backpropagation), which means it can efficiently take gradients of
937 scalar-valued functions with respect to array-valued arguments, as well as
938 forward-mode differentiation, and the two can be composed arbitrarily. The
939 main intended application of Autograd is gradient-based optimization.")
940 (license license:expat))))
941
942 (define-public python2-autograd
943 (package-with-python2 python-autograd))
944
945 (define-public lightgbm
946 (package
947 (name "lightgbm")
948 (version "2.0.12")
949 (source (origin
950 (method git-fetch)
951 (uri (git-reference
952 (url "https://github.com/Microsoft/LightGBM")
953 (commit (string-append "v" version))))
954 (sha256
955 (base32
956 "0jlvyn7k81dzrh9ij3zw576wbgiwmmr26rzpdxjn1dbpc3njpvzi"))
957 (file-name (git-file-name name version))))
958 (native-inputs
959 `(("python-pytest" ,python-pytest)
960 ("python-nose" ,python-nose)))
961 (inputs
962 `(("openmpi" ,openmpi)))
963 (propagated-inputs
964 `(("python-numpy" ,python-numpy)
965 ("python-scipy" ,python-scipy)))
966 (arguments
967 `(#:configure-flags
968 '("-DUSE_MPI=ON")
969 #:phases
970 (modify-phases %standard-phases
971 (replace 'check
972 (lambda _
973 (with-directory-excursion "../source"
974 (invoke "pytest" "tests/c_api_test/test_.py")))))))
975 (build-system cmake-build-system)
976 (home-page "https://github.com/Microsoft/LightGBM")
977 (synopsis "Gradient boosting framework based on decision tree algorithms")
978 (description "LightGBM is a gradient boosting framework that uses tree
979 based learning algorithms. It is designed to be distributed and efficient with
980 the following advantages:
981
982 @itemize
983 @item Faster training speed and higher efficiency
984 @item Lower memory usage
985 @item Better accuracy
986 @item Parallel and GPU learning supported (not enabled in this package)
987 @item Capable of handling large-scale data
988 @end itemize\n")
989 (license license:expat)))
990
991 (define-public vowpal-wabbit
992 ;; Language bindings not included.
993 (package
994 (name "vowpal-wabbit")
995 (version "8.5.0")
996 (source (origin
997 (method git-fetch)
998 (uri (git-reference
999 (url "https://github.com/JohnLangford/vowpal_wabbit")
1000 (commit version)))
1001 (sha256
1002 (base32
1003 "04bwzk6ifgnz3fmzid8b7avxf9n5pnx9xcjm61nkjng1vv0bpj8x"))
1004 (file-name (git-file-name name version))))
1005 (inputs
1006 `(("boost" ,boost)
1007 ("zlib" ,zlib)))
1008 (arguments
1009 `(#:configure-flags
1010 (list (string-append "--with-boost="
1011 (assoc-ref %build-inputs "boost")))
1012 #:phases
1013 (modify-phases %standard-phases
1014 (add-after 'unpack 'make-files-writable
1015 (lambda _
1016 (for-each make-file-writable (find-files "." ".*")) #t))
1017 (add-after 'install 'install-more-headers
1018 (lambda* (#:key outputs #:allow-other-keys)
1019 (for-each
1020 (lambda (file)
1021 (install-file file (string-append
1022 (assoc-ref outputs "out")
1023 "/include/vowpalwabbit")))
1024 (find-files "vowpalwabbit" "\\.h$"))
1025 #t)))))
1026 (build-system gnu-build-system)
1027 (home-page "https://github.com/JohnLangford/vowpal_wabbit")
1028 (synopsis "Fast machine learning library for online learning")
1029 (description "Vowpal Wabbit is a machine learning system with techniques
1030 such as online, hashing, allreduce, reductions, learning2search, active, and
1031 interactive learning.")
1032 (license license:bsd-3)))
1033
1034 (define-public python2-fastlmm
1035 (package
1036 (name "python2-fastlmm")
1037 (version "0.2.21")
1038 (source
1039 (origin
1040 (method url-fetch)
1041 (uri (pypi-uri "fastlmm" version ".zip"))
1042 (sha256
1043 (base32
1044 "1q8c34rpmwkfy3r4d5172pzdkpfryj561897z9r3x22gq7813x1m"))))
1045 (build-system python-build-system)
1046 (arguments
1047 `(#:tests? #f ; some test files are missing
1048 #:python ,python-2)) ; only Python 2.7 is supported
1049 (propagated-inputs
1050 `(("python2-numpy" ,python2-numpy)
1051 ("python2-scipy" ,python2-scipy)
1052 ("python2-matplotlib" ,python2-matplotlib)
1053 ("python2-pandas" ,python2-pandas)
1054 ("python2-scikit-learn" ,python2-scikit-learn)
1055 ("python2-pysnptools" ,python2-pysnptools)))
1056 (native-inputs
1057 `(("unzip" ,unzip)
1058 ("python2-cython" ,python2-cython)
1059 ("python2-mock" ,python2-mock)
1060 ("python2-nose" ,python2-nose)))
1061 (home-page "http://research.microsoft.com/en-us/um/redmond/projects/mscompbio/fastlmm/")
1062 (synopsis "Perform genome-wide association studies on large data sets")
1063 (description
1064 "FaST-LMM, which stands for Factored Spectrally Transformed Linear Mixed
1065 Models, is a program for performing both single-SNP and SNP-set genome-wide
1066 association studies (GWAS) on extremely large data sets.")
1067 (license license:asl2.0)))
1068
1069 ;; There have been no proper releases yet.
1070 (define-public kaldi
1071 (let ((commit "d4791c0f3fc1a09c042dac365e120899ee2ad21e")
1072 (revision "2"))
1073 (package
1074 (name "kaldi")
1075 (version (git-version "0" revision commit))
1076 (source (origin
1077 (method git-fetch)
1078 (uri (git-reference
1079 (url "https://github.com/kaldi-asr/kaldi")
1080 (commit commit)))
1081 (file-name (git-file-name name version))
1082 (sha256
1083 (base32
1084 "07k80my6f19mhrkwbzhjsnpf9871wmrwkl0ym468i830w67qyjrz"))))
1085 (build-system gnu-build-system)
1086 (arguments
1087 `(#:test-target "test"
1088 #:phases
1089 (modify-phases %standard-phases
1090 (add-after 'unpack 'chdir
1091 (lambda _ (chdir "src") #t))
1092 (replace 'configure
1093 (lambda* (#:key build system inputs outputs #:allow-other-keys)
1094 (when (not (or (string-prefix? "x86_64" system)
1095 (string-prefix? "i686" system)))
1096 (substitute* "makefiles/linux_openblas.mk"
1097 (("-msse -msse2") "")))
1098 (substitute* "makefiles/default_rules.mk"
1099 (("/bin/bash") (which "bash")))
1100 (substitute* "Makefile"
1101 (("ext_depend: check_portaudio")
1102 "ext_depend:"))
1103 (substitute* '("online/Makefile"
1104 "onlinebin/Makefile"
1105 "gst-plugin/Makefile")
1106 (("../../tools/portaudio/install")
1107 (assoc-ref inputs "portaudio")))
1108
1109 ;; This `configure' script doesn't support variables passed as
1110 ;; arguments, nor does it support "prefix".
1111 (let ((out (assoc-ref outputs "out"))
1112 (openblas (assoc-ref inputs "openblas"))
1113 (openfst (assoc-ref inputs "openfst")))
1114 (substitute* "configure"
1115 (("check_for_slow_expf;") "")
1116 ;; This affects the RPATH and also serves as the installation
1117 ;; directory.
1118 (("KALDILIBDIR=`pwd`/lib")
1119 (string-append "KALDILIBDIR=" out "/lib")))
1120 (mkdir-p out) ; must exist
1121 (setenv "CONFIG_SHELL" (which "bash"))
1122 (setenv "OPENFST_VER" ,(package-version openfst))
1123 (invoke "./configure"
1124 "--use-cuda=no"
1125 "--shared"
1126 (string-append "--openblas-root=" openblas)
1127 (string-append "--fst-root=" openfst)))))
1128 (add-after 'build 'build-ext-and-gstreamer-plugin
1129 (lambda _
1130 (invoke "make" "-C" "online" "depend")
1131 (invoke "make" "-C" "online")
1132 (invoke "make" "-C" "onlinebin" "depend")
1133 (invoke "make" "-C" "onlinebin")
1134 (invoke "make" "-C" "gst-plugin" "depend")
1135 (invoke "make" "-C" "gst-plugin")
1136 #t))
1137 ;; TODO: also install the executables.
1138 (replace 'install
1139 (lambda* (#:key outputs #:allow-other-keys)
1140 (let* ((out (assoc-ref outputs "out"))
1141 (inc (string-append out "/include"))
1142 (lib (string-append out "/lib")))
1143 (mkdir-p lib)
1144 ;; The build phase installed symlinks to the actual
1145 ;; libraries. Install the actual targets.
1146 (for-each (lambda (file)
1147 (let ((target (readlink file)))
1148 (delete-file file)
1149 (install-file target lib)))
1150 (find-files lib "\\.so"))
1151 ;; Install headers
1152 (for-each (lambda (file)
1153 (let ((target-dir (string-append inc "/" (dirname file))))
1154 (install-file file target-dir)))
1155 (find-files "." "\\.h"))
1156 (install-file "gst-plugin/libgstonlinegmmdecodefaster.so"
1157 (string-append lib "/gstreamer-1.0"))
1158 #t))))))
1159 (inputs
1160 `(("alsa-lib" ,alsa-lib)
1161 ("gfortran" ,gfortran "lib")
1162 ("glib" ,glib)
1163 ("gstreamer" ,gstreamer)
1164 ("jack" ,jack-1)
1165 ("openblas" ,openblas)
1166 ("openfst" ,openfst)
1167 ("portaudio" ,portaudio)
1168 ("python" ,python)))
1169 (native-inputs
1170 `(("glib" ,glib "bin") ; glib-genmarshal
1171 ("grep" ,grep)
1172 ("sed" ,sed)
1173 ("pkg-config" ,pkg-config)
1174 ("which" ,which)))
1175 (home-page "https://kaldi-asr.org/")
1176 (synopsis "Speech recognition toolkit")
1177 (description "Kaldi is an extensible toolkit for speech recognition
1178 written in C++.")
1179 (license license:asl2.0))))
1180
1181 (define-public gst-kaldi-nnet2-online
1182 (let ((commit "cb227ef43b66a9835c14eb0ad39e08ee03c210ad")
1183 (revision "2"))
1184 (package
1185 (name "gst-kaldi-nnet2-online")
1186 (version (git-version "0" revision commit))
1187 (source (origin
1188 (method git-fetch)
1189 (uri (git-reference
1190 (url "https://github.com/alumae/gst-kaldi-nnet2-online")
1191 (commit commit)))
1192 (file-name (git-file-name name version))
1193 (sha256
1194 (base32
1195 "1i6ffwiavxx07ri0lxix6s8q0r31x7i4xxvhys5jxkixf5q34w8g"))))
1196 (build-system gnu-build-system)
1197 (arguments
1198 `(#:tests? #f ; there are none
1199 #:make-flags
1200 (list (string-append "SHELL="
1201 (assoc-ref %build-inputs "bash") "/bin/bash")
1202 (string-append "KALDI_ROOT="
1203 (assoc-ref %build-inputs "kaldi-src"))
1204 (string-append "KALDILIBDIR="
1205 (assoc-ref %build-inputs "kaldi") "/lib")
1206 "KALDI_FLAVOR=dynamic")
1207 #:phases
1208 (modify-phases %standard-phases
1209 (add-after 'unpack 'chdir
1210 (lambda _ (chdir "src") #t))
1211 (replace 'configure
1212 (lambda* (#:key inputs #:allow-other-keys)
1213 (let ((glib (assoc-ref inputs "glib")))
1214 (setenv "CXXFLAGS" "-fPIC")
1215 (setenv "CPLUS_INCLUDE_PATH"
1216 (string-append glib "/include/glib-2.0:"
1217 glib "/lib/glib-2.0/include:"
1218 (assoc-ref inputs "gstreamer")
1219 "/include/gstreamer-1.0")))
1220 (substitute* "Makefile"
1221 (("include \\$\\(KALDI_ROOT\\)/src/kaldi.mk") "")
1222 (("\\$\\(error Cannot find") "#"))
1223 #t))
1224 (add-before 'build 'build-depend
1225 (lambda* (#:key make-flags #:allow-other-keys)
1226 (apply invoke "make" "depend" make-flags)))
1227 (replace 'install
1228 (lambda* (#:key outputs #:allow-other-keys)
1229 (let* ((out (assoc-ref outputs "out"))
1230 (lib (string-append out "/lib/gstreamer-1.0")))
1231 (install-file "libgstkaldinnet2onlinedecoder.so" lib)
1232 #t))))))
1233 (inputs
1234 `(("glib" ,glib)
1235 ("gstreamer" ,gstreamer)
1236 ("jansson" ,jansson)
1237 ("openfst" ,openfst)
1238 ("kaldi" ,kaldi)))
1239 (native-inputs
1240 `(("bash" ,bash)
1241 ("glib:bin" ,glib "bin") ; glib-genmarshal
1242 ("kaldi-src" ,(package-source kaldi))
1243 ("pkg-config" ,pkg-config)))
1244 (home-page "https://kaldi-asr.org/")
1245 (synopsis "Gstreamer plugin for decoding speech")
1246 (description "This package provides a GStreamer plugin that wraps
1247 Kaldi's @code{SingleUtteranceNnet2Decoder}. It requires iVector-adapted DNN
1248 acoustic models. The iVectors are adapted to the current audio stream
1249 automatically.")
1250 (license license:asl2.0))))
1251
1252 (define-public kaldi-gstreamer-server
1253 ;; This is the tip of the py3 branch
1254 (let ((commit "f68cab490be7eb0da2af1475fbc16655f50a60cb")
1255 (revision "2"))
1256 (package
1257 (name "kaldi-gstreamer-server")
1258 (version (git-version "0" revision commit))
1259 (source (origin
1260 (method git-fetch)
1261 (uri (git-reference
1262 (url "https://github.com/alumae/kaldi-gstreamer-server")
1263 (commit commit)))
1264 (file-name (git-file-name name version))
1265 (sha256
1266 (base32
1267 "17lh1368vkg8ngrcbn2phvigzlmalrqg6djx2gg61qq1a0nj87dm"))))
1268 (build-system gnu-build-system)
1269 (arguments
1270 `(#:tests? #f ; there are no tests that can be run automatically
1271 #:modules ((guix build utils)
1272 (guix build gnu-build-system)
1273 (srfi srfi-26))
1274 #:phases
1275 (modify-phases %standard-phases
1276 (delete 'configure)
1277 (replace 'build
1278 (lambda* (#:key outputs #:allow-other-keys)
1279 ;; Disable hash randomization to ensure the generated .pycs
1280 ;; are reproducible.
1281 (setenv "PYTHONHASHSEED" "0")
1282 (with-directory-excursion "kaldigstserver"
1283 ;; See https://github.com/alumae/kaldi-gstreamer-server/issues/232
1284 (substitute* "master_server.py"
1285 (("\\.replace\\('\\\\.*") ")"))
1286
1287 ;; This is a Python 2 file
1288 (delete-file "decoder_test.py")
1289 (delete-file "test-buffer.py")
1290
1291 (for-each (lambda (file)
1292 (apply invoke
1293 `("python"
1294 "-m" "compileall"
1295 "-f" ; force rebuild
1296 ,file)))
1297 (find-files "." "\\.py$")))
1298 #t))
1299 (replace 'install
1300 (lambda* (#:key inputs outputs #:allow-other-keys)
1301 (let* ((out (assoc-ref outputs "out"))
1302 (bin (string-append out "/bin"))
1303 (share (string-append out "/share/kaldi-gstreamer-server/")))
1304 ;; Install Python files
1305 (with-directory-excursion "kaldigstserver"
1306 (for-each (cut install-file <> share)
1307 (find-files "." ".*")))
1308
1309 ;; Install sample configuration files
1310 (for-each (cut install-file <> share)
1311 (find-files "." "\\.yaml"))
1312
1313 ;; Install executables
1314 (mkdir-p bin)
1315 (let* ((server (string-append bin "/kaldi-gst-server"))
1316 (client (string-append bin "/kaldi-gst-client"))
1317 (worker (string-append bin "/kaldi-gst-worker"))
1318 (PYTHONPATH (getenv "PYTHONPATH"))
1319 (GST_PLUGIN_PATH (string-append
1320 (assoc-ref inputs "gst-kaldi-nnet2-online")
1321 "/lib/gstreamer-1.0:${GST_PLUGIN_PATH}"))
1322 (wrap (lambda (wrapper what)
1323 (with-output-to-file wrapper
1324 (lambda _
1325 (format #t
1326 "#!~a
1327 export PYTHONPATH=~a
1328 export GST_PLUGIN_PATH=~a
1329 exec ~a ~a/~a \"$@\"~%"
1330 (which "bash") PYTHONPATH GST_PLUGIN_PATH
1331 (which "python") share what)))
1332 (chmod wrapper #o555))))
1333 (for-each wrap
1334 (list server client worker)
1335 (list "master_server.py"
1336 "client.py"
1337 "worker.py")))
1338 #t))))))
1339 (inputs
1340 `(("gst-kaldi-nnet2-online" ,gst-kaldi-nnet2-online)
1341 ("python" ,python-wrapper)
1342 ("python-pygobject" ,python-pygobject)
1343 ("python-pyyaml" ,python-pyyaml)
1344 ("python-tornado" ,python-tornado-6)))
1345 (home-page "https://github.com/alumae/kaldi-gstreamer-server")
1346 (synopsis "Real-time full-duplex speech recognition server")
1347 (description "This is a real-time full-duplex speech recognition server,
1348 based on the Kaldi toolkit and the GStreamer framework and implemented in
1349 Python.")
1350 (license license:bsd-2))))
1351
1352 ;; Note that Tensorflow includes a "third_party" directory, which seems to not
1353 ;; only contain modified subsets of upstream library source code, but also
1354 ;; adapter headers provided by Google (such as the fft.h header, which is not
1355 ;; part of the upstream project code). The Tensorflow code includes headers
1356 ;; from the "third_party" directory. It does not look like we can replace
1357 ;; these headers with unmodified upstream files, so we keep them.
1358 (define-public tensorflow
1359 (package
1360 (name "tensorflow")
1361 (version "1.9.0")
1362 (source
1363 (origin
1364 (method git-fetch)
1365 (uri (git-reference
1366 (url "https://github.com/tensorflow/tensorflow")
1367 (commit (string-append "v" version))))
1368 (file-name (string-append "tensorflow-" version "-checkout"))
1369 (sha256
1370 (base32
1371 "0a9kwha395g3wgxfwln5j8vn9nkspmd75xldrlqdq540w996g8xa"))))
1372 (build-system cmake-build-system)
1373 (arguments
1374 `(#:tests? #f ; no "check" target
1375 #:build-type "Release"
1376 #:configure-flags
1377 (let ((protobuf (assoc-ref %build-inputs "protobuf"))
1378 (protobuf:native (assoc-ref %build-inputs "protobuf:native"))
1379 (jsoncpp (assoc-ref %build-inputs "jsoncpp"))
1380 (snappy (assoc-ref %build-inputs "snappy"))
1381 (sqlite (assoc-ref %build-inputs "sqlite")))
1382 (list
1383 ;; Use protobuf from Guix
1384 (string-append "-Dprotobuf_STATIC_LIBRARIES="
1385 protobuf "/lib/libprotobuf.so")
1386 (string-append "-DPROTOBUF_PROTOC_EXECUTABLE="
1387 protobuf:native "/bin/protoc")
1388
1389 ;; Use snappy from Guix
1390 (string-append "-Dsnappy_STATIC_LIBRARIES="
1391 snappy "/lib/libsnappy.so")
1392 ;; Yes, this is not actually the include directory but a prefix...
1393 (string-append "-Dsnappy_INCLUDE_DIR=" snappy)
1394
1395 ;; Use jsoncpp from Guix
1396 (string-append "-Djsoncpp_STATIC_LIBRARIES="
1397 jsoncpp "/lib/libjsoncpp.so")
1398 ;; Yes, this is not actually the include directory but a prefix...
1399 (string-append "-Djsoncpp_INCLUDE_DIR=" jsoncpp)
1400
1401 ;; Use sqlite from Guix
1402 (string-append "-Dsqlite_STATIC_LIBRARIES="
1403 sqlite "/lib/libsqlite.a")
1404
1405 ;; Use system libraries wherever possible. Currently, this
1406 ;; only affects zlib.
1407 "-Dsystemlib_ALL=ON"
1408 "-Dtensorflow_ENABLE_POSITION_INDEPENDENT_CODE=ON"
1409 "-Dtensorflow_BUILD_SHARED_LIB=ON"
1410 "-Dtensorflow_OPTIMIZE_FOR_NATIVE_ARCH=OFF"
1411 "-Dtensorflow_ENABLE_SSL_SUPPORT=OFF"
1412 "-Dtensorflow_BUILD_CONTRIB_KERNELS=OFF"))
1413 #:make-flags
1414 (list "CC=gcc")
1415 #:modules ((ice-9 ftw)
1416 (guix build utils)
1417 (guix build cmake-build-system)
1418 ((guix build python-build-system)
1419 #:select (python-version)))
1420 #:imported-modules (,@%cmake-build-system-modules
1421 (guix build python-build-system))
1422 #:phases
1423 (modify-phases %standard-phases
1424 (add-after 'unpack 'set-source-file-times-to-1980
1425 ;; At the end of the tf_python_build_pip_package target, a ZIP
1426 ;; archive should be generated via bdist_wheel, but it fails with
1427 ;; "ZIP does not support timestamps before 1980". Luckily,
1428 ;; SOURCE_DATE_EPOCH is respected, which we set to some time in
1429 ;; 1980.
1430 (lambda _ (setenv "SOURCE_DATE_EPOCH" "315532800") #t))
1431 ;; See https://github.com/tensorflow/tensorflow/issues/20517#issuecomment-406373913
1432 (add-after 'unpack 'python3.7-compatibility
1433 (lambda _
1434 (substitute* '("tensorflow/python/eager/pywrap_tfe_src.cc"
1435 "tensorflow/python/lib/core/ndarray_tensor.cc"
1436 "tensorflow/python/lib/core/py_func.cc")
1437 (("PyUnicode_AsUTF8") "(char *)PyUnicode_AsUTF8"))
1438 (substitute* "tensorflow/c/eager/c_api.h"
1439 (("unsigned char async")
1440 "unsigned char is_async"))
1441
1442 ;; Remove dependency on tensorboard, a complicated but probably
1443 ;; optional package.
1444 (substitute* "tensorflow/tools/pip_package/setup.py"
1445 ((".*'tensorboard >.*") ""))
1446
1447 ;; Fix the build with python-3.8, taken from rejected upstream patch:
1448 ;; https://github.com/tensorflow/tensorflow/issues/34197
1449 (substitute* (find-files "tensorflow/python" ".*\\.cc$")
1450 (("(nullptr,)(\\ +/. tp_print)" _ _ tp_print)
1451 (string-append "NULL, " tp_print)))
1452 #t))
1453 (add-after 'python3.7-compatibility 'chdir
1454 (lambda _ (chdir "tensorflow/contrib/cmake") #t))
1455 (add-after 'chdir 'disable-downloads
1456 (lambda* (#:key inputs #:allow-other-keys)
1457 (substitute* (find-files "external" "\\.cmake$")
1458 (("GIT_REPOSITORY.*") "")
1459 (("GIT_TAG.*") "")
1460 (("PREFIX ")
1461 "DOWNLOAD_COMMAND \"\"\nPREFIX "))
1462
1463 ;; Use packages from Guix
1464 (let ((grpc (assoc-ref inputs "grpc")))
1465 (substitute* "CMakeLists.txt"
1466 ;; Sqlite
1467 (("include\\(sqlite\\)") "")
1468 (("\\$\\{sqlite_STATIC_LIBRARIES\\}")
1469 (string-append (assoc-ref inputs "sqlite")
1470 "/lib/libsqlite3.so"))
1471 (("sqlite_copy_headers_to_destination") "")
1472
1473 ;; PNG
1474 (("include\\(png\\)") "")
1475 (("\\$\\{png_STATIC_LIBRARIES\\}")
1476 (string-append (assoc-ref inputs "libpng")
1477 "/lib/libpng16.so"))
1478 (("png_copy_headers_to_destination") "")
1479
1480 ;; JPEG
1481 (("include\\(jpeg\\)") "")
1482 (("\\$\\{jpeg_STATIC_LIBRARIES\\}")
1483 (string-append (assoc-ref inputs "libjpeg")
1484 "/lib/libjpeg.so"))
1485 (("jpeg_copy_headers_to_destination") "")
1486
1487 ;; GIF
1488 (("include\\(gif\\)") "")
1489 (("\\$\\{gif_STATIC_LIBRARIES\\}")
1490 (string-append (assoc-ref inputs "giflib")
1491 "/lib/libgif.so"))
1492 (("gif_copy_headers_to_destination") "")
1493
1494 ;; lmdb
1495 (("include\\(lmdb\\)") "")
1496 (("\\$\\{lmdb_STATIC_LIBRARIES\\}")
1497 (string-append (assoc-ref inputs "lmdb")
1498 "/lib/liblmdb.so"))
1499 (("lmdb_copy_headers_to_destination") "")
1500
1501 ;; Protobuf
1502 (("include\\(protobuf\\)") "")
1503 (("protobuf_copy_headers_to_destination") "")
1504 (("^ +protobuf") "")
1505
1506 ;; gRPC
1507 (("include\\(grpc\\)")
1508 "find_package(grpc REQUIRED NAMES gRPC)")
1509 (("list\\(APPEND tensorflow_EXTERNAL_DEPENDENCIES grpc\\)") "")
1510
1511 ;; Eigen
1512 (("include\\(eigen\\)")
1513 (string-append "find_package(eigen REQUIRED NAMES Eigen3)
1514 set(eigen_INCLUDE_DIRS ${CMAKE_CURRENT_BINARY_DIR}/external/eigen_archive "
1515 (assoc-ref inputs "eigen") "/include/eigen3)"))
1516 (("^ +eigen") "")
1517
1518 ;; snappy
1519 (("include\\(snappy\\)")
1520 "add_definitions(-DTF_USE_SNAPPY)")
1521 (("list\\(APPEND tensorflow_EXTERNAL_DEPENDENCIES snappy\\)") "")
1522
1523 ;; jsoncpp
1524 (("include\\(jsoncpp\\)") "")
1525 (("^ +jsoncpp") ""))
1526
1527 (substitute* "tf_core_framework.cmake"
1528 ((" grpc") "")
1529 (("\\$\\{GRPC_BUILD\\}/grpc_cpp_plugin")
1530 (which "grpc_cpp_plugin"))
1531 ;; Link with gRPC libraries
1532 (("add_library\\(tf_protos_cc.*" m)
1533 (string-append m
1534 (format #f "\ntarget_link_libraries(tf_protos_cc PRIVATE \
1535 ~a/lib/libgrpc++_unsecure.a \
1536 ~a/lib/libgrpc_unsecure.a \
1537 ~a/lib/libaddress_sorting.a \
1538 ~a/lib/libgpr.a \
1539 ~a//lib/libcares.so
1540 )\n"
1541 grpc grpc grpc grpc
1542 (assoc-ref inputs "c-ares"))))))
1543 (substitute* "tf_tools.cmake"
1544 (("add_dependencies\\(\\$\\{proto_text.*") ""))
1545 ;; Remove dependency on bundled grpc
1546 (substitute* "tf_core_distributed_runtime.cmake"
1547 (("tf_core_cpu grpc") "tf_core_cpu"))
1548
1549 ;; This directory is a dependency of many targets.
1550 (mkdir-p "protobuf")
1551 #t))
1552 (add-after 'configure 'unpack-third-party-sources
1553 (lambda* (#:key inputs #:allow-other-keys)
1554 ;; This is needed to configure bundled packages properly.
1555 (setenv "CONFIG_SHELL" (which "bash"))
1556 (for-each
1557 (lambda (name)
1558 (let* ((what (assoc-ref inputs (string-append name "-src")))
1559 (name* (string-map (lambda (c)
1560 (if (char=? c #\-)
1561 #\_ c)) name))
1562 (where (string-append "../build/" name* "/src/" name*)))
1563 (cond
1564 ((string-suffix? ".zip" what)
1565 (mkdir-p where)
1566 (with-directory-excursion where
1567 (invoke "unzip" what)))
1568 ((string-suffix? ".tar.gz" what)
1569 (mkdir-p where)
1570 (invoke "tar" "xf" what
1571 "-C" where "--strip-components=1"))
1572 (else
1573 (let ((parent (dirname where)))
1574 (mkdir-p parent)
1575 (with-directory-excursion parent
1576 (when (file-exists? name*)
1577 (delete-file-recursively name*))
1578 (copy-recursively what name*)
1579 (map make-file-writable
1580 (find-files name* ".*"))))))))
1581 (list "boringssl"
1582 "cub"
1583 "double-conversion"
1584 "farmhash"
1585 "fft2d"
1586 "highwayhash"
1587 "nsync"
1588 "re2"))
1589
1590 (rename-file "../build/cub/src/cub/cub-1.8.0/"
1591 "../build/cub/src/cub/cub/")
1592 #t))
1593 (add-after 'unpack 'fix-python-build
1594 (lambda* (#:key inputs outputs #:allow-other-keys)
1595 (mkdir-p "protobuf-src")
1596 (invoke "tar" "xf" (assoc-ref inputs "protobuf:src")
1597 "-C" "protobuf-src" "--strip-components=1")
1598 (mkdir-p "eigen-src")
1599 (invoke "tar" "xf" (assoc-ref inputs "eigen:src")
1600 "-C" "eigen-src" "--strip-components=1")
1601
1602 (substitute* "tensorflow/contrib/cmake/tf_python.cmake"
1603 ;; Ensure that all Python dependencies can be found at build time.
1604 (("PYTHONPATH=\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/tf_python" m)
1605 (string-append m ":" (getenv "PYTHONPATH")))
1606 ;; Take protobuf source files from our source package.
1607 (("\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/protobuf/src/protobuf/src/google")
1608 (string-append (getcwd) "/protobuf-src/src/google")))
1609
1610 (substitute* '("tensorflow/contrib/cmake/tf_shared_lib.cmake"
1611 "tensorflow/contrib/cmake/tf_python.cmake")
1612 ;; Take Eigen source files from our source package.
1613 (("\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/eigen/src/eigen/")
1614 (string-append (getcwd) "/eigen-src/"))
1615 ;; Take Eigen headers from our own package.
1616 (("\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/external/eigen_archive")
1617 (string-append (assoc-ref inputs "eigen") "/include/eigen3")))
1618
1619 ;; Correct the RUNPATH of ops libraries generated for Python.
1620 ;; TODO: this doesn't work :(
1621 ;; /gnu/store/...-tensorflow-1.9.0/lib/python3.7/site-packages/tensorflow/contrib/seq2seq/python/ops/lib_beam_search_ops.so:
1622 ;; warning: RUNPATH contains bogus entries: ("/tmp/guix-build-tensorflow-1.9.0.drv-0/source/tensorflow/contrib/build")
1623 ;; /gnu/store/...-tensorflow-1.9.0/lib/python3.7/site-packages/tensorflow/contrib/seq2seq/python/ops/lib_beam_search_ops.so:
1624 ;; error: depends on 'libpywrap_tensorflow_internal.so', which
1625 ;; cannot be found in RUNPATH ...
1626 (substitute* "tensorflow/contrib/cmake/tf_cc_ops.cmake"
1627 (("set_target_properties.*")
1628 (string-append "set_target_properties(${_AT_TARGET} PROPERTIES \
1629 COMPILE_FLAGS ${target_compile_flags} \
1630 INSTALL_RPATH_USE_LINK_PATH TRUE \
1631 INSTALL_RPATH " (assoc-ref outputs "out") "/lib)\n")))
1632 #t))
1633 (add-after 'build 'build-pip-package
1634 (lambda* (#:key outputs #:allow-other-keys)
1635 (setenv "LDFLAGS"
1636 (string-append "-Wl,-rpath="
1637 (assoc-ref outputs "out") "/lib"))
1638 (invoke "make" "tf_python_build_pip_package")
1639 #t))
1640 (add-after 'build-pip-package 'install-python
1641 (lambda* (#:key inputs outputs #:allow-other-keys)
1642 (let ((out (assoc-ref outputs "out"))
1643 (wheel (car (find-files "../build/tf_python/dist/" "\\.whl$")))
1644 (python-version (python-version
1645 (assoc-ref inputs "python"))))
1646 (invoke "python" "-m" "pip" "install" wheel
1647 (string-append "--prefix=" out))
1648
1649 ;; XXX: broken RUNPATH, see fix-python-build phase.
1650 (delete-file
1651 (string-append
1652 out "/lib/python" python-version
1653 "/site-packages/tensorflow/contrib/"
1654 "seq2seq/python/ops/lib_beam_search_ops.so"))
1655 #t))))))
1656 (native-inputs
1657 `(("pkg-config" ,pkg-config)
1658 ("protobuf:native" ,protobuf-3.6) ; protoc
1659 ("protobuf:src" ,(package-source protobuf-3.6))
1660 ("eigen:src" ,(package-source eigen-for-tensorflow))
1661 ;; install_pip_packages.sh wants setuptools 39.1.0 specifically.
1662 ("python-setuptools" ,python-setuptools-for-tensorflow)
1663
1664 ;; The commit hashes and URLs for third-party source code are taken
1665 ;; from "tensorflow/workspace.bzl".
1666 ("boringssl-src"
1667 ,(let ((commit "ee7aa02")
1668 (revision "1"))
1669 (origin
1670 (method git-fetch)
1671 (uri (git-reference
1672 (url "https://boringssl.googlesource.com/boringssl")
1673 (commit commit)))
1674 (file-name (string-append "boringssl-0-" revision
1675 (string-take commit 7)
1676 "-checkout"))
1677 (sha256
1678 (base32
1679 "1jf693q0nw0adsic6cgmbdx6g7wr4rj4vxa8j1hpn792fqhd8wgw")))))
1680 ("cub-src"
1681 ,(let ((version "1.8.0"))
1682 (origin
1683 (method url-fetch)
1684 (uri (string-append "https://mirror.bazel.build/github.com/NVlabs/"
1685 "cub/archive/" version ".zip"))
1686 (file-name (string-append "cub-" version ".zip"))
1687 (sha256
1688 (base32
1689 "1hsqikqridb90dkxkjr2918dcry6pfh46ccnwrzawl56aamhdykb")))))
1690 ("double-conversion-src"
1691 ,(let ((commit "5664746")
1692 (revision "1"))
1693 (origin
1694 (method git-fetch)
1695 (uri (git-reference
1696 (url "https://github.com/google/double-conversion")
1697 (commit commit)))
1698 (file-name
1699 (git-file-name "double-conversion"
1700 (string-append "0-" revision "."
1701 (string-take commit 7))))
1702 (sha256
1703 (base32
1704 "1h5lppqqxcvdg5jq42i5msgwx20ryij3apvmndflngrgdpc04gn1")))))
1705 ("farmhash-src"
1706 ,(let ((commit "816a4ae622e964763ca0862d9dbd19324a1eaf45"))
1707 (origin
1708 (method url-fetch)
1709 (uri (string-append
1710 "https://mirror.bazel.build/github.com/google/farmhash/archive/"
1711 commit ".tar.gz"))
1712 (file-name (string-append "farmhash-0-" (string-take commit 7)
1713 ".tar.gz"))
1714 (sha256
1715 (base32
1716 "185b2xdxl4d4cnsnv6abg8s22gxvx8673jq2yaq85bz4cdy58q35")))))
1717 ;; The license notice on the home page at
1718 ;; http://www.kurims.kyoto-u.ac.jp/~ooura/fft.html says:
1719 ;; Copyright Takuya OOURA, 1996-2001
1720 ;;
1721 ;; You may use, copy, modify and distribute this code for any purpose
1722 ;; (include commercial use) and without fee. Please refer to this
1723 ;; package when you modify this code.
1724 ;;
1725 ;; We take the identical tarball from the Bazel mirror, because the URL
1726 ;; at the home page is not versioned and might change.
1727 ("fft2d-src"
1728 ,(origin
1729 (method url-fetch)
1730 (uri "https://mirror.bazel.build/www.kurims.kyoto-u.ac.jp/~ooura/fft.tgz")
1731 (file-name "fft2d.tar.gz")
1732 (sha256
1733 (base32
1734 "15jjkfvhqvl2c0753d2di8hz0pyzn598g74wqy79awdrf1y67fsj"))))
1735 ("highwayhash-src"
1736 ,(let ((commit "be5edafc2e1a455768e260ccd68ae7317b6690ee")
1737 (revision "1"))
1738 (origin
1739 (method git-fetch)
1740 (uri (git-reference
1741 (url "https://github.com/google/highwayhash")
1742 (commit commit)))
1743 (file-name (string-append "highwayhash-0-" revision
1744 (string-take commit 7)
1745 "-checkout"))
1746 (sha256
1747 (base32
1748 "154jwf98cyy54hldr94pgjn85zynly3abpnc1avmb8a18lzwjyb6")))))
1749 ("nsync-src"
1750 ,(let ((version "0559ce013feac8db639ee1bf776aca0325d28777")
1751 (revision "1"))
1752 (origin
1753 (method url-fetch)
1754 (uri (string-append "https://mirror.bazel.build/"
1755 "github.com/google/nsync/archive/"
1756 version ".tar.gz"))
1757 (file-name (string-append "nsync-0." revision
1758 "-" (string-take version 7)
1759 ".tar.gz"))
1760 (sha256
1761 (base32
1762 "0qdkyqym34x739mmzv97ah5r7ph462v5xkxqxvidmcfqbi64b132")))))
1763 ("re2-src"
1764 ,(let ((commit "e7efc48")
1765 (revision "1"))
1766 (origin
1767 (method git-fetch)
1768 (uri (git-reference
1769 (url "https://github.com/google/re2")
1770 (commit commit)))
1771 (file-name (string-append "re2-0-" revision
1772 (string-take commit 7)
1773 "-checkout"))
1774 (sha256
1775 (base32
1776 "161g9841rjfsy5pn52fcis0s9hdr7rxvb06pad38j5rppfihvign")))))
1777 ("googletest" ,googletest)
1778 ("swig" ,swig)
1779 ("unzip" ,unzip)))
1780 (propagated-inputs
1781 `(("python-absl-py" ,python-absl-py)
1782 ("python-astor" ,python-astor)
1783 ("python-gast" ,python-gast)
1784 ("python-grpcio" ,python-grpcio)
1785 ("python-numpy" ,python-numpy)
1786 ("python-protobuf" ,python-protobuf-3.6)
1787 ("python-six" ,python-six)
1788 ("python-termcolo" ,python-termcolor)
1789 ("python-wheel" ,python-wheel)))
1790 (inputs
1791 `(("c-ares" ,c-ares)
1792 ("eigen" ,eigen-for-tensorflow)
1793 ("gemmlowp" ,gemmlowp-for-tensorflow)
1794 ("lmdb" ,lmdb)
1795 ("libjpeg" ,libjpeg-turbo)
1796 ("libpng" ,libpng)
1797 ("giflib" ,giflib)
1798 ("grpc" ,grpc-1.16.1 "static")
1799 ("grpc:bin" ,grpc-1.16.1)
1800 ("jsoncpp" ,jsoncpp-for-tensorflow)
1801 ("snappy" ,snappy)
1802 ("sqlite" ,sqlite)
1803 ("protobuf" ,protobuf-3.6)
1804 ("python" ,python-wrapper)
1805 ("zlib" ,zlib)))
1806 (home-page "https://tensorflow.org")
1807 (synopsis "Machine learning framework")
1808 (description
1809 "TensorFlow is a flexible platform for building and training machine
1810 learning models. It provides a library for high performance numerical
1811 computation and includes high level Python APIs, including both a sequential
1812 API for beginners that allows users to build models quickly by plugging
1813 together building blocks and a subclassing API with an imperative style for
1814 advanced research.")
1815 (license license:asl2.0)))
1816
1817 (define-public python-iml
1818 (package
1819 (name "python-iml")
1820 (version "0.6.2")
1821 (source
1822 (origin
1823 (method url-fetch)
1824 (uri (pypi-uri "iml" version))
1825 (sha256
1826 (base32
1827 "1k8szlpm19rcwcxdny9qdm3gmaqq8akb4xlvrzyz8c2d679aak6l"))))
1828 (build-system python-build-system)
1829 (propagated-inputs
1830 `(("ipython" ,python-ipython)
1831 ("numpy" ,python-numpy)
1832 ("pandas" ,python-pandas)
1833 ("scipy" ,python-scipy)))
1834 (native-inputs
1835 `(("nose" ,python-nose)))
1836 (home-page "https://github.com/interpretable-ml/iml")
1837 (synopsis "Interpretable Machine Learning (iML) package")
1838 (description "Interpretable ML (iML) is a set of data type objects,
1839 visualizations, and interfaces that can be used by any method designed to
1840 explain the predictions of machine learning models (or really the output of
1841 any function). It currently contains the interface and IO code from the Shap
1842 project, and it will potentially also do the same for the Lime project.")
1843 (license license:expat)))
1844
1845 (define-public python-keras-applications
1846 (package
1847 (name "python-keras-applications")
1848 (version "1.0.8")
1849 (source
1850 (origin
1851 (method url-fetch)
1852 (uri (pypi-uri "Keras_Applications" version))
1853 (sha256
1854 (base32
1855 "1rcz31ca4axa6kzhjx4lwqxbg4wvlljkj8qj9a7p9sfd5fhzjyam"))))
1856 (build-system python-build-system)
1857 ;; The tests require Keras, but this package is needed to build Keras.
1858 (arguments '(#:tests? #f))
1859 (propagated-inputs
1860 `(("python-h5py" ,python-h5py)
1861 ("python-numpy" ,python-numpy)))
1862 (native-inputs
1863 `(("python-pytest" ,python-pytest)
1864 ("python-pytest-cov" ,python-pytest-cov)
1865 ("python-pytest-pep8" ,python-pytest-pep8)
1866 ("python-pytest-xdist" ,python-pytest-xdist)))
1867 (home-page "https://github.com/keras-team/keras-applications")
1868 (synopsis "Reference implementations of popular deep learning models")
1869 (description
1870 "This package provides reference implementations of popular deep learning
1871 models for use with the Keras deep learning framework.")
1872 (license license:expat)))
1873
1874 (define-public python-keras-preprocessing
1875 (package
1876 (name "python-keras-preprocessing")
1877 (version "1.1.0")
1878 (source
1879 (origin
1880 (method url-fetch)
1881 (uri (pypi-uri "Keras_Preprocessing" version))
1882 (sha256
1883 (base32
1884 "1r98nm4k1svsqjyaqkfk23i31bl1kcfcyp7094yyj3c43phfp3as"))))
1885 (build-system python-build-system)
1886 (propagated-inputs
1887 `(("python-numpy" ,python-numpy)
1888 ("python-six" ,python-six)))
1889 (native-inputs
1890 `(("python-pandas" ,python-pandas)
1891 ("python-pillow" ,python-pillow)
1892 ("python-pytest" ,python-pytest)
1893 ("python-pytest-cov" ,python-pytest-cov)
1894 ("python-pytest-xdist" ,python-pytest-xdist)
1895 ("tensorflow" ,tensorflow)))
1896 (home-page "https://github.com/keras-team/keras-preprocessing/")
1897 (synopsis "Data preprocessing and augmentation for deep learning models")
1898 (description
1899 "Keras Preprocessing is the data preprocessing and data augmentation
1900 module of the Keras deep learning library. It provides utilities for working
1901 with image data, text data, and sequence data.")
1902 (license license:expat)))
1903
1904 (define-public python-keras
1905 (package
1906 (name "python-keras")
1907 (version "2.2.4")
1908 (source
1909 (origin
1910 (method url-fetch)
1911 (uri (pypi-uri "Keras" version))
1912 (patches (search-patches "python-keras-integration-test.patch"))
1913 (sha256
1914 (base32
1915 "1j8bsqzh49vjdxy6l1k4iwax5vpjzniynyd041xjavdzvfii1dlh"))))
1916 (build-system python-build-system)
1917 (arguments
1918 `(#:phases
1919 (modify-phases %standard-phases
1920 (add-after 'unpack 'remove-tests-for-unavailable-features
1921 (lambda _
1922 (delete-file "keras/backend/theano_backend.py")
1923 (delete-file "keras/backend/cntk_backend.py")
1924 (delete-file "tests/keras/backend/backend_test.py")
1925
1926 ;; FIXME: This doesn't work because Tensorflow is missing the
1927 ;; coder ops library.
1928 (delete-file "tests/keras/test_callbacks.py")
1929 #t))
1930 (replace 'check
1931 (lambda _
1932 ;; These tests attempt to download data files from the internet.
1933 (delete-file "tests/integration_tests/test_datasets.py")
1934 (delete-file "tests/integration_tests/imagenet_utils_test.py")
1935
1936 (setenv "PYTHONPATH"
1937 (string-append (getcwd) "/build/lib:"
1938 (getenv "PYTHONPATH")))
1939 (invoke "py.test" "-v"
1940 "-p" "no:cacheprovider"
1941 "--ignore" "keras/utils"))))))
1942 (propagated-inputs
1943 `(("python-h5py" ,python-h5py)
1944 ("python-keras-applications" ,python-keras-applications)
1945 ("python-keras-preprocessing" ,python-keras-preprocessing)
1946 ("python-numpy" ,python-numpy)
1947 ("python-pydot" ,python-pydot)
1948 ("python-pyyaml" ,python-pyyaml)
1949 ("python-scipy" ,python-scipy)
1950 ("python-six" ,python-six)
1951 ("tensorflow" ,tensorflow)
1952 ("graphviz" ,graphviz)))
1953 (native-inputs
1954 `(("python-pandas" ,python-pandas)
1955 ("python-pytest" ,python-pytest)
1956 ("python-pytest-cov" ,python-pytest-cov)
1957 ("python-pytest-pep8" ,python-pytest-pep8)
1958 ("python-pytest-timeout" ,python-pytest-timeout)
1959 ("python-pytest-xdist" ,python-pytest-xdist)
1960 ("python-sphinx" ,python-sphinx)
1961 ("python-requests" ,python-requests)))
1962 (home-page "https://github.com/keras-team/keras")
1963 (synopsis "High-level deep learning framework")
1964 (description "Keras is a high-level neural networks API, written in Python
1965 and capable of running on top of TensorFlow. It was developed with a focus on
1966 enabling fast experimentation. Use Keras if you need a deep learning library
1967 that:
1968
1969 @itemize
1970 @item Allows for easy and fast prototyping (through user friendliness,
1971 modularity, and extensibility).
1972 @item Supports both convolutional networks and recurrent networks, as well as
1973 combinations of the two.
1974 @item Runs seamlessly on CPU and GPU.
1975 @end itemize\n")
1976 (license license:expat)))
1977
1978 (define-public gloo
1979 (let ((version "0.0.0") ; no proper version tag
1980 (commit "ca528e32fea9ca8f2b16053cff17160290fc84ce")
1981 (revision "0"))
1982 (package
1983 (name "gloo")
1984 (version (git-version version revision commit))
1985 (source
1986 (origin
1987 (method git-fetch)
1988 (uri (git-reference
1989 (url "https://github.com/facebookincubator/gloo")
1990 (commit commit)))
1991 (file-name (git-file-name name version))
1992 (sha256
1993 (base32
1994 "1q9f80zy75f6njrzrqkmhc0g3qxs4gskr7ns2jdqanxa2ww7a99w"))))
1995 (build-system cmake-build-system)
1996 (native-inputs
1997 `(("googletest" ,googletest)))
1998 (arguments
1999 `(#:configure-flags '("-DBUILD_TEST=1")
2000 #:phases
2001 (modify-phases %standard-phases
2002 (replace 'check
2003 (lambda _
2004 (invoke "make" "gloo_test")
2005 #t)))))
2006 (synopsis "Collective communications library")
2007 (description
2008 "Gloo is a collective communications library. It comes with a
2009 number of collective algorithms useful for machine learning applications.
2010 These include a barrier, broadcast, and allreduce.")
2011 (home-page "https://github.com/facebookincubator/gloo")
2012 (license license:bsd-3))))
2013
2014 (define-public python-umap-learn
2015 (package
2016 (name "python-umap-learn")
2017 (version "0.3.10")
2018 (source
2019 (origin
2020 (method url-fetch)
2021 (uri (pypi-uri "umap-learn" version))
2022 (sha256
2023 (base32
2024 "02ada2yy6km6zgk2836kg1c97yrcpalvan34p8c57446finnpki1"))))
2025 (build-system python-build-system)
2026 (native-inputs
2027 `(("python-joblib" ,python-joblib)
2028 ("python-nose" ,python-nose)))
2029 (propagated-inputs
2030 `(("python-numba" ,python-numba)
2031 ("python-numpy" ,python-numpy)
2032 ("python-scikit-learn" ,python-scikit-learn)
2033 ("python-scipy" ,python-scipy)))
2034 (home-page "https://github.com/lmcinnes/umap")
2035 (synopsis
2036 "Uniform Manifold Approximation and Projection")
2037 (description
2038 "Uniform Manifold Approximation and Projection is a dimension reduction
2039 technique that can be used for visualisation similarly to t-SNE, but also for
2040 general non-linear dimension reduction.")
2041 (license license:bsd-3)))