gnu: Add python-onnx.
[jackhill/guix/guix.git] / gnu / packages / machine-learning.scm
1 ;;; GNU Guix --- Functional package management for GNU
2 ;;; Copyright © 2015, 2016, 2017, 2018, 2019, 2020 Ricardo Wurmus <rekado@elephly.net>
3 ;;; Copyright © 2016, 2020 Efraim Flashner <efraim@flashner.co.il>
4 ;;; Copyright © 2016, 2017, 2020 Marius Bakke <mbakke@fastmail.com>
5 ;;; Copyright © 2016 Hartmut Goebel <h.goebel@crazy-compilers.com>
6 ;;; Copyright © 2018, 2019, 2020 Tobias Geerinckx-Rice <me@tobias.gr>
7 ;;; Copyright © 2018 Kei Kebreau <kkebreau@posteo.net>
8 ;;; Copyright © 2018 Mark Meyer <mark@ofosos.org>
9 ;;; Copyright © 2018 Ben Woodcroft <donttrustben@gmail.com>
10 ;;; Copyright © 2018 Fis Trivial <ybbs.daans@hotmail.com>
11 ;;; Copyright © 2018 Julien Lepiller <julien@lepiller.eu>
12 ;;; Copyright © 2018 Björn Höfling <bjoern.hoefling@bjoernhoefling.de>
13 ;;; Copyright © 2019 Nicolas Goaziou <mail@nicolasgoaziou.fr>
14 ;;; Copyright © 2019, 2020 Guillaume Le Vaillant <glv@posteo.net>
15 ;;; Copyright © 2019 Brett Gilio <brettg@gnu.org>
16 ;;; Copyright © 2020 Konrad Hinsen <konrad.hinsen@fastmail.net>
17 ;;; Copyright © 2020 Edouard Klein <edk@beaver-labs.com>
18 ;;; Copyright © 2020 Vinicius Monego <monego@posteo.net>
19 ;;;
20 ;;; This file is part of GNU Guix.
21 ;;;
22 ;;; GNU Guix is free software; you can redistribute it and/or modify it
23 ;;; under the terms of the GNU General Public License as published by
24 ;;; the Free Software Foundation; either version 3 of the License, or (at
25 ;;; your option) any later version.
26 ;;;
27 ;;; GNU Guix is distributed in the hope that it will be useful, but
28 ;;; WITHOUT ANY WARRANTY; without even the implied warranty of
29 ;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
30 ;;; GNU General Public License for more details.
31 ;;;
32 ;;; You should have received a copy of the GNU General Public License
33 ;;; along with GNU Guix. If not, see <http://www.gnu.org/licenses/>.
34
35 (define-module (gnu packages machine-learning)
36 #:use-module ((guix licenses) #:prefix license:)
37 #:use-module (guix packages)
38 #:use-module (guix utils)
39 #:use-module (guix download)
40 #:use-module (guix svn-download)
41 #:use-module (guix build-system cmake)
42 #:use-module (guix build-system gnu)
43 #:use-module (guix build-system ocaml)
44 #:use-module (guix build-system python)
45 #:use-module (guix build-system r)
46 #:use-module (guix git-download)
47 #:use-module (gnu packages)
48 #:use-module (gnu packages adns)
49 #:use-module (gnu packages algebra)
50 #:use-module (gnu packages audio)
51 #:use-module (gnu packages autotools)
52 #:use-module (gnu packages base)
53 #:use-module (gnu packages bash)
54 #:use-module (gnu packages boost)
55 #:use-module (gnu packages check)
56 #:use-module (gnu packages compression)
57 #:use-module (gnu packages cmake)
58 #:use-module (gnu packages cran)
59 #:use-module (gnu packages databases)
60 #:use-module (gnu packages dejagnu)
61 #:use-module (gnu packages gcc)
62 #:use-module (gnu packages glib)
63 #:use-module (gnu packages graphviz)
64 #:use-module (gnu packages gstreamer)
65 #:use-module (gnu packages image)
66 #:use-module (gnu packages linux)
67 #:use-module (gnu packages maths)
68 #:use-module (gnu packages mpi)
69 #:use-module (gnu packages ocaml)
70 #:use-module (gnu packages onc-rpc)
71 #:use-module (gnu packages perl)
72 #:use-module (gnu packages pkg-config)
73 #:use-module (gnu packages protobuf)
74 #:use-module (gnu packages python)
75 #:use-module (gnu packages python-check)
76 #:use-module (gnu packages python-science)
77 #:use-module (gnu packages python-web)
78 #:use-module (gnu packages python-xyz)
79 #:use-module (gnu packages rpc)
80 #:use-module (gnu packages serialization)
81 #:use-module (gnu packages sphinx)
82 #:use-module (gnu packages statistics)
83 #:use-module (gnu packages sqlite)
84 #:use-module (gnu packages swig)
85 #:use-module (gnu packages web)
86 #:use-module (gnu packages xml)
87 #:use-module (gnu packages xorg)
88 #:use-module (ice-9 match))
89
90 (define-public fann
91 ;; The last release is >100 commits behind, so we package from git.
92 (let ((commit "d71d54788bee56ba4cf7522801270152da5209d7"))
93 (package
94 (name "fann")
95 (version (string-append "2.2.0-1." (string-take commit 8)))
96 (source (origin
97 (method git-fetch)
98 (uri (git-reference
99 (url "https://github.com/libfann/fann")
100 (commit commit)))
101 (file-name (string-append name "-" version "-checkout"))
102 (sha256
103 (base32
104 "0ibwpfrjs6q2lijs8slxjgzb2llcl6rk3v2ski4r6215g5jjhg3x"))))
105 (build-system cmake-build-system)
106 (arguments
107 `(#:phases
108 (modify-phases %standard-phases
109 (replace 'check
110 (lambda* (#:key outputs #:allow-other-keys)
111 (let* ((out (assoc-ref outputs "out")))
112 (with-directory-excursion (string-append (getcwd) "/tests")
113 (invoke "./fann_tests"))))))))
114 (home-page "http://leenissen.dk/fann/wp/")
115 (synopsis "Fast Artificial Neural Network")
116 (description
117 "FANN is a neural network library, which implements multilayer
118 artificial neural networks in C with support for both fully connected and
119 sparsely connected networks.")
120 (license license:lgpl2.1))))
121
122 (define-public libsvm
123 (package
124 (name "libsvm")
125 (version "3.23")
126 (source
127 (origin
128 (method url-fetch)
129 (uri (string-append "https://www.csie.ntu.edu.tw/~cjlin/libsvm/"
130 name "-" version ".tar.gz"))
131 (sha256
132 (base32 "0jpaq0rr92x38p4nk3gjan79ip67m6p80anb28z1d8601miysyi5"))))
133 (build-system gnu-build-system)
134 (arguments
135 `(#:tests? #f ; no "check" target
136 #:phases (modify-phases %standard-phases
137 (delete 'configure)
138 (replace
139 'install ; no ‘install’ target
140 (lambda* (#:key outputs #:allow-other-keys)
141 (let* ((out (assoc-ref outputs "out"))
142 (bin (string-append out "/bin/")))
143 (mkdir-p bin)
144 (for-each (lambda (file)
145 (copy-file file (string-append bin file)))
146 '("svm-train"
147 "svm-predict"
148 "svm-scale")))
149 #t)))))
150 (home-page "https://www.csie.ntu.edu.tw/~cjlin/libsvm/")
151 (synopsis "Library for Support Vector Machines")
152 (description
153 "LIBSVM is a machine learning library for support vector
154 classification, (C-SVC, nu-SVC), regression (epsilon-SVR, nu-SVR) and
155 distribution estimation (one-class SVM). It supports multi-class
156 classification.")
157 (license license:bsd-3)))
158
159 (define-public python-libsvm
160 (package (inherit libsvm)
161 (name "python-libsvm")
162 (build-system gnu-build-system)
163 (arguments
164 `(#:tests? #f ; no "check" target
165 #:make-flags '("-C" "python")
166 #:phases
167 (modify-phases %standard-phases
168 (delete 'configure)
169 (replace
170 'install ; no ‘install’ target
171 (lambda* (#:key inputs outputs #:allow-other-keys)
172 (let ((site (string-append (assoc-ref outputs "out")
173 "/lib/python"
174 (string-take
175 (string-take-right
176 (assoc-ref inputs "python") 5) 3)
177 "/site-packages/")))
178 (substitute* "python/svm.py"
179 (("../libsvm.so.2") "libsvm.so.2"))
180 (mkdir-p site)
181 (for-each (lambda (file)
182 (copy-file file (string-append site (basename file))))
183 (find-files "python" "\\.py"))
184 (copy-file "libsvm.so.2"
185 (string-append site "libsvm.so.2")))
186 #t)))))
187 (inputs
188 `(("python" ,python)))
189 (synopsis "Python bindings of libSVM")))
190
191 (define-public ghmm
192 ;; The latest release candidate is several years and a couple of fixes have
193 ;; been published since. This is why we download the sources from the SVN
194 ;; repository.
195 (let ((svn-revision 2341))
196 (package
197 (name "ghmm")
198 (version (string-append "0.9-rc3-0." (number->string svn-revision)))
199 (source (origin
200 (method svn-fetch)
201 (uri (svn-reference
202 (url "http://svn.code.sf.net/p/ghmm/code/trunk")
203 (revision svn-revision)))
204 (file-name (string-append name "-" version "-checkout"))
205 (sha256
206 (base32
207 "0qbq1rqp94l530f043qzp8aw5lj7dng9wq0miffd7spd1ff638wq"))))
208 (build-system gnu-build-system)
209 (arguments
210 `(#:imported-modules (,@%gnu-build-system-modules
211 (guix build python-build-system))
212 #:modules ((guix build python-build-system)
213 ,@%gnu-build-system-modules)
214 #:phases
215 (modify-phases %standard-phases
216 (add-after 'unpack 'enter-dir
217 (lambda _ (chdir "ghmm") #t))
218 (delete 'check)
219 (add-after 'install 'check
220 (assoc-ref %standard-phases 'check))
221 (add-before 'check 'fix-PYTHONPATH
222 (lambda* (#:key inputs outputs #:allow-other-keys)
223 (let ((python-version (python-version
224 (assoc-ref inputs "python"))))
225 (setenv "PYTHONPATH"
226 (string-append (getenv "PYTHONPATH")
227 ":" (assoc-ref outputs "out")
228 "/lib/python" python-version
229 "/site-packages")))
230 #t))
231 (add-after 'enter-dir 'fix-runpath
232 (lambda* (#:key outputs #:allow-other-keys)
233 (substitute* "ghmmwrapper/setup.py"
234 (("^(.*)extra_compile_args = \\[" line indent)
235 (string-append indent
236 "extra_link_args = [\"-Wl,-rpath="
237 (assoc-ref outputs "out") "/lib\"],\n"
238 line
239 "\"-Wl,-rpath="
240 (assoc-ref outputs "out")
241 "/lib\", ")))
242 #t))
243 (add-after 'enter-dir 'disable-broken-tests
244 (lambda _
245 (substitute* "tests/Makefile.am"
246 ;; GHMM_SILENT_TESTS is assumed to be a command.
247 (("TESTS_ENVIRONMENT.*") "")
248 ;; Do not build broken tests.
249 (("chmm .*") "")
250 (("read_fa .*") "")
251 (("mcmc .*") "")
252 (("label_higher_order_test.*$")
253 "label_higher_order_test\n"))
254
255 ;; These Python unittests are broken as there is no gato.
256 ;; See https://sourceforge.net/p/ghmm/support-requests/3/
257 (substitute* "ghmmwrapper/ghmmunittests.py"
258 (("^(.*)def (testNewXML|testMultipleTransitionClasses|testNewXML)"
259 line indent)
260 (string-append indent
261 "@unittest.skip(\"Disabled by Guix\")\n"
262 line)))
263 #t)))))
264 (inputs
265 `(("python" ,python-2) ; only Python 2 is supported
266 ("libxml2" ,libxml2)))
267 (native-inputs
268 `(("pkg-config" ,pkg-config)
269 ("dejagnu" ,dejagnu)
270 ("swig" ,swig)
271 ("autoconf" ,autoconf)
272 ("automake" ,automake)
273 ("libtool" ,libtool)))
274 (home-page "http://ghmm.org")
275 (synopsis "Hidden Markov Model library")
276 (description
277 "The General Hidden Markov Model library (GHMM) is a C library with
278 additional Python bindings implementing a wide range of types of @dfn{Hidden
279 Markov Models} (HMM) and algorithms: discrete, continuous emissions, basic
280 training, HMM clustering, HMM mixtures.")
281 (license license:lgpl2.0+))))
282
283 (define-public mcl
284 (package
285 (name "mcl")
286 (version "14.137")
287 (source (origin
288 (method url-fetch)
289 (uri (string-append
290 "http://micans.org/mcl/src/mcl-"
291 (string-replace-substring version "." "-")
292 ".tar.gz"))
293 (sha256
294 (base32
295 "15xlax3z31lsn62vlg94hkm75nm40q4679amnfg13jm8m2bnhy5m"))))
296 (build-system gnu-build-system)
297 (arguments
298 `(#:configure-flags (list "--enable-blast")))
299 (inputs
300 `(("perl" ,perl)))
301 (home-page "http://micans.org/mcl/")
302 (synopsis "Clustering algorithm for graphs")
303 (description
304 "The MCL algorithm is short for the @dfn{Markov Cluster Algorithm}, a
305 fast and scalable unsupervised cluster algorithm for graphs (also known as
306 networks) based on simulation of (stochastic) flow in graphs.")
307 ;; In the LICENCE file and web page it says "The software is licensed
308 ;; under the GNU General Public License, version 3.", but in several of
309 ;; the source code files it suggests GPL3 or later.
310 ;; http://listserver.ebi.ac.uk/pipermail/mcl-users/2016/000376.html
311 (license license:gpl3)))
312
313 (define-public ocaml-mcl
314 (package
315 (name "ocaml-mcl")
316 (version "12-068oasis4")
317 (source
318 (origin
319 (method git-fetch)
320 (uri (git-reference
321 (url "https://github.com/fhcrc/mcl")
322 (commit version)))
323 (file-name (git-file-name name version))
324 (sha256
325 (base32
326 "0009dc3h2jp3qg5val452wngpqnbfyhbcxylghq0mrjqxx0jdq5p"))))
327 (build-system ocaml-build-system)
328 (arguments
329 `(#:phases
330 (modify-phases %standard-phases
331 (add-before 'configure 'patch-paths
332 (lambda _
333 (substitute* "configure"
334 (("/bin/sh") (which "sh")))
335 (substitute* "setup.ml"
336 (("LDFLAGS=-fPIC")
337 (string-append "LDFLAGS=-fPIC\"; \"SHELL=" (which "sh")))
338 (("-std=c89") "-std=gnu99")
339
340 ;; This is a mutable string, which is no longer supported. Use
341 ;; a byte buffer instead.
342 (("String.make \\(String.length s\\)")
343 "Bytes.make (String.length s)")
344
345 ;; These two belong together.
346 (("OASISString.replace_chars")
347 "Bytes.to_string (OASISString.replace_chars")
348 ((" s;")
349 " s);"))
350 (substitute* "myocamlbuild.ml"
351 (("std=c89") "std=gnu99"))
352 ;; Since we build with a more recent OCaml, we have to use C99 or
353 ;; later. This causes problems with the old C code.
354 (substitute* "src/impala/matrix.c"
355 (("restrict") "restrict_"))
356 #t)))))
357 (native-inputs
358 `(("ocamlbuild" ,ocamlbuild)))
359 (home-page "https://github.com/fhcrc/mcl")
360 (synopsis "OCaml wrappers around MCL")
361 (description
362 "This package provides OCaml bindings for the MCL graph clustering
363 algorithm.")
364 (license license:gpl3)))
365
366 (define-public randomjungle
367 (package
368 (name "randomjungle")
369 (version "2.1.0")
370 (source
371 (origin
372 (method url-fetch)
373 (uri (string-append
374 "https://www.imbs.uni-luebeck.de/fileadmin/files/Software"
375 "/randomjungle/randomjungle-" version ".tar_.gz"))
376 (patches (search-patches "randomjungle-disable-static-build.patch"))
377 (sha256
378 (base32
379 "12c8rf30cla71swx2mf4ww9mfd8jbdw5lnxd7dxhyw1ygrvg6y4w"))))
380 (build-system gnu-build-system)
381 (arguments
382 `(#:configure-flags
383 (list "--disable-static"
384 (string-append "--with-boost="
385 (assoc-ref %build-inputs "boost")))
386 #:phases
387 (modify-phases %standard-phases
388 (add-before
389 'configure 'set-CXXFLAGS
390 (lambda _
391 (setenv "CXXFLAGS" "-fpermissive ")
392 #t)))))
393 (inputs
394 `(("boost" ,boost)
395 ("gsl" ,gsl)
396 ("libxml2" ,libxml2)
397 ("zlib" ,zlib)))
398 (native-inputs
399 `(("gfortran" ,gfortran)
400 ("gfortran:lib" ,gfortran "lib")))
401 ;; Non-portable assembly instructions are used so building fails on
402 ;; platforms other than x86_64 or i686.
403 (supported-systems '("x86_64-linux" "i686-linux"))
404 (home-page "https://www.imbs.uni-luebeck.de/forschung/software/details.html#c224")
405 (synopsis "Implementation of the Random Forests machine learning method")
406 (description
407 "Random Jungle is an implementation of Random Forests. It is supposed to
408 analyse high dimensional data. In genetics, it can be used for analysing big
409 Genome Wide Association (GWA) data. Random Forests is a powerful machine
410 learning method. Most interesting features are variable selection, missing
411 value imputation, classifier creation, generalization error estimation and
412 sample proximities between pairs of cases.")
413 (license license:gpl3+)))
414
415 (define-public openfst
416 (package
417 (name "openfst")
418 (version "1.7.2")
419 (source (origin
420 (method url-fetch)
421 (uri (string-append "http://www.openfst.org/twiki/pub/FST/"
422 "FstDownload/openfst-" version ".tar.gz"))
423 (sha256
424 (base32
425 "0fqgk8195kz21is09gwzwnrg7fr9526bi9mh4apyskapz27pbhr1"))))
426 (build-system gnu-build-system)
427 (home-page "http://www.openfst.org")
428 (synopsis "Library for weighted finite-state transducers")
429 (description "OpenFst is a library for constructing, combining,
430 optimizing, and searching weighted finite-state transducers (FSTs).")
431 (license license:asl2.0)))
432
433 (define-public shogun
434 (package
435 (name "shogun")
436 (version "6.1.3")
437 (source
438 (origin
439 (method url-fetch)
440 (uri (string-append
441 "ftp://shogun-toolbox.org/shogun/releases/"
442 (version-major+minor version)
443 "/sources/shogun-" version ".tar.bz2"))
444 (sha256
445 (base32
446 "1rn9skm3nw6hr7mr3lgp2gfqhi7ii0lyxck7qmqnf8avq349s5jp"))
447 (modules '((guix build utils)
448 (ice-9 rdelim)))
449 (snippet
450 '(begin
451 ;; Remove non-free sources and files referencing them
452 (for-each delete-file
453 (find-files "src/shogun/classifier/svm/"
454 "SVMLight\\.(cpp|h)"))
455 (for-each delete-file
456 (find-files "examples/undocumented/libshogun/"
457 (string-append
458 "(classifier_.*svmlight.*|"
459 "evaluation_cross_validation_locked_comparison).cpp")))
460 ;; Remove non-free functions.
461 (define (delete-ifdefs file)
462 (with-atomic-file-replacement file
463 (lambda (in out)
464 (let loop ((line (read-line in 'concat))
465 (skipping? #f))
466 (if (eof-object? line)
467 #t
468 (let ((skip-next?
469 (or (and skipping?
470 (not (string-prefix?
471 "#endif //USE_SVMLIGHT" line)))
472 (string-prefix?
473 "#ifdef USE_SVMLIGHT" line))))
474 (when (or (not skipping?)
475 (and skipping? (not skip-next?)))
476 (display line out))
477 (loop (read-line in 'concat) skip-next?)))))))
478 (for-each delete-ifdefs
479 (append
480 (find-files "src/shogun/classifier/mkl"
481 "^MKLClassification\\.cpp")
482 (find-files "src/shogun/classifier/svm"
483 "^SVMLightOneClass\\.(cpp|h)")
484 (find-files "src/shogun/multiclass"
485 "^ScatterSVM\\.(cpp|h)")
486 (find-files "src/shogun/kernel/"
487 "^(Kernel|CombinedKernel|ProductKernel)\\.(cpp|h)")
488 (find-files "src/shogun/regression/svr"
489 "^(MKLRegression|SVRLight)\\.(cpp|h)")
490 (find-files "src/shogun/transfer/domain_adaptation"
491 "^DomainAdaptationSVM\\.(cpp|h)")))
492 #t))))
493 (build-system cmake-build-system)
494 (arguments
495 '(#:tests? #f ;no check target
496 #:phases
497 (modify-phases %standard-phases
498 (add-after 'unpack 'delete-broken-symlinks
499 (lambda _
500 (for-each delete-file '("applications/arts/data"
501 "applications/asp/data"
502 "applications/easysvm/data"
503 "applications/msplicer/data"
504 "applications/ocr/data"
505 "examples/meta/data"
506 "examples/undocumented/data"))
507 #t))
508 (add-after 'unpack 'change-R-target-path
509 (lambda* (#:key outputs #:allow-other-keys)
510 (substitute* '("src/interfaces/r/CMakeLists.txt"
511 "examples/meta/r/CMakeLists.txt")
512 (("\\$\\{R_COMPONENT_LIB_PATH\\}")
513 (string-append (assoc-ref outputs "out")
514 "/lib/R/library/")))
515 #t))
516 (add-after 'unpack 'fix-octave-modules
517 (lambda* (#:key outputs #:allow-other-keys)
518 (substitute* "src/interfaces/octave/CMakeLists.txt"
519 (("^include_directories\\(\\$\\{OCTAVE_INCLUDE_DIRS\\}")
520 "include_directories(${OCTAVE_INCLUDE_DIRS} ${OCTAVE_INCLUDE_DIRS}/octave")
521 ;; change target directory
522 (("\\$\\{OCTAVE_OCT_LOCAL_API_FILE_DIR\\}")
523 (string-append (assoc-ref outputs "out")
524 "/share/octave/packages")))
525 (substitute* '("src/interfaces/octave/swig_typemaps.i"
526 "src/interfaces/octave/sg_print_functions.cpp")
527 ;; "octave/config.h" and "octave/oct-obj.h" deprecated in Octave.
528 (("octave/config\\.h") "octave/octave-config.h")
529 (("octave/oct-obj.h") "octave/ovl.h"))
530 #t))
531 (add-after 'unpack 'move-rxcpp
532 (lambda* (#:key inputs #:allow-other-keys)
533 (let ((rxcpp-dir "shogun/third-party/rxcpp"))
534 (mkdir-p rxcpp-dir)
535 (install-file (assoc-ref inputs "rxcpp") rxcpp-dir)
536 #t)))
537 (add-before 'build 'set-HOME
538 ;; $HOME needs to be set at some point during the build phase
539 (lambda _ (setenv "HOME" "/tmp") #t)))
540 #:configure-flags
541 (list "-DCMAKE_BUILD_WITH_INSTALL_RPATH=TRUE"
542 "-DUSE_SVMLIGHT=OFF" ;disable proprietary SVMLIGHT
543 "-DBUILD_META_EXAMPLES=OFF" ;requires unpackaged ctags
544 ;;"-DINTERFACE_JAVA=ON" ;requires unpackaged jblas
545 ;;"-DINTERFACE_RUBY=ON" ;requires unpackaged ruby-narray
546 ;;"-DINTERFACE_PERL=ON" ;"FindPerlLibs" does not exist
547 ;;"-DINTERFACE_LUA=ON" ;fails because lua doesn't build pkgconfig file
548 "-DINTERFACE_OCTAVE=ON"
549 "-DINTERFACE_PYTHON=ON"
550 "-DINTERFACE_R=ON")))
551 (inputs
552 `(("python" ,python)
553 ("numpy" ,python-numpy)
554 ("r-minimal" ,r-minimal)
555 ("octave" ,octave-cli)
556 ("swig" ,swig)
557 ("eigen" ,eigen)
558 ("hdf5" ,hdf5)
559 ("atlas" ,atlas)
560 ("arpack" ,arpack-ng)
561 ("lapack" ,lapack)
562 ("glpk" ,glpk)
563 ("libxml2" ,libxml2)
564 ("lzo" ,lzo)
565 ("zlib" ,zlib)))
566 (native-inputs
567 `(("pkg-config" ,pkg-config)
568 ("rxcpp" ,rxcpp)))
569 ;; Non-portable SSE instructions are used so building fails on platforms
570 ;; other than x86_64.
571 (supported-systems '("x86_64-linux"))
572 (home-page "https://shogun-toolbox.org/")
573 (synopsis "Machine learning toolbox")
574 (description
575 "The Shogun Machine learning toolbox provides a wide range of unified and
576 efficient Machine Learning (ML) methods. The toolbox seamlessly
577 combines multiple data representations, algorithm classes, and general purpose
578 tools. This enables both rapid prototyping of data pipelines and extensibility
579 in terms of new algorithms.")
580 (license license:gpl3+)))
581
582 (define-public python-onnx
583 (package
584 (name "python-onnx")
585 (version "1.7.0")
586 (source
587 (origin
588 (method url-fetch)
589 (uri (pypi-uri "onnx" version))
590 ;; ONNX will build googletest from a git checkout. Patch CMake
591 ;; to use googletest from Guix and enable tests by default.
592 (patches (search-patches "python-onnx-use-system-googletest.patch"))
593 (sha256
594 (base32 "0j6rgfbhsw3a8id8pyg18y93k68lbjbj1kq6qia36h69f6pvlyjy"))))
595 (build-system python-build-system)
596 (native-inputs
597 `(("cmake" ,cmake)
598 ("googletest" ,googletest)
599 ("pybind11" ,pybind11)
600 ("python-coverage" ,python-coverage)
601 ("python-nbval" ,python-nbval)
602 ("python-pytest" ,python-pytest)
603 ("python-pytest-runner" ,python-pytest-runner)))
604 (inputs
605 `(("protobuf" ,protobuf)))
606 (propagated-inputs
607 `(("python-numpy" ,python-numpy)
608 ("python-protobuf" ,python-protobuf)
609 ("python-six" ,python-six)
610 ("python-tabulate" ,python-tabulate)
611 ("python-typing-extensions"
612 ,python-typing-extensions)))
613 (home-page "https://onnx.ai/")
614 (synopsis "Open Neural Network Exchange")
615 (description
616 "Open Neural Network Exchange (ONNX) provides an open source format for
617 AI models, both deep learning and traditional ML. It defines an extensible
618 computation graph model, as well as definitions of built-in operators and
619 standard data types.")
620 (license license:expat)))
621
622 (define-public rxcpp
623 (package
624 (name "rxcpp")
625 (version "4.1.0")
626 (source
627 (origin
628 (method git-fetch)
629 (uri (git-reference
630 (url "https://github.com/ReactiveX/RxCpp")
631 (commit (string-append "v" version))))
632 (sha256
633 (base32 "1rdpa3jlc181jd08nk437aar085h28i45s6nzrv65apb3xyyz0ij"))
634 (file-name (git-file-name name version))))
635 (build-system cmake-build-system)
636 (arguments
637 `(#:phases
638 (modify-phases %standard-phases
639 (add-after 'unpack 'remove-werror
640 (lambda _
641 (substitute* (find-files ".")
642 (("-Werror") ""))
643 #t))
644 (replace 'check
645 (lambda _
646 (invoke "ctest"))))))
647 (native-inputs
648 `(("catch" ,catch-framework)))
649 (home-page "http://reactivex.io/")
650 (synopsis "Reactive Extensions for C++")
651 (description
652 "The Reactive Extensions for C++ (RxCpp) is a library of algorithms for
653 values-distributed-in-time. ReactiveX is a library for composing asynchronous
654 and event-based programs by using observable sequences.
655
656 It extends the observer pattern to support sequences of data and/or events and
657 adds operators that allow you to compose sequences together declaratively while
658 abstracting away concerns about things like low-level threading,
659 synchronization, thread-safety, concurrent data structures, and non-blocking
660 I/O.")
661 (license license:asl2.0)))
662
663 (define-public r-adaptivesparsity
664 (package
665 (name "r-adaptivesparsity")
666 (version "1.6")
667 (source (origin
668 (method url-fetch)
669 (uri (cran-uri "AdaptiveSparsity" version))
670 (sha256
671 (base32
672 "0imr5m8mll9j6n4icsv6z9rl5kbnwsp9wvzrg7n90nnmcxq2cz91"))))
673 (properties
674 `((upstream-name . "AdaptiveSparsity")))
675 (build-system r-build-system)
676 (arguments
677 `(#:phases
678 (modify-phases %standard-phases
679 (add-after 'unpack 'link-against-armadillo
680 (lambda _
681 (substitute* "src/Makevars"
682 (("PKG_LIBS=" prefix)
683 (string-append prefix "-larmadillo"))))))))
684 (propagated-inputs
685 `(("r-mass" ,r-mass)
686 ("r-matrix" ,r-matrix)
687 ("r-rcpp" ,r-rcpp)
688 ("r-rcpparmadillo" ,r-rcpparmadillo)))
689 (inputs
690 `(("armadillo" ,armadillo)))
691 (home-page "https://cran.r-project.org/web/packages/AdaptiveSparsity")
692 (synopsis "Adaptive sparsity models")
693 (description
694 "This package implements the Figueiredo machine learning algorithm for
695 adaptive sparsity and the Wong algorithm for adaptively sparse gaussian
696 geometric models.")
697 (license license:lgpl3+)))
698
699 (define-public gemmlowp-for-tensorflow
700 ;; The commit hash is taken from "tensorflow/workspace.bzl".
701 (let ((commit "38ebac7b059e84692f53e5938f97a9943c120d98")
702 (revision "2"))
703 (package
704 (name "gemmlowp")
705 (version (git-version "0" revision commit))
706 (source (origin
707 (method url-fetch)
708 (uri (string-append "https://mirror.bazel.build/"
709 "github.com/google/gemmlowp/archive/"
710 commit ".zip"))
711 (file-name (string-append "gemmlowp-" version ".zip"))
712 (sha256
713 (base32
714 "0n56s2g8hrssm4w8qj1v58gfm56a04n9v992ixkmvk6zjiralzxq"))))
715 (build-system cmake-build-system)
716 (arguments
717 `(#:configure-flags
718 (list ,@(match (%current-system)
719 ((or "x86_64-linux" "i686-linux")
720 '("-DCMAKE_CXX_FLAGS=-msse2"))
721 (_ '())))
722 #:phases
723 (modify-phases %standard-phases
724 ;; This directory contains the CMakeLists.txt.
725 (add-after 'unpack 'chdir
726 (lambda _ (chdir "contrib") #t))
727 ;; There is no install target
728 (replace 'install
729 (lambda* (#:key outputs #:allow-other-keys)
730 (let* ((out (assoc-ref outputs "out"))
731 (lib (string-append out "/lib/"))
732 (inc (string-append out "/include/")))
733 (install-file "../build/libeight_bit_int_gemm.so" lib)
734 (for-each (lambda (dir)
735 (let ((target (string-append inc "/" dir)))
736 (mkdir-p target)
737 (for-each (lambda (h)
738 (install-file h target))
739 (find-files (string-append "../" dir)
740 "\\.h$"))))
741 '("meta" "profiling" "public" "fixedpoint"
742 "eight_bit_int_gemm" "internal"))
743 #t))))))
744 (native-inputs
745 `(("unzip" ,unzip)))
746 (home-page "https://github.com/google/gemmlowp")
747 (synopsis "Small self-contained low-precision GEMM library")
748 (description
749 "This is a small self-contained low-precision @dfn{general matrix
750 multiplication} (GEMM) library. It is not a full linear algebra library.
751 Low-precision means that the input and output matrix entries are integers on
752 at most 8 bits. To avoid overflow, results are internally accumulated on more
753 than 8 bits, and at the end only some significant 8 bits are kept.")
754 (license license:asl2.0))))
755
756 (define-public dlib
757 (package
758 (name "dlib")
759 (version "19.20")
760 (source (origin
761 (method url-fetch)
762 (uri (string-append
763 "http://dlib.net/files/dlib-" version ".tar.bz2"))
764 (sha256
765 (base32
766 "139jyi19qz37wwmmy48gil9d1kkh2r3w3bwdzabha6ayxmba96nz"))
767 (modules '((guix build utils)))
768 (snippet
769 '(begin
770 ;; Delete ~13MB of bundled dependencies.
771 (delete-file-recursively "dlib/external")
772 (delete-file-recursively "docs/dlib/external")
773 #t))))
774 (build-system cmake-build-system)
775 (arguments
776 `(#:phases
777 (modify-phases %standard-phases
778 (add-after 'unpack 'disable-asserts
779 (lambda _
780 ;; config.h recommends explicitly enabling or disabling asserts
781 ;; when building as a shared library. By default neither is set.
782 (substitute* "dlib/config.h"
783 (("^//#define DLIB_DISABLE_ASSERTS") "#define DLIB_DISABLE_ASSERTS"))
784 #t))
785 (add-after 'disable-asserts 'disable-failing-tests
786 (lambda _
787 ;; One test times out on MIPS, so we need to disable it.
788 ;; Others are flaky on some platforms.
789 (let* ((system ,(or (%current-target-system)
790 (%current-system)))
791 (disabled-tests (cond
792 ((string-prefix? "mips64" system)
793 '("object_detector" ; timeout
794 "data_io"))
795 ((string-prefix? "armhf" system)
796 '("learning_to_track"))
797 ((string-prefix? "i686" system)
798 '("optimization"))
799 (else '()))))
800 (for-each
801 (lambda (test)
802 (substitute* "dlib/test/makefile"
803 (((string-append "SRC \\+= " test "\\.cpp")) "")))
804 disabled-tests)
805 #t)))
806 (replace 'check
807 (lambda _
808 ;; No test target, so we build and run the unit tests here.
809 (let ((test-dir (string-append "../dlib-" ,version "/dlib/test")))
810 (with-directory-excursion test-dir
811 (invoke "make" "-j" (number->string (parallel-job-count)))
812 (invoke "./dtest" "--runall"))
813 #t)))
814 (add-after 'install 'delete-static-library
815 (lambda* (#:key outputs #:allow-other-keys)
816 (delete-file (string-append (assoc-ref outputs "out")
817 "/lib/libdlib.a"))
818 #t)))))
819 (native-inputs
820 `(("pkg-config" ,pkg-config)
821 ;; For tests.
822 ("libnsl" ,libnsl)))
823 (inputs
824 `(("giflib" ,giflib)
825 ("lapack" ,lapack)
826 ("libjpeg" ,libjpeg-turbo)
827 ("libpng" ,libpng)
828 ("libx11" ,libx11)
829 ("openblas" ,openblas)
830 ("zlib" ,zlib)))
831 (synopsis
832 "Toolkit for making machine learning and data analysis applications in C++")
833 (description
834 "Dlib is a modern C++ toolkit containing machine learning algorithms and
835 tools. It is used in both industry and academia in a wide range of domains
836 including robotics, embedded devices, mobile phones, and large high performance
837 computing environments.")
838 (home-page "http://dlib.net")
839 (license license:boost1.0)))
840
841 (define-public python-scikit-learn
842 (package
843 (name "python-scikit-learn")
844 (version "0.22.1")
845 (source
846 (origin
847 (method git-fetch)
848 (uri (git-reference
849 (url "https://github.com/scikit-learn/scikit-learn")
850 (commit version)))
851 (file-name (git-file-name name version))
852 (sha256
853 (base32
854 "1xqxv210gsmjw094vc5ghq2y9lmm74qkk22pq6flcjzj51b86jxf"))))
855 (build-system python-build-system)
856 (arguments
857 `(#:phases
858 (modify-phases %standard-phases
859 (add-after 'build 'build-ext
860 (lambda _ (invoke "python" "setup.py" "build_ext" "--inplace") #t))
861 (replace 'check
862 (lambda _
863 ;; Restrict OpenBLAS threads to prevent segfaults while testing!
864 (setenv "OPENBLAS_NUM_THREADS" "1")
865
866 ;; Some tests require write access to $HOME.
867 (setenv "HOME" "/tmp")
868
869 (invoke "pytest" "sklearn" "-m" "not network")))
870 (add-before 'reset-gzip-timestamps 'make-files-writable
871 (lambda* (#:key outputs #:allow-other-keys)
872 ;; Make sure .gz files are writable so that the
873 ;; 'reset-gzip-timestamps' phase can do its work.
874 (let ((out (assoc-ref outputs "out")))
875 (for-each make-file-writable
876 (find-files out "\\.gz$"))
877 #t))))))
878 (inputs
879 `(("openblas" ,openblas)))
880 (native-inputs
881 `(("python-pytest" ,python-pytest)
882 ("python-pandas" ,python-pandas) ;for tests
883 ("python-cython" ,python-cython)))
884 (propagated-inputs
885 `(("python-numpy" ,python-numpy)
886 ("python-scipy" ,python-scipy)
887 ("python-joblib" ,python-joblib)))
888 (home-page "https://scikit-learn.org/")
889 (synopsis "Machine Learning in Python")
890 (description
891 "Scikit-learn provides simple and efficient tools for data mining and
892 data analysis.")
893 (properties `((python2-variant . ,(delay python2-scikit-learn))))
894 (license license:bsd-3)))
895
896 ;; scikit-learn 0.22 and later only supports Python 3, so we stick with
897 ;; an older version here.
898 (define-public python2-scikit-learn
899 (let ((base (package-with-python2 (strip-python2-variant python-scikit-learn))))
900 (package
901 (inherit base)
902 (version "0.20.4")
903 (source (origin
904 (method git-fetch)
905 (uri (git-reference
906 (url "https://github.com/scikit-learn/scikit-learn")
907 (commit version)))
908 (file-name (git-file-name "python-scikit-learn" version))
909 (sha256
910 (base32
911 "08zbzi8yx5wdlxfx9jap61vg1malc9ajf576w7a0liv6jvvrxlpj")))))))
912
913 (define-public python-scikit-rebate
914 (package
915 (name "python-scikit-rebate")
916 (version "0.6")
917 (source (origin
918 (method url-fetch)
919 (uri (pypi-uri "skrebate" version))
920 (sha256
921 (base32
922 "1h7qs9gjxpzqabzhb8rmpv3jpmi5iq41kqdibg48299h94iikiw7"))))
923 (build-system python-build-system)
924 ;; Pandas is only needed to run the tests.
925 (native-inputs
926 `(("python-pandas" ,python-pandas)))
927 (propagated-inputs
928 `(("python-numpy" ,python-numpy)
929 ("python-scipy" ,python-scipy)
930 ("python-scikit-learn" ,python-scikit-learn)
931 ("python-joblib" ,python-joblib)))
932 (home-page "https://epistasislab.github.io/scikit-rebate/")
933 (synopsis "Relief-based feature selection algorithms for Python")
934 (description "Scikit-rebate is a scikit-learn-compatible Python
935 implementation of ReBATE, a suite of Relief-based feature selection algorithms
936 for Machine Learning. These algorithms excel at identifying features that are
937 predictive of the outcome in supervised learning problems, and are especially
938 good at identifying feature interactions that are normally overlooked by
939 standard feature selection algorithms.")
940 (license license:expat)))
941
942 (define-public python-autograd
943 (let* ((commit "442205dfefe407beffb33550846434baa90c4de7")
944 (revision "0")
945 (version (git-version "0.0.0" revision commit)))
946 (package
947 (name "python-autograd")
948 (home-page "https://github.com/HIPS/autograd")
949 (source (origin
950 (method git-fetch)
951 (uri (git-reference
952 (url home-page)
953 (commit commit)))
954 (sha256
955 (base32
956 "189sv2xb0mwnjawa9z7mrgdglc1miaq93pnck26r28fi1jdwg0z4"))
957 (file-name (git-file-name name version))))
958 (version version)
959 (build-system python-build-system)
960 (native-inputs
961 `(("python-nose" ,python-nose)
962 ("python-pytest" ,python-pytest)))
963 (propagated-inputs
964 `(("python-future" ,python-future)
965 ("python-numpy" ,python-numpy)))
966 (arguments
967 `(#:phases (modify-phases %standard-phases
968 (replace 'check
969 (lambda _
970 (invoke "py.test" "-v"))))))
971 (synopsis "Efficiently computes derivatives of NumPy code")
972 (description "Autograd can automatically differentiate native Python and
973 NumPy code. It can handle a large subset of Python's features, including loops,
974 ifs, recursion and closures, and it can even take derivatives of derivatives
975 of derivatives. It supports reverse-mode differentiation
976 (a.k.a. backpropagation), which means it can efficiently take gradients of
977 scalar-valued functions with respect to array-valued arguments, as well as
978 forward-mode differentiation, and the two can be composed arbitrarily. The
979 main intended application of Autograd is gradient-based optimization.")
980 (license license:expat))))
981
982 (define-public python2-autograd
983 (package-with-python2 python-autograd))
984
985 (define-public lightgbm
986 (package
987 (name "lightgbm")
988 (version "2.0.12")
989 (source (origin
990 (method git-fetch)
991 (uri (git-reference
992 (url "https://github.com/Microsoft/LightGBM")
993 (commit (string-append "v" version))))
994 (sha256
995 (base32
996 "0jlvyn7k81dzrh9ij3zw576wbgiwmmr26rzpdxjn1dbpc3njpvzi"))
997 (file-name (git-file-name name version))))
998 (native-inputs
999 `(("python-pytest" ,python-pytest)
1000 ("python-nose" ,python-nose)))
1001 (inputs
1002 `(("openmpi" ,openmpi)))
1003 (propagated-inputs
1004 `(("python-numpy" ,python-numpy)
1005 ("python-scipy" ,python-scipy)))
1006 (arguments
1007 `(#:configure-flags
1008 '("-DUSE_MPI=ON")
1009 #:phases
1010 (modify-phases %standard-phases
1011 (replace 'check
1012 (lambda _
1013 (with-directory-excursion "../source"
1014 (invoke "pytest" "tests/c_api_test/test_.py")))))))
1015 (build-system cmake-build-system)
1016 (home-page "https://github.com/Microsoft/LightGBM")
1017 (synopsis "Gradient boosting framework based on decision tree algorithms")
1018 (description "LightGBM is a gradient boosting framework that uses tree
1019 based learning algorithms. It is designed to be distributed and efficient with
1020 the following advantages:
1021
1022 @itemize
1023 @item Faster training speed and higher efficiency
1024 @item Lower memory usage
1025 @item Better accuracy
1026 @item Parallel and GPU learning supported (not enabled in this package)
1027 @item Capable of handling large-scale data
1028 @end itemize\n")
1029 (license license:expat)))
1030
1031 (define-public vowpal-wabbit
1032 ;; Language bindings not included.
1033 (package
1034 (name "vowpal-wabbit")
1035 (version "8.5.0")
1036 (source (origin
1037 (method git-fetch)
1038 (uri (git-reference
1039 (url "https://github.com/JohnLangford/vowpal_wabbit")
1040 (commit version)))
1041 (sha256
1042 (base32
1043 "04bwzk6ifgnz3fmzid8b7avxf9n5pnx9xcjm61nkjng1vv0bpj8x"))
1044 (file-name (git-file-name name version))))
1045 (inputs
1046 `(("boost" ,boost)
1047 ("zlib" ,zlib)))
1048 (arguments
1049 `(#:configure-flags
1050 (list (string-append "--with-boost="
1051 (assoc-ref %build-inputs "boost")))
1052 #:phases
1053 (modify-phases %standard-phases
1054 (add-after 'unpack 'make-files-writable
1055 (lambda _
1056 (for-each make-file-writable (find-files "." ".*")) #t)))))
1057 (build-system gnu-build-system)
1058 (home-page "https://github.com/JohnLangford/vowpal_wabbit")
1059 (synopsis "Fast machine learning library for online learning")
1060 (description "Vowpal Wabbit is a machine learning system with techniques
1061 such as online, hashing, allreduce, reductions, learning2search, active, and
1062 interactive learning.")
1063 (license license:bsd-3)))
1064
1065 (define-public python2-fastlmm
1066 (package
1067 (name "python2-fastlmm")
1068 (version "0.2.21")
1069 (source
1070 (origin
1071 (method url-fetch)
1072 (uri (pypi-uri "fastlmm" version ".zip"))
1073 (sha256
1074 (base32
1075 "1q8c34rpmwkfy3r4d5172pzdkpfryj561897z9r3x22gq7813x1m"))))
1076 (build-system python-build-system)
1077 (arguments
1078 `(#:tests? #f ; some test files are missing
1079 #:python ,python-2)) ; only Python 2.7 is supported
1080 (propagated-inputs
1081 `(("python2-numpy" ,python2-numpy)
1082 ("python2-scipy" ,python2-scipy)
1083 ("python2-matplotlib" ,python2-matplotlib)
1084 ("python2-pandas" ,python2-pandas)
1085 ("python2-scikit-learn" ,python2-scikit-learn)
1086 ("python2-pysnptools" ,python2-pysnptools)))
1087 (native-inputs
1088 `(("unzip" ,unzip)
1089 ("python2-cython" ,python2-cython)
1090 ("python2-mock" ,python2-mock)
1091 ("python2-nose" ,python2-nose)))
1092 (home-page "http://research.microsoft.com/en-us/um/redmond/projects/mscompbio/fastlmm/")
1093 (synopsis "Perform genome-wide association studies on large data sets")
1094 (description
1095 "FaST-LMM, which stands for Factored Spectrally Transformed Linear Mixed
1096 Models, is a program for performing both single-SNP and SNP-set genome-wide
1097 association studies (GWAS) on extremely large data sets.")
1098 (license license:asl2.0)))
1099
1100 ;; There have been no proper releases yet.
1101 (define-public kaldi
1102 (let ((commit "d4791c0f3fc1a09c042dac365e120899ee2ad21e")
1103 (revision "2"))
1104 (package
1105 (name "kaldi")
1106 (version (git-version "0" revision commit))
1107 (source (origin
1108 (method git-fetch)
1109 (uri (git-reference
1110 (url "https://github.com/kaldi-asr/kaldi")
1111 (commit commit)))
1112 (file-name (git-file-name name version))
1113 (sha256
1114 (base32
1115 "07k80my6f19mhrkwbzhjsnpf9871wmrwkl0ym468i830w67qyjrz"))))
1116 (build-system gnu-build-system)
1117 (arguments
1118 `(#:test-target "test"
1119 #:phases
1120 (modify-phases %standard-phases
1121 (add-after 'unpack 'chdir
1122 (lambda _ (chdir "src") #t))
1123 (replace 'configure
1124 (lambda* (#:key build system inputs outputs #:allow-other-keys)
1125 (when (not (or (string-prefix? "x86_64" system)
1126 (string-prefix? "i686" system)))
1127 (substitute* "makefiles/linux_openblas.mk"
1128 (("-msse -msse2") "")))
1129 (substitute* "makefiles/default_rules.mk"
1130 (("/bin/bash") (which "bash")))
1131 (substitute* "Makefile"
1132 (("ext_depend: check_portaudio")
1133 "ext_depend:"))
1134 (substitute* '("online/Makefile"
1135 "onlinebin/Makefile"
1136 "gst-plugin/Makefile")
1137 (("../../tools/portaudio/install")
1138 (assoc-ref inputs "portaudio")))
1139
1140 ;; This `configure' script doesn't support variables passed as
1141 ;; arguments, nor does it support "prefix".
1142 (let ((out (assoc-ref outputs "out"))
1143 (openblas (assoc-ref inputs "openblas"))
1144 (openfst (assoc-ref inputs "openfst")))
1145 (substitute* "configure"
1146 (("check_for_slow_expf;") "")
1147 ;; This affects the RPATH and also serves as the installation
1148 ;; directory.
1149 (("KALDILIBDIR=`pwd`/lib")
1150 (string-append "KALDILIBDIR=" out "/lib")))
1151 (mkdir-p out) ; must exist
1152 (setenv "CONFIG_SHELL" (which "bash"))
1153 (setenv "OPENFST_VER" ,(package-version openfst))
1154 (invoke "./configure"
1155 "--use-cuda=no"
1156 "--shared"
1157 (string-append "--openblas-root=" openblas)
1158 (string-append "--fst-root=" openfst)))))
1159 (add-after 'build 'build-ext-and-gstreamer-plugin
1160 (lambda _
1161 (invoke "make" "-C" "online" "depend")
1162 (invoke "make" "-C" "online")
1163 (invoke "make" "-C" "onlinebin" "depend")
1164 (invoke "make" "-C" "onlinebin")
1165 (invoke "make" "-C" "gst-plugin" "depend")
1166 (invoke "make" "-C" "gst-plugin")
1167 #t))
1168 ;; TODO: also install the executables.
1169 (replace 'install
1170 (lambda* (#:key outputs #:allow-other-keys)
1171 (let* ((out (assoc-ref outputs "out"))
1172 (inc (string-append out "/include"))
1173 (lib (string-append out "/lib")))
1174 (mkdir-p lib)
1175 ;; The build phase installed symlinks to the actual
1176 ;; libraries. Install the actual targets.
1177 (for-each (lambda (file)
1178 (let ((target (readlink file)))
1179 (delete-file file)
1180 (install-file target lib)))
1181 (find-files lib "\\.so"))
1182 ;; Install headers
1183 (for-each (lambda (file)
1184 (let ((target-dir (string-append inc "/" (dirname file))))
1185 (install-file file target-dir)))
1186 (find-files "." "\\.h"))
1187 (install-file "gst-plugin/libgstonlinegmmdecodefaster.so"
1188 (string-append lib "/gstreamer-1.0"))
1189 #t))))))
1190 (inputs
1191 `(("alsa-lib" ,alsa-lib)
1192 ("gfortran" ,gfortran "lib")
1193 ("glib" ,glib)
1194 ("gstreamer" ,gstreamer)
1195 ("jack" ,jack-1)
1196 ("openblas" ,openblas)
1197 ("openfst" ,openfst)
1198 ("portaudio" ,portaudio)
1199 ("python" ,python)))
1200 (native-inputs
1201 `(("glib" ,glib "bin") ; glib-genmarshal
1202 ("grep" ,grep)
1203 ("sed" ,sed)
1204 ("pkg-config" ,pkg-config)
1205 ("which" ,which)))
1206 (home-page "https://kaldi-asr.org/")
1207 (synopsis "Speech recognition toolkit")
1208 (description "Kaldi is an extensible toolkit for speech recognition
1209 written in C++.")
1210 (license license:asl2.0))))
1211
1212 (define-public gst-kaldi-nnet2-online
1213 (let ((commit "cb227ef43b66a9835c14eb0ad39e08ee03c210ad")
1214 (revision "2"))
1215 (package
1216 (name "gst-kaldi-nnet2-online")
1217 (version (git-version "0" revision commit))
1218 (source (origin
1219 (method git-fetch)
1220 (uri (git-reference
1221 (url "https://github.com/alumae/gst-kaldi-nnet2-online")
1222 (commit commit)))
1223 (file-name (git-file-name name version))
1224 (sha256
1225 (base32
1226 "1i6ffwiavxx07ri0lxix6s8q0r31x7i4xxvhys5jxkixf5q34w8g"))))
1227 (build-system gnu-build-system)
1228 (arguments
1229 `(#:tests? #f ; there are none
1230 #:make-flags
1231 (list (string-append "SHELL="
1232 (assoc-ref %build-inputs "bash") "/bin/bash")
1233 (string-append "KALDI_ROOT="
1234 (assoc-ref %build-inputs "kaldi-src"))
1235 (string-append "KALDILIBDIR="
1236 (assoc-ref %build-inputs "kaldi") "/lib")
1237 "KALDI_FLAVOR=dynamic")
1238 #:phases
1239 (modify-phases %standard-phases
1240 (add-after 'unpack 'chdir
1241 (lambda _ (chdir "src") #t))
1242 (replace 'configure
1243 (lambda* (#:key inputs #:allow-other-keys)
1244 (let ((glib (assoc-ref inputs "glib")))
1245 (setenv "CXXFLAGS" "-fPIC")
1246 (setenv "CPLUS_INCLUDE_PATH"
1247 (string-append glib "/include/glib-2.0:"
1248 glib "/lib/glib-2.0/include:"
1249 (assoc-ref inputs "gstreamer")
1250 "/include/gstreamer-1.0")))
1251 (substitute* "Makefile"
1252 (("include \\$\\(KALDI_ROOT\\)/src/kaldi.mk") "")
1253 (("\\$\\(error Cannot find") "#"))
1254 #t))
1255 (add-before 'build 'build-depend
1256 (lambda* (#:key make-flags #:allow-other-keys)
1257 (apply invoke "make" "depend" make-flags)))
1258 (replace 'install
1259 (lambda* (#:key outputs #:allow-other-keys)
1260 (let* ((out (assoc-ref outputs "out"))
1261 (lib (string-append out "/lib/gstreamer-1.0")))
1262 (install-file "libgstkaldinnet2onlinedecoder.so" lib)
1263 #t))))))
1264 (inputs
1265 `(("glib" ,glib)
1266 ("gstreamer" ,gstreamer)
1267 ("jansson" ,jansson)
1268 ("openfst" ,openfst)
1269 ("kaldi" ,kaldi)))
1270 (native-inputs
1271 `(("bash" ,bash)
1272 ("glib:bin" ,glib "bin") ; glib-genmarshal
1273 ("kaldi-src" ,(package-source kaldi))
1274 ("pkg-config" ,pkg-config)))
1275 (home-page "https://kaldi-asr.org/")
1276 (synopsis "Gstreamer plugin for decoding speech")
1277 (description "This package provides a GStreamer plugin that wraps
1278 Kaldi's @code{SingleUtteranceNnet2Decoder}. It requires iVector-adapted DNN
1279 acoustic models. The iVectors are adapted to the current audio stream
1280 automatically.")
1281 (license license:asl2.0))))
1282
1283 (define-public kaldi-gstreamer-server
1284 ;; This is the tip of the py3 branch
1285 (let ((commit "f68cab490be7eb0da2af1475fbc16655f50a60cb")
1286 (revision "2"))
1287 (package
1288 (name "kaldi-gstreamer-server")
1289 (version (git-version "0" revision commit))
1290 (source (origin
1291 (method git-fetch)
1292 (uri (git-reference
1293 (url "https://github.com/alumae/kaldi-gstreamer-server")
1294 (commit commit)))
1295 (file-name (git-file-name name version))
1296 (sha256
1297 (base32
1298 "17lh1368vkg8ngrcbn2phvigzlmalrqg6djx2gg61qq1a0nj87dm"))))
1299 (build-system gnu-build-system)
1300 (arguments
1301 `(#:tests? #f ; there are no tests that can be run automatically
1302 #:modules ((guix build utils)
1303 (guix build gnu-build-system)
1304 (srfi srfi-26))
1305 #:phases
1306 (modify-phases %standard-phases
1307 (delete 'configure)
1308 (replace 'build
1309 (lambda* (#:key outputs #:allow-other-keys)
1310 ;; Disable hash randomization to ensure the generated .pycs
1311 ;; are reproducible.
1312 (setenv "PYTHONHASHSEED" "0")
1313 (with-directory-excursion "kaldigstserver"
1314 ;; See https://github.com/alumae/kaldi-gstreamer-server/issues/232
1315 (substitute* "master_server.py"
1316 (("\\.replace\\('\\\\.*") ")"))
1317
1318 ;; This is a Python 2 file
1319 (delete-file "decoder_test.py")
1320 (delete-file "test-buffer.py")
1321
1322 (for-each (lambda (file)
1323 (apply invoke
1324 `("python"
1325 "-m" "compileall"
1326 "-f" ; force rebuild
1327 ,file)))
1328 (find-files "." "\\.py$")))
1329 #t))
1330 (replace 'install
1331 (lambda* (#:key inputs outputs #:allow-other-keys)
1332 (let* ((out (assoc-ref outputs "out"))
1333 (bin (string-append out "/bin"))
1334 (share (string-append out "/share/kaldi-gstreamer-server/")))
1335 ;; Install Python files
1336 (with-directory-excursion "kaldigstserver"
1337 (for-each (cut install-file <> share)
1338 (find-files "." ".*")))
1339
1340 ;; Install sample configuration files
1341 (for-each (cut install-file <> share)
1342 (find-files "." "\\.yaml"))
1343
1344 ;; Install executables
1345 (mkdir-p bin)
1346 (let* ((server (string-append bin "/kaldi-gst-server"))
1347 (client (string-append bin "/kaldi-gst-client"))
1348 (worker (string-append bin "/kaldi-gst-worker"))
1349 (PYTHONPATH (getenv "PYTHONPATH"))
1350 (GST_PLUGIN_PATH (string-append
1351 (assoc-ref inputs "gst-kaldi-nnet2-online")
1352 "/lib/gstreamer-1.0:${GST_PLUGIN_PATH}"))
1353 (wrap (lambda (wrapper what)
1354 (with-output-to-file wrapper
1355 (lambda _
1356 (format #t
1357 "#!~a
1358 export PYTHONPATH=~a
1359 export GST_PLUGIN_PATH=~a
1360 exec ~a ~a/~a \"$@\"~%"
1361 (which "bash") PYTHONPATH GST_PLUGIN_PATH
1362 (which "python") share what)))
1363 (chmod wrapper #o555))))
1364 (for-each wrap
1365 (list server client worker)
1366 (list "master_server.py"
1367 "client.py"
1368 "worker.py")))
1369 #t))))))
1370 (inputs
1371 `(("gst-kaldi-nnet2-online" ,gst-kaldi-nnet2-online)
1372 ("python" ,python-wrapper)
1373 ("python-pygobject" ,python-pygobject)
1374 ("python-pyyaml" ,python-pyyaml)
1375 ("python-tornado" ,python-tornado-6)))
1376 (home-page "https://github.com/alumae/kaldi-gstreamer-server")
1377 (synopsis "Real-time full-duplex speech recognition server")
1378 (description "This is a real-time full-duplex speech recognition server,
1379 based on the Kaldi toolkit and the GStreamer framework and implemented in
1380 Python.")
1381 (license license:bsd-2))))
1382
1383 ;; Note that Tensorflow includes a "third_party" directory, which seems to not
1384 ;; only contain modified subsets of upstream library source code, but also
1385 ;; adapter headers provided by Google (such as the fft.h header, which is not
1386 ;; part of the upstream project code). The Tensorflow code includes headers
1387 ;; from the "third_party" directory. It does not look like we can replace
1388 ;; these headers with unmodified upstream files, so we keep them.
1389 (define-public tensorflow
1390 (package
1391 (name "tensorflow")
1392 (version "1.9.0")
1393 (source
1394 (origin
1395 (method git-fetch)
1396 (uri (git-reference
1397 (url "https://github.com/tensorflow/tensorflow")
1398 (commit (string-append "v" version))))
1399 (file-name (string-append "tensorflow-" version "-checkout"))
1400 (sha256
1401 (base32
1402 "0a9kwha395g3wgxfwln5j8vn9nkspmd75xldrlqdq540w996g8xa"))))
1403 (build-system cmake-build-system)
1404 (arguments
1405 `(#:tests? #f ; no "check" target
1406 #:build-type "Release"
1407 #:configure-flags
1408 (let ((protobuf (assoc-ref %build-inputs "protobuf"))
1409 (protobuf:native (assoc-ref %build-inputs "protobuf:native"))
1410 (jsoncpp (assoc-ref %build-inputs "jsoncpp"))
1411 (snappy (assoc-ref %build-inputs "snappy"))
1412 (sqlite (assoc-ref %build-inputs "sqlite")))
1413 (list
1414 ;; Use protobuf from Guix
1415 (string-append "-Dprotobuf_STATIC_LIBRARIES="
1416 protobuf "/lib/libprotobuf.so")
1417 (string-append "-DPROTOBUF_PROTOC_EXECUTABLE="
1418 protobuf:native "/bin/protoc")
1419
1420 ;; Use snappy from Guix
1421 (string-append "-Dsnappy_STATIC_LIBRARIES="
1422 snappy "/lib/libsnappy.so")
1423 ;; Yes, this is not actually the include directory but a prefix...
1424 (string-append "-Dsnappy_INCLUDE_DIR=" snappy)
1425
1426 ;; Use jsoncpp from Guix
1427 (string-append "-Djsoncpp_STATIC_LIBRARIES="
1428 jsoncpp "/lib/libjsoncpp.so")
1429 ;; Yes, this is not actually the include directory but a prefix...
1430 (string-append "-Djsoncpp_INCLUDE_DIR=" jsoncpp)
1431
1432 ;; Use sqlite from Guix
1433 (string-append "-Dsqlite_STATIC_LIBRARIES="
1434 sqlite "/lib/libsqlite.a")
1435
1436 ;; Use system libraries wherever possible. Currently, this
1437 ;; only affects zlib.
1438 "-Dsystemlib_ALL=ON"
1439 "-Dtensorflow_ENABLE_POSITION_INDEPENDENT_CODE=ON"
1440 "-Dtensorflow_BUILD_SHARED_LIB=ON"
1441 "-Dtensorflow_OPTIMIZE_FOR_NATIVE_ARCH=OFF"
1442 "-Dtensorflow_ENABLE_SSL_SUPPORT=OFF"
1443 "-Dtensorflow_BUILD_CONTRIB_KERNELS=OFF"))
1444 #:make-flags
1445 (list "CC=gcc")
1446 #:modules ((ice-9 ftw)
1447 (guix build utils)
1448 (guix build cmake-build-system)
1449 ((guix build python-build-system)
1450 #:select (python-version)))
1451 #:imported-modules (,@%cmake-build-system-modules
1452 (guix build python-build-system))
1453 #:phases
1454 (modify-phases %standard-phases
1455 (add-after 'unpack 'set-source-file-times-to-1980
1456 ;; At the end of the tf_python_build_pip_package target, a ZIP
1457 ;; archive should be generated via bdist_wheel, but it fails with
1458 ;; "ZIP does not support timestamps before 1980". Luckily,
1459 ;; SOURCE_DATE_EPOCH is respected, which we set to some time in
1460 ;; 1980.
1461 (lambda _ (setenv "SOURCE_DATE_EPOCH" "315532800") #t))
1462 ;; See https://github.com/tensorflow/tensorflow/issues/20517#issuecomment-406373913
1463 (add-after 'unpack 'python3.7-compatibility
1464 (lambda _
1465 (substitute* '("tensorflow/python/eager/pywrap_tfe_src.cc"
1466 "tensorflow/python/lib/core/ndarray_tensor.cc"
1467 "tensorflow/python/lib/core/py_func.cc")
1468 (("PyUnicode_AsUTF8") "(char *)PyUnicode_AsUTF8"))
1469 (substitute* "tensorflow/c/eager/c_api.h"
1470 (("unsigned char async")
1471 "unsigned char is_async"))
1472
1473 ;; Remove dependency on tensorboard, a complicated but probably
1474 ;; optional package.
1475 (substitute* "tensorflow/tools/pip_package/setup.py"
1476 ((".*'tensorboard >.*") ""))
1477
1478 ;; Fix the build with python-3.8, taken from rejected upstream patch:
1479 ;; https://github.com/tensorflow/tensorflow/issues/34197
1480 (substitute* (find-files "tensorflow/python" ".*\\.cc$")
1481 (("(nullptr,)(\\ +/. tp_print)" _ _ tp_print)
1482 (string-append "NULL, " tp_print)))
1483 #t))
1484 (add-after 'python3.7-compatibility 'chdir
1485 (lambda _ (chdir "tensorflow/contrib/cmake") #t))
1486 (add-after 'chdir 'disable-downloads
1487 (lambda* (#:key inputs #:allow-other-keys)
1488 (substitute* (find-files "external" "\\.cmake$")
1489 (("GIT_REPOSITORY.*") "")
1490 (("GIT_TAG.*") "")
1491 (("PREFIX ")
1492 "DOWNLOAD_COMMAND \"\"\nPREFIX "))
1493
1494 ;; Use packages from Guix
1495 (let ((grpc (assoc-ref inputs "grpc")))
1496 (substitute* "CMakeLists.txt"
1497 ;; Sqlite
1498 (("include\\(sqlite\\)") "")
1499 (("\\$\\{sqlite_STATIC_LIBRARIES\\}")
1500 (string-append (assoc-ref inputs "sqlite")
1501 "/lib/libsqlite3.so"))
1502 (("sqlite_copy_headers_to_destination") "")
1503
1504 ;; PNG
1505 (("include\\(png\\)") "")
1506 (("\\$\\{png_STATIC_LIBRARIES\\}")
1507 (string-append (assoc-ref inputs "libpng")
1508 "/lib/libpng16.so"))
1509 (("png_copy_headers_to_destination") "")
1510
1511 ;; JPEG
1512 (("include\\(jpeg\\)") "")
1513 (("\\$\\{jpeg_STATIC_LIBRARIES\\}")
1514 (string-append (assoc-ref inputs "libjpeg")
1515 "/lib/libjpeg.so"))
1516 (("jpeg_copy_headers_to_destination") "")
1517
1518 ;; GIF
1519 (("include\\(gif\\)") "")
1520 (("\\$\\{gif_STATIC_LIBRARIES\\}")
1521 (string-append (assoc-ref inputs "giflib")
1522 "/lib/libgif.so"))
1523 (("gif_copy_headers_to_destination") "")
1524
1525 ;; lmdb
1526 (("include\\(lmdb\\)") "")
1527 (("\\$\\{lmdb_STATIC_LIBRARIES\\}")
1528 (string-append (assoc-ref inputs "lmdb")
1529 "/lib/liblmdb.so"))
1530 (("lmdb_copy_headers_to_destination") "")
1531
1532 ;; Protobuf
1533 (("include\\(protobuf\\)") "")
1534 (("protobuf_copy_headers_to_destination") "")
1535 (("^ +protobuf") "")
1536
1537 ;; gRPC
1538 (("include\\(grpc\\)")
1539 "find_package(grpc REQUIRED NAMES gRPC)")
1540 (("list\\(APPEND tensorflow_EXTERNAL_DEPENDENCIES grpc\\)") "")
1541
1542 ;; Eigen
1543 (("include\\(eigen\\)")
1544 (string-append "find_package(eigen REQUIRED NAMES Eigen3)
1545 set(eigen_INCLUDE_DIRS ${CMAKE_CURRENT_BINARY_DIR}/external/eigen_archive "
1546 (assoc-ref inputs "eigen") "/include/eigen3)"))
1547 (("^ +eigen") "")
1548
1549 ;; snappy
1550 (("include\\(snappy\\)")
1551 "add_definitions(-DTF_USE_SNAPPY)")
1552 (("list\\(APPEND tensorflow_EXTERNAL_DEPENDENCIES snappy\\)") "")
1553
1554 ;; jsoncpp
1555 (("include\\(jsoncpp\\)") "")
1556 (("^ +jsoncpp") ""))
1557
1558 (substitute* "tf_core_framework.cmake"
1559 ((" grpc") "")
1560 (("\\$\\{GRPC_BUILD\\}/grpc_cpp_plugin")
1561 (which "grpc_cpp_plugin"))
1562 ;; Link with gRPC libraries
1563 (("add_library\\(tf_protos_cc.*" m)
1564 (string-append m
1565 (format #f "\ntarget_link_libraries(tf_protos_cc PRIVATE \
1566 ~a/lib/libgrpc++_unsecure.a \
1567 ~a/lib/libgrpc_unsecure.a \
1568 ~a/lib/libaddress_sorting.a \
1569 ~a/lib/libgpr.a \
1570 ~a//lib/libcares.so
1571 )\n"
1572 grpc grpc grpc grpc
1573 (assoc-ref inputs "c-ares"))))))
1574 (substitute* "tf_tools.cmake"
1575 (("add_dependencies\\(\\$\\{proto_text.*") ""))
1576 ;; Remove dependency on bundled grpc
1577 (substitute* "tf_core_distributed_runtime.cmake"
1578 (("tf_core_cpu grpc") "tf_core_cpu"))
1579
1580 ;; This directory is a dependency of many targets.
1581 (mkdir-p "protobuf")
1582 #t))
1583 (add-after 'configure 'unpack-third-party-sources
1584 (lambda* (#:key inputs #:allow-other-keys)
1585 ;; This is needed to configure bundled packages properly.
1586 (setenv "CONFIG_SHELL" (which "bash"))
1587 (for-each
1588 (lambda (name)
1589 (let* ((what (assoc-ref inputs (string-append name "-src")))
1590 (name* (string-map (lambda (c)
1591 (if (char=? c #\-)
1592 #\_ c)) name))
1593 (where (string-append "../build/" name* "/src/" name*)))
1594 (cond
1595 ((string-suffix? ".zip" what)
1596 (mkdir-p where)
1597 (with-directory-excursion where
1598 (invoke "unzip" what)))
1599 ((string-suffix? ".tar.gz" what)
1600 (mkdir-p where)
1601 (invoke "tar" "xf" what
1602 "-C" where "--strip-components=1"))
1603 (else
1604 (let ((parent (dirname where)))
1605 (mkdir-p parent)
1606 (with-directory-excursion parent
1607 (when (file-exists? name*)
1608 (delete-file-recursively name*))
1609 (copy-recursively what name*)
1610 (map make-file-writable
1611 (find-files name* ".*"))))))))
1612 (list "boringssl"
1613 "cub"
1614 "double-conversion"
1615 "farmhash"
1616 "fft2d"
1617 "highwayhash"
1618 "nsync"
1619 "re2"))
1620
1621 (rename-file "../build/cub/src/cub/cub-1.8.0/"
1622 "../build/cub/src/cub/cub/")
1623 #t))
1624 (add-after 'unpack 'fix-python-build
1625 (lambda* (#:key inputs outputs #:allow-other-keys)
1626 (mkdir-p "protobuf-src")
1627 (invoke "tar" "xf" (assoc-ref inputs "protobuf:src")
1628 "-C" "protobuf-src" "--strip-components=1")
1629 (mkdir-p "eigen-src")
1630 (invoke "tar" "xf" (assoc-ref inputs "eigen:src")
1631 "-C" "eigen-src" "--strip-components=1")
1632
1633 (substitute* "tensorflow/contrib/cmake/tf_python.cmake"
1634 ;; Ensure that all Python dependencies can be found at build time.
1635 (("PYTHONPATH=\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/tf_python" m)
1636 (string-append m ":" (getenv "PYTHONPATH")))
1637 ;; Take protobuf source files from our source package.
1638 (("\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/protobuf/src/protobuf/src/google")
1639 (string-append (getcwd) "/protobuf-src/src/google")))
1640
1641 (substitute* '("tensorflow/contrib/cmake/tf_shared_lib.cmake"
1642 "tensorflow/contrib/cmake/tf_python.cmake")
1643 ;; Take Eigen source files from our source package.
1644 (("\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/eigen/src/eigen/")
1645 (string-append (getcwd) "/eigen-src/"))
1646 ;; Take Eigen headers from our own package.
1647 (("\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/external/eigen_archive")
1648 (string-append (assoc-ref inputs "eigen") "/include/eigen3")))
1649
1650 ;; Correct the RUNPATH of ops libraries generated for Python.
1651 ;; TODO: this doesn't work :(
1652 ;; /gnu/store/...-tensorflow-1.9.0/lib/python3.7/site-packages/tensorflow/contrib/seq2seq/python/ops/lib_beam_search_ops.so:
1653 ;; warning: RUNPATH contains bogus entries: ("/tmp/guix-build-tensorflow-1.9.0.drv-0/source/tensorflow/contrib/build")
1654 ;; /gnu/store/...-tensorflow-1.9.0/lib/python3.7/site-packages/tensorflow/contrib/seq2seq/python/ops/lib_beam_search_ops.so:
1655 ;; error: depends on 'libpywrap_tensorflow_internal.so', which
1656 ;; cannot be found in RUNPATH ...
1657 (substitute* "tensorflow/contrib/cmake/tf_cc_ops.cmake"
1658 (("set_target_properties.*")
1659 (string-append "set_target_properties(${_AT_TARGET} PROPERTIES \
1660 COMPILE_FLAGS ${target_compile_flags} \
1661 INSTALL_RPATH_USE_LINK_PATH TRUE \
1662 INSTALL_RPATH " (assoc-ref outputs "out") "/lib)\n")))
1663 #t))
1664 (add-after 'build 'build-pip-package
1665 (lambda* (#:key outputs #:allow-other-keys)
1666 (setenv "LDFLAGS"
1667 (string-append "-Wl,-rpath="
1668 (assoc-ref outputs "out") "/lib"))
1669 (invoke "make" "tf_python_build_pip_package")
1670 #t))
1671 (add-after 'build-pip-package 'install-python
1672 (lambda* (#:key inputs outputs #:allow-other-keys)
1673 (let ((out (assoc-ref outputs "out"))
1674 (wheel (car (find-files "../build/tf_python/dist/" "\\.whl$")))
1675 (python-version (python-version
1676 (assoc-ref inputs "python"))))
1677 (invoke "python" "-m" "pip" "install" wheel
1678 (string-append "--prefix=" out))
1679
1680 ;; XXX: broken RUNPATH, see fix-python-build phase.
1681 (delete-file
1682 (string-append
1683 out "/lib/python" python-version
1684 "/site-packages/tensorflow/contrib/"
1685 "seq2seq/python/ops/lib_beam_search_ops.so"))
1686 #t))))))
1687 (native-inputs
1688 `(("pkg-config" ,pkg-config)
1689 ("protobuf:native" ,protobuf-3.6) ; protoc
1690 ("protobuf:src" ,(package-source protobuf-3.6))
1691 ("eigen:src" ,(package-source eigen-for-tensorflow))
1692 ;; install_pip_packages.sh wants setuptools 39.1.0 specifically.
1693 ("python-setuptools" ,python-setuptools-for-tensorflow)
1694
1695 ;; The commit hashes and URLs for third-party source code are taken
1696 ;; from "tensorflow/workspace.bzl".
1697 ("boringssl-src"
1698 ,(let ((commit "ee7aa02")
1699 (revision "1"))
1700 (origin
1701 (method git-fetch)
1702 (uri (git-reference
1703 (url "https://boringssl.googlesource.com/boringssl")
1704 (commit commit)))
1705 (file-name (string-append "boringssl-0-" revision
1706 (string-take commit 7)
1707 "-checkout"))
1708 (sha256
1709 (base32
1710 "1jf693q0nw0adsic6cgmbdx6g7wr4rj4vxa8j1hpn792fqhd8wgw")))))
1711 ("cub-src"
1712 ,(let ((version "1.8.0"))
1713 (origin
1714 (method url-fetch)
1715 (uri (string-append "https://mirror.bazel.build/github.com/NVlabs/"
1716 "cub/archive/" version ".zip"))
1717 (file-name (string-append "cub-" version ".zip"))
1718 (sha256
1719 (base32
1720 "1hsqikqridb90dkxkjr2918dcry6pfh46ccnwrzawl56aamhdykb")))))
1721 ("double-conversion-src"
1722 ,(let ((commit "5664746")
1723 (revision "1"))
1724 (origin
1725 (method git-fetch)
1726 (uri (git-reference
1727 (url "https://github.com/google/double-conversion")
1728 (commit commit)))
1729 (file-name
1730 (git-file-name "double-conversion"
1731 (string-append "0-" revision "."
1732 (string-take commit 7))))
1733 (sha256
1734 (base32
1735 "1h5lppqqxcvdg5jq42i5msgwx20ryij3apvmndflngrgdpc04gn1")))))
1736 ("farmhash-src"
1737 ,(let ((commit "816a4ae622e964763ca0862d9dbd19324a1eaf45"))
1738 (origin
1739 (method url-fetch)
1740 (uri (string-append
1741 "https://mirror.bazel.build/github.com/google/farmhash/archive/"
1742 commit ".tar.gz"))
1743 (file-name (string-append "farmhash-0-" (string-take commit 7)
1744 ".tar.gz"))
1745 (sha256
1746 (base32
1747 "185b2xdxl4d4cnsnv6abg8s22gxvx8673jq2yaq85bz4cdy58q35")))))
1748 ;; The license notice on the home page at
1749 ;; http://www.kurims.kyoto-u.ac.jp/~ooura/fft.html says:
1750 ;; Copyright Takuya OOURA, 1996-2001
1751 ;;
1752 ;; You may use, copy, modify and distribute this code for any purpose
1753 ;; (include commercial use) and without fee. Please refer to this
1754 ;; package when you modify this code.
1755 ;;
1756 ;; We take the identical tarball from the Bazel mirror, because the URL
1757 ;; at the home page is not versioned and might change.
1758 ("fft2d-src"
1759 ,(origin
1760 (method url-fetch)
1761 (uri "https://mirror.bazel.build/www.kurims.kyoto-u.ac.jp/~ooura/fft.tgz")
1762 (file-name "fft2d.tar.gz")
1763 (sha256
1764 (base32
1765 "15jjkfvhqvl2c0753d2di8hz0pyzn598g74wqy79awdrf1y67fsj"))))
1766 ("highwayhash-src"
1767 ,(let ((commit "be5edafc2e1a455768e260ccd68ae7317b6690ee")
1768 (revision "1"))
1769 (origin
1770 (method git-fetch)
1771 (uri (git-reference
1772 (url "https://github.com/google/highwayhash")
1773 (commit commit)))
1774 (file-name (string-append "highwayhash-0-" revision
1775 (string-take commit 7)
1776 "-checkout"))
1777 (sha256
1778 (base32
1779 "154jwf98cyy54hldr94pgjn85zynly3abpnc1avmb8a18lzwjyb6")))))
1780 ("nsync-src"
1781 ,(let ((version "0559ce013feac8db639ee1bf776aca0325d28777")
1782 (revision "1"))
1783 (origin
1784 (method url-fetch)
1785 (uri (string-append "https://mirror.bazel.build/"
1786 "github.com/google/nsync/archive/"
1787 version ".tar.gz"))
1788 (file-name (string-append "nsync-0." revision
1789 "-" (string-take version 7)
1790 ".tar.gz"))
1791 (sha256
1792 (base32
1793 "0qdkyqym34x739mmzv97ah5r7ph462v5xkxqxvidmcfqbi64b132")))))
1794 ("re2-src"
1795 ,(let ((commit "e7efc48")
1796 (revision "1"))
1797 (origin
1798 (method git-fetch)
1799 (uri (git-reference
1800 (url "https://github.com/google/re2")
1801 (commit commit)))
1802 (file-name (string-append "re2-0-" revision
1803 (string-take commit 7)
1804 "-checkout"))
1805 (sha256
1806 (base32
1807 "161g9841rjfsy5pn52fcis0s9hdr7rxvb06pad38j5rppfihvign")))))
1808 ("googletest" ,googletest)
1809 ("swig" ,swig)
1810 ("unzip" ,unzip)))
1811 (propagated-inputs
1812 `(("python-absl-py" ,python-absl-py)
1813 ("python-astor" ,python-astor)
1814 ("python-gast" ,python-gast)
1815 ("python-grpcio" ,python-grpcio)
1816 ("python-numpy" ,python-numpy)
1817 ("python-protobuf" ,python-protobuf-3.6)
1818 ("python-six" ,python-six)
1819 ("python-termcolo" ,python-termcolor)
1820 ("python-wheel" ,python-wheel)))
1821 (inputs
1822 `(("c-ares" ,c-ares)
1823 ("eigen" ,eigen-for-tensorflow)
1824 ("gemmlowp" ,gemmlowp-for-tensorflow)
1825 ("lmdb" ,lmdb)
1826 ("libjpeg" ,libjpeg-turbo)
1827 ("libpng" ,libpng)
1828 ("giflib" ,giflib)
1829 ("grpc" ,grpc-1.16.1 "static")
1830 ("grpc:bin" ,grpc-1.16.1)
1831 ("jsoncpp" ,jsoncpp-for-tensorflow)
1832 ("snappy" ,snappy)
1833 ("sqlite" ,sqlite)
1834 ("protobuf" ,protobuf-3.6)
1835 ("python" ,python-wrapper)
1836 ("zlib" ,zlib)))
1837 (home-page "https://tensorflow.org")
1838 (synopsis "Machine learning framework")
1839 (description
1840 "TensorFlow is a flexible platform for building and training machine
1841 learning models. It provides a library for high performance numerical
1842 computation and includes high level Python APIs, including both a sequential
1843 API for beginners that allows users to build models quickly by plugging
1844 together building blocks and a subclassing API with an imperative style for
1845 advanced research.")
1846 (license license:asl2.0)))
1847
1848 (define-public python-iml
1849 (package
1850 (name "python-iml")
1851 (version "0.6.2")
1852 (source
1853 (origin
1854 (method url-fetch)
1855 (uri (pypi-uri "iml" version))
1856 (sha256
1857 (base32
1858 "1k8szlpm19rcwcxdny9qdm3gmaqq8akb4xlvrzyz8c2d679aak6l"))))
1859 (build-system python-build-system)
1860 (propagated-inputs
1861 `(("ipython" ,python-ipython)
1862 ("numpy" ,python-numpy)
1863 ("pandas" ,python-pandas)
1864 ("scipy" ,python-scipy)))
1865 (native-inputs
1866 `(("nose" ,python-nose)))
1867 (home-page "https://github.com/interpretable-ml/iml")
1868 (synopsis "Interpretable Machine Learning (iML) package")
1869 (description "Interpretable ML (iML) is a set of data type objects,
1870 visualizations, and interfaces that can be used by any method designed to
1871 explain the predictions of machine learning models (or really the output of
1872 any function). It currently contains the interface and IO code from the Shap
1873 project, and it will potentially also do the same for the Lime project.")
1874 (license license:expat)))
1875
1876 (define-public python-keras-applications
1877 (package
1878 (name "python-keras-applications")
1879 (version "1.0.8")
1880 (source
1881 (origin
1882 (method url-fetch)
1883 (uri (pypi-uri "Keras_Applications" version))
1884 (sha256
1885 (base32
1886 "1rcz31ca4axa6kzhjx4lwqxbg4wvlljkj8qj9a7p9sfd5fhzjyam"))))
1887 (build-system python-build-system)
1888 ;; The tests require Keras, but this package is needed to build Keras.
1889 (arguments '(#:tests? #f))
1890 (propagated-inputs
1891 `(("python-h5py" ,python-h5py)
1892 ("python-numpy" ,python-numpy)))
1893 (native-inputs
1894 `(("python-pytest" ,python-pytest)
1895 ("python-pytest-cov" ,python-pytest-cov)
1896 ("python-pytest-pep8" ,python-pytest-pep8)
1897 ("python-pytest-xdist" ,python-pytest-xdist)))
1898 (home-page "https://github.com/keras-team/keras-applications")
1899 (synopsis "Reference implementations of popular deep learning models")
1900 (description
1901 "This package provides reference implementations of popular deep learning
1902 models for use with the Keras deep learning framework.")
1903 (license license:expat)))
1904
1905 (define-public python-keras-preprocessing
1906 (package
1907 (name "python-keras-preprocessing")
1908 (version "1.1.0")
1909 (source
1910 (origin
1911 (method url-fetch)
1912 (uri (pypi-uri "Keras_Preprocessing" version))
1913 (sha256
1914 (base32
1915 "1r98nm4k1svsqjyaqkfk23i31bl1kcfcyp7094yyj3c43phfp3as"))))
1916 (build-system python-build-system)
1917 (propagated-inputs
1918 `(("python-numpy" ,python-numpy)
1919 ("python-six" ,python-six)))
1920 (native-inputs
1921 `(("python-pandas" ,python-pandas)
1922 ("python-pillow" ,python-pillow)
1923 ("python-pytest" ,python-pytest)
1924 ("python-pytest-cov" ,python-pytest-cov)
1925 ("python-pytest-xdist" ,python-pytest-xdist)
1926 ("tensorflow" ,tensorflow)))
1927 (home-page "https://github.com/keras-team/keras-preprocessing/")
1928 (synopsis "Data preprocessing and augmentation for deep learning models")
1929 (description
1930 "Keras Preprocessing is the data preprocessing and data augmentation
1931 module of the Keras deep learning library. It provides utilities for working
1932 with image data, text data, and sequence data.")
1933 (license license:expat)))
1934
1935 (define-public python-keras
1936 (package
1937 (name "python-keras")
1938 (version "2.2.4")
1939 (source
1940 (origin
1941 (method url-fetch)
1942 (uri (pypi-uri "Keras" version))
1943 (patches (search-patches "python-keras-integration-test.patch"))
1944 (sha256
1945 (base32
1946 "1j8bsqzh49vjdxy6l1k4iwax5vpjzniynyd041xjavdzvfii1dlh"))))
1947 (build-system python-build-system)
1948 (arguments
1949 `(#:phases
1950 (modify-phases %standard-phases
1951 (add-after 'unpack 'remove-tests-for-unavailable-features
1952 (lambda _
1953 (delete-file "keras/backend/theano_backend.py")
1954 (delete-file "keras/backend/cntk_backend.py")
1955 (delete-file "tests/keras/backend/backend_test.py")
1956
1957 ;; FIXME: This doesn't work because Tensorflow is missing the
1958 ;; coder ops library.
1959 (delete-file "tests/keras/test_callbacks.py")
1960 #t))
1961 (replace 'check
1962 (lambda _
1963 ;; These tests attempt to download data files from the internet.
1964 (delete-file "tests/integration_tests/test_datasets.py")
1965 (delete-file "tests/integration_tests/imagenet_utils_test.py")
1966
1967 (setenv "PYTHONPATH"
1968 (string-append (getcwd) "/build/lib:"
1969 (getenv "PYTHONPATH")))
1970 (invoke "py.test" "-v"
1971 "-p" "no:cacheprovider"
1972 "--ignore" "keras/utils"))))))
1973 (propagated-inputs
1974 `(("python-h5py" ,python-h5py)
1975 ("python-keras-applications" ,python-keras-applications)
1976 ("python-keras-preprocessing" ,python-keras-preprocessing)
1977 ("python-numpy" ,python-numpy)
1978 ("python-pydot" ,python-pydot)
1979 ("python-pyyaml" ,python-pyyaml)
1980 ("python-scipy" ,python-scipy)
1981 ("python-six" ,python-six)
1982 ("tensorflow" ,tensorflow)
1983 ("graphviz" ,graphviz)))
1984 (native-inputs
1985 `(("python-pandas" ,python-pandas)
1986 ("python-pytest" ,python-pytest)
1987 ("python-pytest-cov" ,python-pytest-cov)
1988 ("python-pytest-pep8" ,python-pytest-pep8)
1989 ("python-pytest-timeout" ,python-pytest-timeout)
1990 ("python-pytest-xdist" ,python-pytest-xdist)
1991 ("python-sphinx" ,python-sphinx)
1992 ("python-requests" ,python-requests)))
1993 (home-page "https://github.com/keras-team/keras")
1994 (synopsis "High-level deep learning framework")
1995 (description "Keras is a high-level neural networks API, written in Python
1996 and capable of running on top of TensorFlow. It was developed with a focus on
1997 enabling fast experimentation. Use Keras if you need a deep learning library
1998 that:
1999
2000 @itemize
2001 @item Allows for easy and fast prototyping (through user friendliness,
2002 modularity, and extensibility).
2003 @item Supports both convolutional networks and recurrent networks, as well as
2004 combinations of the two.
2005 @item Runs seamlessly on CPU and GPU.
2006 @end itemize\n")
2007 (license license:expat)))
2008
2009 (define-public gloo
2010 (let ((version "0.0.0") ; no proper version tag
2011 (commit "ca528e32fea9ca8f2b16053cff17160290fc84ce")
2012 (revision "0"))
2013 (package
2014 (name "gloo")
2015 (version (git-version version revision commit))
2016 (source
2017 (origin
2018 (method git-fetch)
2019 (uri (git-reference
2020 (url "https://github.com/facebookincubator/gloo")
2021 (commit commit)))
2022 (file-name (git-file-name name version))
2023 (sha256
2024 (base32
2025 "1q9f80zy75f6njrzrqkmhc0g3qxs4gskr7ns2jdqanxa2ww7a99w"))))
2026 (build-system cmake-build-system)
2027 (native-inputs
2028 `(("googletest" ,googletest)))
2029 (arguments
2030 `(#:configure-flags '("-DBUILD_TEST=1")
2031 #:phases
2032 (modify-phases %standard-phases
2033 (replace 'check
2034 (lambda _
2035 (invoke "make" "gloo_test")
2036 #t)))))
2037 (synopsis "Collective communications library")
2038 (description
2039 "Gloo is a collective communications library. It comes with a
2040 number of collective algorithms useful for machine learning applications.
2041 These include a barrier, broadcast, and allreduce.")
2042 (home-page "https://github.com/facebookincubator/gloo")
2043 (license license:bsd-3))))
2044
2045 (define-public python-umap-learn
2046 (package
2047 (name "python-umap-learn")
2048 (version "0.3.10")
2049 (source
2050 (origin
2051 (method url-fetch)
2052 (uri (pypi-uri "umap-learn" version))
2053 (sha256
2054 (base32
2055 "02ada2yy6km6zgk2836kg1c97yrcpalvan34p8c57446finnpki1"))))
2056 (build-system python-build-system)
2057 (native-inputs
2058 `(("python-joblib" ,python-joblib)
2059 ("python-nose" ,python-nose)))
2060 (propagated-inputs
2061 `(("python-numba" ,python-numba)
2062 ("python-numpy" ,python-numpy)
2063 ("python-scikit-learn" ,python-scikit-learn)
2064 ("python-scipy" ,python-scipy)))
2065 (home-page "https://github.com/lmcinnes/umap")
2066 (synopsis
2067 "Uniform Manifold Approximation and Projection")
2068 (description
2069 "Uniform Manifold Approximation and Projection is a dimension reduction
2070 technique that can be used for visualisation similarly to t-SNE, but also for
2071 general non-linear dimension reduction.")
2072 (license license:bsd-3)))