Merge branch 'master' into staging
[jackhill/guix/guix.git] / gnu / packages / machine-learning.scm
1 ;;; GNU Guix --- Functional package management for GNU
2 ;;; Copyright © 2015, 2016, 2017, 2018, 2019 Ricardo Wurmus <rekado@elephly.net>
3 ;;; Copyright © 2016 Efraim Flashner <efraim@flashner.co.il>
4 ;;; Copyright © 2016, 2017 Marius Bakke <mbakke@fastmail.com>
5 ;;; Copyright © 2016 Hartmut Goebel <h.goebel@crazy-compilers.com>
6 ;;; Copyright © 2018, 2019 Tobias Geerinckx-Rice <me@tobias.gr>
7 ;;; Copyright © 2018 Kei Kebreau <kkebreau@posteo.net>
8 ;;; Copyright © 2018 Mark Meyer <mark@ofosos.org>
9 ;;; Copyright © 2018 Ben Woodcroft <donttrustben@gmail.com>
10 ;;; Copyright © 2018 Fis Trivial <ybbs.daans@hotmail.com>
11 ;;; Copyright © 2018 Julien Lepiller <julien@lepiller.eu>
12 ;;; Copyright © 2018 Björn Höfling <bjoern.hoefling@bjoernhoefling.de>
13 ;;; Copyright © 2019 Nicolas Goaziou <mail@nicolasgoaziou.fr>
14 ;;;
15 ;;; This file is part of GNU Guix.
16 ;;;
17 ;;; GNU Guix is free software; you can redistribute it and/or modify it
18 ;;; under the terms of the GNU General Public License as published by
19 ;;; the Free Software Foundation; either version 3 of the License, or (at
20 ;;; your option) any later version.
21 ;;;
22 ;;; GNU Guix is distributed in the hope that it will be useful, but
23 ;;; WITHOUT ANY WARRANTY; without even the implied warranty of
24 ;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
25 ;;; GNU General Public License for more details.
26 ;;;
27 ;;; You should have received a copy of the GNU General Public License
28 ;;; along with GNU Guix. If not, see <http://www.gnu.org/licenses/>.
29
30 (define-module (gnu packages machine-learning)
31 #:use-module ((guix licenses) #:prefix license:)
32 #:use-module (guix packages)
33 #:use-module (guix utils)
34 #:use-module (guix download)
35 #:use-module (guix svn-download)
36 #:use-module (guix build-system cmake)
37 #:use-module (guix build-system gnu)
38 #:use-module (guix build-system ocaml)
39 #:use-module (guix build-system python)
40 #:use-module (guix build-system r)
41 #:use-module (guix git-download)
42 #:use-module (gnu packages)
43 #:use-module (gnu packages adns)
44 #:use-module (gnu packages algebra)
45 #:use-module (gnu packages audio)
46 #:use-module (gnu packages autotools)
47 #:use-module (gnu packages base)
48 #:use-module (gnu packages bash)
49 #:use-module (gnu packages boost)
50 #:use-module (gnu packages check)
51 #:use-module (gnu packages compression)
52 #:use-module (gnu packages cran)
53 #:use-module (gnu packages databases)
54 #:use-module (gnu packages dejagnu)
55 #:use-module (gnu packages gcc)
56 #:use-module (gnu packages glib)
57 #:use-module (gnu packages graphviz)
58 #:use-module (gnu packages gstreamer)
59 #:use-module (gnu packages image)
60 #:use-module (gnu packages linux)
61 #:use-module (gnu packages maths)
62 #:use-module (gnu packages mpi)
63 #:use-module (gnu packages ocaml)
64 #:use-module (gnu packages onc-rpc)
65 #:use-module (gnu packages perl)
66 #:use-module (gnu packages pkg-config)
67 #:use-module (gnu packages protobuf)
68 #:use-module (gnu packages python)
69 #:use-module (gnu packages python-web)
70 #:use-module (gnu packages python-xyz)
71 #:use-module (gnu packages serialization)
72 #:use-module (gnu packages sphinx)
73 #:use-module (gnu packages statistics)
74 #:use-module (gnu packages sqlite)
75 #:use-module (gnu packages swig)
76 #:use-module (gnu packages tls)
77 #:use-module (gnu packages web)
78 #:use-module (gnu packages xml)
79 #:use-module (gnu packages xorg)
80 #:use-module (ice-9 match))
81
82 (define-public fann
83 ;; The last release is >100 commits behind, so we package from git.
84 (let ((commit "d71d54788bee56ba4cf7522801270152da5209d7"))
85 (package
86 (name "fann")
87 (version (string-append "2.2.0-1." (string-take commit 8)))
88 (source (origin
89 (method git-fetch)
90 (uri (git-reference
91 (url "https://github.com/libfann/fann.git")
92 (commit commit)))
93 (file-name (string-append name "-" version "-checkout"))
94 (sha256
95 (base32
96 "0ibwpfrjs6q2lijs8slxjgzb2llcl6rk3v2ski4r6215g5jjhg3x"))))
97 (build-system cmake-build-system)
98 (arguments
99 `(#:phases
100 (modify-phases %standard-phases
101 (replace 'check
102 (lambda* (#:key outputs #:allow-other-keys)
103 (let* ((out (assoc-ref outputs "out")))
104 (with-directory-excursion (string-append (getcwd) "/tests")
105 (invoke "./fann_tests"))))))))
106 (home-page "http://leenissen.dk/fann/wp/")
107 (synopsis "Fast Artificial Neural Network")
108 (description
109 "FANN is a free open source neural network library, which implements
110 multilayer artificial neural networks in C with support for both fully
111 connected and sparsely connected networks.")
112 (license license:lgpl2.1))))
113
114 (define-public libsvm
115 (package
116 (name "libsvm")
117 (version "3.23")
118 (source
119 (origin
120 (method url-fetch)
121 (uri (string-append "https://www.csie.ntu.edu.tw/~cjlin/libsvm/"
122 name "-" version ".tar.gz"))
123 (sha256
124 (base32 "0jpaq0rr92x38p4nk3gjan79ip67m6p80anb28z1d8601miysyi5"))))
125 (build-system gnu-build-system)
126 (arguments
127 `(#:tests? #f ; no "check" target
128 #:phases (modify-phases %standard-phases
129 (delete 'configure)
130 (replace
131 'install ; no ‘install’ target
132 (lambda* (#:key outputs #:allow-other-keys)
133 (let* ((out (assoc-ref outputs "out"))
134 (bin (string-append out "/bin/")))
135 (mkdir-p bin)
136 (for-each (lambda (file)
137 (copy-file file (string-append bin file)))
138 '("svm-train"
139 "svm-predict"
140 "svm-scale")))
141 #t)))))
142 (home-page "http://www.csie.ntu.edu.tw/~cjlin/libsvm/")
143 (synopsis "Library for Support Vector Machines")
144 (description
145 "LIBSVM is a machine learning library for support vector
146 classification, (C-SVC, nu-SVC), regression (epsilon-SVR, nu-SVR) and
147 distribution estimation (one-class SVM). It supports multi-class
148 classification.")
149 (license license:bsd-3)))
150
151 (define-public python-libsvm
152 (package (inherit libsvm)
153 (name "python-libsvm")
154 (build-system gnu-build-system)
155 (arguments
156 `(#:tests? #f ; no "check" target
157 #:make-flags '("-C" "python")
158 #:phases
159 (modify-phases %standard-phases
160 (delete 'configure)
161 (replace
162 'install ; no ‘install’ target
163 (lambda* (#:key inputs outputs #:allow-other-keys)
164 (let ((site (string-append (assoc-ref outputs "out")
165 "/lib/python"
166 (string-take
167 (string-take-right
168 (assoc-ref inputs "python") 5) 3)
169 "/site-packages/")))
170 (substitute* "python/svm.py"
171 (("../libsvm.so.2") "libsvm.so.2"))
172 (mkdir-p site)
173 (for-each (lambda (file)
174 (copy-file file (string-append site (basename file))))
175 (find-files "python" "\\.py"))
176 (copy-file "libsvm.so.2"
177 (string-append site "libsvm.so.2")))
178 #t)))))
179 (inputs
180 `(("python" ,python)))
181 (synopsis "Python bindings of libSVM")))
182
183 (define-public ghmm
184 ;; The latest release candidate is several years and a couple of fixes have
185 ;; been published since. This is why we download the sources from the SVN
186 ;; repository.
187 (let ((svn-revision 2341))
188 (package
189 (name "ghmm")
190 (version (string-append "0.9-rc3-0." (number->string svn-revision)))
191 (source (origin
192 (method svn-fetch)
193 (uri (svn-reference
194 (url "http://svn.code.sf.net/p/ghmm/code/trunk")
195 (revision svn-revision)))
196 (file-name (string-append name "-" version))
197 (sha256
198 (base32
199 "0qbq1rqp94l530f043qzp8aw5lj7dng9wq0miffd7spd1ff638wq"))))
200 (build-system gnu-build-system)
201 (arguments
202 `(#:imported-modules (,@%gnu-build-system-modules
203 (guix build python-build-system))
204 #:phases
205 (modify-phases %standard-phases
206 (add-after 'unpack 'enter-dir
207 (lambda _ (chdir "ghmm") #t))
208 (delete 'check)
209 (add-after 'install 'check
210 (assoc-ref %standard-phases 'check))
211 (add-before 'check 'fix-PYTHONPATH
212 (lambda* (#:key inputs outputs #:allow-other-keys)
213 (let ((python-version ((@@ (guix build python-build-system)
214 get-python-version)
215 (assoc-ref inputs "python"))))
216 (setenv "PYTHONPATH"
217 (string-append (getenv "PYTHONPATH")
218 ":" (assoc-ref outputs "out")
219 "/lib/python" python-version
220 "/site-packages")))
221 #t))
222 (add-after 'enter-dir 'fix-runpath
223 (lambda* (#:key outputs #:allow-other-keys)
224 (substitute* "ghmmwrapper/setup.py"
225 (("^(.*)extra_compile_args = \\[" line indent)
226 (string-append indent
227 "extra_link_args = [\"-Wl,-rpath="
228 (assoc-ref outputs "out") "/lib\"],\n"
229 line
230 "\"-Wl,-rpath="
231 (assoc-ref outputs "out")
232 "/lib\", ")))
233 #t))
234 (add-after 'enter-dir 'disable-broken-tests
235 (lambda _
236 (substitute* "tests/Makefile.am"
237 ;; GHMM_SILENT_TESTS is assumed to be a command.
238 (("TESTS_ENVIRONMENT.*") "")
239 ;; Do not build broken tests.
240 (("chmm .*") "")
241 (("read_fa .*") "")
242 (("mcmc .*") "")
243 (("label_higher_order_test.*$")
244 "label_higher_order_test\n"))
245
246 ;; These Python unittests are broken as there is no gato.
247 ;; See https://sourceforge.net/p/ghmm/support-requests/3/
248 (substitute* "ghmmwrapper/ghmmunittests.py"
249 (("^(.*)def (testNewXML|testMultipleTransitionClasses|testNewXML)"
250 line indent)
251 (string-append indent
252 "@unittest.skip(\"Disabled by Guix\")\n"
253 line)))
254 #t))
255 (add-after 'disable-broken-tests 'autogen
256 (lambda _
257 (invoke "bash" "autogen.sh"))))))
258 (inputs
259 `(("python" ,python-2) ; only Python 2 is supported
260 ("libxml2" ,libxml2)))
261 (native-inputs
262 `(("pkg-config" ,pkg-config)
263 ("dejagnu" ,dejagnu)
264 ("swig" ,swig)
265 ("autoconf" ,autoconf)
266 ("automake" ,automake)
267 ("libtool" ,libtool)))
268 (home-page "http://ghmm.org")
269 (synopsis "Hidden Markov Model library")
270 (description
271 "The General Hidden Markov Model library (GHMM) is a C library with
272 additional Python bindings implementing a wide range of types of @dfn{Hidden
273 Markov Models} (HMM) and algorithms: discrete, continuous emissions, basic
274 training, HMM clustering, HMM mixtures.")
275 (license license:lgpl2.0+))))
276
277 (define-public mcl
278 (package
279 (name "mcl")
280 (version "14.137")
281 (source (origin
282 (method url-fetch)
283 (uri (string-append
284 "http://micans.org/mcl/src/mcl-"
285 (string-replace-substring version "." "-")
286 ".tar.gz"))
287 (sha256
288 (base32
289 "15xlax3z31lsn62vlg94hkm75nm40q4679amnfg13jm8m2bnhy5m"))))
290 (build-system gnu-build-system)
291 (arguments
292 `(#:configure-flags (list "--enable-blast")))
293 (inputs
294 `(("perl" ,perl)))
295 (home-page "http://micans.org/mcl/")
296 (synopsis "Clustering algorithm for graphs")
297 (description
298 "The MCL algorithm is short for the @dfn{Markov Cluster Algorithm}, a
299 fast and scalable unsupervised cluster algorithm for graphs (also known as
300 networks) based on simulation of (stochastic) flow in graphs.")
301 ;; In the LICENCE file and web page it says "The software is licensed
302 ;; under the GNU General Public License, version 3.", but in several of
303 ;; the source code files it suggests GPL3 or later.
304 ;; http://listserver.ebi.ac.uk/pipermail/mcl-users/2016/000376.html
305 (license license:gpl3)))
306
307 (define-public ocaml-mcl
308 (package
309 (name "ocaml-mcl")
310 (version "12-068oasis4")
311 (source
312 (origin
313 (method git-fetch)
314 (uri (git-reference
315 (url "https://github.com/fhcrc/mcl.git")
316 (commit version)))
317 (file-name (git-file-name name version))
318 (sha256
319 (base32
320 "0009dc3h2jp3qg5val452wngpqnbfyhbcxylghq0mrjqxx0jdq5p"))))
321 (build-system ocaml-build-system)
322 (arguments
323 `(#:phases
324 (modify-phases %standard-phases
325 (add-before 'configure 'patch-paths
326 (lambda _
327 (substitute* "configure"
328 (("/bin/sh") (which "sh")))
329 (substitute* "setup.ml"
330 (("LDFLAGS=-fPIC")
331 (string-append "LDFLAGS=-fPIC\"; \"SHELL=" (which "sh")))
332 (("-std=c89") "-std=gnu99")
333
334 ;; This is a mutable string, which is no longer supported. Use
335 ;; a byte buffer instead.
336 (("String.make \\(String.length s\\)")
337 "Bytes.make (String.length s)")
338
339 ;; These two belong together.
340 (("OASISString.replace_chars")
341 "Bytes.to_string (OASISString.replace_chars")
342 ((" s;")
343 " s);"))
344 (substitute* "myocamlbuild.ml"
345 (("std=c89") "std=gnu99"))
346 ;; Since we build with a more recent OCaml, we have to use C99 or
347 ;; later. This causes problems with the old C code.
348 (substitute* "src/impala/matrix.c"
349 (("restrict") "restrict_"))
350 #t)))))
351 (native-inputs
352 `(("ocamlbuild" ,ocamlbuild)))
353 (home-page "https://github.com/fhcrc/mcl")
354 (synopsis "OCaml wrappers around MCL")
355 (description
356 "This package provides OCaml bindings for the MCL graph clustering
357 algorithm.")
358 (license license:gpl3)))
359
360 (define-public randomjungle
361 (package
362 (name "randomjungle")
363 (version "2.1.0")
364 (source
365 (origin
366 (method url-fetch)
367 (uri (string-append
368 "https://www.imbs.uni-luebeck.de/fileadmin/files/Software"
369 "/randomjungle/randomjungle-" version ".tar_.gz"))
370 (patches (search-patches "randomjungle-disable-static-build.patch"))
371 (sha256
372 (base32
373 "12c8rf30cla71swx2mf4ww9mfd8jbdw5lnxd7dxhyw1ygrvg6y4w"))))
374 (build-system gnu-build-system)
375 (arguments
376 `(#:configure-flags
377 (list "--disable-static"
378 (string-append "--with-boost="
379 (assoc-ref %build-inputs "boost")))
380 #:phases
381 (modify-phases %standard-phases
382 (add-before
383 'configure 'set-CXXFLAGS
384 (lambda _
385 (setenv "CXXFLAGS" "-fpermissive ")
386 #t)))))
387 (inputs
388 `(("boost" ,boost)
389 ("gsl" ,gsl)
390 ("libxml2" ,libxml2)
391 ("zlib" ,zlib)))
392 (native-inputs
393 `(("gfortran" ,gfortran)
394 ("gfortran:lib" ,gfortran "lib")))
395 ;; Non-portable assembly instructions are used so building fails on
396 ;; platforms other than x86_64 or i686.
397 (supported-systems '("x86_64-linux" "i686-linux"))
398 (home-page "https://www.imbs.uni-luebeck.de/forschung/software/details.html#c224")
399 (synopsis "Implementation of the Random Forests machine learning method")
400 (description
401 "Random Jungle is an implementation of Random Forests. It is supposed to
402 analyse high dimensional data. In genetics, it can be used for analysing big
403 Genome Wide Association (GWA) data. Random Forests is a powerful machine
404 learning method. Most interesting features are variable selection, missing
405 value imputation, classifier creation, generalization error estimation and
406 sample proximities between pairs of cases.")
407 (license license:gpl3+)))
408
409 (define-public openfst
410 (package
411 (name "openfst")
412 (version "1.7.2")
413 (source (origin
414 (method url-fetch)
415 (uri (string-append "http://www.openfst.org/twiki/pub/FST/"
416 "FstDownload/openfst-" version ".tar.gz"))
417 (sha256
418 (base32
419 "0fqgk8195kz21is09gwzwnrg7fr9526bi9mh4apyskapz27pbhr1"))))
420 (build-system gnu-build-system)
421 (home-page "http://www.openfst.org")
422 (synopsis "Library for weighted finite-state transducers")
423 (description "OpenFst is a library for constructing, combining,
424 optimizing, and searching weighted finite-state transducers (FSTs).")
425 (license license:asl2.0)))
426
427 (define-public shogun
428 (package
429 (name "shogun")
430 (version "6.1.3")
431 (source
432 (origin
433 (method url-fetch)
434 (uri (string-append
435 "ftp://shogun-toolbox.org/shogun/releases/"
436 (version-major+minor version)
437 "/sources/shogun-" version ".tar.bz2"))
438 (sha256
439 (base32
440 "1rn9skm3nw6hr7mr3lgp2gfqhi7ii0lyxck7qmqnf8avq349s5jp"))
441 (modules '((guix build utils)
442 (ice-9 rdelim)))
443 (snippet
444 '(begin
445 ;; Remove non-free sources and files referencing them
446 (for-each delete-file
447 (find-files "src/shogun/classifier/svm/"
448 "SVMLight\\.(cpp|h)"))
449 (for-each delete-file
450 (find-files "examples/undocumented/libshogun/"
451 (string-append
452 "(classifier_.*svmlight.*|"
453 "evaluation_cross_validation_locked_comparison).cpp")))
454 ;; Remove non-free functions.
455 (define (delete-ifdefs file)
456 (with-atomic-file-replacement file
457 (lambda (in out)
458 (let loop ((line (read-line in 'concat))
459 (skipping? #f))
460 (if (eof-object? line)
461 #t
462 (let ((skip-next?
463 (or (and skipping?
464 (not (string-prefix?
465 "#endif //USE_SVMLIGHT" line)))
466 (string-prefix?
467 "#ifdef USE_SVMLIGHT" line))))
468 (when (or (not skipping?)
469 (and skipping? (not skip-next?)))
470 (display line out))
471 (loop (read-line in 'concat) skip-next?)))))))
472 (for-each delete-ifdefs
473 (append
474 (find-files "src/shogun/classifier/mkl"
475 "^MKLClassification\\.cpp")
476 (find-files "src/shogun/classifier/svm"
477 "^SVMLightOneClass\\.(cpp|h)")
478 (find-files "src/shogun/multiclass"
479 "^ScatterSVM\\.(cpp|h)")
480 (find-files "src/shogun/kernel/"
481 "^(Kernel|CombinedKernel|ProductKernel)\\.(cpp|h)")
482 (find-files "src/shogun/regression/svr"
483 "^(MKLRegression|SVRLight)\\.(cpp|h)")
484 (find-files "src/shogun/transfer/domain_adaptation"
485 "^DomainAdaptationSVM\\.(cpp|h)")))
486 #t))))
487 (build-system cmake-build-system)
488 (arguments
489 '(#:tests? #f ;no check target
490 #:phases
491 (modify-phases %standard-phases
492 (add-after 'unpack 'delete-broken-symlinks
493 (lambda _
494 (for-each delete-file '("applications/arts/data"
495 "applications/asp/data"
496 "applications/easysvm/data"
497 "applications/msplicer/data"
498 "applications/ocr/data"
499 "examples/meta/data"
500 "examples/undocumented/data"))
501 #t))
502 (add-after 'unpack 'change-R-target-path
503 (lambda* (#:key outputs #:allow-other-keys)
504 (substitute* '("src/interfaces/r/CMakeLists.txt"
505 "examples/meta/r/CMakeLists.txt")
506 (("\\$\\{R_COMPONENT_LIB_PATH\\}")
507 (string-append (assoc-ref outputs "out")
508 "/lib/R/library/")))
509 #t))
510 (add-after 'unpack 'fix-octave-modules
511 (lambda* (#:key outputs #:allow-other-keys)
512 (substitute* "src/interfaces/octave/CMakeLists.txt"
513 (("^include_directories\\(\\$\\{OCTAVE_INCLUDE_DIRS\\}")
514 "include_directories(${OCTAVE_INCLUDE_DIRS} ${OCTAVE_INCLUDE_DIRS}/octave")
515 ;; change target directory
516 (("\\$\\{OCTAVE_OCT_LOCAL_API_FILE_DIR\\}")
517 (string-append (assoc-ref outputs "out")
518 "/share/octave/packages")))
519 (substitute* '("src/interfaces/octave/swig_typemaps.i"
520 "src/interfaces/octave/sg_print_functions.cpp")
521 ;; "octave/config.h" and "octave/oct-obj.h" deprecated in Octave.
522 (("octave/config\\.h") "octave/octave-config.h")
523 (("octave/oct-obj.h") "octave/ovl.h"))
524 #t))
525 (add-after 'unpack 'move-rxcpp
526 (lambda* (#:key inputs #:allow-other-keys)
527 (let ((rxcpp-dir "shogun/third-party/rxcpp"))
528 (mkdir-p rxcpp-dir)
529 (install-file (assoc-ref inputs "rxcpp") rxcpp-dir)
530 #t)))
531 (add-before 'build 'set-HOME
532 ;; $HOME needs to be set at some point during the build phase
533 (lambda _ (setenv "HOME" "/tmp") #t)))
534 #:configure-flags
535 (list "-DCMAKE_BUILD_WITH_INSTALL_RPATH=TRUE"
536 "-DUSE_SVMLIGHT=OFF" ;disable proprietary SVMLIGHT
537 "-DBUILD_META_EXAMPLES=OFF" ;requires unpackaged ctags
538 ;;"-DINTERFACE_JAVA=ON" ;requires unpackaged jblas
539 ;;"-DINTERFACE_RUBY=ON" ;requires unpackaged ruby-narray
540 ;;"-DINTERFACE_PERL=ON" ;"FindPerlLibs" does not exist
541 ;;"-DINTERFACE_LUA=ON" ;fails because lua doesn't build pkgconfig file
542 "-DINTERFACE_OCTAVE=ON"
543 "-DINTERFACE_PYTHON=ON"
544 "-DINTERFACE_R=ON")))
545 (inputs
546 `(("python" ,python)
547 ("numpy" ,python-numpy)
548 ("r-minimal" ,r-minimal)
549 ("octave" ,octave-cli)
550 ("swig" ,swig)
551 ("eigen" ,eigen)
552 ("hdf5" ,hdf5)
553 ("atlas" ,atlas)
554 ("arpack" ,arpack-ng)
555 ("lapack" ,lapack)
556 ("glpk" ,glpk)
557 ("libxml2" ,libxml2)
558 ("lzo" ,lzo)
559 ("zlib" ,zlib)))
560 (native-inputs
561 `(("pkg-config" ,pkg-config)
562 ("rxcpp" ,rxcpp)))
563 ;; Non-portable SSE instructions are used so building fails on platforms
564 ;; other than x86_64.
565 (supported-systems '("x86_64-linux"))
566 (home-page "http://shogun-toolbox.org/")
567 (synopsis "Machine learning toolbox")
568 (description
569 "The Shogun Machine learning toolbox provides a wide range of unified and
570 efficient Machine Learning (ML) methods. The toolbox seamlessly allows to
571 combine multiple data representations, algorithm classes, and general purpose
572 tools. This enables both rapid prototyping of data pipelines and extensibility
573 in terms of new algorithms.")
574 (license license:gpl3+)))
575
576 (define-public rxcpp
577 (package
578 (name "rxcpp")
579 (version "4.1.0")
580 (source
581 (origin
582 (method git-fetch)
583 (uri (git-reference
584 (url "https://github.com/ReactiveX/RxCpp.git")
585 (commit (string-append "v" version))))
586 (sha256
587 (base32 "1rdpa3jlc181jd08nk437aar085h28i45s6nzrv65apb3xyyz0ij"))
588 (file-name (git-file-name name version))))
589 (build-system cmake-build-system)
590 (arguments
591 `(#:phases
592 (modify-phases %standard-phases
593 (add-after 'unpack 'remove-werror
594 (lambda _
595 (substitute* (find-files ".")
596 (("-Werror") ""))
597 #t))
598 (replace 'check
599 (lambda _
600 (invoke "ctest"))))))
601 (native-inputs
602 `(("catch" ,catch-framework)))
603 (home-page "http://reactivex.io/")
604 (synopsis "Reactive Extensions for C++")
605 (description
606 "The Reactive Extensions for C++ (RxCpp) is a library of algorithms for
607 values-distributed-in-time. ReactiveX is a library for composing asynchronous
608 and event-based programs by using observable sequences.
609
610 It extends the observer pattern to support sequences of data and/or events and
611 adds operators that allow you to compose sequences together declaratively while
612 abstracting away concerns about things like low-level threading,
613 synchronization, thread-safety, concurrent data structures, and non-blocking
614 I/O.")
615 (license license:asl2.0)))
616
617 (define-public r-adaptivesparsity
618 (package
619 (name "r-adaptivesparsity")
620 (version "1.6")
621 (source (origin
622 (method url-fetch)
623 (uri (cran-uri "AdaptiveSparsity" version))
624 (sha256
625 (base32
626 "0imr5m8mll9j6n4icsv6z9rl5kbnwsp9wvzrg7n90nnmcxq2cz91"))))
627 (properties
628 `((upstream-name . "AdaptiveSparsity")))
629 (build-system r-build-system)
630 (arguments
631 `(#:phases
632 (modify-phases %standard-phases
633 (add-after 'unpack 'link-against-armadillo
634 (lambda _
635 (substitute* "src/Makevars"
636 (("PKG_LIBS=" prefix)
637 (string-append prefix "-larmadillo"))))))))
638 (propagated-inputs
639 `(("r-mass" ,r-mass)
640 ("r-matrix" ,r-matrix)
641 ("r-rcpp" ,r-rcpp)
642 ("r-rcpparmadillo" ,r-rcpparmadillo)))
643 (inputs
644 `(("armadillo" ,armadillo)))
645 (home-page "https://cran.r-project.org/web/packages/AdaptiveSparsity")
646 (synopsis "Adaptive sparsity models")
647 (description
648 "This package implements the Figueiredo machine learning algorithm for
649 adaptive sparsity and the Wong algorithm for adaptively sparse gaussian
650 geometric models.")
651 (license license:lgpl3+)))
652
653 (define-public gemmlowp-for-tensorflow
654 ;; The commit hash is taken from "tensorflow/workspace.bzl".
655 (let ((commit "38ebac7b059e84692f53e5938f97a9943c120d98")
656 (revision "2"))
657 (package
658 (name "gemmlowp")
659 (version (git-version "0" revision commit))
660 (source (origin
661 (method url-fetch)
662 (uri (string-append "https://mirror.bazel.build/"
663 "github.com/google/gemmlowp/archive/"
664 commit ".zip"))
665 (file-name (string-append "gemmlowp-" version ".zip"))
666 (sha256
667 (base32
668 "0n56s2g8hrssm4w8qj1v58gfm56a04n9v992ixkmvk6zjiralzxq"))))
669 (build-system cmake-build-system)
670 (arguments
671 `(#:configure-flags
672 (list ,@(match (%current-system)
673 ((or "x86_64-linux" "i686-linux")
674 '("-DCMAKE_CXX_FLAGS=-msse2"))
675 (_ '())))
676 #:phases
677 (modify-phases %standard-phases
678 ;; This directory contains the CMakeLists.txt.
679 (add-after 'unpack 'chdir
680 (lambda _ (chdir "contrib") #t))
681 ;; There is no install target
682 (replace 'install
683 (lambda* (#:key outputs #:allow-other-keys)
684 (let* ((out (assoc-ref outputs "out"))
685 (lib (string-append out "/lib/"))
686 (inc (string-append out "/include/")))
687 (install-file "../build/libeight_bit_int_gemm.so" lib)
688 (for-each (lambda (dir)
689 (let ((target (string-append inc "/" dir)))
690 (mkdir-p target)
691 (for-each (lambda (h)
692 (install-file h target))
693 (find-files (string-append "../" dir)
694 "\\.h$"))))
695 '("meta" "profiling" "public" "fixedpoint"
696 "eight_bit_int_gemm" "internal"))
697 #t))))))
698 (native-inputs
699 `(("unzip" ,unzip)))
700 (home-page "https://github.com/google/gemmlowp")
701 (synopsis "Small self-contained low-precision GEMM library")
702 (description
703 "This is a small self-contained low-precision @dfn{general matrix
704 multiplication} (GEMM) library. It is not a full linear algebra library.
705 Low-precision means that the input and output matrix entries are integers on
706 at most 8 bits. To avoid overflow, results are internally accumulated on more
707 than 8 bits, and at the end only some significant 8 bits are kept.")
708 (license license:asl2.0))))
709
710 (define-public dlib
711 (package
712 (name "dlib")
713 (version "19.7")
714 (source (origin
715 (method url-fetch)
716 (uri (string-append
717 "http://dlib.net/files/dlib-" version ".tar.bz2"))
718 (sha256
719 (base32
720 "1mljz02kwkrbggyncxv5fpnyjdybw2qihaacb3js8yfkw12vwpc2"))
721 (modules '((guix build utils)))
722 (snippet
723 '(begin
724 ;; Delete ~13MB of bundled dependencies.
725 (delete-file-recursively "dlib/external")
726 (delete-file-recursively "docs/dlib/external")
727 #t))))
728 (build-system cmake-build-system)
729 (arguments
730 `(#:phases
731 (modify-phases %standard-phases
732 (add-after 'unpack 'disable-asserts
733 (lambda _
734 ;; config.h recommends explicitly enabling or disabling asserts
735 ;; when building as a shared library. By default neither is set.
736 (substitute* "dlib/config.h"
737 (("^//#define DLIB_DISABLE_ASSERTS") "#define DLIB_DISABLE_ASSERTS"))
738 #t))
739 (add-after 'disable-asserts 'disable-failing-tests
740 (lambda _
741 ;; One test times out on MIPS, so we need to disable it.
742 ;; Others are flaky on some platforms.
743 (let* ((system ,(or (%current-target-system)
744 (%current-system)))
745 (disabled-tests (cond
746 ((string-prefix? "mips64" system)
747 '("object_detector" ; timeout
748 "data_io"))
749 ((string-prefix? "armhf" system)
750 '("learning_to_track"))
751 ((string-prefix? "i686" system)
752 '("optimization"))
753 (else '()))))
754 (for-each
755 (lambda (test)
756 (substitute* "dlib/test/makefile"
757 (((string-append "SRC \\+= " test "\\.cpp")) "")))
758 disabled-tests)
759 #t)))
760 (replace 'check
761 (lambda _
762 ;; No test target, so we build and run the unit tests here.
763 (let ((test-dir (string-append "../dlib-" ,version "/dlib/test")))
764 (with-directory-excursion test-dir
765 (invoke "make" "-j" (number->string (parallel-job-count)))
766 (invoke "./dtest" "--runall"))
767 #t)))
768 (add-after 'install 'delete-static-library
769 (lambda* (#:key outputs #:allow-other-keys)
770 (delete-file (string-append (assoc-ref outputs "out")
771 "/lib/libdlib.a"))
772 #t)))))
773 (native-inputs
774 `(("pkg-config" ,pkg-config)
775 ;; For tests.
776 ("libnsl" ,libnsl)))
777 (inputs
778 `(("giflib" ,giflib)
779 ("lapack" ,lapack)
780 ("libjpeg" ,libjpeg)
781 ("libpng" ,libpng)
782 ("libx11" ,libx11)
783 ("openblas" ,openblas)
784 ("zlib" ,zlib)))
785 (synopsis
786 "Toolkit for making machine learning and data analysis applications in C++")
787 (description
788 "Dlib is a modern C++ toolkit containing machine learning algorithms and
789 tools. It is used in both industry and academia in a wide range of domains
790 including robotics, embedded devices, mobile phones, and large high performance
791 computing environments.")
792 (home-page "http://dlib.net")
793 (license license:boost1.0)))
794
795 (define-public python-scikit-learn
796 (package
797 (name "python-scikit-learn")
798 (version "0.20.3")
799 (source
800 (origin
801 (method git-fetch)
802 (uri (git-reference
803 (url "https://github.com/scikit-learn/scikit-learn.git")
804 (commit version)))
805 (file-name (git-file-name name version))
806 (sha256
807 (base32
808 "08aaby5zphfxy83mggg35bwyka7wk91l2qijh8kk0bl08dikq8dl"))))
809 (build-system python-build-system)
810 (arguments
811 `(#:phases
812 (modify-phases %standard-phases
813 (add-after 'build 'build-ext
814 (lambda _ (invoke "python" "setup.py" "build_ext" "--inplace") #t))
815 (replace 'check
816 (lambda _
817 ;; Restrict OpenBLAS threads to prevent segfaults while testing!
818 (setenv "OPENBLAS_NUM_THREADS" "1")
819
820 ;; Some tests require write access to $HOME.
821 (setenv "HOME" "/tmp")
822
823 (invoke "pytest" "sklearn" "-m" "not network")))
824 ;; FIXME: This fails with permission denied
825 (delete 'reset-gzip-timestamps))))
826 (inputs
827 `(("openblas" ,openblas)))
828 (native-inputs
829 `(("python-pytest" ,python-pytest)
830 ("python-pandas" ,python-pandas) ;for tests
831 ("python-cython" ,python-cython)))
832 (propagated-inputs
833 `(("python-numpy" ,python-numpy)
834 ("python-scipy" ,python-scipy)))
835 (home-page "http://scikit-learn.org/")
836 (synopsis "Machine Learning in Python")
837 (description
838 "Scikit-learn provides simple and efficient tools for data mining and
839 data analysis.")
840 (license license:bsd-3)))
841
842 (define-public python2-scikit-learn
843 (package-with-python2 python-scikit-learn))
844
845 (define-public python-autograd
846 (let* ((commit "442205dfefe407beffb33550846434baa90c4de7")
847 (revision "0")
848 (version (git-version "0.0.0" revision commit)))
849 (package
850 (name "python-autograd")
851 (home-page "https://github.com/HIPS/autograd")
852 (source (origin
853 (method git-fetch)
854 (uri (git-reference
855 (url home-page)
856 (commit commit)))
857 (sha256
858 (base32
859 "189sv2xb0mwnjawa9z7mrgdglc1miaq93pnck26r28fi1jdwg0z4"))
860 (file-name (git-file-name name version))))
861 (version version)
862 (build-system python-build-system)
863 (native-inputs
864 `(("python-nose" ,python-nose)
865 ("python-pytest" ,python-pytest)))
866 (propagated-inputs
867 `(("python-future" ,python-future)
868 ("python-numpy" ,python-numpy)))
869 (arguments
870 `(#:phases (modify-phases %standard-phases
871 (replace 'check
872 (lambda _
873 (invoke "py.test" "-v"))))))
874 (synopsis "Efficiently computes derivatives of NumPy code")
875 (description "Autograd can automatically differentiate native Python and
876 NumPy code. It can handle a large subset of Python's features, including loops,
877 ifs, recursion and closures, and it can even take derivatives of derivatives
878 of derivatives. It supports reverse-mode differentiation
879 (a.k.a. backpropagation), which means it can efficiently take gradients of
880 scalar-valued functions with respect to array-valued arguments, as well as
881 forward-mode differentiation, and the two can be composed arbitrarily. The
882 main intended application of Autograd is gradient-based optimization.")
883 (license license:expat))))
884
885 (define-public python2-autograd
886 (package-with-python2 python-autograd))
887
888 (define-public lightgbm
889 (package
890 (name "lightgbm")
891 (version "2.0.12")
892 (source (origin
893 (method url-fetch)
894 (uri (string-append
895 "https://github.com/Microsoft/LightGBM/archive/v"
896 version ".tar.gz"))
897 (sha256
898 (base32
899 "132zf0yk0545mg72hyzxm102g3hpb6ixx9hnf8zd2k55gas6cjj1"))
900 (file-name (string-append name "-" version ".tar.gz"))))
901 (native-inputs
902 `(("python-pytest" ,python-pytest)
903 ("python-nose" ,python-nose)))
904 (inputs
905 `(("openmpi" ,openmpi)))
906 (propagated-inputs
907 `(("python-numpy" ,python-numpy)
908 ("python-scipy" ,python-scipy)))
909 (arguments
910 `(#:configure-flags
911 '("-DUSE_MPI=ON")
912 #:phases
913 (modify-phases %standard-phases
914 (replace 'check
915 (lambda* (#:key outputs #:allow-other-keys)
916 (with-directory-excursion ,(string-append "../LightGBM-" version)
917 (invoke "pytest" "tests/c_api_test/test_.py")))))))
918 (build-system cmake-build-system)
919 (home-page "https://github.com/Microsoft/LightGBM")
920 (synopsis "Gradient boosting framework based on decision tree algorithms")
921 (description "LightGBM is a gradient boosting framework that uses tree
922 based learning algorithms. It is designed to be distributed and efficient with
923 the following advantages:
924
925 @itemize
926 @item Faster training speed and higher efficiency
927 @item Lower memory usage
928 @item Better accuracy
929 @item Parallel and GPU learning supported (not enabled in this package)
930 @item Capable of handling large-scale data
931 @end itemize\n")
932 (license license:expat)))
933
934 (define-public vowpal-wabbit
935 ;; Language bindings not included.
936 (package
937 (name "vowpal-wabbit")
938 (version "8.5.0")
939 (source (origin
940 (method url-fetch)
941 (uri (string-append
942 "https://github.com/JohnLangford/vowpal_wabbit/archive/"
943 version ".tar.gz"))
944 (sha256
945 (base32
946 "0clp2kb7rk5sckhllxjr5a651awf4s8dgzg4659yh4hf5cqnf0gr"))
947 (file-name (string-append name "-" version ".tar.gz"))))
948 (inputs
949 `(("boost" ,boost)
950 ("zlib" ,zlib)))
951 (arguments
952 `(#:configure-flags
953 (list (string-append "--with-boost="
954 (assoc-ref %build-inputs "boost")))))
955 (build-system gnu-build-system)
956 (home-page "https://github.com/JohnLangford/vowpal_wabbit")
957 (synopsis "Fast machine learning library for online learning")
958 (description "Vowpal Wabbit is a machine learning system with techniques
959 such as online, hashing, allreduce, reductions, learning2search, active, and
960 interactive learning.")
961 (license license:bsd-3)))
962
963 (define-public python2-fastlmm
964 (package
965 (name "python2-fastlmm")
966 (version "0.2.21")
967 (source
968 (origin
969 (method url-fetch)
970 (uri (pypi-uri "fastlmm" version ".zip"))
971 (sha256
972 (base32
973 "1q8c34rpmwkfy3r4d5172pzdkpfryj561897z9r3x22gq7813x1m"))))
974 (build-system python-build-system)
975 (arguments
976 `(#:tests? #f ; some test files are missing
977 #:python ,python-2)) ; only Python 2.7 is supported
978 (propagated-inputs
979 `(("python2-numpy" ,python2-numpy)
980 ("python2-scipy" ,python2-scipy)
981 ("python2-matplotlib" ,python2-matplotlib)
982 ("python2-pandas" ,python2-pandas)
983 ("python2-scikit-learn" ,python2-scikit-learn)
984 ("python2-pysnptools" ,python2-pysnptools)))
985 (native-inputs
986 `(("unzip" ,unzip)
987 ("python2-cython" ,python2-cython)
988 ("python2-mock" ,python2-mock)
989 ("python2-nose" ,python2-nose)))
990 (home-page "http://research.microsoft.com/en-us/um/redmond/projects/mscompbio/fastlmm/")
991 (synopsis "Perform genome-wide association studies on large data sets")
992 (description
993 "FaST-LMM, which stands for Factored Spectrally Transformed Linear Mixed
994 Models, is a program for performing both single-SNP and SNP-set genome-wide
995 association studies (GWAS) on extremely large data sets.")
996 (license license:asl2.0)))
997
998 ;; There have been no proper releases yet.
999 (define-public kaldi
1000 (let ((commit "2f95609f0bb085bd3a1dc5eb0a39f3edea59e606")
1001 (revision "1"))
1002 (package
1003 (name "kaldi")
1004 (version (git-version "0" revision commit))
1005 (source (origin
1006 (method git-fetch)
1007 (uri (git-reference
1008 (url "https://github.com/kaldi-asr/kaldi.git")
1009 (commit commit)))
1010 (file-name (git-file-name name version))
1011 (sha256
1012 (base32
1013 "082qh3pfi7hvncylp4xsmkfahbd7gb0whdfa4rwrx7fxk9rdh3kz"))))
1014 (build-system gnu-build-system)
1015 (arguments
1016 `(#:test-target "test"
1017 #:phases
1018 (modify-phases %standard-phases
1019 (add-after 'unpack 'chdir
1020 (lambda _ (chdir "src") #t))
1021 (replace 'configure
1022 (lambda* (#:key build system inputs outputs #:allow-other-keys)
1023 (when (not (or (string-prefix? "x86_64" system)
1024 (string-prefix? "i686" system)))
1025 (substitute* "makefiles/linux_openblas.mk"
1026 (("-msse -msse2") "")))
1027 (substitute* "makefiles/default_rules.mk"
1028 (("/bin/bash") (which "bash")))
1029 (substitute* "Makefile"
1030 (("ext_depend: check_portaudio")
1031 "ext_depend:"))
1032 (substitute* '("online/Makefile"
1033 "onlinebin/Makefile"
1034 "gst-plugin/Makefile")
1035 (("../../tools/portaudio/install")
1036 (assoc-ref inputs "portaudio")))
1037
1038 ;; This `configure' script doesn't support variables passed as
1039 ;; arguments, nor does it support "prefix".
1040 (let ((out (assoc-ref outputs "out"))
1041 (openblas (assoc-ref inputs "openblas"))
1042 (openfst (assoc-ref inputs "openfst")))
1043 (substitute* "configure"
1044 (("check_for_slow_expf;") "")
1045 ;; This affects the RPATH and also serves as the installation
1046 ;; directory.
1047 (("KALDILIBDIR=`pwd`/lib")
1048 (string-append "KALDILIBDIR=" out "/lib")))
1049 (mkdir-p out) ; must exist
1050 (setenv "CONFIG_SHELL" (which "bash"))
1051 (setenv "OPENFST_VER" ,(package-version openfst))
1052 (invoke "./configure"
1053 "--use-cuda=no"
1054 "--shared"
1055 (string-append "--openblas-root=" openblas)
1056 (string-append "--fst-root=" openfst)))))
1057 (add-after 'build 'build-ext-and-gstreamer-plugin
1058 (lambda _
1059 (invoke "make" "-C" "online" "depend")
1060 (invoke "make" "-C" "online")
1061 (invoke "make" "-C" "onlinebin" "depend")
1062 (invoke "make" "-C" "onlinebin")
1063 (invoke "make" "-C" "gst-plugin" "depend")
1064 (invoke "make" "-C" "gst-plugin")
1065 #t))
1066 ;; TODO: also install the executables.
1067 (replace 'install
1068 (lambda* (#:key outputs #:allow-other-keys)
1069 (let* ((out (assoc-ref outputs "out"))
1070 (inc (string-append out "/include"))
1071 (lib (string-append out "/lib")))
1072 (mkdir-p lib)
1073 ;; The build phase installed symlinks to the actual
1074 ;; libraries. Install the actual targets.
1075 (for-each (lambda (file)
1076 (let ((target (readlink file)))
1077 (delete-file file)
1078 (install-file target lib)))
1079 (find-files lib "\\.so"))
1080 ;; Install headers
1081 (for-each (lambda (file)
1082 (let ((target-dir (string-append inc "/" (dirname file))))
1083 (install-file file target-dir)))
1084 (find-files "." "\\.h"))
1085 (install-file "gst-plugin/libgstonlinegmmdecodefaster.so"
1086 (string-append lib "/gstreamer-1.0"))
1087 #t))))))
1088 (inputs
1089 `(("alsa-lib" ,alsa-lib)
1090 ("gfortran" ,gfortran "lib")
1091 ("glib" ,glib)
1092 ("gstreamer" ,gstreamer)
1093 ("jack" ,jack-1)
1094 ("openblas" ,openblas)
1095 ("openfst" ,openfst)
1096 ("portaudio" ,portaudio)
1097 ("python" ,python)))
1098 (native-inputs
1099 `(("glib" ,glib "bin") ; glib-genmarshal
1100 ("grep" ,grep)
1101 ("sed" ,sed)
1102 ("pkg-config" ,pkg-config)
1103 ("which" ,which)))
1104 (home-page "https://kaldi-asr.org/")
1105 (synopsis "Speech recognition toolkit")
1106 (description "Kaldi is an extensible toolkit for speech recognition
1107 written in C++.")
1108 (license license:asl2.0))))
1109
1110 (define-public gst-kaldi-nnet2-online
1111 (let ((commit "617e43e73c7cc45eb9119028c02bd4178f738c4a")
1112 (revision "1"))
1113 (package
1114 (name "gst-kaldi-nnet2-online")
1115 (version (git-version "0" revision commit))
1116 (source (origin
1117 (method git-fetch)
1118 (uri (git-reference
1119 (url "https://github.com/alumae/gst-kaldi-nnet2-online.git")
1120 (commit commit)))
1121 (file-name (git-file-name name version))
1122 (sha256
1123 (base32
1124 "0xh3w67b69818s6ib02ara4lw7wamjdmh4jznvkpzrs4skbs9jx9"))))
1125 (build-system gnu-build-system)
1126 (arguments
1127 `(#:tests? #f ; there are none
1128 #:make-flags
1129 (list (string-append "SHELL="
1130 (assoc-ref %build-inputs "bash") "/bin/bash")
1131 (string-append "KALDI_ROOT="
1132 (assoc-ref %build-inputs "kaldi-src"))
1133 (string-append "KALDILIBDIR="
1134 (assoc-ref %build-inputs "kaldi") "/lib")
1135 "KALDI_FLAVOR=dynamic")
1136 #:phases
1137 (modify-phases %standard-phases
1138 (add-after 'unpack 'chdir
1139 (lambda _ (chdir "src") #t))
1140 (replace 'configure
1141 (lambda* (#:key inputs #:allow-other-keys)
1142 (let ((glib (assoc-ref inputs "glib")))
1143 (setenv "CXXFLAGS" "-std=c++11 -fPIC")
1144 (setenv "CPLUS_INCLUDE_PATH"
1145 (string-append glib "/include/glib-2.0:"
1146 glib "/lib/glib-2.0/include:"
1147 (assoc-ref inputs "gstreamer")
1148 "/include/gstreamer-1.0:"
1149 (getenv "CPLUS_INCLUDE_PATH"))))
1150 (substitute* "Makefile"
1151 (("include \\$\\(KALDI_ROOT\\)/src/kaldi.mk") "")
1152 (("\\$\\(error Cannot find") "#"))))
1153 (add-before 'build 'build-depend
1154 (lambda* (#:key make-flags #:allow-other-keys)
1155 (apply invoke "make" "depend" make-flags)))
1156 (replace 'install
1157 (lambda* (#:key outputs #:allow-other-keys)
1158 (let* ((out (assoc-ref outputs "out"))
1159 (lib (string-append out "/lib/gstreamer-1.0")))
1160 (install-file "libgstkaldinnet2onlinedecoder.so" lib)
1161 #t))))))
1162 (inputs
1163 `(("glib" ,glib)
1164 ("gstreamer" ,gstreamer)
1165 ("jansson" ,jansson)
1166 ("openfst" ,openfst)
1167 ("kaldi" ,kaldi)))
1168 (native-inputs
1169 `(("bash" ,bash)
1170 ("glib:bin" ,glib "bin") ; glib-genmarshal
1171 ("kaldi-src" ,(package-source kaldi))
1172 ("pkg-config" ,pkg-config)))
1173 (home-page "https://kaldi-asr.org/")
1174 (synopsis "Gstreamer plugin for decoding speech")
1175 (description "This package provides a GStreamer plugin that wraps
1176 Kaldi's @code{SingleUtteranceNnet2Decoder}. It requires iVector-adapted DNN
1177 acoustic models. The iVectors are adapted to the current audio stream
1178 automatically.")
1179 (license license:asl2.0))))
1180
1181 (define-public kaldi-gstreamer-server
1182 (let ((commit "1735ba49c5dc0ebfc184e45105fc600cd9f1f508")
1183 (revision "1"))
1184 (package
1185 (name "kaldi-gstreamer-server")
1186 (version (git-version "0" revision commit))
1187 (source (origin
1188 (method git-fetch)
1189 (uri (git-reference
1190 (url "https://github.com/alumae/kaldi-gstreamer-server.git")
1191 (commit commit)))
1192 (file-name (git-file-name name version))
1193 (sha256
1194 (base32
1195 "0j701m7lbwmzqxsfanj882v7881hrbmpqybbczbxqpcbg8q34w0k"))))
1196 (build-system gnu-build-system)
1197 (arguments
1198 `(#:tests? #f ; there are no tests that can be run automatically
1199 #:modules ((guix build utils)
1200 (guix build gnu-build-system)
1201 (srfi srfi-26))
1202 #:phases
1203 (modify-phases %standard-phases
1204 (delete 'configure)
1205 (replace 'build
1206 (lambda* (#:key outputs #:allow-other-keys)
1207 ;; Disable hash randomization to ensure the generated .pycs
1208 ;; are reproducible.
1209 (setenv "PYTHONHASHSEED" "0")
1210 (with-directory-excursion "kaldigstserver"
1211 (for-each (lambda (file)
1212 (apply invoke
1213 `("python"
1214 "-m" "compileall"
1215 "-f" ; force rebuild
1216 ,file)))
1217 (find-files "." "\\.py$")))
1218 #t))
1219 (replace 'install
1220 (lambda* (#:key inputs outputs #:allow-other-keys)
1221 (let* ((out (assoc-ref outputs "out"))
1222 (bin (string-append out "/bin"))
1223 (share (string-append out "/share/kaldi-gstreamer-server/")))
1224 ;; Install Python files
1225 (with-directory-excursion "kaldigstserver"
1226 (for-each (cut install-file <> share)
1227 (find-files "." ".*")))
1228
1229 ;; Install sample configuration files
1230 (for-each (cut install-file <> share)
1231 (find-files "." "\\.yaml"))
1232
1233 ;; Install executables
1234 (mkdir-p bin)
1235 (let* ((server (string-append bin "/kaldi-gst-server"))
1236 (client (string-append bin "/kaldi-gst-client"))
1237 (worker (string-append bin "/kaldi-gst-worker"))
1238 (PYTHONPATH (getenv "PYTHONPATH"))
1239 (GST_PLUGIN_PATH (string-append
1240 (assoc-ref inputs "gst-kaldi-nnet2-online")
1241 "/lib/gstreamer-1.0:${GST_PLUGIN_PATH}"))
1242 (wrap (lambda (wrapper what)
1243 (with-output-to-file wrapper
1244 (lambda _
1245 (format #t
1246 "#!~a
1247 export PYTHONPATH=~a
1248 export GST_PLUGIN_PATH=~a
1249 exec ~a ~a/~a \"$@\"~%"
1250 (which "bash") PYTHONPATH GST_PLUGIN_PATH
1251 (which "python") share what)))
1252 (chmod wrapper #o555))))
1253 (for-each wrap
1254 (list server client worker)
1255 (list "master_server.py"
1256 "client.py"
1257 "worker.py")))
1258 #t))))))
1259 (inputs
1260 `(("gst-kaldi-nnet2-online" ,gst-kaldi-nnet2-online)
1261 ("python2" ,python-2)
1262 ("python2-futures" ,python2-futures)
1263 ("python2-pygobject" ,python2-pygobject)
1264 ("python2-pyyaml" ,python2-pyyaml)
1265 ("python2-tornado" ,python2-tornado)
1266 ("python2-ws4py" ,python2-ws4py-for-kaldi-gstreamer-server)))
1267 (home-page "https://github.com/alumae/kaldi-gstreamer-server")
1268 (synopsis "Real-time full-duplex speech recognition server")
1269 (description "This is a real-time full-duplex speech recognition server,
1270 based on the Kaldi toolkit and the GStreamer framework and implemented in
1271 Python.")
1272 (license license:bsd-2))))
1273
1274 (define-public grpc
1275 (package
1276 (name "grpc")
1277 (version "1.16.1")
1278 (source (origin
1279 (method git-fetch)
1280 (uri (git-reference
1281 (url "https://github.com/grpc/grpc.git")
1282 (commit (string-append "v" version))))
1283 (file-name (git-file-name name version))
1284 (sha256
1285 (base32
1286 "1jimqz3115f9pli5w6ik9wi7mjc7ix6y7yrq4a1ab9fc3dalj7p2"))))
1287 (build-system cmake-build-system)
1288 (arguments
1289 `(#:tests? #f ; no test target
1290 #:configure-flags
1291 (list "-DgRPC_ZLIB_PROVIDER=package"
1292 "-DgRPC_CARES_PROVIDER=package"
1293 "-DgRPC_SSL_PROVIDER=package"
1294 "-DgRPC_PROTOBUF_PROVIDER=package")))
1295 (inputs
1296 `(("c-ares" ,c-ares-next)
1297 ("openssl" ,openssl)
1298 ("zlib" ,zlib)))
1299 (native-inputs
1300 `(("protobuf" ,protobuf-next)
1301 ("python" ,python-wrapper)))
1302 (home-page "https://grpc.io")
1303 (synopsis "High performance universal RPC framework")
1304 (description "gRPC is a modern open source high performance @dfn{Remote
1305 Procedure Call} (RPC) framework that can run in any environment. It can
1306 efficiently connect services in and across data centers with pluggable support
1307 for load balancing, tracing, health checking and authentication. It is also
1308 applicable in last mile of distributed computing to connect devices, mobile
1309 applications and browsers to backend services.")
1310 (license license:asl2.0)))
1311
1312 ;; Note that Tensorflow includes a "third_party" directory, which seems to not
1313 ;; only contain modified subsets of upstream library source code, but also
1314 ;; adapter headers provided by Google (such as the fft.h header, which is not
1315 ;; part of the upstream project code). The Tensorflow code includes headers
1316 ;; from the "third_party" directory. It does not look like we can replace
1317 ;; these headers with unmodified upstream files, so we keep them.
1318 (define-public tensorflow
1319 (package
1320 (name "tensorflow")
1321 (version "1.9.0")
1322 (source
1323 (origin
1324 (method git-fetch)
1325 (uri (git-reference
1326 (url "https://github.com/tensorflow/tensorflow.git")
1327 (commit (string-append "v" version))))
1328 (file-name (string-append "tensorflow-" version "-checkout"))
1329 (sha256
1330 (base32
1331 "0a9kwha395g3wgxfwln5j8vn9nkspmd75xldrlqdq540w996g8xa"))))
1332 (build-system cmake-build-system)
1333 (arguments
1334 `(#:tests? #f ; no "check" target
1335 #:build-type "Release"
1336 #:configure-flags
1337 (let ((protobuf (assoc-ref %build-inputs "protobuf"))
1338 (protobuf:native (assoc-ref %build-inputs "protobuf:native"))
1339 (jsoncpp (assoc-ref %build-inputs "jsoncpp"))
1340 (snappy (assoc-ref %build-inputs "snappy"))
1341 (sqlite (assoc-ref %build-inputs "sqlite")))
1342 (list
1343 ;; Use protobuf from Guix
1344 (string-append "-Dprotobuf_STATIC_LIBRARIES="
1345 protobuf "/lib/libprotobuf.so")
1346 (string-append "-DPROTOBUF_PROTOC_EXECUTABLE="
1347 protobuf:native "/bin/protoc")
1348
1349 ;; Use snappy from Guix
1350 (string-append "-Dsnappy_STATIC_LIBRARIES="
1351 snappy "/lib/libsnappy.so")
1352 ;; Yes, this is not actually the include directory but a prefix...
1353 (string-append "-Dsnappy_INCLUDE_DIR=" snappy)
1354
1355 ;; Use jsoncpp from Guix
1356 (string-append "-Djsoncpp_STATIC_LIBRARIES="
1357 jsoncpp "/lib/libjsoncpp.so")
1358 ;; Yes, this is not actually the include directory but a prefix...
1359 (string-append "-Djsoncpp_INCLUDE_DIR=" jsoncpp)
1360
1361 ;; Use sqlite from Guix
1362 (string-append "-Dsqlite_STATIC_LIBRARIES="
1363 sqlite "/lib/libsqlite.a")
1364
1365 ;; Use system libraries wherever possible. Currently, this
1366 ;; only affects zlib.
1367 "-Dsystemlib_ALL=ON"
1368 "-Dtensorflow_ENABLE_POSITION_INDEPENDENT_CODE=ON"
1369 "-Dtensorflow_BUILD_SHARED_LIB=ON"
1370 "-Dtensorflow_OPTIMIZE_FOR_NATIVE_ARCH=OFF"
1371 "-Dtensorflow_ENABLE_SSL_SUPPORT=OFF"
1372 "-Dtensorflow_BUILD_CONTRIB_KERNELS=OFF"))
1373 #:make-flags
1374 (list "CC=gcc")
1375 #:modules ((ice-9 ftw)
1376 (guix build utils)
1377 (guix build cmake-build-system))
1378 #:phases
1379 (modify-phases %standard-phases
1380 (add-after 'unpack 'set-source-file-times-to-1980
1381 ;; At the end of the tf_python_build_pip_package target, a ZIP
1382 ;; archive should be generated via bdist_wheel, but it fails with
1383 ;; "ZIP does not support timestamps before 1980". Luckily,
1384 ;; SOURCE_DATE_EPOCH is respected, which we set to some time in
1385 ;; 1980.
1386 (lambda _ (setenv "SOURCE_DATE_EPOCH" "315532800") #t))
1387 ;; See https://github.com/tensorflow/tensorflow/issues/20517#issuecomment-406373913
1388 (add-after 'unpack 'python3.7-compatibility
1389 (lambda _
1390 (substitute* '("tensorflow/python/eager/pywrap_tfe_src.cc"
1391 "tensorflow/python/lib/core/ndarray_tensor.cc"
1392 "tensorflow/python/lib/core/py_func.cc")
1393 (("PyUnicode_AsUTF8") "(char *)PyUnicode_AsUTF8"))
1394 (substitute* "tensorflow/c/eager/c_api.h"
1395 (("unsigned char async")
1396 "unsigned char is_async"))
1397
1398 ;; Remove dependency on tensorboard, a complicated but probably
1399 ;; optional package.
1400 (substitute* "tensorflow/tools/pip_package/setup.py"
1401 ((".*'tensorboard >.*") ""))
1402 #t))
1403 (add-after 'python3.7-compatibility 'chdir
1404 (lambda _ (chdir "tensorflow/contrib/cmake") #t))
1405 (add-after 'chdir 'disable-downloads
1406 (lambda* (#:key inputs #:allow-other-keys)
1407 (substitute* (find-files "external" "\\.cmake$")
1408 (("GIT_REPOSITORY.*") "")
1409 (("GIT_TAG.*") "")
1410 (("PREFIX ")
1411 "DOWNLOAD_COMMAND \"\"\nPREFIX "))
1412
1413 ;; Use packages from Guix
1414 (let ((grpc (assoc-ref inputs "grpc")))
1415 (substitute* "CMakeLists.txt"
1416 ;; Sqlite
1417 (("include\\(sqlite\\)") "")
1418 (("\\$\\{sqlite_STATIC_LIBRARIES\\}")
1419 (string-append (assoc-ref inputs "sqlite")
1420 "/lib/libsqlite3.so"))
1421 (("sqlite_copy_headers_to_destination") "")
1422
1423 ;; PNG
1424 (("include\\(png\\)") "")
1425 (("\\$\\{png_STATIC_LIBRARIES\\}")
1426 (string-append (assoc-ref inputs "libpng")
1427 "/lib/libpng16.so"))
1428 (("png_copy_headers_to_destination") "")
1429
1430 ;; JPEG
1431 (("include\\(jpeg\\)") "")
1432 (("\\$\\{jpeg_STATIC_LIBRARIES\\}")
1433 (string-append (assoc-ref inputs "libjpeg")
1434 "/lib/libjpeg.so"))
1435 (("jpeg_copy_headers_to_destination") "")
1436
1437 ;; GIF
1438 (("include\\(gif\\)") "")
1439 (("\\$\\{gif_STATIC_LIBRARIES\\}")
1440 (string-append (assoc-ref inputs "giflib")
1441 "/lib/libgif.so"))
1442 (("gif_copy_headers_to_destination") "")
1443
1444 ;; lmdb
1445 (("include\\(lmdb\\)") "")
1446 (("\\$\\{lmdb_STATIC_LIBRARIES\\}")
1447 (string-append (assoc-ref inputs "lmdb")
1448 "/lib/liblmdb.so"))
1449 (("lmdb_copy_headers_to_destination") "")
1450
1451 ;; Protobuf
1452 (("include\\(protobuf\\)") "")
1453 (("protobuf_copy_headers_to_destination") "")
1454 (("^ +protobuf") "")
1455
1456 ;; gRPC
1457 (("include\\(grpc\\)")
1458 "find_package(grpc REQUIRED NAMES gRPC)")
1459 (("list\\(APPEND tensorflow_EXTERNAL_DEPENDENCIES grpc\\)") "")
1460
1461 ;; Eigen
1462 (("include\\(eigen\\)")
1463 (string-append "find_package(eigen REQUIRED NAMES Eigen3)
1464 set(eigen_INCLUDE_DIRS ${CMAKE_CURRENT_BINARY_DIR}/external/eigen_archive "
1465 (assoc-ref inputs "eigen") "/include/eigen3)"))
1466 (("^ +eigen") "")
1467
1468 ;; snappy
1469 (("include\\(snappy\\)")
1470 "add_definitions(-DTF_USE_SNAPPY)")
1471 (("list\\(APPEND tensorflow_EXTERNAL_DEPENDENCIES snappy\\)") "")
1472
1473 ;; jsoncpp
1474 (("include\\(jsoncpp\\)") "")
1475 (("^ +jsoncpp") ""))
1476
1477 (substitute* "tf_core_framework.cmake"
1478 ((" grpc") "")
1479 (("\\$\\{GRPC_BUILD\\}/grpc_cpp_plugin")
1480 (which "grpc_cpp_plugin"))
1481 ;; Link with gRPC libraries
1482 (("add_library\\(tf_protos_cc.*" m)
1483 (string-append m
1484 (format #f "\ntarget_link_libraries(tf_protos_cc PRIVATE \
1485 ~a/lib/libgrpc++_unsecure.a \
1486 ~a/lib/libgrpc_unsecure.a \
1487 ~a/lib/libaddress_sorting.a \
1488 ~a/lib/libgpr.a \
1489 ~a//lib/libcares.so
1490 )\n"
1491 grpc grpc grpc grpc
1492 (assoc-ref inputs "c-ares"))))))
1493 (substitute* "tf_tools.cmake"
1494 (("add_dependencies\\(\\$\\{proto_text.*") ""))
1495 ;; Remove dependency on bundled grpc
1496 (substitute* "tf_core_distributed_runtime.cmake"
1497 (("tf_core_cpu grpc") "tf_core_cpu"))
1498
1499 ;; This directory is a dependency of many targets.
1500 (mkdir-p "protobuf")
1501 #t))
1502 (add-after 'configure 'unpack-third-party-sources
1503 (lambda* (#:key inputs #:allow-other-keys)
1504 ;; This is needed to configure bundled packages properly.
1505 (setenv "CONFIG_SHELL" (which "bash"))
1506 (for-each
1507 (lambda (name)
1508 (let* ((what (assoc-ref inputs (string-append name "-src")))
1509 (name* (string-map (lambda (c)
1510 (if (char=? c #\-)
1511 #\_ c)) name))
1512 (where (string-append "../build/" name* "/src/" name*)))
1513 (cond
1514 ((string-suffix? ".zip" what)
1515 (mkdir-p where)
1516 (with-directory-excursion where
1517 (invoke "unzip" what)))
1518 ((string-suffix? ".tar.gz" what)
1519 (mkdir-p where)
1520 (invoke "tar" "xf" what
1521 "-C" where "--strip-components=1"))
1522 (else
1523 (let ((parent (dirname where)))
1524 (mkdir-p parent)
1525 (with-directory-excursion parent
1526 (when (file-exists? name*)
1527 (delete-file-recursively name*))
1528 (copy-recursively what name*)
1529 (map make-file-writable
1530 (find-files name* ".*"))))))))
1531 (list "boringssl"
1532 "cub"
1533 "double-conversion"
1534 "farmhash"
1535 "fft2d"
1536 "highwayhash"
1537 "nsync"
1538 "re2"))
1539
1540 (rename-file "../build/cub/src/cub/cub-1.8.0/"
1541 "../build/cub/src/cub/cub/")
1542 #t))
1543 (add-after 'unpack 'fix-python-build
1544 (lambda* (#:key inputs outputs #:allow-other-keys)
1545 (mkdir-p "protobuf-src")
1546 (invoke "tar" "xf" (assoc-ref inputs "protobuf:src")
1547 "-C" "protobuf-src" "--strip-components=1")
1548 (mkdir-p "eigen-src")
1549 (invoke "tar" "xf" (assoc-ref inputs "eigen:src")
1550 "-C" "eigen-src" "--strip-components=1")
1551
1552 (substitute* "tensorflow/contrib/cmake/tf_python.cmake"
1553 ;; Ensure that all Python dependencies can be found at build time.
1554 (("PYTHONPATH=\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/tf_python" m)
1555 (string-append m ":" (getenv "PYTHONPATH")))
1556 ;; Take protobuf source files from our source package.
1557 (("\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/protobuf/src/protobuf/src/google")
1558 (string-append (getcwd) "/protobuf-src/src/google")))
1559
1560 (substitute* '("tensorflow/contrib/cmake/tf_shared_lib.cmake"
1561 "tensorflow/contrib/cmake/tf_python.cmake")
1562 ;; Take Eigen source files from our source package.
1563 (("\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/eigen/src/eigen/")
1564 (string-append (getcwd) "/eigen-src/"))
1565 ;; Take Eigen headers from our own package.
1566 (("\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/external/eigen_archive")
1567 (string-append (assoc-ref inputs "eigen") "/include/eigen3")))
1568
1569 ;; Correct the RUNPATH of ops libraries generated for Python.
1570 ;; TODO: this doesn't work :(
1571 ;; /gnu/store/...-tensorflow-1.9.0/lib/python3.7/site-packages/tensorflow/contrib/seq2seq/python/ops/lib_beam_search_ops.so:
1572 ;; warning: RUNPATH contains bogus entries: ("/tmp/guix-build-tensorflow-1.9.0.drv-0/source/tensorflow/contrib/build")
1573 ;; /gnu/store/...-tensorflow-1.9.0/lib/python3.7/site-packages/tensorflow/contrib/seq2seq/python/ops/lib_beam_search_ops.so:
1574 ;; error: depends on 'libpywrap_tensorflow_internal.so', which
1575 ;; cannot be found in RUNPATH ...
1576 (substitute* "tensorflow/contrib/cmake/tf_cc_ops.cmake"
1577 (("set_target_properties.*")
1578 (string-append "set_target_properties(${_AT_TARGET} PROPERTIES \
1579 COMPILE_FLAGS ${target_compile_flags} \
1580 INSTALL_RPATH_USE_LINK_PATH TRUE \
1581 INSTALL_RPATH " (assoc-ref outputs "out") "/lib)\n")))
1582 #t))
1583 (add-after 'build 'build-pip-package
1584 (lambda* (#:key outputs #:allow-other-keys)
1585 (setenv "LDFLAGS"
1586 (string-append "-Wl,-rpath="
1587 (assoc-ref outputs "out") "/lib"))
1588 (invoke "make" "tf_python_build_pip_package")
1589 #t))
1590 (add-after 'build-pip-package 'install-python
1591 (lambda* (#:key outputs #:allow-other-keys)
1592 (let ((out (assoc-ref outputs "out"))
1593 (wheel (car (find-files "../build/tf_python/dist/" "\\.whl$"))))
1594 (invoke "python" "-m" "pip" "install" wheel
1595 (string-append "--prefix=" out))
1596
1597 ;; XXX: broken RUNPATH, see fix-python-build phase.
1598 (delete-file
1599 (string-append
1600 out "/lib/python3.7/site-packages/tensorflow/contrib/"
1601 "seq2seq/python/ops/lib_beam_search_ops.so"))
1602 #t))))))
1603 (native-inputs
1604 `(("pkg-config" ,pkg-config)
1605 ("protobuf:native" ,protobuf-next) ; protoc
1606 ("protobuf:src" ,(package-source protobuf-next))
1607 ("eigen:src" ,(package-source eigen-for-tensorflow))
1608 ;; The commit hashes and URLs for third-party source code are taken
1609 ;; from "tensorflow/workspace.bzl".
1610 ("boringssl-src"
1611 ,(let ((commit "ee7aa02")
1612 (revision "1"))
1613 (origin
1614 (method git-fetch)
1615 (uri (git-reference
1616 (url "https://boringssl.googlesource.com/boringssl")
1617 (commit commit)))
1618 (file-name (string-append "boringssl-0-" revision
1619 (string-take commit 7)
1620 "-checkout"))
1621 (sha256
1622 (base32
1623 "1jf693q0nw0adsic6cgmbdx6g7wr4rj4vxa8j1hpn792fqhd8wgw")))))
1624 ("cub-src"
1625 ,(let ((version "1.8.0"))
1626 (origin
1627 (method url-fetch)
1628 (uri (string-append "https://mirror.bazel.build/github.com/NVlabs/"
1629 "cub/archive/" version ".zip"))
1630 (file-name (string-append "cub-" version ".zip"))
1631 (sha256
1632 (base32
1633 "1hsqikqridb90dkxkjr2918dcry6pfh46ccnwrzawl56aamhdykb")))))
1634 ("double-conversion-src"
1635 ,(let ((commit "5664746")
1636 (revision "1"))
1637 (origin
1638 (method git-fetch)
1639 (uri (git-reference
1640 (url "https://github.com/google/double-conversion.git")
1641 (commit commit)))
1642 (file-name
1643 (git-file-name "double-conversion"
1644 (string-append "0-" revision "."
1645 (string-take commit 7))))
1646 (sha256
1647 (base32
1648 "1h5lppqqxcvdg5jq42i5msgwx20ryij3apvmndflngrgdpc04gn1")))))
1649 ("farmhash-src"
1650 ,(let ((commit "816a4ae622e964763ca0862d9dbd19324a1eaf45"))
1651 (origin
1652 (method url-fetch)
1653 (uri (string-append
1654 "https://mirror.bazel.build/github.com/google/farmhash/archive/"
1655 commit ".tar.gz"))
1656 (file-name (string-append "farmhash-0-" (string-take commit 7)
1657 ".tar.gz"))
1658 (sha256
1659 (base32
1660 "185b2xdxl4d4cnsnv6abg8s22gxvx8673jq2yaq85bz4cdy58q35")))))
1661 ;; The license notice on the home page at
1662 ;; http://www.kurims.kyoto-u.ac.jp/~ooura/fft.html says:
1663 ;; Copyright Takuya OOURA, 1996-2001
1664 ;;
1665 ;; You may use, copy, modify and distribute this code for any purpose
1666 ;; (include commercial use) and without fee. Please refer to this
1667 ;; package when you modify this code.
1668 ;;
1669 ;; We take the identical tarball from the Bazel mirror, because the URL
1670 ;; at the home page is not versioned and might change.
1671 ("fft2d-src"
1672 ,(origin
1673 (method url-fetch)
1674 (uri "https://mirror.bazel.build/www.kurims.kyoto-u.ac.jp/~ooura/fft.tgz")
1675 (file-name "fft2d.tar.gz")
1676 (sha256
1677 (base32
1678 "15jjkfvhqvl2c0753d2di8hz0pyzn598g74wqy79awdrf1y67fsj"))))
1679 ("highwayhash-src"
1680 ,(let ((commit "be5edafc2e1a455768e260ccd68ae7317b6690ee")
1681 (revision "1"))
1682 (origin
1683 (method git-fetch)
1684 (uri (git-reference
1685 (url "https://github.com/google/highwayhash.git")
1686 (commit commit)))
1687 (file-name (string-append "highwayhash-0-" revision
1688 (string-take commit 7)
1689 "-checkout"))
1690 (sha256
1691 (base32
1692 "154jwf98cyy54hldr94pgjn85zynly3abpnc1avmb8a18lzwjyb6")))))
1693 ("nsync-src"
1694 ,(let ((version "0559ce013feac8db639ee1bf776aca0325d28777")
1695 (revision "1"))
1696 (origin
1697 (method url-fetch)
1698 (uri (string-append "https://mirror.bazel.build/"
1699 "github.com/google/nsync/archive/"
1700 version ".tar.gz"))
1701 (file-name (string-append "nsync-0." revision
1702 "-" (string-take version 7)
1703 ".tar.gz"))
1704 (sha256
1705 (base32
1706 "0qdkyqym34x739mmzv97ah5r7ph462v5xkxqxvidmcfqbi64b132")))))
1707 ("re2-src"
1708 ,(let ((commit "e7efc48")
1709 (revision "1"))
1710 (origin
1711 (method git-fetch)
1712 (uri (git-reference
1713 (url "https://github.com/google/re2")
1714 (commit commit)))
1715 (file-name (string-append "re2-0-" revision
1716 (string-take commit 7)
1717 "-checkout"))
1718 (sha256
1719 (base32
1720 "161g9841rjfsy5pn52fcis0s9hdr7rxvb06pad38j5rppfihvign")))))
1721 ("googletest" ,googletest)
1722 ("swig" ,swig)
1723 ("unzip" ,unzip)))
1724 (propagated-inputs
1725 `(("python-absl-py" ,python-absl-py)
1726 ("python-astor" ,python-astor)
1727 ("python-gast" ,python-gast)
1728 ("python-grpcio" ,python-grpcio)
1729 ("python-numpy" ,python-numpy)
1730 ("python-protobuf" ,python-protobuf-next)
1731 ("python-six" ,python-six)
1732 ("python-termcolo" ,python-termcolor)
1733 ("python-wheel" ,python-wheel)))
1734 (inputs
1735 `(("c-ares" ,c-ares-next)
1736 ("eigen" ,eigen-for-tensorflow)
1737 ("gemmlowp" ,gemmlowp-for-tensorflow)
1738 ("lmdb" ,lmdb)
1739 ("libjpeg" ,libjpeg)
1740 ("libpng" ,libpng)
1741 ("giflib" ,giflib)
1742 ("grpc" ,grpc)
1743 ("jsoncpp" ,jsoncpp-for-tensorflow)
1744 ("snappy" ,snappy)
1745 ("sqlite" ,sqlite)
1746 ("protobuf" ,protobuf-next)
1747 ("python" ,python-wrapper)
1748 ("zlib" ,zlib)))
1749 (home-page "https://tensorflow.org")
1750 (synopsis "Machine learning framework")
1751 (description
1752 "TensorFlow is a flexible platform for building and training machine
1753 learning models. It provides a library for high performance numerical
1754 computation and includes high level Python APIs, including both a sequential
1755 API for beginners that allows users to build models quickly by plugging
1756 together building blocks and a subclassing API with an imperative style for
1757 advanced research.")
1758 (license license:asl2.0)))
1759
1760 (define-public python-iml
1761 (package
1762 (name "python-iml")
1763 (version "0.6.2")
1764 (source
1765 (origin
1766 (method url-fetch)
1767 (uri (pypi-uri "iml" version))
1768 (sha256
1769 (base32
1770 "1k8szlpm19rcwcxdny9qdm3gmaqq8akb4xlvrzyz8c2d679aak6l"))))
1771 (build-system python-build-system)
1772 (propagated-inputs
1773 `(("ipython" ,python-ipython)
1774 ("nose" ,python-nose)
1775 ("numpy" ,python-numpy)
1776 ("pandas" ,python-pandas)
1777 ("scipy" ,python-scipy)))
1778 (home-page "http://github.com/interpretable-ml/iml")
1779 (synopsis "Interpretable Machine Learning (iML) package")
1780 (description "Interpretable ML (iML) is a set of data type objects,
1781 visualizations, and interfaces that can be used by any method designed to
1782 explain the predictions of machine learning models (or really the output of
1783 any function). It currently contains the interface and IO code from the Shap
1784 project, and it will potentially also do the same for the Lime project.")
1785 (license license:expat)))
1786
1787 (define-public python-keras-applications
1788 (package
1789 (name "python-keras-applications")
1790 (version "1.0.8")
1791 (source
1792 (origin
1793 (method url-fetch)
1794 (uri (pypi-uri "Keras_Applications" version))
1795 (sha256
1796 (base32
1797 "1rcz31ca4axa6kzhjx4lwqxbg4wvlljkj8qj9a7p9sfd5fhzjyam"))))
1798 (build-system python-build-system)
1799 ;; The tests require Keras, but this package is needed to build Keras.
1800 (arguments '(#:tests? #f))
1801 (propagated-inputs
1802 `(("python-h5py" ,python-h5py)
1803 ("python-numpy" ,python-numpy)))
1804 (native-inputs
1805 `(("python-pytest" ,python-pytest)
1806 ("python-pytest-cov" ,python-pytest-cov)
1807 ("python-pytest-pep8" ,python-pytest-pep8)
1808 ("python-pytest-xdist" ,python-pytest-xdist)))
1809 (home-page "https://github.com/keras-team/keras-applications")
1810 (synopsis "Reference implementations of popular deep learning models")
1811 (description
1812 "This package provides reference implementations of popular deep learning
1813 models for use with the Keras deep learning framework.")
1814 (license license:expat)))
1815
1816 (define-public python-keras-preprocessing
1817 (package
1818 (name "python-keras-preprocessing")
1819 (version "1.1.0")
1820 (source
1821 (origin
1822 (method url-fetch)
1823 (uri (pypi-uri "Keras_Preprocessing" version))
1824 (sha256
1825 (base32
1826 "1r98nm4k1svsqjyaqkfk23i31bl1kcfcyp7094yyj3c43phfp3as"))))
1827 (build-system python-build-system)
1828 (propagated-inputs
1829 `(("python-numpy" ,python-numpy)
1830 ("python-six" ,python-six)))
1831 (native-inputs
1832 `(("python-pandas" ,python-pandas)
1833 ("python-pillow" ,python-pillow)
1834 ("python-pytest" ,python-pytest)
1835 ("python-pytest-cov" ,python-pytest-cov)
1836 ("python-pytest-xdist" ,python-pytest-xdist)
1837 ("tensorflow" ,tensorflow)))
1838 (home-page "https://github.com/keras-team/keras-preprocessing/")
1839 (synopsis "Data preprocessing and augmentation for deep learning models")
1840 (description
1841 "Keras Preprocessing is the data preprocessing and data augmentation
1842 module of the Keras deep learning library. It provides utilities for working
1843 with image data, text data, and sequence data.")
1844 (license license:expat)))
1845
1846 (define-public python-keras
1847 (package
1848 (name "python-keras")
1849 (version "2.2.4")
1850 (source
1851 (origin
1852 (method url-fetch)
1853 (uri (pypi-uri "Keras" version))
1854 (sha256
1855 (base32
1856 "1j8bsqzh49vjdxy6l1k4iwax5vpjzniynyd041xjavdzvfii1dlh"))))
1857 (build-system python-build-system)
1858 (arguments
1859 `(#:phases
1860 (modify-phases %standard-phases
1861 (add-after 'unpack 'remove-tests-for-unavailable-features
1862 (lambda _
1863 (delete-file "keras/backend/theano_backend.py")
1864 (delete-file "keras/backend/cntk_backend.py")
1865 (delete-file "tests/keras/backend/backend_test.py")
1866
1867 ;; FIXME: This doesn't work because Tensorflow is missing the
1868 ;; coder ops library.
1869 (delete-file "tests/keras/test_callbacks.py")
1870 #t))
1871 (replace 'check
1872 (lambda _
1873 ;; These tests attempt to download data files from the internet.
1874 (delete-file "tests/integration_tests/test_datasets.py")
1875 (delete-file "tests/integration_tests/imagenet_utils_test.py")
1876
1877 (setenv "PYTHONPATH"
1878 (string-append (getcwd) "/build/lib:"
1879 (getenv "PYTHONPATH")))
1880 (invoke "py.test" "-v"
1881 "-p" "no:cacheprovider"
1882 "--ignore" "keras/utils"))))))
1883 (propagated-inputs
1884 `(("python-h5py" ,python-h5py)
1885 ("python-keras-applications" ,python-keras-applications)
1886 ("python-keras-preprocessing" ,python-keras-preprocessing)
1887 ("python-numpy" ,python-numpy)
1888 ("python-pydot" ,python-pydot)
1889 ("python-pyyaml" ,python-pyyaml)
1890 ("python-scipy" ,python-scipy)
1891 ("python-six" ,python-six)
1892 ("tensorflow" ,tensorflow)
1893 ("graphviz" ,graphviz)))
1894 (native-inputs
1895 `(("python-pandas" ,python-pandas)
1896 ("python-pytest" ,python-pytest)
1897 ("python-pytest-cov" ,python-pytest-cov)
1898 ("python-pytest-pep8" ,python-pytest-pep8)
1899 ("python-pytest-timeout" ,python-pytest-timeout)
1900 ("python-pytest-xdist" ,python-pytest-xdist)
1901 ("python-sphinx" ,python-sphinx)
1902 ("python-requests" ,python-requests)))
1903 (home-page "https://github.com/keras-team/keras")
1904 (synopsis "High-level deep learning framework")
1905 (description "Keras is a high-level neural networks API, written in Python
1906 and capable of running on top of TensorFlow. It was developed with a focus on
1907 enabling fast experimentation. Use Keras if you need a deep learning library
1908 that:
1909
1910 @itemize
1911 @item Allows for easy and fast prototyping (through user friendliness,
1912 modularity, and extensibility).
1913 @item Supports both convolutional networks and recurrent networks, as well as
1914 combinations of the two.
1915 @item Runs seamlessly on CPU and GPU.
1916 @end itemize\n")
1917 (license license:expat)))