gnu: unbound: Update to 1.10.1.
[jackhill/guix/guix.git] / gnu / packages / machine-learning.scm
1 ;;; GNU Guix --- Functional package management for GNU
2 ;;; Copyright © 2015, 2016, 2017, 2018, 2019, 2020 Ricardo Wurmus <rekado@elephly.net>
3 ;;; Copyright © 2016, 2020 Efraim Flashner <efraim@flashner.co.il>
4 ;;; Copyright © 2016, 2017, 2020 Marius Bakke <mbakke@fastmail.com>
5 ;;; Copyright © 2016 Hartmut Goebel <h.goebel@crazy-compilers.com>
6 ;;; Copyright © 2018, 2019 Tobias Geerinckx-Rice <me@tobias.gr>
7 ;;; Copyright © 2018 Kei Kebreau <kkebreau@posteo.net>
8 ;;; Copyright © 2018 Mark Meyer <mark@ofosos.org>
9 ;;; Copyright © 2018 Ben Woodcroft <donttrustben@gmail.com>
10 ;;; Copyright © 2018 Fis Trivial <ybbs.daans@hotmail.com>
11 ;;; Copyright © 2018 Julien Lepiller <julien@lepiller.eu>
12 ;;; Copyright © 2018 Björn Höfling <bjoern.hoefling@bjoernhoefling.de>
13 ;;; Copyright © 2019 Nicolas Goaziou <mail@nicolasgoaziou.fr>
14 ;;; Copyright © 2019 Guillaume Le Vaillant <glv@posteo.net>
15 ;;; Copyright © 2019 Brett Gilio <brettg@gnu.org>
16 ;;; Copyright © 2020 Konrad Hinsen <konrad.hinsen@fastmail.net>
17 ;;;
18 ;;; This file is part of GNU Guix.
19 ;;;
20 ;;; GNU Guix is free software; you can redistribute it and/or modify it
21 ;;; under the terms of the GNU General Public License as published by
22 ;;; the Free Software Foundation; either version 3 of the License, or (at
23 ;;; your option) any later version.
24 ;;;
25 ;;; GNU Guix is distributed in the hope that it will be useful, but
26 ;;; WITHOUT ANY WARRANTY; without even the implied warranty of
27 ;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
28 ;;; GNU General Public License for more details.
29 ;;;
30 ;;; You should have received a copy of the GNU General Public License
31 ;;; along with GNU Guix. If not, see <http://www.gnu.org/licenses/>.
32
33 (define-module (gnu packages machine-learning)
34 #:use-module ((guix licenses) #:prefix license:)
35 #:use-module (guix packages)
36 #:use-module (guix utils)
37 #:use-module (guix download)
38 #:use-module (guix svn-download)
39 #:use-module (guix build-system asdf)
40 #:use-module (guix build-system cmake)
41 #:use-module (guix build-system gnu)
42 #:use-module (guix build-system ocaml)
43 #:use-module (guix build-system python)
44 #:use-module (guix build-system r)
45 #:use-module (guix git-download)
46 #:use-module (gnu packages)
47 #:use-module (gnu packages adns)
48 #:use-module (gnu packages algebra)
49 #:use-module (gnu packages audio)
50 #:use-module (gnu packages autotools)
51 #:use-module (gnu packages base)
52 #:use-module (gnu packages bash)
53 #:use-module (gnu packages boost)
54 #:use-module (gnu packages check)
55 #:use-module (gnu packages compression)
56 #:use-module (gnu packages cran)
57 #:use-module (gnu packages databases)
58 #:use-module (gnu packages dejagnu)
59 #:use-module (gnu packages gcc)
60 #:use-module (gnu packages glib)
61 #:use-module (gnu packages graphviz)
62 #:use-module (gnu packages gstreamer)
63 #:use-module (gnu packages image)
64 #:use-module (gnu packages linux)
65 #:use-module (gnu packages lisp-xyz)
66 #:use-module (gnu packages maths)
67 #:use-module (gnu packages mpi)
68 #:use-module (gnu packages ocaml)
69 #:use-module (gnu packages onc-rpc)
70 #:use-module (gnu packages perl)
71 #:use-module (gnu packages pkg-config)
72 #:use-module (gnu packages protobuf)
73 #:use-module (gnu packages python)
74 #:use-module (gnu packages python-science)
75 #:use-module (gnu packages python-web)
76 #:use-module (gnu packages python-xyz)
77 #:use-module (gnu packages rpc)
78 #:use-module (gnu packages serialization)
79 #:use-module (gnu packages sphinx)
80 #:use-module (gnu packages statistics)
81 #:use-module (gnu packages sqlite)
82 #:use-module (gnu packages swig)
83 #:use-module (gnu packages web)
84 #:use-module (gnu packages xml)
85 #:use-module (gnu packages xorg)
86 #:use-module (ice-9 match))
87
88 (define-public fann
89 ;; The last release is >100 commits behind, so we package from git.
90 (let ((commit "d71d54788bee56ba4cf7522801270152da5209d7"))
91 (package
92 (name "fann")
93 (version (string-append "2.2.0-1." (string-take commit 8)))
94 (source (origin
95 (method git-fetch)
96 (uri (git-reference
97 (url "https://github.com/libfann/fann.git")
98 (commit commit)))
99 (file-name (string-append name "-" version "-checkout"))
100 (sha256
101 (base32
102 "0ibwpfrjs6q2lijs8slxjgzb2llcl6rk3v2ski4r6215g5jjhg3x"))))
103 (build-system cmake-build-system)
104 (arguments
105 `(#:phases
106 (modify-phases %standard-phases
107 (replace 'check
108 (lambda* (#:key outputs #:allow-other-keys)
109 (let* ((out (assoc-ref outputs "out")))
110 (with-directory-excursion (string-append (getcwd) "/tests")
111 (invoke "./fann_tests"))))))))
112 (home-page "http://leenissen.dk/fann/wp/")
113 (synopsis "Fast Artificial Neural Network")
114 (description
115 "FANN is a neural network library, which implements multilayer
116 artificial neural networks in C with support for both fully connected and
117 sparsely connected networks.")
118 (license license:lgpl2.1))))
119
120 (define-public libsvm
121 (package
122 (name "libsvm")
123 (version "3.23")
124 (source
125 (origin
126 (method url-fetch)
127 (uri (string-append "https://www.csie.ntu.edu.tw/~cjlin/libsvm/"
128 name "-" version ".tar.gz"))
129 (sha256
130 (base32 "0jpaq0rr92x38p4nk3gjan79ip67m6p80anb28z1d8601miysyi5"))))
131 (build-system gnu-build-system)
132 (arguments
133 `(#:tests? #f ; no "check" target
134 #:phases (modify-phases %standard-phases
135 (delete 'configure)
136 (replace
137 'install ; no ‘install’ target
138 (lambda* (#:key outputs #:allow-other-keys)
139 (let* ((out (assoc-ref outputs "out"))
140 (bin (string-append out "/bin/")))
141 (mkdir-p bin)
142 (for-each (lambda (file)
143 (copy-file file (string-append bin file)))
144 '("svm-train"
145 "svm-predict"
146 "svm-scale")))
147 #t)))))
148 (home-page "https://www.csie.ntu.edu.tw/~cjlin/libsvm/")
149 (synopsis "Library for Support Vector Machines")
150 (description
151 "LIBSVM is a machine learning library for support vector
152 classification, (C-SVC, nu-SVC), regression (epsilon-SVR, nu-SVR) and
153 distribution estimation (one-class SVM). It supports multi-class
154 classification.")
155 (license license:bsd-3)))
156
157 (define-public python-libsvm
158 (package (inherit libsvm)
159 (name "python-libsvm")
160 (build-system gnu-build-system)
161 (arguments
162 `(#:tests? #f ; no "check" target
163 #:make-flags '("-C" "python")
164 #:phases
165 (modify-phases %standard-phases
166 (delete 'configure)
167 (replace
168 'install ; no ‘install’ target
169 (lambda* (#:key inputs outputs #:allow-other-keys)
170 (let ((site (string-append (assoc-ref outputs "out")
171 "/lib/python"
172 (string-take
173 (string-take-right
174 (assoc-ref inputs "python") 5) 3)
175 "/site-packages/")))
176 (substitute* "python/svm.py"
177 (("../libsvm.so.2") "libsvm.so.2"))
178 (mkdir-p site)
179 (for-each (lambda (file)
180 (copy-file file (string-append site (basename file))))
181 (find-files "python" "\\.py"))
182 (copy-file "libsvm.so.2"
183 (string-append site "libsvm.so.2")))
184 #t)))))
185 (inputs
186 `(("python" ,python)))
187 (synopsis "Python bindings of libSVM")))
188
189 (define-public ghmm
190 ;; The latest release candidate is several years and a couple of fixes have
191 ;; been published since. This is why we download the sources from the SVN
192 ;; repository.
193 (let ((svn-revision 2341))
194 (package
195 (name "ghmm")
196 (version (string-append "0.9-rc3-0." (number->string svn-revision)))
197 (source (origin
198 (method svn-fetch)
199 (uri (svn-reference
200 (url "http://svn.code.sf.net/p/ghmm/code/trunk")
201 (revision svn-revision)))
202 (file-name (string-append name "-" version "-checkout"))
203 (sha256
204 (base32
205 "0qbq1rqp94l530f043qzp8aw5lj7dng9wq0miffd7spd1ff638wq"))))
206 (build-system gnu-build-system)
207 (arguments
208 `(#:imported-modules (,@%gnu-build-system-modules
209 (guix build python-build-system))
210 #:modules ((guix build python-build-system)
211 ,@%gnu-build-system-modules)
212 #:phases
213 (modify-phases %standard-phases
214 (add-after 'unpack 'enter-dir
215 (lambda _ (chdir "ghmm") #t))
216 (delete 'check)
217 (add-after 'install 'check
218 (assoc-ref %standard-phases 'check))
219 (add-before 'check 'fix-PYTHONPATH
220 (lambda* (#:key inputs outputs #:allow-other-keys)
221 (let ((python-version (python-version
222 (assoc-ref inputs "python"))))
223 (setenv "PYTHONPATH"
224 (string-append (getenv "PYTHONPATH")
225 ":" (assoc-ref outputs "out")
226 "/lib/python" python-version
227 "/site-packages")))
228 #t))
229 (add-after 'enter-dir 'fix-runpath
230 (lambda* (#:key outputs #:allow-other-keys)
231 (substitute* "ghmmwrapper/setup.py"
232 (("^(.*)extra_compile_args = \\[" line indent)
233 (string-append indent
234 "extra_link_args = [\"-Wl,-rpath="
235 (assoc-ref outputs "out") "/lib\"],\n"
236 line
237 "\"-Wl,-rpath="
238 (assoc-ref outputs "out")
239 "/lib\", ")))
240 #t))
241 (add-after 'enter-dir 'disable-broken-tests
242 (lambda _
243 (substitute* "tests/Makefile.am"
244 ;; GHMM_SILENT_TESTS is assumed to be a command.
245 (("TESTS_ENVIRONMENT.*") "")
246 ;; Do not build broken tests.
247 (("chmm .*") "")
248 (("read_fa .*") "")
249 (("mcmc .*") "")
250 (("label_higher_order_test.*$")
251 "label_higher_order_test\n"))
252
253 ;; These Python unittests are broken as there is no gato.
254 ;; See https://sourceforge.net/p/ghmm/support-requests/3/
255 (substitute* "ghmmwrapper/ghmmunittests.py"
256 (("^(.*)def (testNewXML|testMultipleTransitionClasses|testNewXML)"
257 line indent)
258 (string-append indent
259 "@unittest.skip(\"Disabled by Guix\")\n"
260 line)))
261 #t)))))
262 (inputs
263 `(("python" ,python-2) ; only Python 2 is supported
264 ("libxml2" ,libxml2)))
265 (native-inputs
266 `(("pkg-config" ,pkg-config)
267 ("dejagnu" ,dejagnu)
268 ("swig" ,swig)
269 ("autoconf" ,autoconf)
270 ("automake" ,automake)
271 ("libtool" ,libtool)))
272 (home-page "http://ghmm.org")
273 (synopsis "Hidden Markov Model library")
274 (description
275 "The General Hidden Markov Model library (GHMM) is a C library with
276 additional Python bindings implementing a wide range of types of @dfn{Hidden
277 Markov Models} (HMM) and algorithms: discrete, continuous emissions, basic
278 training, HMM clustering, HMM mixtures.")
279 (license license:lgpl2.0+))))
280
281 (define-public mcl
282 (package
283 (name "mcl")
284 (version "14.137")
285 (source (origin
286 (method url-fetch)
287 (uri (string-append
288 "http://micans.org/mcl/src/mcl-"
289 (string-replace-substring version "." "-")
290 ".tar.gz"))
291 (sha256
292 (base32
293 "15xlax3z31lsn62vlg94hkm75nm40q4679amnfg13jm8m2bnhy5m"))))
294 (build-system gnu-build-system)
295 (arguments
296 `(#:configure-flags (list "--enable-blast")))
297 (inputs
298 `(("perl" ,perl)))
299 (home-page "http://micans.org/mcl/")
300 (synopsis "Clustering algorithm for graphs")
301 (description
302 "The MCL algorithm is short for the @dfn{Markov Cluster Algorithm}, a
303 fast and scalable unsupervised cluster algorithm for graphs (also known as
304 networks) based on simulation of (stochastic) flow in graphs.")
305 ;; In the LICENCE file and web page it says "The software is licensed
306 ;; under the GNU General Public License, version 3.", but in several of
307 ;; the source code files it suggests GPL3 or later.
308 ;; http://listserver.ebi.ac.uk/pipermail/mcl-users/2016/000376.html
309 (license license:gpl3)))
310
311 (define-public ocaml-mcl
312 (package
313 (name "ocaml-mcl")
314 (version "12-068oasis4")
315 (source
316 (origin
317 (method git-fetch)
318 (uri (git-reference
319 (url "https://github.com/fhcrc/mcl.git")
320 (commit version)))
321 (file-name (git-file-name name version))
322 (sha256
323 (base32
324 "0009dc3h2jp3qg5val452wngpqnbfyhbcxylghq0mrjqxx0jdq5p"))))
325 (build-system ocaml-build-system)
326 (arguments
327 `(#:phases
328 (modify-phases %standard-phases
329 (add-before 'configure 'patch-paths
330 (lambda _
331 (substitute* "configure"
332 (("/bin/sh") (which "sh")))
333 (substitute* "setup.ml"
334 (("LDFLAGS=-fPIC")
335 (string-append "LDFLAGS=-fPIC\"; \"SHELL=" (which "sh")))
336 (("-std=c89") "-std=gnu99")
337
338 ;; This is a mutable string, which is no longer supported. Use
339 ;; a byte buffer instead.
340 (("String.make \\(String.length s\\)")
341 "Bytes.make (String.length s)")
342
343 ;; These two belong together.
344 (("OASISString.replace_chars")
345 "Bytes.to_string (OASISString.replace_chars")
346 ((" s;")
347 " s);"))
348 (substitute* "myocamlbuild.ml"
349 (("std=c89") "std=gnu99"))
350 ;; Since we build with a more recent OCaml, we have to use C99 or
351 ;; later. This causes problems with the old C code.
352 (substitute* "src/impala/matrix.c"
353 (("restrict") "restrict_"))
354 #t)))))
355 (native-inputs
356 `(("ocamlbuild" ,ocamlbuild)))
357 (home-page "https://github.com/fhcrc/mcl")
358 (synopsis "OCaml wrappers around MCL")
359 (description
360 "This package provides OCaml bindings for the MCL graph clustering
361 algorithm.")
362 (license license:gpl3)))
363
364 (define-public randomjungle
365 (package
366 (name "randomjungle")
367 (version "2.1.0")
368 (source
369 (origin
370 (method url-fetch)
371 (uri (string-append
372 "https://www.imbs.uni-luebeck.de/fileadmin/files/Software"
373 "/randomjungle/randomjungle-" version ".tar_.gz"))
374 (patches (search-patches "randomjungle-disable-static-build.patch"))
375 (sha256
376 (base32
377 "12c8rf30cla71swx2mf4ww9mfd8jbdw5lnxd7dxhyw1ygrvg6y4w"))))
378 (build-system gnu-build-system)
379 (arguments
380 `(#:configure-flags
381 (list "--disable-static"
382 (string-append "--with-boost="
383 (assoc-ref %build-inputs "boost")))
384 #:phases
385 (modify-phases %standard-phases
386 (add-before
387 'configure 'set-CXXFLAGS
388 (lambda _
389 (setenv "CXXFLAGS" "-fpermissive ")
390 #t)))))
391 (inputs
392 `(("boost" ,boost)
393 ("gsl" ,gsl)
394 ("libxml2" ,libxml2)
395 ("zlib" ,zlib)))
396 (native-inputs
397 `(("gfortran" ,gfortran)
398 ("gfortran:lib" ,gfortran "lib")))
399 ;; Non-portable assembly instructions are used so building fails on
400 ;; platforms other than x86_64 or i686.
401 (supported-systems '("x86_64-linux" "i686-linux"))
402 (home-page "https://www.imbs.uni-luebeck.de/forschung/software/details.html#c224")
403 (synopsis "Implementation of the Random Forests machine learning method")
404 (description
405 "Random Jungle is an implementation of Random Forests. It is supposed to
406 analyse high dimensional data. In genetics, it can be used for analysing big
407 Genome Wide Association (GWA) data. Random Forests is a powerful machine
408 learning method. Most interesting features are variable selection, missing
409 value imputation, classifier creation, generalization error estimation and
410 sample proximities between pairs of cases.")
411 (license license:gpl3+)))
412
413 (define-public openfst
414 (package
415 (name "openfst")
416 (version "1.7.2")
417 (source (origin
418 (method url-fetch)
419 (uri (string-append "http://www.openfst.org/twiki/pub/FST/"
420 "FstDownload/openfst-" version ".tar.gz"))
421 (sha256
422 (base32
423 "0fqgk8195kz21is09gwzwnrg7fr9526bi9mh4apyskapz27pbhr1"))))
424 (build-system gnu-build-system)
425 (home-page "http://www.openfst.org")
426 (synopsis "Library for weighted finite-state transducers")
427 (description "OpenFst is a library for constructing, combining,
428 optimizing, and searching weighted finite-state transducers (FSTs).")
429 (license license:asl2.0)))
430
431 (define-public shogun
432 (package
433 (name "shogun")
434 (version "6.1.3")
435 (source
436 (origin
437 (method url-fetch)
438 (uri (string-append
439 "ftp://shogun-toolbox.org/shogun/releases/"
440 (version-major+minor version)
441 "/sources/shogun-" version ".tar.bz2"))
442 (sha256
443 (base32
444 "1rn9skm3nw6hr7mr3lgp2gfqhi7ii0lyxck7qmqnf8avq349s5jp"))
445 (modules '((guix build utils)
446 (ice-9 rdelim)))
447 (snippet
448 '(begin
449 ;; Remove non-free sources and files referencing them
450 (for-each delete-file
451 (find-files "src/shogun/classifier/svm/"
452 "SVMLight\\.(cpp|h)"))
453 (for-each delete-file
454 (find-files "examples/undocumented/libshogun/"
455 (string-append
456 "(classifier_.*svmlight.*|"
457 "evaluation_cross_validation_locked_comparison).cpp")))
458 ;; Remove non-free functions.
459 (define (delete-ifdefs file)
460 (with-atomic-file-replacement file
461 (lambda (in out)
462 (let loop ((line (read-line in 'concat))
463 (skipping? #f))
464 (if (eof-object? line)
465 #t
466 (let ((skip-next?
467 (or (and skipping?
468 (not (string-prefix?
469 "#endif //USE_SVMLIGHT" line)))
470 (string-prefix?
471 "#ifdef USE_SVMLIGHT" line))))
472 (when (or (not skipping?)
473 (and skipping? (not skip-next?)))
474 (display line out))
475 (loop (read-line in 'concat) skip-next?)))))))
476 (for-each delete-ifdefs
477 (append
478 (find-files "src/shogun/classifier/mkl"
479 "^MKLClassification\\.cpp")
480 (find-files "src/shogun/classifier/svm"
481 "^SVMLightOneClass\\.(cpp|h)")
482 (find-files "src/shogun/multiclass"
483 "^ScatterSVM\\.(cpp|h)")
484 (find-files "src/shogun/kernel/"
485 "^(Kernel|CombinedKernel|ProductKernel)\\.(cpp|h)")
486 (find-files "src/shogun/regression/svr"
487 "^(MKLRegression|SVRLight)\\.(cpp|h)")
488 (find-files "src/shogun/transfer/domain_adaptation"
489 "^DomainAdaptationSVM\\.(cpp|h)")))
490 #t))))
491 (build-system cmake-build-system)
492 (arguments
493 '(#:tests? #f ;no check target
494 #:phases
495 (modify-phases %standard-phases
496 (add-after 'unpack 'delete-broken-symlinks
497 (lambda _
498 (for-each delete-file '("applications/arts/data"
499 "applications/asp/data"
500 "applications/easysvm/data"
501 "applications/msplicer/data"
502 "applications/ocr/data"
503 "examples/meta/data"
504 "examples/undocumented/data"))
505 #t))
506 (add-after 'unpack 'change-R-target-path
507 (lambda* (#:key outputs #:allow-other-keys)
508 (substitute* '("src/interfaces/r/CMakeLists.txt"
509 "examples/meta/r/CMakeLists.txt")
510 (("\\$\\{R_COMPONENT_LIB_PATH\\}")
511 (string-append (assoc-ref outputs "out")
512 "/lib/R/library/")))
513 #t))
514 (add-after 'unpack 'fix-octave-modules
515 (lambda* (#:key outputs #:allow-other-keys)
516 (substitute* "src/interfaces/octave/CMakeLists.txt"
517 (("^include_directories\\(\\$\\{OCTAVE_INCLUDE_DIRS\\}")
518 "include_directories(${OCTAVE_INCLUDE_DIRS} ${OCTAVE_INCLUDE_DIRS}/octave")
519 ;; change target directory
520 (("\\$\\{OCTAVE_OCT_LOCAL_API_FILE_DIR\\}")
521 (string-append (assoc-ref outputs "out")
522 "/share/octave/packages")))
523 (substitute* '("src/interfaces/octave/swig_typemaps.i"
524 "src/interfaces/octave/sg_print_functions.cpp")
525 ;; "octave/config.h" and "octave/oct-obj.h" deprecated in Octave.
526 (("octave/config\\.h") "octave/octave-config.h")
527 (("octave/oct-obj.h") "octave/ovl.h"))
528 #t))
529 (add-after 'unpack 'move-rxcpp
530 (lambda* (#:key inputs #:allow-other-keys)
531 (let ((rxcpp-dir "shogun/third-party/rxcpp"))
532 (mkdir-p rxcpp-dir)
533 (install-file (assoc-ref inputs "rxcpp") rxcpp-dir)
534 #t)))
535 (add-before 'build 'set-HOME
536 ;; $HOME needs to be set at some point during the build phase
537 (lambda _ (setenv "HOME" "/tmp") #t)))
538 #:configure-flags
539 (list "-DCMAKE_BUILD_WITH_INSTALL_RPATH=TRUE"
540 "-DUSE_SVMLIGHT=OFF" ;disable proprietary SVMLIGHT
541 "-DBUILD_META_EXAMPLES=OFF" ;requires unpackaged ctags
542 ;;"-DINTERFACE_JAVA=ON" ;requires unpackaged jblas
543 ;;"-DINTERFACE_RUBY=ON" ;requires unpackaged ruby-narray
544 ;;"-DINTERFACE_PERL=ON" ;"FindPerlLibs" does not exist
545 ;;"-DINTERFACE_LUA=ON" ;fails because lua doesn't build pkgconfig file
546 "-DINTERFACE_OCTAVE=ON"
547 "-DINTERFACE_PYTHON=ON"
548 "-DINTERFACE_R=ON")))
549 (inputs
550 `(("python" ,python)
551 ("numpy" ,python-numpy)
552 ("r-minimal" ,r-minimal)
553 ("octave" ,octave-cli)
554 ("swig" ,swig)
555 ("eigen" ,eigen)
556 ("hdf5" ,hdf5)
557 ("atlas" ,atlas)
558 ("arpack" ,arpack-ng)
559 ("lapack" ,lapack)
560 ("glpk" ,glpk)
561 ("libxml2" ,libxml2)
562 ("lzo" ,lzo)
563 ("zlib" ,zlib)))
564 (native-inputs
565 `(("pkg-config" ,pkg-config)
566 ("rxcpp" ,rxcpp)))
567 ;; Non-portable SSE instructions are used so building fails on platforms
568 ;; other than x86_64.
569 (supported-systems '("x86_64-linux"))
570 (home-page "https://shogun-toolbox.org/")
571 (synopsis "Machine learning toolbox")
572 (description
573 "The Shogun Machine learning toolbox provides a wide range of unified and
574 efficient Machine Learning (ML) methods. The toolbox seamlessly
575 combines multiple data representations, algorithm classes, and general purpose
576 tools. This enables both rapid prototyping of data pipelines and extensibility
577 in terms of new algorithms.")
578 (license license:gpl3+)))
579
580 (define-public rxcpp
581 (package
582 (name "rxcpp")
583 (version "4.1.0")
584 (source
585 (origin
586 (method git-fetch)
587 (uri (git-reference
588 (url "https://github.com/ReactiveX/RxCpp.git")
589 (commit (string-append "v" version))))
590 (sha256
591 (base32 "1rdpa3jlc181jd08nk437aar085h28i45s6nzrv65apb3xyyz0ij"))
592 (file-name (git-file-name name version))))
593 (build-system cmake-build-system)
594 (arguments
595 `(#:phases
596 (modify-phases %standard-phases
597 (add-after 'unpack 'remove-werror
598 (lambda _
599 (substitute* (find-files ".")
600 (("-Werror") ""))
601 #t))
602 (replace 'check
603 (lambda _
604 (invoke "ctest"))))))
605 (native-inputs
606 `(("catch" ,catch-framework)))
607 (home-page "http://reactivex.io/")
608 (synopsis "Reactive Extensions for C++")
609 (description
610 "The Reactive Extensions for C++ (RxCpp) is a library of algorithms for
611 values-distributed-in-time. ReactiveX is a library for composing asynchronous
612 and event-based programs by using observable sequences.
613
614 It extends the observer pattern to support sequences of data and/or events and
615 adds operators that allow you to compose sequences together declaratively while
616 abstracting away concerns about things like low-level threading,
617 synchronization, thread-safety, concurrent data structures, and non-blocking
618 I/O.")
619 (license license:asl2.0)))
620
621 (define-public r-adaptivesparsity
622 (package
623 (name "r-adaptivesparsity")
624 (version "1.6")
625 (source (origin
626 (method url-fetch)
627 (uri (cran-uri "AdaptiveSparsity" version))
628 (sha256
629 (base32
630 "0imr5m8mll9j6n4icsv6z9rl5kbnwsp9wvzrg7n90nnmcxq2cz91"))))
631 (properties
632 `((upstream-name . "AdaptiveSparsity")))
633 (build-system r-build-system)
634 (arguments
635 `(#:phases
636 (modify-phases %standard-phases
637 (add-after 'unpack 'link-against-armadillo
638 (lambda _
639 (substitute* "src/Makevars"
640 (("PKG_LIBS=" prefix)
641 (string-append prefix "-larmadillo"))))))))
642 (propagated-inputs
643 `(("r-mass" ,r-mass)
644 ("r-matrix" ,r-matrix)
645 ("r-rcpp" ,r-rcpp)
646 ("r-rcpparmadillo" ,r-rcpparmadillo)))
647 (inputs
648 `(("armadillo" ,armadillo)))
649 (home-page "https://cran.r-project.org/web/packages/AdaptiveSparsity")
650 (synopsis "Adaptive sparsity models")
651 (description
652 "This package implements the Figueiredo machine learning algorithm for
653 adaptive sparsity and the Wong algorithm for adaptively sparse gaussian
654 geometric models.")
655 (license license:lgpl3+)))
656
657 (define-public gemmlowp-for-tensorflow
658 ;; The commit hash is taken from "tensorflow/workspace.bzl".
659 (let ((commit "38ebac7b059e84692f53e5938f97a9943c120d98")
660 (revision "2"))
661 (package
662 (name "gemmlowp")
663 (version (git-version "0" revision commit))
664 (source (origin
665 (method url-fetch)
666 (uri (string-append "https://mirror.bazel.build/"
667 "github.com/google/gemmlowp/archive/"
668 commit ".zip"))
669 (file-name (string-append "gemmlowp-" version ".zip"))
670 (sha256
671 (base32
672 "0n56s2g8hrssm4w8qj1v58gfm56a04n9v992ixkmvk6zjiralzxq"))))
673 (build-system cmake-build-system)
674 (arguments
675 `(#:configure-flags
676 (list ,@(match (%current-system)
677 ((or "x86_64-linux" "i686-linux")
678 '("-DCMAKE_CXX_FLAGS=-msse2"))
679 (_ '())))
680 #:phases
681 (modify-phases %standard-phases
682 ;; This directory contains the CMakeLists.txt.
683 (add-after 'unpack 'chdir
684 (lambda _ (chdir "contrib") #t))
685 ;; There is no install target
686 (replace 'install
687 (lambda* (#:key outputs #:allow-other-keys)
688 (let* ((out (assoc-ref outputs "out"))
689 (lib (string-append out "/lib/"))
690 (inc (string-append out "/include/")))
691 (install-file "../build/libeight_bit_int_gemm.so" lib)
692 (for-each (lambda (dir)
693 (let ((target (string-append inc "/" dir)))
694 (mkdir-p target)
695 (for-each (lambda (h)
696 (install-file h target))
697 (find-files (string-append "../" dir)
698 "\\.h$"))))
699 '("meta" "profiling" "public" "fixedpoint"
700 "eight_bit_int_gemm" "internal"))
701 #t))))))
702 (native-inputs
703 `(("unzip" ,unzip)))
704 (home-page "https://github.com/google/gemmlowp")
705 (synopsis "Small self-contained low-precision GEMM library")
706 (description
707 "This is a small self-contained low-precision @dfn{general matrix
708 multiplication} (GEMM) library. It is not a full linear algebra library.
709 Low-precision means that the input and output matrix entries are integers on
710 at most 8 bits. To avoid overflow, results are internally accumulated on more
711 than 8 bits, and at the end only some significant 8 bits are kept.")
712 (license license:asl2.0))))
713
714 (define-public dlib
715 (package
716 (name "dlib")
717 (version "19.7")
718 (source (origin
719 (method url-fetch)
720 (uri (string-append
721 "http://dlib.net/files/dlib-" version ".tar.bz2"))
722 (sha256
723 (base32
724 "1mljz02kwkrbggyncxv5fpnyjdybw2qihaacb3js8yfkw12vwpc2"))
725 (modules '((guix build utils)))
726 (snippet
727 '(begin
728 ;; Delete ~13MB of bundled dependencies.
729 (delete-file-recursively "dlib/external")
730 (delete-file-recursively "docs/dlib/external")
731 #t))))
732 (build-system cmake-build-system)
733 (arguments
734 `(#:phases
735 (modify-phases %standard-phases
736 (add-after 'unpack 'disable-asserts
737 (lambda _
738 ;; config.h recommends explicitly enabling or disabling asserts
739 ;; when building as a shared library. By default neither is set.
740 (substitute* "dlib/config.h"
741 (("^//#define DLIB_DISABLE_ASSERTS") "#define DLIB_DISABLE_ASSERTS"))
742 #t))
743 (add-after 'disable-asserts 'disable-failing-tests
744 (lambda _
745 ;; One test times out on MIPS, so we need to disable it.
746 ;; Others are flaky on some platforms.
747 (let* ((system ,(or (%current-target-system)
748 (%current-system)))
749 (disabled-tests (cond
750 ((string-prefix? "mips64" system)
751 '("object_detector" ; timeout
752 "data_io"))
753 ((string-prefix? "armhf" system)
754 '("learning_to_track"))
755 ((string-prefix? "i686" system)
756 '("optimization"))
757 (else '()))))
758 (for-each
759 (lambda (test)
760 (substitute* "dlib/test/makefile"
761 (((string-append "SRC \\+= " test "\\.cpp")) "")))
762 disabled-tests)
763 #t)))
764 (replace 'check
765 (lambda _
766 ;; No test target, so we build and run the unit tests here.
767 (let ((test-dir (string-append "../dlib-" ,version "/dlib/test")))
768 (with-directory-excursion test-dir
769 (invoke "make" "-j" (number->string (parallel-job-count)))
770 (invoke "./dtest" "--runall"))
771 #t)))
772 (add-after 'install 'delete-static-library
773 (lambda* (#:key outputs #:allow-other-keys)
774 (delete-file (string-append (assoc-ref outputs "out")
775 "/lib/libdlib.a"))
776 #t)))))
777 (native-inputs
778 `(("pkg-config" ,pkg-config)
779 ;; For tests.
780 ("libnsl" ,libnsl)))
781 (inputs
782 `(("giflib" ,giflib)
783 ("lapack" ,lapack)
784 ("libjpeg" ,libjpeg-turbo)
785 ("libpng" ,libpng)
786 ("libx11" ,libx11)
787 ("openblas" ,openblas)
788 ("zlib" ,zlib)))
789 (synopsis
790 "Toolkit for making machine learning and data analysis applications in C++")
791 (description
792 "Dlib is a modern C++ toolkit containing machine learning algorithms and
793 tools. It is used in both industry and academia in a wide range of domains
794 including robotics, embedded devices, mobile phones, and large high performance
795 computing environments.")
796 (home-page "http://dlib.net")
797 (license license:boost1.0)))
798
799 (define-public python-scikit-learn
800 (package
801 (name "python-scikit-learn")
802 (version "0.22.1")
803 (source
804 (origin
805 (method git-fetch)
806 (uri (git-reference
807 (url "https://github.com/scikit-learn/scikit-learn.git")
808 (commit version)))
809 (file-name (git-file-name name version))
810 (sha256
811 (base32
812 "1xqxv210gsmjw094vc5ghq2y9lmm74qkk22pq6flcjzj51b86jxf"))))
813 (build-system python-build-system)
814 (arguments
815 `(#:phases
816 (modify-phases %standard-phases
817 (add-after 'build 'build-ext
818 (lambda _ (invoke "python" "setup.py" "build_ext" "--inplace") #t))
819 (replace 'check
820 (lambda _
821 ;; Restrict OpenBLAS threads to prevent segfaults while testing!
822 (setenv "OPENBLAS_NUM_THREADS" "1")
823
824 ;; Some tests require write access to $HOME.
825 (setenv "HOME" "/tmp")
826
827 (invoke "pytest" "sklearn" "-m" "not network")))
828 (add-before 'reset-gzip-timestamps 'make-files-writable
829 (lambda* (#:key outputs #:allow-other-keys)
830 ;; Make sure .gz files are writable so that the
831 ;; 'reset-gzip-timestamps' phase can do its work.
832 (let ((out (assoc-ref outputs "out")))
833 (for-each make-file-writable
834 (find-files out "\\.gz$"))
835 #t))))))
836 (inputs
837 `(("openblas" ,openblas)))
838 (native-inputs
839 `(("python-pytest" ,python-pytest)
840 ("python-pandas" ,python-pandas) ;for tests
841 ("python-cython" ,python-cython)))
842 (propagated-inputs
843 `(("python-numpy" ,python-numpy)
844 ("python-scipy" ,python-scipy)
845 ("python-joblib" ,python-joblib)))
846 (home-page "https://scikit-learn.org/")
847 (synopsis "Machine Learning in Python")
848 (description
849 "Scikit-learn provides simple and efficient tools for data mining and
850 data analysis.")
851 (properties `((python2-variant . ,(delay python2-scikit-learn))))
852 (license license:bsd-3)))
853
854 ;; scikit-learn 0.22 and later only supports Python 3, so we stick with
855 ;; an older version here.
856 (define-public python2-scikit-learn
857 (let ((base (package-with-python2 (strip-python2-variant python-scikit-learn))))
858 (package
859 (inherit base)
860 (version "0.20.4")
861 (source (origin
862 (method git-fetch)
863 (uri (git-reference
864 (url "https://github.com/scikit-learn/scikit-learn.git")
865 (commit version)))
866 (file-name (git-file-name "python-scikit-learn" version))
867 (sha256
868 (base32
869 "08zbzi8yx5wdlxfx9jap61vg1malc9ajf576w7a0liv6jvvrxlpj")))))))
870
871 (define-public python-scikit-rebate
872 (package
873 (name "python-scikit-rebate")
874 (version "0.6")
875 (source (origin
876 (method url-fetch)
877 (uri (pypi-uri "skrebate" version))
878 (sha256
879 (base32
880 "1h7qs9gjxpzqabzhb8rmpv3jpmi5iq41kqdibg48299h94iikiw7"))))
881 (build-system python-build-system)
882 ;; Pandas is only needed to run the tests.
883 (native-inputs
884 `(("python-pandas" ,python-pandas)))
885 (propagated-inputs
886 `(("python-numpy" ,python-numpy)
887 ("python-scipy" ,python-scipy)
888 ("python-scikit-learn" ,python-scikit-learn)
889 ("python-joblib" ,python-joblib)))
890 (home-page "https://epistasislab.github.io/scikit-rebate/")
891 (synopsis "Relief-based feature selection algorithms for Python")
892 (description "Scikit-rebate is a scikit-learn-compatible Python
893 implementation of ReBATE, a suite of Relief-based feature selection algorithms
894 for Machine Learning. These algorithms excel at identifying features that are
895 predictive of the outcome in supervised learning problems, and are especially
896 good at identifying feature interactions that are normally overlooked by
897 standard feature selection algorithms.")
898 (license license:expat)))
899
900 (define-public python-autograd
901 (let* ((commit "442205dfefe407beffb33550846434baa90c4de7")
902 (revision "0")
903 (version (git-version "0.0.0" revision commit)))
904 (package
905 (name "python-autograd")
906 (home-page "https://github.com/HIPS/autograd")
907 (source (origin
908 (method git-fetch)
909 (uri (git-reference
910 (url home-page)
911 (commit commit)))
912 (sha256
913 (base32
914 "189sv2xb0mwnjawa9z7mrgdglc1miaq93pnck26r28fi1jdwg0z4"))
915 (file-name (git-file-name name version))))
916 (version version)
917 (build-system python-build-system)
918 (native-inputs
919 `(("python-nose" ,python-nose)
920 ("python-pytest" ,python-pytest)))
921 (propagated-inputs
922 `(("python-future" ,python-future)
923 ("python-numpy" ,python-numpy)))
924 (arguments
925 `(#:phases (modify-phases %standard-phases
926 (replace 'check
927 (lambda _
928 (invoke "py.test" "-v"))))))
929 (synopsis "Efficiently computes derivatives of NumPy code")
930 (description "Autograd can automatically differentiate native Python and
931 NumPy code. It can handle a large subset of Python's features, including loops,
932 ifs, recursion and closures, and it can even take derivatives of derivatives
933 of derivatives. It supports reverse-mode differentiation
934 (a.k.a. backpropagation), which means it can efficiently take gradients of
935 scalar-valued functions with respect to array-valued arguments, as well as
936 forward-mode differentiation, and the two can be composed arbitrarily. The
937 main intended application of Autograd is gradient-based optimization.")
938 (license license:expat))))
939
940 (define-public python2-autograd
941 (package-with-python2 python-autograd))
942
943 (define-public lightgbm
944 (package
945 (name "lightgbm")
946 (version "2.0.12")
947 (source (origin
948 (method url-fetch)
949 (uri (string-append
950 "https://github.com/Microsoft/LightGBM/archive/v"
951 version ".tar.gz"))
952 (sha256
953 (base32
954 "132zf0yk0545mg72hyzxm102g3hpb6ixx9hnf8zd2k55gas6cjj1"))
955 (file-name (string-append name "-" version ".tar.gz"))))
956 (native-inputs
957 `(("python-pytest" ,python-pytest)
958 ("python-nose" ,python-nose)))
959 (inputs
960 `(("openmpi" ,openmpi)))
961 (propagated-inputs
962 `(("python-numpy" ,python-numpy)
963 ("python-scipy" ,python-scipy)))
964 (arguments
965 `(#:configure-flags
966 '("-DUSE_MPI=ON")
967 #:phases
968 (modify-phases %standard-phases
969 (replace 'check
970 (lambda* (#:key outputs #:allow-other-keys)
971 (with-directory-excursion ,(string-append "../LightGBM-" version)
972 (invoke "pytest" "tests/c_api_test/test_.py")))))))
973 (build-system cmake-build-system)
974 (home-page "https://github.com/Microsoft/LightGBM")
975 (synopsis "Gradient boosting framework based on decision tree algorithms")
976 (description "LightGBM is a gradient boosting framework that uses tree
977 based learning algorithms. It is designed to be distributed and efficient with
978 the following advantages:
979
980 @itemize
981 @item Faster training speed and higher efficiency
982 @item Lower memory usage
983 @item Better accuracy
984 @item Parallel and GPU learning supported (not enabled in this package)
985 @item Capable of handling large-scale data
986 @end itemize\n")
987 (license license:expat)))
988
989 (define-public vowpal-wabbit
990 ;; Language bindings not included.
991 (package
992 (name "vowpal-wabbit")
993 (version "8.5.0")
994 (source (origin
995 (method git-fetch)
996 (uri (git-reference
997 (url "https://github.com/JohnLangford/vowpal_wabbit")
998 (commit version)))
999 (sha256
1000 (base32
1001 "04bwzk6ifgnz3fmzid8b7avxf9n5pnx9xcjm61nkjng1vv0bpj8x"))
1002 (file-name (git-file-name name version))))
1003 (inputs
1004 `(("boost" ,boost)
1005 ("zlib" ,zlib)))
1006 (arguments
1007 `(#:configure-flags
1008 (list (string-append "--with-boost="
1009 (assoc-ref %build-inputs "boost")))
1010 #:phases
1011 (modify-phases %standard-phases
1012 (add-after 'unpack 'make-files-writable
1013 (lambda _
1014 (for-each make-file-writable (find-files "." ".*")) #t)))))
1015 (build-system gnu-build-system)
1016 (home-page "https://github.com/JohnLangford/vowpal_wabbit")
1017 (synopsis "Fast machine learning library for online learning")
1018 (description "Vowpal Wabbit is a machine learning system with techniques
1019 such as online, hashing, allreduce, reductions, learning2search, active, and
1020 interactive learning.")
1021 (license license:bsd-3)))
1022
1023 (define-public python2-fastlmm
1024 (package
1025 (name "python2-fastlmm")
1026 (version "0.2.21")
1027 (source
1028 (origin
1029 (method url-fetch)
1030 (uri (pypi-uri "fastlmm" version ".zip"))
1031 (sha256
1032 (base32
1033 "1q8c34rpmwkfy3r4d5172pzdkpfryj561897z9r3x22gq7813x1m"))))
1034 (build-system python-build-system)
1035 (arguments
1036 `(#:tests? #f ; some test files are missing
1037 #:python ,python-2)) ; only Python 2.7 is supported
1038 (propagated-inputs
1039 `(("python2-numpy" ,python2-numpy)
1040 ("python2-scipy" ,python2-scipy)
1041 ("python2-matplotlib" ,python2-matplotlib)
1042 ("python2-pandas" ,python2-pandas)
1043 ("python2-scikit-learn" ,python2-scikit-learn)
1044 ("python2-pysnptools" ,python2-pysnptools)))
1045 (native-inputs
1046 `(("unzip" ,unzip)
1047 ("python2-cython" ,python2-cython)
1048 ("python2-mock" ,python2-mock)
1049 ("python2-nose" ,python2-nose)))
1050 (home-page "http://research.microsoft.com/en-us/um/redmond/projects/mscompbio/fastlmm/")
1051 (synopsis "Perform genome-wide association studies on large data sets")
1052 (description
1053 "FaST-LMM, which stands for Factored Spectrally Transformed Linear Mixed
1054 Models, is a program for performing both single-SNP and SNP-set genome-wide
1055 association studies (GWAS) on extremely large data sets.")
1056 (license license:asl2.0)))
1057
1058 ;; There have been no proper releases yet.
1059 (define-public kaldi
1060 (let ((commit "d4791c0f3fc1a09c042dac365e120899ee2ad21e")
1061 (revision "2"))
1062 (package
1063 (name "kaldi")
1064 (version (git-version "0" revision commit))
1065 (source (origin
1066 (method git-fetch)
1067 (uri (git-reference
1068 (url "https://github.com/kaldi-asr/kaldi.git")
1069 (commit commit)))
1070 (file-name (git-file-name name version))
1071 (sha256
1072 (base32
1073 "07k80my6f19mhrkwbzhjsnpf9871wmrwkl0ym468i830w67qyjrz"))))
1074 (build-system gnu-build-system)
1075 (arguments
1076 `(#:test-target "test"
1077 #:phases
1078 (modify-phases %standard-phases
1079 (add-after 'unpack 'chdir
1080 (lambda _ (chdir "src") #t))
1081 (replace 'configure
1082 (lambda* (#:key build system inputs outputs #:allow-other-keys)
1083 (when (not (or (string-prefix? "x86_64" system)
1084 (string-prefix? "i686" system)))
1085 (substitute* "makefiles/linux_openblas.mk"
1086 (("-msse -msse2") "")))
1087 (substitute* "makefiles/default_rules.mk"
1088 (("/bin/bash") (which "bash")))
1089 (substitute* "Makefile"
1090 (("ext_depend: check_portaudio")
1091 "ext_depend:"))
1092 (substitute* '("online/Makefile"
1093 "onlinebin/Makefile"
1094 "gst-plugin/Makefile")
1095 (("../../tools/portaudio/install")
1096 (assoc-ref inputs "portaudio")))
1097
1098 ;; This `configure' script doesn't support variables passed as
1099 ;; arguments, nor does it support "prefix".
1100 (let ((out (assoc-ref outputs "out"))
1101 (openblas (assoc-ref inputs "openblas"))
1102 (openfst (assoc-ref inputs "openfst")))
1103 (substitute* "configure"
1104 (("check_for_slow_expf;") "")
1105 ;; This affects the RPATH and also serves as the installation
1106 ;; directory.
1107 (("KALDILIBDIR=`pwd`/lib")
1108 (string-append "KALDILIBDIR=" out "/lib")))
1109 (mkdir-p out) ; must exist
1110 (setenv "CONFIG_SHELL" (which "bash"))
1111 (setenv "OPENFST_VER" ,(package-version openfst))
1112 (invoke "./configure"
1113 "--use-cuda=no"
1114 "--shared"
1115 (string-append "--openblas-root=" openblas)
1116 (string-append "--fst-root=" openfst)))))
1117 (add-after 'build 'build-ext-and-gstreamer-plugin
1118 (lambda _
1119 (invoke "make" "-C" "online" "depend")
1120 (invoke "make" "-C" "online")
1121 (invoke "make" "-C" "onlinebin" "depend")
1122 (invoke "make" "-C" "onlinebin")
1123 (invoke "make" "-C" "gst-plugin" "depend")
1124 (invoke "make" "-C" "gst-plugin")
1125 #t))
1126 ;; TODO: also install the executables.
1127 (replace 'install
1128 (lambda* (#:key outputs #:allow-other-keys)
1129 (let* ((out (assoc-ref outputs "out"))
1130 (inc (string-append out "/include"))
1131 (lib (string-append out "/lib")))
1132 (mkdir-p lib)
1133 ;; The build phase installed symlinks to the actual
1134 ;; libraries. Install the actual targets.
1135 (for-each (lambda (file)
1136 (let ((target (readlink file)))
1137 (delete-file file)
1138 (install-file target lib)))
1139 (find-files lib "\\.so"))
1140 ;; Install headers
1141 (for-each (lambda (file)
1142 (let ((target-dir (string-append inc "/" (dirname file))))
1143 (install-file file target-dir)))
1144 (find-files "." "\\.h"))
1145 (install-file "gst-plugin/libgstonlinegmmdecodefaster.so"
1146 (string-append lib "/gstreamer-1.0"))
1147 #t))))))
1148 (inputs
1149 `(("alsa-lib" ,alsa-lib)
1150 ("gfortran" ,gfortran "lib")
1151 ("glib" ,glib)
1152 ("gstreamer" ,gstreamer)
1153 ("jack" ,jack-1)
1154 ("openblas" ,openblas)
1155 ("openfst" ,openfst)
1156 ("portaudio" ,portaudio)
1157 ("python" ,python)))
1158 (native-inputs
1159 `(("glib" ,glib "bin") ; glib-genmarshal
1160 ("grep" ,grep)
1161 ("sed" ,sed)
1162 ("pkg-config" ,pkg-config)
1163 ("which" ,which)))
1164 (home-page "https://kaldi-asr.org/")
1165 (synopsis "Speech recognition toolkit")
1166 (description "Kaldi is an extensible toolkit for speech recognition
1167 written in C++.")
1168 (license license:asl2.0))))
1169
1170 (define-public gst-kaldi-nnet2-online
1171 (let ((commit "cb227ef43b66a9835c14eb0ad39e08ee03c210ad")
1172 (revision "2"))
1173 (package
1174 (name "gst-kaldi-nnet2-online")
1175 (version (git-version "0" revision commit))
1176 (source (origin
1177 (method git-fetch)
1178 (uri (git-reference
1179 (url "https://github.com/alumae/gst-kaldi-nnet2-online.git")
1180 (commit commit)))
1181 (file-name (git-file-name name version))
1182 (sha256
1183 (base32
1184 "1i6ffwiavxx07ri0lxix6s8q0r31x7i4xxvhys5jxkixf5q34w8g"))))
1185 (build-system gnu-build-system)
1186 (arguments
1187 `(#:tests? #f ; there are none
1188 #:make-flags
1189 (list (string-append "SHELL="
1190 (assoc-ref %build-inputs "bash") "/bin/bash")
1191 (string-append "KALDI_ROOT="
1192 (assoc-ref %build-inputs "kaldi-src"))
1193 (string-append "KALDILIBDIR="
1194 (assoc-ref %build-inputs "kaldi") "/lib")
1195 "KALDI_FLAVOR=dynamic")
1196 #:phases
1197 (modify-phases %standard-phases
1198 (add-after 'unpack 'chdir
1199 (lambda _ (chdir "src") #t))
1200 (replace 'configure
1201 (lambda* (#:key inputs #:allow-other-keys)
1202 (let ((glib (assoc-ref inputs "glib")))
1203 (setenv "CXXFLAGS" "-fPIC")
1204 (setenv "CPLUS_INCLUDE_PATH"
1205 (string-append glib "/include/glib-2.0:"
1206 glib "/lib/glib-2.0/include:"
1207 (assoc-ref inputs "gstreamer")
1208 "/include/gstreamer-1.0")))
1209 (substitute* "Makefile"
1210 (("include \\$\\(KALDI_ROOT\\)/src/kaldi.mk") "")
1211 (("\\$\\(error Cannot find") "#"))
1212 #t))
1213 (add-before 'build 'build-depend
1214 (lambda* (#:key make-flags #:allow-other-keys)
1215 (apply invoke "make" "depend" make-flags)))
1216 (replace 'install
1217 (lambda* (#:key outputs #:allow-other-keys)
1218 (let* ((out (assoc-ref outputs "out"))
1219 (lib (string-append out "/lib/gstreamer-1.0")))
1220 (install-file "libgstkaldinnet2onlinedecoder.so" lib)
1221 #t))))))
1222 (inputs
1223 `(("glib" ,glib)
1224 ("gstreamer" ,gstreamer)
1225 ("jansson" ,jansson)
1226 ("openfst" ,openfst)
1227 ("kaldi" ,kaldi)))
1228 (native-inputs
1229 `(("bash" ,bash)
1230 ("glib:bin" ,glib "bin") ; glib-genmarshal
1231 ("kaldi-src" ,(package-source kaldi))
1232 ("pkg-config" ,pkg-config)))
1233 (home-page "https://kaldi-asr.org/")
1234 (synopsis "Gstreamer plugin for decoding speech")
1235 (description "This package provides a GStreamer plugin that wraps
1236 Kaldi's @code{SingleUtteranceNnet2Decoder}. It requires iVector-adapted DNN
1237 acoustic models. The iVectors are adapted to the current audio stream
1238 automatically.")
1239 (license license:asl2.0))))
1240
1241 (define-public kaldi-gstreamer-server
1242 ;; This is the tip of the py3 branch
1243 (let ((commit "f68cab490be7eb0da2af1475fbc16655f50a60cb")
1244 (revision "2"))
1245 (package
1246 (name "kaldi-gstreamer-server")
1247 (version (git-version "0" revision commit))
1248 (source (origin
1249 (method git-fetch)
1250 (uri (git-reference
1251 (url "https://github.com/alumae/kaldi-gstreamer-server.git")
1252 (commit commit)))
1253 (file-name (git-file-name name version))
1254 (sha256
1255 (base32
1256 "17lh1368vkg8ngrcbn2phvigzlmalrqg6djx2gg61qq1a0nj87dm"))))
1257 (build-system gnu-build-system)
1258 (arguments
1259 `(#:tests? #f ; there are no tests that can be run automatically
1260 #:modules ((guix build utils)
1261 (guix build gnu-build-system)
1262 (srfi srfi-26))
1263 #:phases
1264 (modify-phases %standard-phases
1265 (delete 'configure)
1266 (replace 'build
1267 (lambda* (#:key outputs #:allow-other-keys)
1268 ;; Disable hash randomization to ensure the generated .pycs
1269 ;; are reproducible.
1270 (setenv "PYTHONHASHSEED" "0")
1271 (with-directory-excursion "kaldigstserver"
1272 ;; See https://github.com/alumae/kaldi-gstreamer-server/issues/232
1273 (substitute* "master_server.py"
1274 (("\\.replace\\('\\\\.*") ")"))
1275
1276 ;; This is a Python 2 file
1277 (delete-file "decoder_test.py")
1278 (delete-file "test-buffer.py")
1279
1280 (for-each (lambda (file)
1281 (apply invoke
1282 `("python"
1283 "-m" "compileall"
1284 "-f" ; force rebuild
1285 ,file)))
1286 (find-files "." "\\.py$")))
1287 #t))
1288 (replace 'install
1289 (lambda* (#:key inputs outputs #:allow-other-keys)
1290 (let* ((out (assoc-ref outputs "out"))
1291 (bin (string-append out "/bin"))
1292 (share (string-append out "/share/kaldi-gstreamer-server/")))
1293 ;; Install Python files
1294 (with-directory-excursion "kaldigstserver"
1295 (for-each (cut install-file <> share)
1296 (find-files "." ".*")))
1297
1298 ;; Install sample configuration files
1299 (for-each (cut install-file <> share)
1300 (find-files "." "\\.yaml"))
1301
1302 ;; Install executables
1303 (mkdir-p bin)
1304 (let* ((server (string-append bin "/kaldi-gst-server"))
1305 (client (string-append bin "/kaldi-gst-client"))
1306 (worker (string-append bin "/kaldi-gst-worker"))
1307 (PYTHONPATH (getenv "PYTHONPATH"))
1308 (GST_PLUGIN_PATH (string-append
1309 (assoc-ref inputs "gst-kaldi-nnet2-online")
1310 "/lib/gstreamer-1.0:${GST_PLUGIN_PATH}"))
1311 (wrap (lambda (wrapper what)
1312 (with-output-to-file wrapper
1313 (lambda _
1314 (format #t
1315 "#!~a
1316 export PYTHONPATH=~a
1317 export GST_PLUGIN_PATH=~a
1318 exec ~a ~a/~a \"$@\"~%"
1319 (which "bash") PYTHONPATH GST_PLUGIN_PATH
1320 (which "python") share what)))
1321 (chmod wrapper #o555))))
1322 (for-each wrap
1323 (list server client worker)
1324 (list "master_server.py"
1325 "client.py"
1326 "worker.py")))
1327 #t))))))
1328 (inputs
1329 `(("gst-kaldi-nnet2-online" ,gst-kaldi-nnet2-online)
1330 ("python" ,python-wrapper)
1331 ("python-pygobject" ,python-pygobject)
1332 ("python-pyyaml" ,python-pyyaml)
1333 ("python-tornado" ,python-tornado-6)))
1334 (home-page "https://github.com/alumae/kaldi-gstreamer-server")
1335 (synopsis "Real-time full-duplex speech recognition server")
1336 (description "This is a real-time full-duplex speech recognition server,
1337 based on the Kaldi toolkit and the GStreamer framework and implemented in
1338 Python.")
1339 (license license:bsd-2))))
1340
1341 ;; Note that Tensorflow includes a "third_party" directory, which seems to not
1342 ;; only contain modified subsets of upstream library source code, but also
1343 ;; adapter headers provided by Google (such as the fft.h header, which is not
1344 ;; part of the upstream project code). The Tensorflow code includes headers
1345 ;; from the "third_party" directory. It does not look like we can replace
1346 ;; these headers with unmodified upstream files, so we keep them.
1347 (define-public tensorflow
1348 (package
1349 (name "tensorflow")
1350 (version "1.9.0")
1351 (source
1352 (origin
1353 (method git-fetch)
1354 (uri (git-reference
1355 (url "https://github.com/tensorflow/tensorflow.git")
1356 (commit (string-append "v" version))))
1357 (file-name (string-append "tensorflow-" version "-checkout"))
1358 (sha256
1359 (base32
1360 "0a9kwha395g3wgxfwln5j8vn9nkspmd75xldrlqdq540w996g8xa"))))
1361 (build-system cmake-build-system)
1362 (arguments
1363 `(#:tests? #f ; no "check" target
1364 #:build-type "Release"
1365 #:configure-flags
1366 (let ((protobuf (assoc-ref %build-inputs "protobuf"))
1367 (protobuf:native (assoc-ref %build-inputs "protobuf:native"))
1368 (jsoncpp (assoc-ref %build-inputs "jsoncpp"))
1369 (snappy (assoc-ref %build-inputs "snappy"))
1370 (sqlite (assoc-ref %build-inputs "sqlite")))
1371 (list
1372 ;; Use protobuf from Guix
1373 (string-append "-Dprotobuf_STATIC_LIBRARIES="
1374 protobuf "/lib/libprotobuf.so")
1375 (string-append "-DPROTOBUF_PROTOC_EXECUTABLE="
1376 protobuf:native "/bin/protoc")
1377
1378 ;; Use snappy from Guix
1379 (string-append "-Dsnappy_STATIC_LIBRARIES="
1380 snappy "/lib/libsnappy.so")
1381 ;; Yes, this is not actually the include directory but a prefix...
1382 (string-append "-Dsnappy_INCLUDE_DIR=" snappy)
1383
1384 ;; Use jsoncpp from Guix
1385 (string-append "-Djsoncpp_STATIC_LIBRARIES="
1386 jsoncpp "/lib/libjsoncpp.so")
1387 ;; Yes, this is not actually the include directory but a prefix...
1388 (string-append "-Djsoncpp_INCLUDE_DIR=" jsoncpp)
1389
1390 ;; Use sqlite from Guix
1391 (string-append "-Dsqlite_STATIC_LIBRARIES="
1392 sqlite "/lib/libsqlite.a")
1393
1394 ;; Use system libraries wherever possible. Currently, this
1395 ;; only affects zlib.
1396 "-Dsystemlib_ALL=ON"
1397 "-Dtensorflow_ENABLE_POSITION_INDEPENDENT_CODE=ON"
1398 "-Dtensorflow_BUILD_SHARED_LIB=ON"
1399 "-Dtensorflow_OPTIMIZE_FOR_NATIVE_ARCH=OFF"
1400 "-Dtensorflow_ENABLE_SSL_SUPPORT=OFF"
1401 "-Dtensorflow_BUILD_CONTRIB_KERNELS=OFF"))
1402 #:make-flags
1403 (list "CC=gcc")
1404 #:modules ((ice-9 ftw)
1405 (guix build utils)
1406 (guix build cmake-build-system)
1407 ((guix build python-build-system)
1408 #:select (python-version)))
1409 #:imported-modules (,@%cmake-build-system-modules
1410 (guix build python-build-system))
1411 #:phases
1412 (modify-phases %standard-phases
1413 (add-after 'unpack 'set-source-file-times-to-1980
1414 ;; At the end of the tf_python_build_pip_package target, a ZIP
1415 ;; archive should be generated via bdist_wheel, but it fails with
1416 ;; "ZIP does not support timestamps before 1980". Luckily,
1417 ;; SOURCE_DATE_EPOCH is respected, which we set to some time in
1418 ;; 1980.
1419 (lambda _ (setenv "SOURCE_DATE_EPOCH" "315532800") #t))
1420 ;; See https://github.com/tensorflow/tensorflow/issues/20517#issuecomment-406373913
1421 (add-after 'unpack 'python3.7-compatibility
1422 (lambda _
1423 (substitute* '("tensorflow/python/eager/pywrap_tfe_src.cc"
1424 "tensorflow/python/lib/core/ndarray_tensor.cc"
1425 "tensorflow/python/lib/core/py_func.cc")
1426 (("PyUnicode_AsUTF8") "(char *)PyUnicode_AsUTF8"))
1427 (substitute* "tensorflow/c/eager/c_api.h"
1428 (("unsigned char async")
1429 "unsigned char is_async"))
1430
1431 ;; Remove dependency on tensorboard, a complicated but probably
1432 ;; optional package.
1433 (substitute* "tensorflow/tools/pip_package/setup.py"
1434 ((".*'tensorboard >.*") ""))
1435
1436 ;; Fix the build with python-3.8, taken from rejected upstream patch:
1437 ;; https://github.com/tensorflow/tensorflow/issues/34197
1438 (substitute* (find-files "tensorflow/python" ".*\\.cc$")
1439 (("(nullptr,)(\\ +/. tp_print)" _ _ tp_print)
1440 (string-append "NULL, " tp_print)))
1441 #t))
1442 (add-after 'python3.7-compatibility 'chdir
1443 (lambda _ (chdir "tensorflow/contrib/cmake") #t))
1444 (add-after 'chdir 'disable-downloads
1445 (lambda* (#:key inputs #:allow-other-keys)
1446 (substitute* (find-files "external" "\\.cmake$")
1447 (("GIT_REPOSITORY.*") "")
1448 (("GIT_TAG.*") "")
1449 (("PREFIX ")
1450 "DOWNLOAD_COMMAND \"\"\nPREFIX "))
1451
1452 ;; Use packages from Guix
1453 (let ((grpc (assoc-ref inputs "grpc")))
1454 (substitute* "CMakeLists.txt"
1455 ;; Sqlite
1456 (("include\\(sqlite\\)") "")
1457 (("\\$\\{sqlite_STATIC_LIBRARIES\\}")
1458 (string-append (assoc-ref inputs "sqlite")
1459 "/lib/libsqlite3.so"))
1460 (("sqlite_copy_headers_to_destination") "")
1461
1462 ;; PNG
1463 (("include\\(png\\)") "")
1464 (("\\$\\{png_STATIC_LIBRARIES\\}")
1465 (string-append (assoc-ref inputs "libpng")
1466 "/lib/libpng16.so"))
1467 (("png_copy_headers_to_destination") "")
1468
1469 ;; JPEG
1470 (("include\\(jpeg\\)") "")
1471 (("\\$\\{jpeg_STATIC_LIBRARIES\\}")
1472 (string-append (assoc-ref inputs "libjpeg")
1473 "/lib/libjpeg.so"))
1474 (("jpeg_copy_headers_to_destination") "")
1475
1476 ;; GIF
1477 (("include\\(gif\\)") "")
1478 (("\\$\\{gif_STATIC_LIBRARIES\\}")
1479 (string-append (assoc-ref inputs "giflib")
1480 "/lib/libgif.so"))
1481 (("gif_copy_headers_to_destination") "")
1482
1483 ;; lmdb
1484 (("include\\(lmdb\\)") "")
1485 (("\\$\\{lmdb_STATIC_LIBRARIES\\}")
1486 (string-append (assoc-ref inputs "lmdb")
1487 "/lib/liblmdb.so"))
1488 (("lmdb_copy_headers_to_destination") "")
1489
1490 ;; Protobuf
1491 (("include\\(protobuf\\)") "")
1492 (("protobuf_copy_headers_to_destination") "")
1493 (("^ +protobuf") "")
1494
1495 ;; gRPC
1496 (("include\\(grpc\\)")
1497 "find_package(grpc REQUIRED NAMES gRPC)")
1498 (("list\\(APPEND tensorflow_EXTERNAL_DEPENDENCIES grpc\\)") "")
1499
1500 ;; Eigen
1501 (("include\\(eigen\\)")
1502 (string-append "find_package(eigen REQUIRED NAMES Eigen3)
1503 set(eigen_INCLUDE_DIRS ${CMAKE_CURRENT_BINARY_DIR}/external/eigen_archive "
1504 (assoc-ref inputs "eigen") "/include/eigen3)"))
1505 (("^ +eigen") "")
1506
1507 ;; snappy
1508 (("include\\(snappy\\)")
1509 "add_definitions(-DTF_USE_SNAPPY)")
1510 (("list\\(APPEND tensorflow_EXTERNAL_DEPENDENCIES snappy\\)") "")
1511
1512 ;; jsoncpp
1513 (("include\\(jsoncpp\\)") "")
1514 (("^ +jsoncpp") ""))
1515
1516 (substitute* "tf_core_framework.cmake"
1517 ((" grpc") "")
1518 (("\\$\\{GRPC_BUILD\\}/grpc_cpp_plugin")
1519 (which "grpc_cpp_plugin"))
1520 ;; Link with gRPC libraries
1521 (("add_library\\(tf_protos_cc.*" m)
1522 (string-append m
1523 (format #f "\ntarget_link_libraries(tf_protos_cc PRIVATE \
1524 ~a/lib/libgrpc++_unsecure.a \
1525 ~a/lib/libgrpc_unsecure.a \
1526 ~a/lib/libaddress_sorting.a \
1527 ~a/lib/libgpr.a \
1528 ~a//lib/libcares.so
1529 )\n"
1530 grpc grpc grpc grpc
1531 (assoc-ref inputs "c-ares"))))))
1532 (substitute* "tf_tools.cmake"
1533 (("add_dependencies\\(\\$\\{proto_text.*") ""))
1534 ;; Remove dependency on bundled grpc
1535 (substitute* "tf_core_distributed_runtime.cmake"
1536 (("tf_core_cpu grpc") "tf_core_cpu"))
1537
1538 ;; This directory is a dependency of many targets.
1539 (mkdir-p "protobuf")
1540 #t))
1541 (add-after 'configure 'unpack-third-party-sources
1542 (lambda* (#:key inputs #:allow-other-keys)
1543 ;; This is needed to configure bundled packages properly.
1544 (setenv "CONFIG_SHELL" (which "bash"))
1545 (for-each
1546 (lambda (name)
1547 (let* ((what (assoc-ref inputs (string-append name "-src")))
1548 (name* (string-map (lambda (c)
1549 (if (char=? c #\-)
1550 #\_ c)) name))
1551 (where (string-append "../build/" name* "/src/" name*)))
1552 (cond
1553 ((string-suffix? ".zip" what)
1554 (mkdir-p where)
1555 (with-directory-excursion where
1556 (invoke "unzip" what)))
1557 ((string-suffix? ".tar.gz" what)
1558 (mkdir-p where)
1559 (invoke "tar" "xf" what
1560 "-C" where "--strip-components=1"))
1561 (else
1562 (let ((parent (dirname where)))
1563 (mkdir-p parent)
1564 (with-directory-excursion parent
1565 (when (file-exists? name*)
1566 (delete-file-recursively name*))
1567 (copy-recursively what name*)
1568 (map make-file-writable
1569 (find-files name* ".*"))))))))
1570 (list "boringssl"
1571 "cub"
1572 "double-conversion"
1573 "farmhash"
1574 "fft2d"
1575 "highwayhash"
1576 "nsync"
1577 "re2"))
1578
1579 (rename-file "../build/cub/src/cub/cub-1.8.0/"
1580 "../build/cub/src/cub/cub/")
1581 #t))
1582 (add-after 'unpack 'fix-python-build
1583 (lambda* (#:key inputs outputs #:allow-other-keys)
1584 (mkdir-p "protobuf-src")
1585 (invoke "tar" "xf" (assoc-ref inputs "protobuf:src")
1586 "-C" "protobuf-src" "--strip-components=1")
1587 (mkdir-p "eigen-src")
1588 (invoke "tar" "xf" (assoc-ref inputs "eigen:src")
1589 "-C" "eigen-src" "--strip-components=1")
1590
1591 (substitute* "tensorflow/contrib/cmake/tf_python.cmake"
1592 ;; Ensure that all Python dependencies can be found at build time.
1593 (("PYTHONPATH=\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/tf_python" m)
1594 (string-append m ":" (getenv "PYTHONPATH")))
1595 ;; Take protobuf source files from our source package.
1596 (("\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/protobuf/src/protobuf/src/google")
1597 (string-append (getcwd) "/protobuf-src/src/google")))
1598
1599 (substitute* '("tensorflow/contrib/cmake/tf_shared_lib.cmake"
1600 "tensorflow/contrib/cmake/tf_python.cmake")
1601 ;; Take Eigen source files from our source package.
1602 (("\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/eigen/src/eigen/")
1603 (string-append (getcwd) "/eigen-src/"))
1604 ;; Take Eigen headers from our own package.
1605 (("\\$\\{CMAKE_CURRENT_BINARY_DIR\\}/external/eigen_archive")
1606 (string-append (assoc-ref inputs "eigen") "/include/eigen3")))
1607
1608 ;; Correct the RUNPATH of ops libraries generated for Python.
1609 ;; TODO: this doesn't work :(
1610 ;; /gnu/store/...-tensorflow-1.9.0/lib/python3.7/site-packages/tensorflow/contrib/seq2seq/python/ops/lib_beam_search_ops.so:
1611 ;; warning: RUNPATH contains bogus entries: ("/tmp/guix-build-tensorflow-1.9.0.drv-0/source/tensorflow/contrib/build")
1612 ;; /gnu/store/...-tensorflow-1.9.0/lib/python3.7/site-packages/tensorflow/contrib/seq2seq/python/ops/lib_beam_search_ops.so:
1613 ;; error: depends on 'libpywrap_tensorflow_internal.so', which
1614 ;; cannot be found in RUNPATH ...
1615 (substitute* "tensorflow/contrib/cmake/tf_cc_ops.cmake"
1616 (("set_target_properties.*")
1617 (string-append "set_target_properties(${_AT_TARGET} PROPERTIES \
1618 COMPILE_FLAGS ${target_compile_flags} \
1619 INSTALL_RPATH_USE_LINK_PATH TRUE \
1620 INSTALL_RPATH " (assoc-ref outputs "out") "/lib)\n")))
1621 #t))
1622 (add-after 'build 'build-pip-package
1623 (lambda* (#:key outputs #:allow-other-keys)
1624 (setenv "LDFLAGS"
1625 (string-append "-Wl,-rpath="
1626 (assoc-ref outputs "out") "/lib"))
1627 (invoke "make" "tf_python_build_pip_package")
1628 #t))
1629 (add-after 'build-pip-package 'install-python
1630 (lambda* (#:key inputs outputs #:allow-other-keys)
1631 (let ((out (assoc-ref outputs "out"))
1632 (wheel (car (find-files "../build/tf_python/dist/" "\\.whl$")))
1633 (python-version (python-version
1634 (assoc-ref inputs "python"))))
1635 (invoke "python" "-m" "pip" "install" wheel
1636 (string-append "--prefix=" out))
1637
1638 ;; XXX: broken RUNPATH, see fix-python-build phase.
1639 (delete-file
1640 (string-append
1641 out "/lib/python" python-version
1642 "/site-packages/tensorflow/contrib/"
1643 "seq2seq/python/ops/lib_beam_search_ops.so"))
1644 #t))))))
1645 (native-inputs
1646 `(("pkg-config" ,pkg-config)
1647 ("protobuf:native" ,protobuf-3.6) ; protoc
1648 ("protobuf:src" ,(package-source protobuf-3.6))
1649 ("eigen:src" ,(package-source eigen-for-tensorflow))
1650 ;; install_pip_packages.sh wants setuptools 39.1.0 specifically.
1651 ("python-setuptools" ,python-setuptools-for-tensorflow)
1652
1653 ;; The commit hashes and URLs for third-party source code are taken
1654 ;; from "tensorflow/workspace.bzl".
1655 ("boringssl-src"
1656 ,(let ((commit "ee7aa02")
1657 (revision "1"))
1658 (origin
1659 (method git-fetch)
1660 (uri (git-reference
1661 (url "https://boringssl.googlesource.com/boringssl")
1662 (commit commit)))
1663 (file-name (string-append "boringssl-0-" revision
1664 (string-take commit 7)
1665 "-checkout"))
1666 (sha256
1667 (base32
1668 "1jf693q0nw0adsic6cgmbdx6g7wr4rj4vxa8j1hpn792fqhd8wgw")))))
1669 ("cub-src"
1670 ,(let ((version "1.8.0"))
1671 (origin
1672 (method url-fetch)
1673 (uri (string-append "https://mirror.bazel.build/github.com/NVlabs/"
1674 "cub/archive/" version ".zip"))
1675 (file-name (string-append "cub-" version ".zip"))
1676 (sha256
1677 (base32
1678 "1hsqikqridb90dkxkjr2918dcry6pfh46ccnwrzawl56aamhdykb")))))
1679 ("double-conversion-src"
1680 ,(let ((commit "5664746")
1681 (revision "1"))
1682 (origin
1683 (method git-fetch)
1684 (uri (git-reference
1685 (url "https://github.com/google/double-conversion.git")
1686 (commit commit)))
1687 (file-name
1688 (git-file-name "double-conversion"
1689 (string-append "0-" revision "."
1690 (string-take commit 7))))
1691 (sha256
1692 (base32
1693 "1h5lppqqxcvdg5jq42i5msgwx20ryij3apvmndflngrgdpc04gn1")))))
1694 ("farmhash-src"
1695 ,(let ((commit "816a4ae622e964763ca0862d9dbd19324a1eaf45"))
1696 (origin
1697 (method url-fetch)
1698 (uri (string-append
1699 "https://mirror.bazel.build/github.com/google/farmhash/archive/"
1700 commit ".tar.gz"))
1701 (file-name (string-append "farmhash-0-" (string-take commit 7)
1702 ".tar.gz"))
1703 (sha256
1704 (base32
1705 "185b2xdxl4d4cnsnv6abg8s22gxvx8673jq2yaq85bz4cdy58q35")))))
1706 ;; The license notice on the home page at
1707 ;; http://www.kurims.kyoto-u.ac.jp/~ooura/fft.html says:
1708 ;; Copyright Takuya OOURA, 1996-2001
1709 ;;
1710 ;; You may use, copy, modify and distribute this code for any purpose
1711 ;; (include commercial use) and without fee. Please refer to this
1712 ;; package when you modify this code.
1713 ;;
1714 ;; We take the identical tarball from the Bazel mirror, because the URL
1715 ;; at the home page is not versioned and might change.
1716 ("fft2d-src"
1717 ,(origin
1718 (method url-fetch)
1719 (uri "https://mirror.bazel.build/www.kurims.kyoto-u.ac.jp/~ooura/fft.tgz")
1720 (file-name "fft2d.tar.gz")
1721 (sha256
1722 (base32
1723 "15jjkfvhqvl2c0753d2di8hz0pyzn598g74wqy79awdrf1y67fsj"))))
1724 ("highwayhash-src"
1725 ,(let ((commit "be5edafc2e1a455768e260ccd68ae7317b6690ee")
1726 (revision "1"))
1727 (origin
1728 (method git-fetch)
1729 (uri (git-reference
1730 (url "https://github.com/google/highwayhash.git")
1731 (commit commit)))
1732 (file-name (string-append "highwayhash-0-" revision
1733 (string-take commit 7)
1734 "-checkout"))
1735 (sha256
1736 (base32
1737 "154jwf98cyy54hldr94pgjn85zynly3abpnc1avmb8a18lzwjyb6")))))
1738 ("nsync-src"
1739 ,(let ((version "0559ce013feac8db639ee1bf776aca0325d28777")
1740 (revision "1"))
1741 (origin
1742 (method url-fetch)
1743 (uri (string-append "https://mirror.bazel.build/"
1744 "github.com/google/nsync/archive/"
1745 version ".tar.gz"))
1746 (file-name (string-append "nsync-0." revision
1747 "-" (string-take version 7)
1748 ".tar.gz"))
1749 (sha256
1750 (base32
1751 "0qdkyqym34x739mmzv97ah5r7ph462v5xkxqxvidmcfqbi64b132")))))
1752 ("re2-src"
1753 ,(let ((commit "e7efc48")
1754 (revision "1"))
1755 (origin
1756 (method git-fetch)
1757 (uri (git-reference
1758 (url "https://github.com/google/re2")
1759 (commit commit)))
1760 (file-name (string-append "re2-0-" revision
1761 (string-take commit 7)
1762 "-checkout"))
1763 (sha256
1764 (base32
1765 "161g9841rjfsy5pn52fcis0s9hdr7rxvb06pad38j5rppfihvign")))))
1766 ("googletest" ,googletest)
1767 ("swig" ,swig)
1768 ("unzip" ,unzip)))
1769 (propagated-inputs
1770 `(("python-absl-py" ,python-absl-py)
1771 ("python-astor" ,python-astor)
1772 ("python-gast" ,python-gast)
1773 ("python-grpcio" ,python-grpcio)
1774 ("python-numpy" ,python-numpy)
1775 ("python-protobuf" ,python-protobuf-3.6)
1776 ("python-six" ,python-six)
1777 ("python-termcolo" ,python-termcolor)
1778 ("python-wheel" ,python-wheel)))
1779 (inputs
1780 `(("c-ares" ,c-ares)
1781 ("eigen" ,eigen-for-tensorflow)
1782 ("gemmlowp" ,gemmlowp-for-tensorflow)
1783 ("lmdb" ,lmdb)
1784 ("libjpeg" ,libjpeg-turbo)
1785 ("libpng" ,libpng)
1786 ("giflib" ,giflib)
1787 ("grpc" ,grpc-1.16.1 "static")
1788 ("grpc:bin" ,grpc-1.16.1)
1789 ("jsoncpp" ,jsoncpp-for-tensorflow)
1790 ("snappy" ,snappy)
1791 ("sqlite" ,sqlite)
1792 ("protobuf" ,protobuf-3.6)
1793 ("python" ,python-wrapper)
1794 ("zlib" ,zlib)))
1795 (home-page "https://tensorflow.org")
1796 (synopsis "Machine learning framework")
1797 (description
1798 "TensorFlow is a flexible platform for building and training machine
1799 learning models. It provides a library for high performance numerical
1800 computation and includes high level Python APIs, including both a sequential
1801 API for beginners that allows users to build models quickly by plugging
1802 together building blocks and a subclassing API with an imperative style for
1803 advanced research.")
1804 (license license:asl2.0)))
1805
1806 (define-public python-iml
1807 (package
1808 (name "python-iml")
1809 (version "0.6.2")
1810 (source
1811 (origin
1812 (method url-fetch)
1813 (uri (pypi-uri "iml" version))
1814 (sha256
1815 (base32
1816 "1k8szlpm19rcwcxdny9qdm3gmaqq8akb4xlvrzyz8c2d679aak6l"))))
1817 (build-system python-build-system)
1818 (propagated-inputs
1819 `(("ipython" ,python-ipython)
1820 ("nose" ,python-nose)
1821 ("numpy" ,python-numpy)
1822 ("pandas" ,python-pandas)
1823 ("scipy" ,python-scipy)))
1824 (home-page "https://github.com/interpretable-ml/iml")
1825 (synopsis "Interpretable Machine Learning (iML) package")
1826 (description "Interpretable ML (iML) is a set of data type objects,
1827 visualizations, and interfaces that can be used by any method designed to
1828 explain the predictions of machine learning models (or really the output of
1829 any function). It currently contains the interface and IO code from the Shap
1830 project, and it will potentially also do the same for the Lime project.")
1831 (license license:expat)))
1832
1833 (define-public python-keras-applications
1834 (package
1835 (name "python-keras-applications")
1836 (version "1.0.8")
1837 (source
1838 (origin
1839 (method url-fetch)
1840 (uri (pypi-uri "Keras_Applications" version))
1841 (sha256
1842 (base32
1843 "1rcz31ca4axa6kzhjx4lwqxbg4wvlljkj8qj9a7p9sfd5fhzjyam"))))
1844 (build-system python-build-system)
1845 ;; The tests require Keras, but this package is needed to build Keras.
1846 (arguments '(#:tests? #f))
1847 (propagated-inputs
1848 `(("python-h5py" ,python-h5py)
1849 ("python-numpy" ,python-numpy)))
1850 (native-inputs
1851 `(("python-pytest" ,python-pytest)
1852 ("python-pytest-cov" ,python-pytest-cov)
1853 ("python-pytest-pep8" ,python-pytest-pep8)
1854 ("python-pytest-xdist" ,python-pytest-xdist)))
1855 (home-page "https://github.com/keras-team/keras-applications")
1856 (synopsis "Reference implementations of popular deep learning models")
1857 (description
1858 "This package provides reference implementations of popular deep learning
1859 models for use with the Keras deep learning framework.")
1860 (license license:expat)))
1861
1862 (define-public python-keras-preprocessing
1863 (package
1864 (name "python-keras-preprocessing")
1865 (version "1.1.0")
1866 (source
1867 (origin
1868 (method url-fetch)
1869 (uri (pypi-uri "Keras_Preprocessing" version))
1870 (sha256
1871 (base32
1872 "1r98nm4k1svsqjyaqkfk23i31bl1kcfcyp7094yyj3c43phfp3as"))))
1873 (build-system python-build-system)
1874 (propagated-inputs
1875 `(("python-numpy" ,python-numpy)
1876 ("python-six" ,python-six)))
1877 (native-inputs
1878 `(("python-pandas" ,python-pandas)
1879 ("python-pillow" ,python-pillow)
1880 ("python-pytest" ,python-pytest)
1881 ("python-pytest-cov" ,python-pytest-cov)
1882 ("python-pytest-xdist" ,python-pytest-xdist)
1883 ("tensorflow" ,tensorflow)))
1884 (home-page "https://github.com/keras-team/keras-preprocessing/")
1885 (synopsis "Data preprocessing and augmentation for deep learning models")
1886 (description
1887 "Keras Preprocessing is the data preprocessing and data augmentation
1888 module of the Keras deep learning library. It provides utilities for working
1889 with image data, text data, and sequence data.")
1890 (license license:expat)))
1891
1892 (define-public python-keras
1893 (package
1894 (name "python-keras")
1895 (version "2.2.4")
1896 (source
1897 (origin
1898 (method url-fetch)
1899 (uri (pypi-uri "Keras" version))
1900 (patches (search-patches "python-keras-integration-test.patch"))
1901 (sha256
1902 (base32
1903 "1j8bsqzh49vjdxy6l1k4iwax5vpjzniynyd041xjavdzvfii1dlh"))))
1904 (build-system python-build-system)
1905 (arguments
1906 `(#:phases
1907 (modify-phases %standard-phases
1908 (add-after 'unpack 'remove-tests-for-unavailable-features
1909 (lambda _
1910 (delete-file "keras/backend/theano_backend.py")
1911 (delete-file "keras/backend/cntk_backend.py")
1912 (delete-file "tests/keras/backend/backend_test.py")
1913
1914 ;; FIXME: This doesn't work because Tensorflow is missing the
1915 ;; coder ops library.
1916 (delete-file "tests/keras/test_callbacks.py")
1917 #t))
1918 (replace 'check
1919 (lambda _
1920 ;; These tests attempt to download data files from the internet.
1921 (delete-file "tests/integration_tests/test_datasets.py")
1922 (delete-file "tests/integration_tests/imagenet_utils_test.py")
1923
1924 (setenv "PYTHONPATH"
1925 (string-append (getcwd) "/build/lib:"
1926 (getenv "PYTHONPATH")))
1927 (invoke "py.test" "-v"
1928 "-p" "no:cacheprovider"
1929 "--ignore" "keras/utils"))))))
1930 (propagated-inputs
1931 `(("python-h5py" ,python-h5py)
1932 ("python-keras-applications" ,python-keras-applications)
1933 ("python-keras-preprocessing" ,python-keras-preprocessing)
1934 ("python-numpy" ,python-numpy)
1935 ("python-pydot" ,python-pydot)
1936 ("python-pyyaml" ,python-pyyaml)
1937 ("python-scipy" ,python-scipy)
1938 ("python-six" ,python-six)
1939 ("tensorflow" ,tensorflow)
1940 ("graphviz" ,graphviz)))
1941 (native-inputs
1942 `(("python-pandas" ,python-pandas)
1943 ("python-pytest" ,python-pytest)
1944 ("python-pytest-cov" ,python-pytest-cov)
1945 ("python-pytest-pep8" ,python-pytest-pep8)
1946 ("python-pytest-timeout" ,python-pytest-timeout)
1947 ("python-pytest-xdist" ,python-pytest-xdist)
1948 ("python-sphinx" ,python-sphinx)
1949 ("python-requests" ,python-requests)))
1950 (home-page "https://github.com/keras-team/keras")
1951 (synopsis "High-level deep learning framework")
1952 (description "Keras is a high-level neural networks API, written in Python
1953 and capable of running on top of TensorFlow. It was developed with a focus on
1954 enabling fast experimentation. Use Keras if you need a deep learning library
1955 that:
1956
1957 @itemize
1958 @item Allows for easy and fast prototyping (through user friendliness,
1959 modularity, and extensibility).
1960 @item Supports both convolutional networks and recurrent networks, as well as
1961 combinations of the two.
1962 @item Runs seamlessly on CPU and GPU.
1963 @end itemize\n")
1964 (license license:expat)))
1965
1966 (define-public sbcl-cl-libsvm-format
1967 (let ((commit "3300f84fd8d9f5beafc114f543f9d83417c742fb")
1968 (revision "0"))
1969 (package
1970 (name "sbcl-cl-libsvm-format")
1971 (version (git-version "0.1.0" revision commit))
1972 (source
1973 (origin
1974 (method git-fetch)
1975 (uri (git-reference
1976 (url "https://github.com/masatoi/cl-libsvm-format.git")
1977 (commit commit)))
1978 (file-name (git-file-name name version))
1979 (sha256
1980 (base32
1981 "0284aj84xszhkhlivaigf9qj855fxad3mzmv3zfr0qzb5k0nzwrg"))))
1982 (build-system asdf-build-system/sbcl)
1983 (native-inputs
1984 `(("prove" ,sbcl-prove)
1985 ("prove-asdf" ,sbcl-prove-asdf)))
1986 (inputs
1987 `(("alexandria" ,sbcl-alexandria)))
1988 (synopsis "LibSVM data format reader for Common Lisp")
1989 (description
1990 "This Common Lisp library provides a fast reader for data in LibSVM
1991 format.")
1992 (home-page "https://github.com/masatoi/cl-libsvm-format")
1993 (license license:expat))))
1994
1995 (define-public cl-libsvm-format
1996 (sbcl-package->cl-source-package sbcl-cl-libsvm-format))
1997
1998 (define-public ecl-cl-libsvm-format
1999 (sbcl-package->ecl-package sbcl-cl-libsvm-format))
2000
2001 (define-public sbcl-cl-online-learning
2002 (let ((commit "fc7a34f4f161cd1c7dd747d2ed8f698947781423")
2003 (revision "0"))
2004 (package
2005 (name "sbcl-cl-online-learning")
2006 (version (git-version "0.5" revision commit))
2007 (source
2008 (origin
2009 (method git-fetch)
2010 (uri (git-reference
2011 (url "https://github.com/masatoi/cl-online-learning.git")
2012 (commit commit)))
2013 (file-name (git-file-name name version))
2014 (sha256
2015 (base32
2016 "14x95rlg80ay5hv645ki57pqvy12v28hz4k1w0f6bsfi2rmpxchq"))))
2017 (build-system asdf-build-system/sbcl)
2018 (native-inputs
2019 `(("prove" ,sbcl-prove)
2020 ("prove-asdf" ,sbcl-prove-asdf)))
2021 (inputs
2022 `(("cl-libsvm-format" ,sbcl-cl-libsvm-format)
2023 ("cl-store" ,sbcl-cl-store)))
2024 (arguments
2025 `(;; FIXME: Tests pass but then the check phase crashes
2026 #:tests? #f))
2027 (synopsis "Online Machine Learning for Common Lisp")
2028 (description
2029 "This library contains a collection of machine learning algorithms for
2030 online linear classification written in Common Lisp.")
2031 (home-page "https://github.com/masatoi/cl-online-learning")
2032 (license license:expat))))
2033
2034 (define-public cl-online-learning
2035 (sbcl-package->cl-source-package sbcl-cl-online-learning))
2036
2037 (define-public ecl-cl-online-learning
2038 (sbcl-package->ecl-package sbcl-cl-online-learning))
2039
2040 (define-public sbcl-cl-random-forest
2041 (let ((commit "85fbdd4596d40e824f70f1b7cf239cf544e49d51")
2042 (revision "0"))
2043 (package
2044 (name "sbcl-cl-random-forest")
2045 (version (git-version "0.1" revision commit))
2046 (source
2047 (origin
2048 (method git-fetch)
2049 (uri (git-reference
2050 (url "https://github.com/masatoi/cl-random-forest.git")
2051 (commit commit)))
2052 (file-name (git-file-name name version))
2053 (sha256
2054 (base32
2055 "097xv60i1ndz68sg9p4pc7c5gvyp9i1xgw966b4wwfq3x6hbz421"))))
2056 (build-system asdf-build-system/sbcl)
2057 (native-inputs
2058 `(("prove" ,sbcl-prove)
2059 ("prove-asdf" ,sbcl-prove-asdf)
2060 ("trivial-garbage" ,sbcl-trivial-garbage)))
2061 (inputs
2062 `(("alexandria" ,sbcl-alexandria)
2063 ("cl-libsvm-format" ,sbcl-cl-libsvm-format)
2064 ("cl-online-learning" ,sbcl-cl-online-learning)
2065 ("lparallel" ,sbcl-lparallel)))
2066 (arguments
2067 `(;; The tests download data from the Internet
2068 #:tests? #f
2069 #:phases
2070 (modify-phases %standard-phases
2071 (add-after 'unpack 'add-sb-cltl2-dependency
2072 (lambda _
2073 ;; sb-cltl2 is required by lparallel when using sbcl, but it is
2074 ;; not loaded automatically.
2075 (substitute* "cl-random-forest.asd"
2076 (("\\(in-package :cl-user\\)")
2077 "(in-package :cl-user) #+sbcl (require :sb-cltl2)"))
2078 #t)))))
2079 (synopsis "Random Forest and Global Refinement for Common Lisp")
2080 (description
2081 "CL-random-forest is an implementation of Random Forest for multiclass
2082 classification and univariate regression written in Common Lisp. It also
2083 includes an implementation of Global Refinement of Random Forest.")
2084 (home-page "https://github.com/masatoi/cl-random-forest")
2085 (license license:expat))))
2086
2087 (define-public cl-random-forest
2088 (sbcl-package->cl-source-package sbcl-cl-random-forest))
2089
2090 (define-public ecl-cl-random-forest
2091 (sbcl-package->ecl-package sbcl-cl-random-forest))
2092
2093 (define-public gloo
2094 (let ((version "0.0.0") ; no proper version tag
2095 (commit "ca528e32fea9ca8f2b16053cff17160290fc84ce")
2096 (revision "0"))
2097 (package
2098 (name "gloo")
2099 (version (git-version version revision commit))
2100 (source
2101 (origin
2102 (method git-fetch)
2103 (uri (git-reference
2104 (url "https://github.com/facebookincubator/gloo.git")
2105 (commit commit)))
2106 (file-name (git-file-name name version))
2107 (sha256
2108 (base32
2109 "1q9f80zy75f6njrzrqkmhc0g3qxs4gskr7ns2jdqanxa2ww7a99w"))))
2110 (build-system cmake-build-system)
2111 (native-inputs
2112 `(("googletest" ,googletest)))
2113 (arguments
2114 `(#:configure-flags '("-DBUILD_TEST=1")
2115 #:phases
2116 (modify-phases %standard-phases
2117 (replace 'check
2118 (lambda _
2119 (invoke "make" "gloo_test")
2120 #t)))))
2121 (synopsis "Collective communications library")
2122 (description
2123 "Gloo is a collective communications library. It comes with a
2124 number of collective algorithms useful for machine learning applications.
2125 These include a barrier, broadcast, and allreduce.")
2126 (home-page "https://github.com/facebookincubator/gloo")
2127 (license license:bsd-3))))
2128
2129 (define-public python-umap-learn
2130 (package
2131 (name "python-umap-learn")
2132 (version "0.3.10")
2133 (source
2134 (origin
2135 (method url-fetch)
2136 (uri (pypi-uri "umap-learn" version))
2137 (sha256
2138 (base32
2139 "02ada2yy6km6zgk2836kg1c97yrcpalvan34p8c57446finnpki1"))))
2140 (build-system python-build-system)
2141 (native-inputs
2142 `(("python-joblib" ,python-joblib)
2143 ("python-nose" ,python-nose)))
2144 (propagated-inputs
2145 `(("python-numba" ,python-numba)
2146 ("python-numpy" ,python-numpy)
2147 ("python-scikit-learn" ,python-scikit-learn)
2148 ("python-scipy" ,python-scipy)))
2149 (home-page "https://github.com/lmcinnes/umap")
2150 (synopsis
2151 "Uniform Manifold Approximation and Projection")
2152 (description
2153 "Uniform Manifold Approximation and Projection is a dimension reduction
2154 technique that can be used for visualisation similarly to t-SNE, but also for
2155 general non-linear dimension reduction.")
2156 (license license:bsd-3)))