Diff of the two buildlogs: -- --- b1/build.log 2021-11-29 08:34:15.927232597 +0000 +++ b2/build.log 2021-11-29 08:53:40.871298758 +0000 @@ -1,6 +1,6 @@ I: pbuilder: network access will be disabled during build -I: Current time: Sun Nov 28 20:16:01 -12 2021 -I: pbuilder-time-stamp: 1638173761 +I: Current time: Mon Jan 2 04:57:22 +14 2023 +I: pbuilder-time-stamp: 1672585042 I: Building the build Environment I: extracting base tarball [/var/cache/pbuilder/buster-reproducible-base.tgz] I: copying local configuration @@ -17,7 +17,7 @@ I: Extracting source gpgv: unknown type of key resource 'trustedkeys.kbx' gpgv: keyblock resource '/root/.gnupg/trustedkeys.kbx': General error -gpgv: Signature made Wed Dec 5 16:26:36 2018 -12 +gpgv: Signature made Thu Dec 6 18:26:36 2018 +14 gpgv: using RSA key 638BC75EC1E5C589067E35DE62645EB35F686A8A gpgv: Can't check signature: No public key dpkg-source: warning: failed to verify signature on ./caffe_1.0.0+git20180821.99bd997-2.dsc @@ -31,136 +31,170 @@ dpkg-source: info: applying cmake-link-correct-python-lib.patch I: using fakeroot in build. I: Installing the build-deps -I: user script /srv/workspace/pbuilder/1832359/tmp/hooks/D02_print_environment starting +I: user script /srv/workspace/pbuilder/2636591/tmp/hooks/D01_modify_environment starting +debug: Running on ionos5-amd64. +I: Changing host+domainname to test build reproducibility +I: Adding a custom variable just for the fun of it... +I: Changing /bin/sh to bash +Removing 'diversion of /bin/sh to /bin/sh.distrib by dash' +Adding 'diversion of /bin/sh to /bin/sh.distrib by bash' +Removing 'diversion of /usr/share/man/man1/sh.1.gz to /usr/share/man/man1/sh.distrib.1.gz by dash' +Adding 'diversion of /usr/share/man/man1/sh.1.gz to /usr/share/man/man1/sh.distrib.1.gz by bash' +I: Setting pbuilder2's login shell to /bin/bash +I: Setting pbuilder2's GECOS to second user,second room,second work-phone,second home-phone,second other +I: user script /srv/workspace/pbuilder/2636591/tmp/hooks/D01_modify_environment finished +I: user script /srv/workspace/pbuilder/2636591/tmp/hooks/D02_print_environment starting I: set - BUILDDIR='/build' - BUILDUSERGECOS='first user,first room,first work-phone,first home-phone,first other' - BUILDUSERNAME='pbuilder1' - BUILD_ARCH='amd64' - DEBIAN_FRONTEND='noninteractive' - DEB_BUILD_OPTIONS='buildinfo=+all reproducible=+all parallel=15' - DISTRIBUTION='' - HOME='/root' - HOST_ARCH='amd64' + BASH=/bin/sh + BASHOPTS=checkwinsize:cmdhist:complete_fullquote:extquote:force_fignore:globasciiranges:hostcomplete:interactive_comments:progcomp:promptvars:sourcepath + BASH_ALIASES=() + BASH_ARGC=() + BASH_ARGV=() + BASH_CMDS=() + BASH_LINENO=([0]="12" [1]="0") + BASH_SOURCE=([0]="/tmp/hooks/D02_print_environment" [1]="/tmp/hooks/D02_print_environment") + BASH_VERSINFO=([0]="5" [1]="0" [2]="3" [3]="1" [4]="release" [5]="x86_64-pc-linux-gnu") + BASH_VERSION='5.0.3(1)-release' + BUILDDIR=/build + BUILDUSERGECOS='second user,second room,second work-phone,second home-phone,second other' + BUILDUSERNAME=pbuilder2 + BUILD_ARCH=amd64 + DEBIAN_FRONTEND=noninteractive + DEB_BUILD_OPTIONS='buildinfo=+all reproducible=+all parallel=16' + DIRSTACK=() + DISTRIBUTION= + EUID=0 + FUNCNAME=([0]="Echo" [1]="main") + GROUPS=() + HOME=/root + HOSTNAME=i-capture-the-hostname + HOSTTYPE=x86_64 + HOST_ARCH=amd64 IFS=' ' - INVOCATION_ID='00d095b78f1c43489fe73dffeba184d2' - LANG='C' - LANGUAGE='en_US:en' - LC_ALL='C' - MAIL='/var/mail/root' - OPTIND='1' - PATH='/usr/sbin:/usr/bin:/sbin:/bin:/usr/games' - PBCURRENTCOMMANDLINEOPERATION='build' - PBUILDER_OPERATION='build' - PBUILDER_PKGDATADIR='/usr/share/pbuilder' - PBUILDER_PKGLIBDIR='/usr/lib/pbuilder' - PBUILDER_SYSCONFDIR='/etc' - PPID='1832359' - PS1='# ' - PS2='> ' + INVOCATION_ID=ca3bf67b73b0483ab1e5726206f5a3d2 + LANG=C + LANGUAGE=et_EE:et + LC_ALL=C + MACHTYPE=x86_64-pc-linux-gnu + MAIL=/var/mail/root + OPTERR=1 + OPTIND=1 + OSTYPE=linux-gnu + PATH=/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/i/capture/the/path + PBCURRENTCOMMANDLINEOPERATION=build + PBUILDER_OPERATION=build + PBUILDER_PKGDATADIR=/usr/share/pbuilder + PBUILDER_PKGLIBDIR=/usr/lib/pbuilder + PBUILDER_SYSCONFDIR=/etc + PIPESTATUS=([0]="0") + POSIXLY_CORRECT=y + PPID=2636591 PS4='+ ' - PWD='/' - SHELL='/bin/bash' - SHLVL='2' - SUDO_COMMAND='/usr/bin/timeout -k 18.1h 18h /usr/bin/ionice -c 3 /usr/bin/nice /usr/sbin/pbuilder --build --configfile /srv/reproducible-results/rbuild-debian/tmp.yPlLzCDj65/pbuilderrc_sUIW --hookdir /etc/pbuilder/first-build-hooks --debbuildopts -b --basetgz /var/cache/pbuilder/buster-reproducible-base.tgz --buildresult /srv/reproducible-results/rbuild-debian/tmp.yPlLzCDj65/b1 --logfile b1/build.log caffe_1.0.0+git20180821.99bd997-2.dsc' - SUDO_GID='110' - SUDO_UID='105' - SUDO_USER='jenkins' - TERM='unknown' - TZ='/usr/share/zoneinfo/Etc/GMT+12' - USER='root' - _='/usr/bin/systemd-run' - http_proxy='http://78.137.99.97:3128' + PWD=/ + SHELL=/bin/bash + SHELLOPTS=braceexpand:errexit:hashall:interactive-comments:posix + SHLVL=3 + SUDO_COMMAND='/usr/bin/timeout -k 24.1h 24h /usr/bin/ionice -c 3 /usr/bin/nice -n 11 /usr/bin/unshare --uts -- /usr/sbin/pbuilder --build --configfile /srv/reproducible-results/rbuild-debian/tmp.yPlLzCDj65/pbuilderrc_7YiN --hookdir /etc/pbuilder/rebuild-hooks --debbuildopts -b --basetgz /var/cache/pbuilder/buster-reproducible-base.tgz --buildresult /srv/reproducible-results/rbuild-debian/tmp.yPlLzCDj65/b2 --logfile b2/build.log caffe_1.0.0+git20180821.99bd997-2.dsc' + SUDO_GID=110 + SUDO_UID=105 + SUDO_USER=jenkins + TERM=unknown + TZ=/usr/share/zoneinfo/Etc/GMT-14 + UID=0 + USER=root + _='I: set' + http_proxy=http://85.184.249.68:3128 I: uname -a - Linux ionos1-amd64 5.10.0-9-amd64 #1 SMP Debian 5.10.70-1 (2021-09-30) x86_64 GNU/Linux + Linux i-capture-the-hostname 5.14.0-0.bpo.2-amd64 #1 SMP Debian 5.14.9-2~bpo11+1 (2021-10-10) x86_64 GNU/Linux I: ls -l /bin total 5116 - -rwxr-xr-x 1 root root 1168776 Apr 17 2019 bash - -rwxr-xr-x 3 root root 38984 Jul 10 2019 bunzip2 - -rwxr-xr-x 3 root root 38984 Jul 10 2019 bzcat - lrwxrwxrwx 1 root root 6 Jul 10 2019 bzcmp -> bzdiff - -rwxr-xr-x 1 root root 2227 Jul 10 2019 bzdiff - lrwxrwxrwx 1 root root 6 Jul 10 2019 bzegrep -> bzgrep - -rwxr-xr-x 1 root root 4877 Jun 24 2019 bzexe - lrwxrwxrwx 1 root root 6 Jul 10 2019 bzfgrep -> bzgrep - -rwxr-xr-x 1 root root 3641 Jul 10 2019 bzgrep - -rwxr-xr-x 3 root root 38984 Jul 10 2019 bzip2 - -rwxr-xr-x 1 root root 14328 Jul 10 2019 bzip2recover - lrwxrwxrwx 1 root root 6 Jul 10 2019 bzless -> bzmore - -rwxr-xr-x 1 root root 1297 Jul 10 2019 bzmore - -rwxr-xr-x 1 root root 43744 Feb 28 2019 cat - -rwxr-xr-x 1 root root 64320 Feb 28 2019 chgrp - -rwxr-xr-x 1 root root 64288 Feb 28 2019 chmod - -rwxr-xr-x 1 root root 72512 Feb 28 2019 chown - -rwxr-xr-x 1 root root 146880 Feb 28 2019 cp - -rwxr-xr-x 1 root root 121464 Jan 17 2019 dash - -rwxr-xr-x 1 root root 109408 Feb 28 2019 date - -rwxr-xr-x 1 root root 76712 Feb 28 2019 dd - -rwxr-xr-x 1 root root 93744 Feb 28 2019 df - -rwxr-xr-x 1 root root 138856 Feb 28 2019 dir - -rwxr-xr-x 1 root root 84288 Jan 9 2019 dmesg - lrwxrwxrwx 1 root root 8 Sep 26 2018 dnsdomainname -> hostname - lrwxrwxrwx 1 root root 8 Sep 26 2018 domainname -> hostname - -rwxr-xr-x 1 root root 39520 Feb 28 2019 echo - -rwxr-xr-x 1 root root 28 Jan 7 2019 egrep - -rwxr-xr-x 1 root root 35424 Feb 28 2019 false - -rwxr-xr-x 1 root root 28 Jan 7 2019 fgrep - -rwxr-xr-x 1 root root 68880 Jan 9 2019 findmnt - -rwsr-xr-x 1 root root 34896 Apr 22 2020 fusermount - -rwxr-xr-x 1 root root 198976 Jan 7 2019 grep - -rwxr-xr-x 2 root root 2345 Jan 5 2019 gunzip - -rwxr-xr-x 1 root root 6375 Jan 5 2019 gzexe - -rwxr-xr-x 1 root root 98048 Jan 5 2019 gzip - -rwxr-xr-x 1 root root 26696 Sep 26 2018 hostname - -rwxr-xr-x 1 root root 68552 Feb 28 2019 ln - -rwxr-xr-x 1 root root 56760 Jul 26 2018 login - -rwxr-xr-x 1 root root 138856 Feb 28 2019 ls - -rwxr-xr-x 1 root root 108624 Jan 9 2019 lsblk - -rwxr-xr-x 1 root root 89088 Feb 28 2019 mkdir - -rwxr-xr-x 1 root root 68544 Feb 28 2019 mknod - -rwxr-xr-x 1 root root 43808 Feb 28 2019 mktemp - -rwxr-xr-x 1 root root 43008 Jan 9 2019 more - -rwsr-xr-x 1 root root 51280 Jan 9 2019 mount - -rwxr-xr-x 1 root root 14408 Jan 9 2019 mountpoint - -rwxr-xr-x 1 root root 138728 Feb 28 2019 mv - lrwxrwxrwx 1 root root 8 Sep 26 2018 nisdomainname -> hostname - lrwxrwxrwx 1 root root 14 Feb 14 2019 pidof -> /sbin/killall5 - -rwxr-xr-x 1 root root 39616 Feb 28 2019 pwd - lrwxrwxrwx 1 root root 4 Apr 17 2019 rbash -> bash - -rwxr-xr-x 1 root root 47776 Feb 28 2019 readlink - -rwxr-xr-x 1 root root 68416 Feb 28 2019 rm - -rwxr-xr-x 1 root root 47776 Feb 28 2019 rmdir - -rwxr-xr-x 1 root root 23312 Jan 21 2019 run-parts - -rwxr-xr-x 1 root root 122224 Dec 22 2018 sed - lrwxrwxrwx 1 root root 4 Nov 7 09:58 sh -> dash - -rwxr-xr-x 1 root root 39552 Feb 28 2019 sleep - -rwxr-xr-x 1 root root 80672 Feb 28 2019 stty - -rwsr-xr-x 1 root root 63568 Jan 9 2019 su - -rwxr-xr-x 1 root root 35488 Feb 28 2019 sync - -rwxr-xr-x 1 root root 445560 Apr 23 2019 tar - -rwxr-xr-x 1 root root 14440 Jan 21 2019 tempfile - -rwxr-xr-x 1 root root 97152 Feb 28 2019 touch - -rwxr-xr-x 1 root root 35424 Feb 28 2019 true - -rwxr-xr-x 1 root root 14328 Apr 22 2020 ulockmgr_server - -rwsr-xr-x 1 root root 34888 Jan 9 2019 umount - -rwxr-xr-x 1 root root 39584 Feb 28 2019 uname - -rwxr-xr-x 2 root root 2345 Jan 5 2019 uncompress - -rwxr-xr-x 1 root root 138856 Feb 28 2019 vdir - -rwxr-xr-x 1 root root 34896 Jan 9 2019 wdctl - -rwxr-xr-x 1 root root 946 Jan 21 2019 which - lrwxrwxrwx 1 root root 8 Sep 26 2018 ypdomainname -> hostname - -rwxr-xr-x 1 root root 1983 Jan 5 2019 zcat - -rwxr-xr-x 1 root root 1677 Jan 5 2019 zcmp - -rwxr-xr-x 1 root root 5879 Jan 5 2019 zdiff - -rwxr-xr-x 1 root root 29 Jan 5 2019 zegrep - -rwxr-xr-x 1 root root 29 Jan 5 2019 zfgrep - -rwxr-xr-x 1 root root 2080 Jan 5 2019 zforce - -rwxr-xr-x 1 root root 7584 Jan 5 2019 zgrep - -rwxr-xr-x 1 root root 2205 Jan 5 2019 zless - -rwxr-xr-x 1 root root 1841 Jan 5 2019 zmore - -rwxr-xr-x 1 root root 4552 Jan 5 2019 znew -I: user script /srv/workspace/pbuilder/1832359/tmp/hooks/D02_print_environment finished + -rwxr-xr-x 1 root root 1168776 Apr 18 2019 bash + -rwxr-xr-x 3 root root 38984 Jul 11 2019 bunzip2 + -rwxr-xr-x 3 root root 38984 Jul 11 2019 bzcat + lrwxrwxrwx 1 root root 6 Jul 11 2019 bzcmp -> bzdiff + -rwxr-xr-x 1 root root 2227 Jul 11 2019 bzdiff + lrwxrwxrwx 1 root root 6 Jul 11 2019 bzegrep -> bzgrep + -rwxr-xr-x 1 root root 4877 Jun 25 2019 bzexe + lrwxrwxrwx 1 root root 6 Jul 11 2019 bzfgrep -> bzgrep + -rwxr-xr-x 1 root root 3641 Jul 11 2019 bzgrep + -rwxr-xr-x 3 root root 38984 Jul 11 2019 bzip2 + -rwxr-xr-x 1 root root 14328 Jul 11 2019 bzip2recover + lrwxrwxrwx 1 root root 6 Jul 11 2019 bzless -> bzmore + -rwxr-xr-x 1 root root 1297 Jul 11 2019 bzmore + -rwxr-xr-x 1 root root 43744 Mar 1 2019 cat + -rwxr-xr-x 1 root root 64320 Mar 1 2019 chgrp + -rwxr-xr-x 1 root root 64288 Mar 1 2019 chmod + -rwxr-xr-x 1 root root 72512 Mar 1 2019 chown + -rwxr-xr-x 1 root root 146880 Mar 1 2019 cp + -rwxr-xr-x 1 root root 121464 Jan 18 2019 dash + -rwxr-xr-x 1 root root 109408 Mar 1 2019 date + -rwxr-xr-x 1 root root 76712 Mar 1 2019 dd + -rwxr-xr-x 1 root root 93744 Mar 1 2019 df + -rwxr-xr-x 1 root root 138856 Mar 1 2019 dir + -rwxr-xr-x 1 root root 84288 Jan 10 2019 dmesg + lrwxrwxrwx 1 root root 8 Sep 27 2018 dnsdomainname -> hostname + lrwxrwxrwx 1 root root 8 Sep 27 2018 domainname -> hostname + -rwxr-xr-x 1 root root 39520 Mar 1 2019 echo + -rwxr-xr-x 1 root root 28 Jan 8 2019 egrep + -rwxr-xr-x 1 root root 35424 Mar 1 2019 false + -rwxr-xr-x 1 root root 28 Jan 8 2019 fgrep + -rwxr-xr-x 1 root root 68880 Jan 10 2019 findmnt + -rwsr-xr-x 1 root root 34896 Apr 23 2020 fusermount + -rwxr-xr-x 1 root root 198976 Jan 8 2019 grep + -rwxr-xr-x 2 root root 2345 Jan 6 2019 gunzip + -rwxr-xr-x 1 root root 6375 Jan 6 2019 gzexe + -rwxr-xr-x 1 root root 98048 Jan 6 2019 gzip + -rwxr-xr-x 1 root root 26696 Sep 27 2018 hostname + -rwxr-xr-x 1 root root 68552 Mar 1 2019 ln + -rwxr-xr-x 1 root root 56760 Jul 27 2018 login + -rwxr-xr-x 1 root root 138856 Mar 1 2019 ls + -rwxr-xr-x 1 root root 108624 Jan 10 2019 lsblk + -rwxr-xr-x 1 root root 89088 Mar 1 2019 mkdir + -rwxr-xr-x 1 root root 68544 Mar 1 2019 mknod + -rwxr-xr-x 1 root root 43808 Mar 1 2019 mktemp + -rwxr-xr-x 1 root root 43008 Jan 10 2019 more + -rwsr-xr-x 1 root root 51280 Jan 10 2019 mount + -rwxr-xr-x 1 root root 14408 Jan 10 2019 mountpoint + -rwxr-xr-x 1 root root 138728 Mar 1 2019 mv + lrwxrwxrwx 1 root root 8 Sep 27 2018 nisdomainname -> hostname + lrwxrwxrwx 1 root root 14 Feb 15 2019 pidof -> /sbin/killall5 + -rwxr-xr-x 1 root root 39616 Mar 1 2019 pwd + lrwxrwxrwx 1 root root 4 Apr 18 2019 rbash -> bash + -rwxr-xr-x 1 root root 47776 Mar 1 2019 readlink + -rwxr-xr-x 1 root root 68416 Mar 1 2019 rm + -rwxr-xr-x 1 root root 47776 Mar 1 2019 rmdir + -rwxr-xr-x 1 root root 23312 Jan 22 2019 run-parts + -rwxr-xr-x 1 root root 122224 Dec 23 2018 sed + lrwxrwxrwx 1 root root 4 Jan 2 04:57 sh -> bash + lrwxrwxrwx 1 root root 4 Dec 11 18:21 sh.distrib -> dash + -rwxr-xr-x 1 root root 39552 Mar 1 2019 sleep + -rwxr-xr-x 1 root root 80672 Mar 1 2019 stty + -rwsr-xr-x 1 root root 63568 Jan 10 2019 su + -rwxr-xr-x 1 root root 35488 Mar 1 2019 sync + -rwxr-xr-x 1 root root 445560 Apr 24 2019 tar + -rwxr-xr-x 1 root root 14440 Jan 22 2019 tempfile + -rwxr-xr-x 1 root root 97152 Mar 1 2019 touch + -rwxr-xr-x 1 root root 35424 Mar 1 2019 true + -rwxr-xr-x 1 root root 14328 Apr 23 2020 ulockmgr_server + -rwsr-xr-x 1 root root 34888 Jan 10 2019 umount + -rwxr-xr-x 1 root root 39584 Mar 1 2019 uname + -rwxr-xr-x 2 root root 2345 Jan 6 2019 uncompress + -rwxr-xr-x 1 root root 138856 Mar 1 2019 vdir + -rwxr-xr-x 1 root root 34896 Jan 10 2019 wdctl + -rwxr-xr-x 1 root root 946 Jan 22 2019 which + lrwxrwxrwx 1 root root 8 Sep 27 2018 ypdomainname -> hostname + -rwxr-xr-x 1 root root 1983 Jan 6 2019 zcat + -rwxr-xr-x 1 root root 1677 Jan 6 2019 zcmp + -rwxr-xr-x 1 root root 5879 Jan 6 2019 zdiff + -rwxr-xr-x 1 root root 29 Jan 6 2019 zegrep + -rwxr-xr-x 1 root root 29 Jan 6 2019 zfgrep + -rwxr-xr-x 1 root root 2080 Jan 6 2019 zforce + -rwxr-xr-x 1 root root 7584 Jan 6 2019 zgrep + -rwxr-xr-x 1 root root 2205 Jan 6 2019 zless + -rwxr-xr-x 1 root root 1841 Jan 6 2019 zmore + -rwxr-xr-x 1 root root 4552 Jan 6 2019 znew +I: user script /srv/workspace/pbuilder/2636591/tmp/hooks/D02_print_environment finished -> Attempting to satisfy build-dependencies -> Creating pbuilder-satisfydepends-dummy package Package: pbuilder-satisfydepends-dummy @@ -847,7 +881,7 @@ Get: 560 http://deb.debian.org/debian buster/main amd64 python3-skimage-lib amd64 0.14.2-2 [1726 kB] Get: 561 http://deb.debian.org/debian buster/main amd64 python3-skimage all 0.14.2-2 [19.9 MB] Get: 562 http://deb.debian.org/debian buster/main amd64 python3-yaml amd64 3.13-2 [121 kB] -Fetched 546 MB in 11s (49.9 MB/s) +Fetched 546 MB in 9s (60.6 MB/s) debconf: delaying package configuration, since apt-utils is not installed Selecting previously unselected package libapparmor1:amd64. (Reading database ... (Reading database ... 5% (Reading database ... 10% (Reading database ... 15% (Reading database ... 20% (Reading database ... 25% (Reading database ... 30% (Reading database ... 35% (Reading database ... 40% (Reading database ... 45% (Reading database ... 50% (Reading database ... 55% (Reading database ... 60% (Reading database ... 65% (Reading database ... 70% (Reading database ... 75% (Reading database ... 80% (Reading database ... 85% (Reading database ... 90% (Reading database ... 95% (Reading database ... 100% (Reading database ... 19195 files and directories currently installed.) @@ -3163,7 +3197,8 @@ fakeroot is already the newest version (1.23-1). 0 upgraded, 0 newly installed, 0 to remove and 0 not upgraded. I: Building the package -I: Running cd /build/caffe-1.0.0+git20180821.99bd997/ && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games" HOME="/nonexistent/first-build" dpkg-buildpackage -us -uc -b && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games" HOME="/nonexistent/first-build" dpkg-genchanges -S > ../caffe_1.0.0+git20180821.99bd997-2_source.changes +hostname: Name or service not known +I: Running cd /build/caffe-1.0.0+git20180821.99bd997/ && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/i/capture/the/path" HOME="/nonexistent/second-build" dpkg-buildpackage -us -uc -b && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/i/capture/the/path" HOME="/nonexistent/second-build" dpkg-genchanges -S > ../caffe_1.0.0+git20180821.99bd997-2_source.changes dpkg-buildpackage: info: source package caffe dpkg-buildpackage: info: source version 1.0.0+git20180821.99bd997-2 dpkg-buildpackage: info: source distribution unstable @@ -3320,7 +3355,7 @@ make[1]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997' dh_auto_build --builddirectory="caffe_cpu_build" \ -- caffe pycaffe test.testbin all - cd caffe_cpu_build && make -j15 "INSTALL=install --strip-program=true" caffe pycaffe test.testbin all + cd caffe_cpu_build && make -j16 "INSTALL=install --strip-program=true" caffe pycaffe test.testbin all make[2]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' /usr/bin/cmake -S/build/caffe-1.0.0+git20180821.99bd997 -B/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build --check-build-system CMakeFiles/Makefile.cmake 0 make -f CMakeFiles/Makefile2 caffe @@ -3356,36 +3391,36 @@ make[5]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' make -f src/caffe/CMakeFiles/caffe.dir/build.make src/caffe/CMakeFiles/caffe.dir/build make[5]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -[ 1%] Building CXX object src/caffe/CMakeFiles/caffe.dir/internal_thread.cpp.o [ 1%] Building CXX object src/caffe/CMakeFiles/caffe.dir/blob.cpp.o -[ 5%] Building CXX object src/caffe/CMakeFiles/caffe.dir/data_transformer.cpp.o +cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/blob.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/blob.cpp +[ 1%] Building CXX object src/caffe/CMakeFiles/caffe.dir/common.cpp.o +[ 3%] Building CXX object src/caffe/CMakeFiles/caffe.dir/data_transformer.cpp.o [ 5%] Building CXX object src/caffe/CMakeFiles/caffe.dir/layer.cpp.o -[ 5%] Building CXX object src/caffe/CMakeFiles/caffe.dir/layer_factory.cpp.o -[ 5%] Building CXX object src/caffe/CMakeFiles/caffe.dir/layers/base_conv_layer.cpp.o -[ 7%] Building CXX object src/caffe/CMakeFiles/caffe.dir/layers/absval_layer.cpp.o -cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/layer.cpp -[ 7%] Building CXX object src/caffe/CMakeFiles/caffe.dir/layers/accuracy_layer.cpp.o -[ 7%] Building CXX object src/caffe/CMakeFiles/caffe.dir/common.cpp.o +cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/common.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/common.cpp cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/data_transformer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/data_transformer.cpp +[ 5%] Building CXX object src/caffe/CMakeFiles/caffe.dir/internal_thread.cpp.o +[ 5%] Building CXX object src/caffe/CMakeFiles/caffe.dir/layer_factory.cpp.o cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/internal_thread.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/internal_thread.cpp -cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/blob.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/blob.cpp +cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/layer.cpp cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/layer_factory.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/layer_factory.cpp +[ 7%] Building CXX object src/caffe/CMakeFiles/caffe.dir/layers/absval_layer.cpp.o +[ 7%] Building CXX object src/caffe/CMakeFiles/caffe.dir/layers/accuracy_layer.cpp.o cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/layers/absval_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/layers/absval_layer.cpp +[ 9%] Building CXX object src/caffe/CMakeFiles/caffe.dir/layers/argmax_layer.cpp.o cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/layers/accuracy_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/layers/accuracy_layer.cpp -cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/common.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/common.cpp +[ 9%] Building CXX object src/caffe/CMakeFiles/caffe.dir/layers/base_conv_layer.cpp.o +cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/layers/argmax_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/layers/argmax_layer.cpp cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/layers/base_conv_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/layers/base_conv_layer.cpp -[ 9%] Building CXX object src/caffe/CMakeFiles/caffe.dir/layers/base_data_layer.cpp.o +[ 11%] Building CXX object src/caffe/CMakeFiles/caffe.dir/layers/base_data_layer.cpp.o cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/layers/base_data_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/layers/base_data_layer.cpp -[ 11%] Building CXX object src/caffe/CMakeFiles/caffe.dir/layers/argmax_layer.cpp.o -cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/layers/argmax_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/layers/argmax_layer.cpp [ 11%] Building CXX object src/caffe/CMakeFiles/caffe.dir/layers/batch_norm_layer.cpp.o cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/layers/batch_norm_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/layers/batch_norm_layer.cpp [ 13%] Building CXX object src/caffe/CMakeFiles/caffe.dir/layers/batch_reindex_layer.cpp.o cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/layers/batch_reindex_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/layers/batch_reindex_layer.cpp +[ 13%] Building CXX object src/caffe/CMakeFiles/caffe.dir/layers/bias_layer.cpp.o +cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/layers/bias_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/layers/bias_layer.cpp [ 15%] Building CXX object src/caffe/CMakeFiles/caffe.dir/layers/bnll_layer.cpp.o cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/layers/bnll_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/layers/bnll_layer.cpp -[ 15%] Building CXX object src/caffe/CMakeFiles/caffe.dir/layers/bias_layer.cpp.o -cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/layers/bias_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/layers/bias_layer.cpp [ 15%] Building CXX object src/caffe/CMakeFiles/caffe.dir/layers/clip_layer.cpp.o cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/layers/clip_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/layers/clip_layer.cpp [ 17%] Building CXX object src/caffe/CMakeFiles/caffe.dir/layers/concat_layer.cpp.o @@ -3394,14 +3429,6 @@ cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/layers/contrastive_loss_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/layers/contrastive_loss_layer.cpp [ 19%] Building CXX object src/caffe/CMakeFiles/caffe.dir/layers/conv_layer.cpp.o cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/layers/conv_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/layers/conv_layer.cpp -/build/caffe-1.0.0+git20180821.99bd997/src/caffe/layer_factory.cpp: In instantiation of 'boost::shared_ptr > caffe::GetPythonLayer(const caffe::LayerParameter&) [with Dtype = float]': -/build/caffe-1.0.0+git20180821.99bd997/src/caffe/layer_factory.cpp:304:1: required from here -/build/caffe-1.0.0+git20180821.99bd997/src/caffe/layer_factory.cpp:298:5: warning: catching polymorphic type 'struct boost::python::error_already_set' by value [-Wcatch-value=] - } catch (bp::error_already_set) { - ^~~~~ -/build/caffe-1.0.0+git20180821.99bd997/src/caffe/layer_factory.cpp: In instantiation of 'boost::shared_ptr > caffe::GetPythonLayer(const caffe::LayerParameter&) [with Dtype = double]': -/build/caffe-1.0.0+git20180821.99bd997/src/caffe/layer_factory.cpp:304:1: required from here -/build/caffe-1.0.0+git20180821.99bd997/src/caffe/layer_factory.cpp:298:5: warning: catching polymorphic type 'struct boost::python::error_already_set' by value [-Wcatch-value=] [ 19%] Building CXX object src/caffe/CMakeFiles/caffe.dir/layers/crop_layer.cpp.o cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/layers/crop_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/layers/crop_layer.cpp [ 21%] Building CXX object src/caffe/CMakeFiles/caffe.dir/layers/cudnn_conv_layer.cpp.o @@ -3428,10 +3455,18 @@ cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/layers/deconv_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/layers/deconv_layer.cpp [ 30%] Building CXX object src/caffe/CMakeFiles/caffe.dir/layers/dropout_layer.cpp.o cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/layers/dropout_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/layers/dropout_layer.cpp -[ 30%] Building CXX object src/caffe/CMakeFiles/caffe.dir/layers/eltwise_layer.cpp.o +/build/caffe-1.0.0+git20180821.99bd997/src/caffe/layer_factory.cpp: In instantiation of 'boost::shared_ptr > caffe::GetPythonLayer(const caffe::LayerParameter&) [with Dtype = float]': +/build/caffe-1.0.0+git20180821.99bd997/src/caffe/layer_factory.cpp:304:1: required from here +/build/caffe-1.0.0+git20180821.99bd997/src/caffe/layer_factory.cpp:298:5: warning: catching polymorphic type 'struct boost::python::error_already_set' by value [-Wcatch-value=] + } catch (bp::error_already_set) { + ^~~~~ +/build/caffe-1.0.0+git20180821.99bd997/src/caffe/layer_factory.cpp: In instantiation of 'boost::shared_ptr > caffe::GetPythonLayer(const caffe::LayerParameter&) [with Dtype = double]': +/build/caffe-1.0.0+git20180821.99bd997/src/caffe/layer_factory.cpp:304:1: required from here +/build/caffe-1.0.0+git20180821.99bd997/src/caffe/layer_factory.cpp:298:5: warning: catching polymorphic type 'struct boost::python::error_already_set' by value [-Wcatch-value=] [ 32%] Building CXX object src/caffe/CMakeFiles/caffe.dir/layers/dummy_data_layer.cpp.o -cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/layers/eltwise_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/layers/eltwise_layer.cpp cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/layers/dummy_data_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/layers/dummy_data_layer.cpp +[ 32%] Building CXX object src/caffe/CMakeFiles/caffe.dir/layers/eltwise_layer.cpp.o +cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/layers/eltwise_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/layers/eltwise_layer.cpp [ 34%] Building CXX object src/caffe/CMakeFiles/caffe.dir/layers/elu_layer.cpp.o cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/layers/elu_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/layers/elu_layer.cpp [ 34%] Building CXX object src/caffe/CMakeFiles/caffe.dir/layers/embed_layer.cpp.o @@ -3443,8 +3478,8 @@ [ 38%] Building CXX object src/caffe/CMakeFiles/caffe.dir/layers/filter_layer.cpp.o cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/layers/filter_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/layers/filter_layer.cpp [ 38%] Building CXX object src/caffe/CMakeFiles/caffe.dir/layers/flatten_layer.cpp.o -cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/layers/flatten_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/layers/flatten_layer.cpp [ 40%] Building CXX object src/caffe/CMakeFiles/caffe.dir/layers/hdf5_data_layer.cpp.o +cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/layers/flatten_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/layers/flatten_layer.cpp cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/layers/hdf5_data_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/layers/hdf5_data_layer.cpp [ 40%] Building CXX object src/caffe/CMakeFiles/caffe.dir/layers/hdf5_output_layer.cpp.o cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/layers/hdf5_output_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/layers/hdf5_output_layer.cpp @@ -3526,10 +3561,10 @@ cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/layers/window_data_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/layers/window_data_layer.cpp [ 76%] Building CXX object src/caffe/CMakeFiles/caffe.dir/net.cpp.o cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/net.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/net.cpp -[ 76%] Building CXX object src/caffe/CMakeFiles/caffe.dir/solver.cpp.o -cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/solver.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/solver.cpp [ 78%] Building CXX object src/caffe/CMakeFiles/caffe.dir/parallel.cpp.o cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/parallel.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/parallel.cpp +[ 78%] Building CXX object src/caffe/CMakeFiles/caffe.dir/solver.cpp.o +cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/solver.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/solver.cpp [ 80%] Building CXX object src/caffe/CMakeFiles/caffe.dir/solvers/adadelta_solver.cpp.o cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -DWITH_PYTHON_LAYER -Dcaffe_EXPORTS -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/usr/include/python3.7m -I/usr/lib/python3/dist-packages/numpy/core/include -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -fPIC -o CMakeFiles/caffe.dir/solvers/adadelta_solver.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/solvers/adadelta_solver.cpp [ 80%] Building CXX object src/caffe/CMakeFiles/caffe.dir/solvers/adagrad_solver.cpp.o @@ -3648,16 +3683,16 @@ cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build && /usr/bin/cmake -E cmake_depends "Unix Makefiles" /build/caffe-1.0.0+git20180821.99bd997 /build/caffe-1.0.0+git20180821.99bd997/src/caffe /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe/CMakeFiles/caffe.dir/DependInfo.cmake --color= make[5]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' make -f src/caffe/CMakeFiles/caffe.dir/build.make src/caffe/CMakeFiles/caffe.dir/build -make[5]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -make[5]: Nothing to be done for 'src/caffe/CMakeFiles/caffe.dir/build'. -make[5]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -[ 59%] Built target caffe Scanning dependencies of target gtest make[5]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' make -f src/gtest/CMakeFiles/gtest.dir/build.make src/gtest/CMakeFiles/gtest.dir/build make[5]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -[ 60%] Building CXX object src/gtest/CMakeFiles/gtest.dir/gtest-all.cpp.o +make[5]: Nothing to be done for 'src/caffe/CMakeFiles/caffe.dir/build'. +make[5]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' +make[5]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' +[ 2%] Building CXX object src/gtest/CMakeFiles/gtest.dir/gtest-all.cpp.o cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/gtest && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DGTEST_USE_OWN_TR1_TUPLE -DUSE_HDF5 -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/src -isystem /usr/include/hdf5/serial -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/gtest.dir/gtest-all.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/gtest/gtest-all.cpp +[ 60%] Built target caffe [ 60%] Linking CXX static library ../../lib/libgtest.a cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/gtest && /usr/bin/cmake -P CMakeFiles/gtest.dir/cmake_clean_target.cmake cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/gtest && /usr/bin/cmake -E cmake_link_script CMakeFiles/gtest.dir/link.txt --verbose=1 @@ -3672,38 +3707,38 @@ make[5]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' make -f src/caffe/test/CMakeFiles/test.testbin.dir/build.make src/caffe/test/CMakeFiles/test.testbin.dir/build make[5]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -[ 60%] Building CXX object src/caffe/test/CMakeFiles/test.testbin.dir/test_accuracy_layer.cpp.o +[ 62%] Building CXX object src/caffe/test/CMakeFiles/test.testbin.dir/test_argmax_layer.cpp.o +cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe/test && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DGTEST_USE_OWN_TR1_TUPLE -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/src -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/test.testbin.dir/test_argmax_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/test/test_argmax_layer.cpp +[ 62%] Building CXX object src/caffe/test/CMakeFiles/test.testbin.dir/test_accuracy_layer.cpp.o +[ 63%] Building CXX object src/caffe/test/CMakeFiles/test.testbin.dir/test_batch_reindex_layer.cpp.o +[ 63%] Building CXX object src/caffe/test/CMakeFiles/test.testbin.dir/test_batch_norm_layer.cpp.o cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe/test && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DGTEST_USE_OWN_TR1_TUPLE -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/src -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/test.testbin.dir/test_accuracy_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/test/test_accuracy_layer.cpp -[ 60%] Building CXX object src/caffe/test/CMakeFiles/test.testbin.dir/test_batch_norm_layer.cpp.o -[ 62%] Building CXX object src/caffe/test/CMakeFiles/test.testbin.dir/test_batch_reindex_layer.cpp.o cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe/test && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DGTEST_USE_OWN_TR1_TUPLE -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/src -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/test.testbin.dir/test_batch_norm_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/test/test_batch_norm_layer.cpp +[ 63%] Building CXX object src/caffe/test/CMakeFiles/test.testbin.dir/test_benchmark.cpp.o cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe/test && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DGTEST_USE_OWN_TR1_TUPLE -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/src -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/test.testbin.dir/test_batch_reindex_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/test/test_batch_reindex_layer.cpp -[ 62%] Building CXX object src/caffe/test/CMakeFiles/test.testbin.dir/test_benchmark.cpp.o -[ 63%] Building CXX object src/caffe/test/CMakeFiles/test.testbin.dir/test_argmax_layer.cpp.o -cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe/test && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DGTEST_USE_OWN_TR1_TUPLE -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/src -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/test.testbin.dir/test_benchmark.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/test/test_benchmark.cpp [ 64%] Building CXX object src/caffe/test/CMakeFiles/test.testbin.dir/test_bias_layer.cpp.o -cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe/test && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DGTEST_USE_OWN_TR1_TUPLE -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/src -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/test.testbin.dir/test_argmax_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/test/test_argmax_layer.cpp +cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe/test && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DGTEST_USE_OWN_TR1_TUPLE -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/src -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/test.testbin.dir/test_benchmark.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/test/test_benchmark.cpp [ 64%] Building CXX object src/caffe/test/CMakeFiles/test.testbin.dir/test_blob.cpp.o cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe/test && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DGTEST_USE_OWN_TR1_TUPLE -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/src -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/test.testbin.dir/test_bias_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/test/test_bias_layer.cpp cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe/test && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DGTEST_USE_OWN_TR1_TUPLE -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/src -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/test.testbin.dir/test_blob.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/test/test_blob.cpp -[ 64%] Building CXX object src/caffe/test/CMakeFiles/test.testbin.dir/test_caffe_main.cpp.o [ 65%] Building CXX object src/caffe/test/CMakeFiles/test.testbin.dir/test_common.cpp.o +[ 65%] Building CXX object src/caffe/test/CMakeFiles/test.testbin.dir/test_caffe_main.cpp.o cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe/test && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DGTEST_USE_OWN_TR1_TUPLE -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/src -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/test.testbin.dir/test_caffe_main.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/test/test_caffe_main.cpp -cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe/test && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DGTEST_USE_OWN_TR1_TUPLE -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/src -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/test.testbin.dir/test_common.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/test/test_common.cpp [ 65%] Building CXX object src/caffe/test/CMakeFiles/test.testbin.dir/test_concat_layer.cpp.o -cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe/test && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DGTEST_USE_OWN_TR1_TUPLE -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/src -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/test.testbin.dir/test_concat_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/test/test_concat_layer.cpp +cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe/test && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DGTEST_USE_OWN_TR1_TUPLE -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/src -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/test.testbin.dir/test_common.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/test/test_common.cpp [ 66%] Building CXX object src/caffe/test/CMakeFiles/test.testbin.dir/test_contrastive_loss_layer.cpp.o +cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe/test && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DGTEST_USE_OWN_TR1_TUPLE -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/src -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/test.testbin.dir/test_concat_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/test/test_concat_layer.cpp cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe/test && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DGTEST_USE_OWN_TR1_TUPLE -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/src -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/test.testbin.dir/test_contrastive_loss_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/test/test_contrastive_loss_layer.cpp [ 66%] Building CXX object src/caffe/test/CMakeFiles/test.testbin.dir/test_convolution_layer.cpp.o cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe/test && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DGTEST_USE_OWN_TR1_TUPLE -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/src -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/test.testbin.dir/test_convolution_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/test/test_convolution_layer.cpp [ 67%] Building CXX object src/caffe/test/CMakeFiles/test.testbin.dir/test_crop_layer.cpp.o cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe/test && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DGTEST_USE_OWN_TR1_TUPLE -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/src -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/test.testbin.dir/test_crop_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/test/test_crop_layer.cpp -[ 67%] Building CXX object src/caffe/test/CMakeFiles/test.testbin.dir/test_data_layer.cpp.o -cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe/test && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DGTEST_USE_OWN_TR1_TUPLE -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/src -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/test.testbin.dir/test_data_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/test/test_data_layer.cpp [ 68%] Building CXX object src/caffe/test/CMakeFiles/test.testbin.dir/test_data_transformer.cpp.o -cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe/test && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DGTEST_USE_OWN_TR1_TUPLE -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/src -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/test.testbin.dir/test_data_transformer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/test/test_data_transformer.cpp [ 68%] Building CXX object src/caffe/test/CMakeFiles/test.testbin.dir/test_db.cpp.o cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe/test && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DGTEST_USE_OWN_TR1_TUPLE -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/src -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/test.testbin.dir/test_db.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/test/test_db.cpp +[ 68%] Building CXX object src/caffe/test/CMakeFiles/test.testbin.dir/test_data_layer.cpp.o +cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe/test && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DGTEST_USE_OWN_TR1_TUPLE -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/src -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/test.testbin.dir/test_data_transformer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/test/test_data_transformer.cpp +cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe/test && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DGTEST_USE_OWN_TR1_TUPLE -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/src -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/test.testbin.dir/test_data_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/test/test_data_layer.cpp [ 70%] Building CXX object src/caffe/test/CMakeFiles/test.testbin.dir/test_deconvolution_layer.cpp.o cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe/test && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DGTEST_USE_OWN_TR1_TUPLE -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/src -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/test.testbin.dir/test_deconvolution_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/test/test_deconvolution_layer.cpp [ 70%] Building CXX object src/caffe/test/CMakeFiles/test.testbin.dir/test_dummy_data_layer.cpp.o @@ -3714,10 +3749,10 @@ cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe/test && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DGTEST_USE_OWN_TR1_TUPLE -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/src -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/test.testbin.dir/test_embed_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/test/test_embed_layer.cpp [ 72%] Building CXX object src/caffe/test/CMakeFiles/test.testbin.dir/test_euclidean_loss_layer.cpp.o cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe/test && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DGTEST_USE_OWN_TR1_TUPLE -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/src -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/test.testbin.dir/test_euclidean_loss_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/test/test_euclidean_loss_layer.cpp -[ 72%] Building CXX object src/caffe/test/CMakeFiles/test.testbin.dir/test_filler.cpp.o -cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe/test && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DGTEST_USE_OWN_TR1_TUPLE -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/src -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/test.testbin.dir/test_filler.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/test/test_filler.cpp [ 73%] Building CXX object src/caffe/test/CMakeFiles/test.testbin.dir/test_filter_layer.cpp.o cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe/test && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DGTEST_USE_OWN_TR1_TUPLE -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/src -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/test.testbin.dir/test_filter_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/test/test_filter_layer.cpp +[ 73%] Building CXX object src/caffe/test/CMakeFiles/test.testbin.dir/test_filler.cpp.o +cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe/test && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DGTEST_USE_OWN_TR1_TUPLE -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/src -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/test.testbin.dir/test_filler.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/test/test_filler.cpp [ 73%] Building CXX object src/caffe/test/CMakeFiles/test.testbin.dir/test_flatten_layer.cpp.o cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe/test && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DGTEST_USE_OWN_TR1_TUPLE -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/src -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/test.testbin.dir/test_flatten_layer.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/src/caffe/test/test_flatten_layer.cpp [ 74%] Building CXX object src/caffe/test/CMakeFiles/test.testbin.dir/test_gradient_based_solver.cpp.o @@ -3839,123 +3874,118 @@ [ 81%] Built target caffe make -f tools/CMakeFiles/upgrade_solver_proto_text.dir/build.make tools/CMakeFiles/upgrade_solver_proto_text.dir/depend make -f tools/CMakeFiles/compute_image_mean.dir/build.make tools/CMakeFiles/compute_image_mean.dir/depend -make -f tools/CMakeFiles/caffe.bin.dir/build.make tools/CMakeFiles/caffe.bin.dir/depend -make -f tools/CMakeFiles/upgrade_net_proto_binary.dir/build.make tools/CMakeFiles/upgrade_net_proto_binary.dir/depend make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build && /usr/bin/cmake -E cmake_depends "Unix Makefiles" /build/caffe-1.0.0+git20180821.99bd997 /build/caffe-1.0.0+git20180821.99bd997/tools /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/tools /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/tools/CMakeFiles/upgrade_solver_proto_text.dir/DependInfo.cmake --color= -make -f tools/CMakeFiles/convert_imageset.dir/build.make tools/CMakeFiles/convert_imageset.dir/depend +make -f tools/CMakeFiles/caffe.bin.dir/build.make tools/CMakeFiles/caffe.bin.dir/depend make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build && /usr/bin/cmake -E cmake_depends "Unix Makefiles" /build/caffe-1.0.0+git20180821.99bd997 /build/caffe-1.0.0+git20180821.99bd997/tools /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/tools /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/tools/CMakeFiles/compute_image_mean.dir/DependInfo.cmake --color= +make -f tools/CMakeFiles/upgrade_net_proto_binary.dir/build.make tools/CMakeFiles/upgrade_net_proto_binary.dir/depend +make -f tools/CMakeFiles/convert_imageset.dir/build.make tools/CMakeFiles/convert_imageset.dir/depend make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build && /usr/bin/cmake -E cmake_depends "Unix Makefiles" /build/caffe-1.0.0+git20180821.99bd997 /build/caffe-1.0.0+git20180821.99bd997/tools /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/tools /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/tools/CMakeFiles/caffe.bin.dir/DependInfo.cmake --color= make -f tools/CMakeFiles/extract_features.dir/build.make tools/CMakeFiles/extract_features.dir/depend -make -f tools/CMakeFiles/upgrade_net_proto_text.dir/build.make tools/CMakeFiles/upgrade_net_proto_text.dir/depend make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build && /usr/bin/cmake -E cmake_depends "Unix Makefiles" /build/caffe-1.0.0+git20180821.99bd997 /build/caffe-1.0.0+git20180821.99bd997/tools /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/tools /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/tools/CMakeFiles/upgrade_net_proto_binary.dir/DependInfo.cmake --color= make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build && /usr/bin/cmake -E cmake_depends "Unix Makefiles" /build/caffe-1.0.0+git20180821.99bd997 /build/caffe-1.0.0+git20180821.99bd997/tools /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/tools /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/tools/CMakeFiles/convert_imageset.dir/DependInfo.cmake --color= +make -f tools/CMakeFiles/upgrade_net_proto_text.dir/build.make tools/CMakeFiles/upgrade_net_proto_text.dir/depend +make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' +cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build && /usr/bin/cmake -E cmake_depends "Unix Makefiles" /build/caffe-1.0.0+git20180821.99bd997 /build/caffe-1.0.0+git20180821.99bd997/tools /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/tools /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/tools/CMakeFiles/upgrade_net_proto_text.dir/DependInfo.cmake --color= make -f examples/CMakeFiles/classification.dir/build.make examples/CMakeFiles/classification.dir/depend make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build && /usr/bin/cmake -E cmake_depends "Unix Makefiles" /build/caffe-1.0.0+git20180821.99bd997 /build/caffe-1.0.0+git20180821.99bd997/tools /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/tools /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/tools/CMakeFiles/extract_features.dir/DependInfo.cmake --color= make -f examples/CMakeFiles/convert_mnist_data.dir/build.make examples/CMakeFiles/convert_mnist_data.dir/depend -make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build && /usr/bin/cmake -E cmake_depends "Unix Makefiles" /build/caffe-1.0.0+git20180821.99bd997 /build/caffe-1.0.0+git20180821.99bd997/tools /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/tools /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/tools/CMakeFiles/upgrade_net_proto_text.dir/DependInfo.cmake --color= make -f examples/CMakeFiles/convert_cifar_data.dir/build.make examples/CMakeFiles/convert_cifar_data.dir/depend -make -f examples/CMakeFiles/convert_mnist_siamese_data.dir/build.make examples/CMakeFiles/convert_mnist_siamese_data.dir/depend make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build && /usr/bin/cmake -E cmake_depends "Unix Makefiles" /build/caffe-1.0.0+git20180821.99bd997 /build/caffe-1.0.0+git20180821.99bd997/examples /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples/CMakeFiles/classification.dir/DependInfo.cmake --color= -make -f python/CMakeFiles/pycaffe.dir/build.make python/CMakeFiles/pycaffe.dir/depend +Scanning dependencies of target upgrade_solver_proto_text +Scanning dependencies of target compute_image_mean +make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' +make -f tools/CMakeFiles/compute_image_mean.dir/build.make tools/CMakeFiles/compute_image_mean.dir/build make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build && /usr/bin/cmake -E cmake_depends "Unix Makefiles" /build/caffe-1.0.0+git20180821.99bd997 /build/caffe-1.0.0+git20180821.99bd997/examples /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples/CMakeFiles/convert_mnist_data.dir/DependInfo.cmake --color= +make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build && /usr/bin/cmake -E cmake_depends "Unix Makefiles" /build/caffe-1.0.0+git20180821.99bd997 /build/caffe-1.0.0+git20180821.99bd997/examples /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples/CMakeFiles/convert_mnist_siamese_data.dir/DependInfo.cmake --color= cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build && /usr/bin/cmake -E cmake_depends "Unix Makefiles" /build/caffe-1.0.0+git20180821.99bd997 /build/caffe-1.0.0+git20180821.99bd997/examples /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples/CMakeFiles/convert_cifar_data.dir/DependInfo.cmake --color= -Scanning dependencies of target compute_image_mean +make -f tools/CMakeFiles/upgrade_solver_proto_text.dir/build.make tools/CMakeFiles/upgrade_solver_proto_text.dir/build +make -f examples/CMakeFiles/convert_mnist_siamese_data.dir/build.make examples/CMakeFiles/convert_mnist_siamese_data.dir/depend make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build && /usr/bin/cmake -E cmake_depends "Unix Makefiles" /build/caffe-1.0.0+git20180821.99bd997 /build/caffe-1.0.0+git20180821.99bd997/python /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/python /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/python/CMakeFiles/pycaffe.dir/DependInfo.cmake --color= -Scanning dependencies of target upgrade_solver_proto_text -make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -make -f tools/CMakeFiles/compute_image_mean.dir/build.make tools/CMakeFiles/compute_image_mean.dir/build +make -f python/CMakeFiles/pycaffe.dir/build.make python/CMakeFiles/pycaffe.dir/depend make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -make -f tools/CMakeFiles/upgrade_solver_proto_text.dir/build.make tools/CMakeFiles/upgrade_solver_proto_text.dir/build -Scanning dependencies of target upgrade_net_proto_text -Scanning dependencies of target convert_imageset +cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build && /usr/bin/cmake -E cmake_depends "Unix Makefiles" /build/caffe-1.0.0+git20180821.99bd997 /build/caffe-1.0.0+git20180821.99bd997/examples /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples/CMakeFiles/convert_mnist_siamese_data.dir/DependInfo.cmake --color= make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' +cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build && /usr/bin/cmake -E cmake_depends "Unix Makefiles" /build/caffe-1.0.0+git20180821.99bd997 /build/caffe-1.0.0+git20180821.99bd997/python /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/python /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/python/CMakeFiles/pycaffe.dir/DependInfo.cmake --color= +Scanning dependencies of target convert_imageset make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' make -f tools/CMakeFiles/convert_imageset.dir/build.make tools/CMakeFiles/convert_imageset.dir/build -make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -Scanning dependencies of target upgrade_net_proto_binary Scanning dependencies of target caffe.bin make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -make -f tools/CMakeFiles/upgrade_net_proto_text.dir/build.make tools/CMakeFiles/upgrade_net_proto_text.dir/build -make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' make -f tools/CMakeFiles/caffe.bin.dir/build.make tools/CMakeFiles/caffe.bin.dir/build -Scanning dependencies of target extract_features -make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' +[ 82%] Building CXX object tools/CMakeFiles/compute_image_mean.dir/compute_image_mean.cpp.o +cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/tools && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/compute_image_mean.dir/compute_image_mean.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/tools/compute_image_mean.cpp +Scanning dependencies of target convert_mnist_data make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -make -f tools/CMakeFiles/upgrade_net_proto_binary.dir/build.make tools/CMakeFiles/upgrade_net_proto_binary.dir/build +make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' +make -f examples/CMakeFiles/convert_mnist_data.dir/build.make examples/CMakeFiles/convert_mnist_data.dir/build +Scanning dependencies of target extract_features make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' make -f tools/CMakeFiles/extract_features.dir/build.make tools/CMakeFiles/extract_features.dir/build -Scanning dependencies of target classification -Scanning dependencies of target convert_cifar_data -[ 82%] Building CXX object tools/CMakeFiles/compute_image_mean.dir/compute_image_mean.cpp.o +make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/tools && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/compute_image_mean.dir/compute_image_mean.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/tools/compute_image_mean.cpp -make -f examples/CMakeFiles/convert_cifar_data.dir/build.make examples/CMakeFiles/convert_cifar_data.dir/build +make -f python/CMakeFiles/pycaffe.dir/build.make python/CMakeFiles/pycaffe.dir/build +Scanning dependencies of target upgrade_net_proto_text +Scanning dependencies of target upgrade_net_proto_binary +make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' +Scanning dependencies of target classification +make -f tools/CMakeFiles/upgrade_net_proto_text.dir/build.make tools/CMakeFiles/upgrade_net_proto_text.dir/build make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' [ 82%] Building CXX object tools/CMakeFiles/upgrade_solver_proto_text.dir/upgrade_solver_proto_text.cpp.o cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/tools && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/upgrade_solver_proto_text.dir/upgrade_solver_proto_text.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/tools/upgrade_solver_proto_text.cpp -make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -make -f examples/CMakeFiles/classification.dir/build.make examples/CMakeFiles/classification.dir/build -Scanning dependencies of target convert_mnist_siamese_data -[ 82%] Building CXX object tools/CMakeFiles/upgrade_net_proto_text.dir/upgrade_net_proto_text.cpp.o make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -Scanning dependencies of target convert_mnist_data -make -f examples/CMakeFiles/convert_mnist_siamese_data.dir/build.make examples/CMakeFiles/convert_mnist_siamese_data.dir/build -cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/tools && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/upgrade_net_proto_text.dir/upgrade_net_proto_text.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/tools/upgrade_net_proto_text.cpp -make -f python/CMakeFiles/pycaffe.dir/build.make python/CMakeFiles/pycaffe.dir/build -make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' [ 84%] Building CXX object tools/CMakeFiles/convert_imageset.dir/convert_imageset.cpp.o -make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -make -f examples/CMakeFiles/convert_mnist_data.dir/build.make examples/CMakeFiles/convert_mnist_data.dir/build -[ 84%] Building CXX object tools/CMakeFiles/caffe.bin.dir/caffe.cpp.o -[ 84%] Building CXX object tools/CMakeFiles/upgrade_net_proto_binary.dir/upgrade_net_proto_binary.cpp.o +make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/tools && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/convert_imageset.dir/convert_imageset.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/tools/convert_imageset.cpp -cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/tools && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/upgrade_net_proto_binary.dir/upgrade_net_proto_binary.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/tools/upgrade_net_proto_binary.cpp +make -f tools/CMakeFiles/upgrade_net_proto_binary.dir/build.make tools/CMakeFiles/upgrade_net_proto_binary.dir/build make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' make[4]: Nothing to be done for 'python/CMakeFiles/pycaffe.dir/build'. make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/tools && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/caffe.bin.dir/caffe.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/tools/caffe.cpp +make -f examples/CMakeFiles/classification.dir/build.make examples/CMakeFiles/classification.dir/build [ 85%] Building CXX object tools/CMakeFiles/extract_features.dir/extract_features.cpp.o +[ 85%] Building CXX object tools/CMakeFiles/caffe.bin.dir/caffe.cpp.o +Scanning dependencies of target convert_mnist_siamese_data +cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/tools && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/caffe.bin.dir/caffe.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/tools/caffe.cpp +make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/tools && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/extract_features.dir/extract_features.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/tools/extract_features.cpp -[ 87%] Building CXX object examples/CMakeFiles/classification.dir/cpp_classification/classification.cpp.o -cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/classification.dir/cpp_classification/classification.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/examples/cpp_classification/classification.cpp -[ 89%] Building CXX object examples/CMakeFiles/convert_mnist_data.dir/mnist/convert_mnist_data.cpp.o -[ 90%] Built target pycaffe +Scanning dependencies of target convert_cifar_data +[ 87%] Building CXX object examples/CMakeFiles/convert_mnist_data.dir/mnist/convert_mnist_data.cpp.o +make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/convert_mnist_data.dir/mnist/convert_mnist_data.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/examples/mnist/convert_mnist_data.cpp +make -f examples/CMakeFiles/convert_mnist_siamese_data.dir/build.make examples/CMakeFiles/convert_mnist_siamese_data.dir/build +make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' +make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' +make -f examples/CMakeFiles/convert_cifar_data.dir/build.make examples/CMakeFiles/convert_cifar_data.dir/build +make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' +[ 87%] Building CXX object tools/CMakeFiles/upgrade_net_proto_binary.dir/upgrade_net_proto_binary.cpp.o +cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/tools && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/upgrade_net_proto_binary.dir/upgrade_net_proto_binary.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/tools/upgrade_net_proto_binary.cpp +[ 87%] Building CXX object tools/CMakeFiles/upgrade_net_proto_text.dir/upgrade_net_proto_text.cpp.o +cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/tools && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/upgrade_net_proto_text.dir/upgrade_net_proto_text.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/tools/upgrade_net_proto_text.cpp +[ 89%] Built target pycaffe +[ 90%] Building CXX object examples/CMakeFiles/classification.dir/cpp_classification/classification.cpp.o [ 92%] Building CXX object examples/CMakeFiles/convert_mnist_siamese_data.dir/siamese/convert_mnist_siamese_data.cpp.o +cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/classification.dir/cpp_classification/classification.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/examples/cpp_classification/classification.cpp [ 93%] Building CXX object examples/CMakeFiles/convert_cifar_data.dir/cifar10/convert_cifar_data.cpp.o -cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/convert_mnist_siamese_data.dir/siamese/convert_mnist_siamese_data.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/examples/siamese/convert_mnist_siamese_data.cpp cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/convert_cifar_data.dir/cifar10/convert_cifar_data.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/examples/cifar10/convert_cifar_data.cpp -[ 93%] Linking CXX executable compute_image_mean -cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/tools && /usr/bin/cmake -E cmake_link_script CMakeFiles/compute_image_mean.dir/link.txt --verbose=1 -/usr/bin/c++ -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -Wl,-z,relro -Wl,-z,now -Wl,--as-needed -rdynamic CMakeFiles/compute_image_mean.dir/compute_image_mean.cpp.o -o compute_image_mean ../lib/libcaffe.so.1.0.0 ../lib/libcaffeproto.a -lboost_system -lboost_thread -lboost_filesystem -lboost_chrono -lboost_date_time -lboost_atomic -lglog -lgflags -lprotobuf -lpthread /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5.so -lpthread -lsz -lz -ldl -lm /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5.so -lpthread -lsz -lz -ldl -lm /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl.so -llmdb -lleveldb /usr/lib/x86_64-linux-gnu/libopencv_highgui.so.3.2.0 /usr/lib/x86_64-linux-gnu/libopencv_imgcodecs.so.3.2.0 /usr/lib/x86_64-linux-gnu/libopencv_imgproc.so.3.2.0 /usr/lib/x86_64-linux-gnu/libopencv_core.so.3.2.0 -lblas -lboost_python3 +cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples && /usr/bin/c++ -DCAFFE_VERSION=1.0.0 -DCPU_ONLY -DUSE_HDF5 -DUSE_LEVELDB -DUSE_LMDB -DUSE_OPENCV -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/include -I/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build -I/build/caffe-1.0.0+git20180821.99bd997/include -isystem /usr/include/hdf5/serial -isystem /usr/include/opencv -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -o CMakeFiles/convert_mnist_siamese_data.dir/siamese/convert_mnist_siamese_data.cpp.o -c /build/caffe-1.0.0+git20180821.99bd997/examples/siamese/convert_mnist_siamese_data.cpp [ 93%] Linking CXX executable siamese/convert_mnist_siamese_data cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples && /usr/bin/cmake -E cmake_link_script CMakeFiles/convert_mnist_siamese_data.dir/link.txt --verbose=1 /usr/bin/c++ -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -Wl,-z,relro -Wl,-z,now -Wl,--as-needed -rdynamic CMakeFiles/convert_mnist_siamese_data.dir/siamese/convert_mnist_siamese_data.cpp.o -o siamese/convert_mnist_siamese_data ../lib/libcaffe.so.1.0.0 ../lib/libcaffeproto.a -lboost_system -lboost_thread -lboost_filesystem -lboost_chrono -lboost_date_time -lboost_atomic -lglog -lgflags -lprotobuf -lpthread /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5.so -lpthread -lsz -lz -ldl -lm /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5.so -lpthread -lsz -lz -ldl -lm /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl.so -llmdb -lleveldb /usr/lib/x86_64-linux-gnu/libopencv_highgui.so.3.2.0 /usr/lib/x86_64-linux-gnu/libopencv_imgcodecs.so.3.2.0 /usr/lib/x86_64-linux-gnu/libopencv_imgproc.so.3.2.0 /usr/lib/x86_64-linux-gnu/libopencv_core.so.3.2.0 -lblas -lboost_python3 cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples && ln -sf /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples/siamese/convert_mnist_siamese_data /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples/siamese/convert_mnist_siamese_data.bin make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' [ 93%] Built target convert_mnist_siamese_data -make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -[ 93%] Built target compute_image_mean [ 93%] Linking CXX executable mnist/convert_mnist_data cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples && /usr/bin/cmake -E cmake_link_script CMakeFiles/convert_mnist_data.dir/link.txt --verbose=1 /usr/bin/c++ -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -Wl,-z,relro -Wl,-z,now -Wl,--as-needed -rdynamic CMakeFiles/convert_mnist_data.dir/mnist/convert_mnist_data.cpp.o -o mnist/convert_mnist_data ../lib/libcaffe.so.1.0.0 ../lib/libcaffeproto.a -lboost_system -lboost_thread -lboost_filesystem -lboost_chrono -lboost_date_time -lboost_atomic -lglog -lgflags -lprotobuf -lpthread /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5.so -lpthread -lsz -lz -ldl -lm /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5.so -lpthread -lsz -lz -ldl -lm /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl.so -llmdb -lleveldb /usr/lib/x86_64-linux-gnu/libopencv_highgui.so.3.2.0 /usr/lib/x86_64-linux-gnu/libopencv_imgcodecs.so.3.2.0 /usr/lib/x86_64-linux-gnu/libopencv_imgproc.so.3.2.0 /usr/lib/x86_64-linux-gnu/libopencv_core.so.3.2.0 -lblas -lboost_python3 @@ -3965,45 +3995,50 @@ cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples && ln -sf /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples/mnist/convert_mnist_data /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples/mnist/convert_mnist_data.bin make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' [ 93%] Built target convert_mnist_data +cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples && ln -sf /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples/cifar10/convert_cifar_data /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples/cifar10/convert_cifar_data.bin +make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' +[ 93%] Built target convert_cifar_data +[ 93%] Linking CXX executable compute_image_mean +cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/tools && /usr/bin/cmake -E cmake_link_script CMakeFiles/compute_image_mean.dir/link.txt --verbose=1 +/usr/bin/c++ -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -Wl,-z,relro -Wl,-z,now -Wl,--as-needed -rdynamic CMakeFiles/compute_image_mean.dir/compute_image_mean.cpp.o -o compute_image_mean ../lib/libcaffe.so.1.0.0 ../lib/libcaffeproto.a -lboost_system -lboost_thread -lboost_filesystem -lboost_chrono -lboost_date_time -lboost_atomic -lglog -lgflags -lprotobuf -lpthread /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5.so -lpthread -lsz -lz -ldl -lm /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5.so -lpthread -lsz -lz -ldl -lm /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl.so -llmdb -lleveldb /usr/lib/x86_64-linux-gnu/libopencv_highgui.so.3.2.0 /usr/lib/x86_64-linux-gnu/libopencv_imgcodecs.so.3.2.0 /usr/lib/x86_64-linux-gnu/libopencv_imgproc.so.3.2.0 /usr/lib/x86_64-linux-gnu/libopencv_core.so.3.2.0 -lblas -lboost_python3 [ 93%] Linking CXX executable convert_imageset cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/tools && /usr/bin/cmake -E cmake_link_script CMakeFiles/convert_imageset.dir/link.txt --verbose=1 /usr/bin/c++ -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -Wl,-z,relro -Wl,-z,now -Wl,--as-needed -rdynamic CMakeFiles/convert_imageset.dir/convert_imageset.cpp.o -o convert_imageset ../lib/libcaffe.so.1.0.0 ../lib/libcaffeproto.a -lboost_system -lboost_thread -lboost_filesystem -lboost_chrono -lboost_date_time -lboost_atomic -lglog -lgflags -lprotobuf -lpthread /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5.so -lpthread -lsz -lz -ldl -lm /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5.so -lpthread -lsz -lz -ldl -lm /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl.so -llmdb -lleveldb /usr/lib/x86_64-linux-gnu/libopencv_highgui.so.3.2.0 /usr/lib/x86_64-linux-gnu/libopencv_imgcodecs.so.3.2.0 /usr/lib/x86_64-linux-gnu/libopencv_imgproc.so.3.2.0 /usr/lib/x86_64-linux-gnu/libopencv_core.so.3.2.0 -lblas -lboost_python3 -cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples && ln -sf /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples/cifar10/convert_cifar_data /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples/cifar10/convert_cifar_data.bin make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -[ 93%] Built target convert_cifar_data +[ 93%] Built target compute_image_mean +make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' +[ 93%] Built target convert_imageset [ 95%] Linking CXX executable upgrade_solver_proto_text cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/tools && /usr/bin/cmake -E cmake_link_script CMakeFiles/upgrade_solver_proto_text.dir/link.txt --verbose=1 /usr/bin/c++ -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -Wl,-z,relro -Wl,-z,now -Wl,--as-needed -rdynamic CMakeFiles/upgrade_solver_proto_text.dir/upgrade_solver_proto_text.cpp.o -o upgrade_solver_proto_text ../lib/libcaffe.so.1.0.0 ../lib/libcaffeproto.a -lboost_system -lboost_thread -lboost_filesystem -lboost_chrono -lboost_date_time -lboost_atomic -lglog -lgflags -lprotobuf -lpthread /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5.so -lpthread -lsz -lz -ldl -lm /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5.so -lpthread -lsz -lz -ldl -lm /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl.so -llmdb -lleveldb /usr/lib/x86_64-linux-gnu/libopencv_highgui.so.3.2.0 /usr/lib/x86_64-linux-gnu/libopencv_imgcodecs.so.3.2.0 /usr/lib/x86_64-linux-gnu/libopencv_imgproc.so.3.2.0 /usr/lib/x86_64-linux-gnu/libopencv_core.so.3.2.0 -lblas -lboost_python3 -make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -[ 95%] Built target convert_imageset -make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -[ 95%] Built target upgrade_solver_proto_text [ 96%] Linking CXX executable upgrade_net_proto_text cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/tools && /usr/bin/cmake -E cmake_link_script CMakeFiles/upgrade_net_proto_text.dir/link.txt --verbose=1 /usr/bin/c++ -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -Wl,-z,relro -Wl,-z,now -Wl,--as-needed -rdynamic CMakeFiles/upgrade_net_proto_text.dir/upgrade_net_proto_text.cpp.o -o upgrade_net_proto_text ../lib/libcaffe.so.1.0.0 ../lib/libcaffeproto.a -lboost_system -lboost_thread -lboost_filesystem -lboost_chrono -lboost_date_time -lboost_atomic -lglog -lgflags -lprotobuf -lpthread /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5.so -lpthread -lsz -lz -ldl -lm /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5.so -lpthread -lsz -lz -ldl -lm /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl.so -llmdb -lleveldb /usr/lib/x86_64-linux-gnu/libopencv_highgui.so.3.2.0 /usr/lib/x86_64-linux-gnu/libopencv_imgcodecs.so.3.2.0 /usr/lib/x86_64-linux-gnu/libopencv_imgproc.so.3.2.0 /usr/lib/x86_64-linux-gnu/libopencv_core.so.3.2.0 -lblas -lboost_python3 -[ 98%] Linking CXX executable upgrade_net_proto_binary -cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/tools && /usr/bin/cmake -E cmake_link_script CMakeFiles/upgrade_net_proto_binary.dir/link.txt --verbose=1 -/usr/bin/c++ -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -Wl,-z,relro -Wl,-z,now -Wl,--as-needed -rdynamic CMakeFiles/upgrade_net_proto_binary.dir/upgrade_net_proto_binary.cpp.o -o upgrade_net_proto_binary ../lib/libcaffe.so.1.0.0 ../lib/libcaffeproto.a -lboost_system -lboost_thread -lboost_filesystem -lboost_chrono -lboost_date_time -lboost_atomic -lglog -lgflags -lprotobuf -lpthread /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5.so -lpthread -lsz -lz -ldl -lm /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5.so -lpthread -lsz -lz -ldl -lm /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl.so -llmdb -lleveldb /usr/lib/x86_64-linux-gnu/libopencv_highgui.so.3.2.0 /usr/lib/x86_64-linux-gnu/libopencv_imgcodecs.so.3.2.0 /usr/lib/x86_64-linux-gnu/libopencv_imgproc.so.3.2.0 /usr/lib/x86_64-linux-gnu/libopencv_core.so.3.2.0 -lblas -lboost_python3 make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -[ 98%] Built target upgrade_net_proto_text +[ 96%] Built target upgrade_solver_proto_text make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -[ 98%] Built target upgrade_net_proto_binary +[ 96%] Built target upgrade_net_proto_text +[ 98%] Linking CXX executable upgrade_net_proto_binary +cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/tools && /usr/bin/cmake -E cmake_link_script CMakeFiles/upgrade_net_proto_binary.dir/link.txt --verbose=1 [ 98%] Linking CXX executable extract_features cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/tools && /usr/bin/cmake -E cmake_link_script CMakeFiles/extract_features.dir/link.txt --verbose=1 +/usr/bin/c++ -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -Wl,-z,relro -Wl,-z,now -Wl,--as-needed -rdynamic CMakeFiles/upgrade_net_proto_binary.dir/upgrade_net_proto_binary.cpp.o -o upgrade_net_proto_binary ../lib/libcaffe.so.1.0.0 ../lib/libcaffeproto.a -lboost_system -lboost_thread -lboost_filesystem -lboost_chrono -lboost_date_time -lboost_atomic -lglog -lgflags -lprotobuf -lpthread /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5.so -lpthread -lsz -lz -ldl -lm /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5.so -lpthread -lsz -lz -ldl -lm /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl.so -llmdb -lleveldb /usr/lib/x86_64-linux-gnu/libopencv_highgui.so.3.2.0 /usr/lib/x86_64-linux-gnu/libopencv_imgcodecs.so.3.2.0 /usr/lib/x86_64-linux-gnu/libopencv_imgproc.so.3.2.0 /usr/lib/x86_64-linux-gnu/libopencv_core.so.3.2.0 -lblas -lboost_python3 /usr/bin/c++ -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -Wl,-z,relro -Wl,-z,now -Wl,--as-needed -rdynamic CMakeFiles/extract_features.dir/extract_features.cpp.o -o extract_features ../lib/libcaffe.so.1.0.0 ../lib/libcaffeproto.a -lboost_system -lboost_thread -lboost_filesystem -lboost_chrono -lboost_date_time -lboost_atomic -lglog -lgflags -lprotobuf -lpthread /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5.so -lpthread -lsz -lz -ldl -lm /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5.so -lpthread -lsz -lz -ldl -lm /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl.so -llmdb -lleveldb /usr/lib/x86_64-linux-gnu/libopencv_highgui.so.3.2.0 /usr/lib/x86_64-linux-gnu/libopencv_imgcodecs.so.3.2.0 /usr/lib/x86_64-linux-gnu/libopencv_imgproc.so.3.2.0 /usr/lib/x86_64-linux-gnu/libopencv_core.so.3.2.0 -lblas -lboost_python3 -[ 98%] Linking CXX executable cpp_classification/classification -cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples && /usr/bin/cmake -E cmake_link_script CMakeFiles/classification.dir/link.txt --verbose=1 -/usr/bin/c++ -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -Wl,-z,relro -Wl,-z,now -Wl,--as-needed -rdynamic CMakeFiles/classification.dir/cpp_classification/classification.cpp.o -o cpp_classification/classification ../lib/libcaffe.so.1.0.0 ../lib/libcaffeproto.a -lboost_system -lboost_thread -lboost_filesystem -lboost_chrono -lboost_date_time -lboost_atomic -lglog -lgflags -lprotobuf -lpthread /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5.so -lpthread -lsz -lz -ldl -lm /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5.so -lpthread -lsz -lz -ldl -lm /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl.so -llmdb -lleveldb /usr/lib/x86_64-linux-gnu/libopencv_highgui.so.3.2.0 /usr/lib/x86_64-linux-gnu/libopencv_imgcodecs.so.3.2.0 /usr/lib/x86_64-linux-gnu/libopencv_imgproc.so.3.2.0 /usr/lib/x86_64-linux-gnu/libopencv_core.so.3.2.0 -lblas -lboost_python3 +make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' +[ 98%] Built target upgrade_net_proto_binary +make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' +[ 98%] Built target extract_features [100%] Linking CXX executable caffe cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/tools && /usr/bin/cmake -E cmake_link_script CMakeFiles/caffe.bin.dir/link.txt --verbose=1 /usr/bin/c++ -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -Wl,-z,relro -Wl,-z,now -Wl,--as-needed -rdynamic CMakeFiles/caffe.bin.dir/caffe.cpp.o -o caffe ../lib/libcaffe.so.1.0.0 ../lib/libcaffeproto.a -lboost_system -lboost_thread -lboost_filesystem -lboost_chrono -lboost_date_time -lboost_atomic -lglog -lgflags -lprotobuf -lpthread /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5.so -lpthread -lsz -lz -ldl -lm /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5.so -lpthread -lsz -lz -ldl -lm /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl.so -llmdb -lleveldb /usr/lib/x86_64-linux-gnu/libopencv_highgui.so.3.2.0 /usr/lib/x86_64-linux-gnu/libopencv_imgcodecs.so.3.2.0 /usr/lib/x86_64-linux-gnu/libopencv_imgproc.so.3.2.0 /usr/lib/x86_64-linux-gnu/libopencv_core.so.3.2.0 -lblas -lboost_python3 +[100%] Linking CXX executable cpp_classification/classification +cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples && /usr/bin/cmake -E cmake_link_script CMakeFiles/classification.dir/link.txt --verbose=1 +/usr/bin/c++ -g -O2 -ffile-prefix-map=/build/caffe-1.0.0+git20180821.99bd997=. -fstack-protector-strong -Wformat -Werror=format-security -Wall -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -Wall -Wno-sign-compare -Wno-uninitialized -O3 -DNDEBUG -Wl,-z,relro -Wl,-z,now -Wl,--as-needed -rdynamic CMakeFiles/classification.dir/cpp_classification/classification.cpp.o -o cpp_classification/classification ../lib/libcaffe.so.1.0.0 ../lib/libcaffeproto.a -lboost_system -lboost_thread -lboost_filesystem -lboost_chrono -lboost_date_time -lboost_atomic -lglog -lgflags -lprotobuf -lpthread /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5.so -lpthread -lsz -lz -ldl -lm /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5.so -lpthread -lsz -lz -ldl -lm /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl_cpp.so /usr/lib/x86_64-linux-gnu/hdf5/serial/libhdf5_hl.so -llmdb -lleveldb /usr/lib/x86_64-linux-gnu/libopencv_highgui.so.3.2.0 /usr/lib/x86_64-linux-gnu/libopencv_imgcodecs.so.3.2.0 /usr/lib/x86_64-linux-gnu/libopencv_imgproc.so.3.2.0 /usr/lib/x86_64-linux-gnu/libopencv_core.so.3.2.0 -lblas -lboost_python3 make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -[100%] Built target extract_features +[100%] Built target caffe.bin cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples && ln -sf /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples/cpp_classification/classification /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples/cpp_classification/classification.bin make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' [100%] Built target classification -make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -[100%] Built target caffe.bin make[3]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' /usr/bin/cmake -E cmake_progress_start /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/CMakeFiles 0 make[2]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' @@ -11452,7 +11487,7 @@ make[1]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997' dh_auto_test --builddirectory="caffe_cpu_build" \ -- runtest pytest LD_LIBRARY_PATH=/build/caffe-1.0.0+git20180821.99bd997/"caffe_cpu_build"/lib/ - cd caffe_cpu_build && make -j15 test runtest pytest LD_LIBRARY_PATH=/build/caffe-1.0.0\+git20180821.99bd997/caffe_cpu_build/lib/ ARGS\+=-j15 + cd caffe_cpu_build && make -j16 test runtest pytest LD_LIBRARY_PATH=/build/caffe-1.0.0\+git20180821.99bd997/caffe_cpu_build/lib/ ARGS\+=-j16 make[2]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' make[2]: Nothing to be done for 'test'. /usr/bin/cmake -S/build/caffe-1.0.0+git20180821.99bd997 -B/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build --check-build-system CMakeFiles/Makefile.cmake 0 @@ -11469,18 +11504,18 @@ make[5]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build && /usr/bin/cmake -E cmake_depends "Unix Makefiles" /build/caffe-1.0.0+git20180821.99bd997 /build/caffe-1.0.0+git20180821.99bd997/src/gtest /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/gtest /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/gtest/CMakeFiles/gtest.dir/DependInfo.cmake --color= make[5]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -make -f src/caffe/CMakeFiles/caffeproto.dir/build.make src/caffe/CMakeFiles/caffeproto.dir/build +make -f src/gtest/CMakeFiles/gtest.dir/build.make src/gtest/CMakeFiles/gtest.dir/build make[5]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -make[5]: Nothing to be done for 'src/caffe/CMakeFiles/caffeproto.dir/build'. +make[5]: Nothing to be done for 'src/gtest/CMakeFiles/gtest.dir/build'. make[5]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' +[ 1%] Built target gtest make[5]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -make -f src/gtest/CMakeFiles/gtest.dir/build.make src/gtest/CMakeFiles/gtest.dir/build -[ 1%] Built target caffeproto +make -f src/caffe/CMakeFiles/caffeproto.dir/build.make src/caffe/CMakeFiles/caffeproto.dir/build make[5]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -make[5]: Nothing to be done for 'src/gtest/CMakeFiles/gtest.dir/build'. +make[5]: Nothing to be done for 'src/caffe/CMakeFiles/caffeproto.dir/build'. make[5]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' +[ 2%] Built target caffeproto make -f src/caffe/CMakeFiles/caffe.dir/build.make src/caffe/CMakeFiles/caffe.dir/depend -[ 2%] Built target gtest make[5]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build && /usr/bin/cmake -E cmake_depends "Unix Makefiles" /build/caffe-1.0.0+git20180821.99bd997 /build/caffe-1.0.0+git20180821.99bd997/src/caffe /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/src/caffe/CMakeFiles/caffe.dir/DependInfo.cmake --color= make[5]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' @@ -11507,2798 +11542,2798 @@ make[5]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' cd /build/caffe-1.0.0+git20180821.99bd997 && /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/test/test.testbin --gtest_shuffle --gtest_filter="-*GPU*" Note: Google Test filter = -*GPU* -Note: Randomizing tests' orders with a seed of 95158 . +Note: Randomizing tests' orders with a seed of 43389 . [==========] Running 1162 tests from 152 test cases. [----------] Global test environment set-up. -[----------] 1 test from HDF5OutputLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] HDF5OutputLayerTest/1.TestForward -[ OK ] HDF5OutputLayerTest/1.TestForward (244 ms) -[----------] 1 test from HDF5OutputLayerTest/1 (244 ms total) - -[----------] 5 tests from ImageDataLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] ImageDataLayerTest/1.TestReshape -[ OK ] ImageDataLayerTest/1.TestReshape (196 ms) -[ RUN ] ImageDataLayerTest/1.TestShuffle -[ OK ] ImageDataLayerTest/1.TestShuffle (230 ms) -[ RUN ] ImageDataLayerTest/1.TestResize -[ OK ] ImageDataLayerTest/1.TestResize (180 ms) -[ RUN ] ImageDataLayerTest/1.TestRead -[ OK ] ImageDataLayerTest/1.TestRead (219 ms) -[ RUN ] ImageDataLayerTest/1.TestSpace -[ OK ] ImageDataLayerTest/1.TestSpace (90 ms) -[----------] 5 tests from ImageDataLayerTest/1 (915 ms total) - -[----------] 2 tests from EuclideanLossLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] EuclideanLossLayerTest/1.TestGradient -[ OK ] EuclideanLossLayerTest/1.TestGradient (115 ms) -[ RUN ] EuclideanLossLayerTest/1.TestForward -[ OK ] EuclideanLossLayerTest/1.TestForward (1 ms) -[----------] 2 tests from EuclideanLossLayerTest/1 (116 ms total) - -[----------] 20 tests from BiasLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] BiasLayerTest/1.TestForwardEltwise -[ OK ] BiasLayerTest/1.TestForwardEltwise (0 ms) -[ RUN ] BiasLayerTest/1.TestForwardBiasAxis2 -[ OK ] BiasLayerTest/1.TestForwardBiasAxis2 (0 ms) -[ RUN ] BiasLayerTest/1.TestGradientEltwise -[ OK ] BiasLayerTest/1.TestGradientEltwise (5 ms) -[ RUN ] BiasLayerTest/1.TestForwardBroadcastMiddleWithParam -[ OK ] BiasLayerTest/1.TestForwardBroadcastMiddleWithParam (0 ms) -[ RUN ] BiasLayerTest/1.TestForwardBroadcastMiddleInPlace -[ OK ] BiasLayerTest/1.TestForwardBroadcastMiddleInPlace (0 ms) -[ RUN ] BiasLayerTest/1.TestGradientBroadcastBegin -[ OK ] BiasLayerTest/1.TestGradientBroadcastBegin (125 ms) -[ RUN ] BiasLayerTest/1.TestBackwardEltwiseInPlace -[ OK ] BiasLayerTest/1.TestBackwardEltwiseInPlace (0 ms) -[ RUN ] BiasLayerTest/1.TestGradientBiasAxis2 -[ OK ] BiasLayerTest/1.TestGradientBiasAxis2 (109 ms) -[ RUN ] BiasLayerTest/1.TestForwardEltwiseInPlace -[ OK ] BiasLayerTest/1.TestForwardEltwiseInPlace (0 ms) -[ RUN ] BiasLayerTest/1.TestForwardBias -[ OK ] BiasLayerTest/1.TestForwardBias (0 ms) -[ RUN ] BiasLayerTest/1.TestGradientBroadcastMiddle -[ OK ] BiasLayerTest/1.TestGradientBroadcastMiddle (128 ms) -[ RUN ] BiasLayerTest/1.TestGradientBroadcastMiddleWithParam -[ OK ] BiasLayerTest/1.TestGradientBroadcastMiddleWithParam (130 ms) -[ RUN ] BiasLayerTest/1.TestGradientBias -[ OK ] BiasLayerTest/1.TestGradientBias (111 ms) -[ RUN ] BiasLayerTest/1.TestBackwardBroadcastMiddleInPlace -[ OK ] BiasLayerTest/1.TestBackwardBroadcastMiddleInPlace (0 ms) -[ RUN ] BiasLayerTest/1.TestForwardEltwiseWithParam -[ OK ] BiasLayerTest/1.TestForwardEltwiseWithParam (1 ms) -[ RUN ] BiasLayerTest/1.TestGradientEltwiseWithParam -[ OK ] BiasLayerTest/1.TestGradientEltwiseWithParam (259 ms) -[ RUN ] BiasLayerTest/1.TestGradientBroadcastEnd -[ OK ] BiasLayerTest/1.TestGradientBroadcastEnd (218 ms) -[ RUN ] BiasLayerTest/1.TestForwardBroadcastEnd -[ OK ] BiasLayerTest/1.TestForwardBroadcastEnd (1 ms) -[ RUN ] BiasLayerTest/1.TestForwardBroadcastBegin -[ OK ] BiasLayerTest/1.TestForwardBroadcastBegin (0 ms) -[ RUN ] BiasLayerTest/1.TestForwardBroadcastMiddle -[ OK ] BiasLayerTest/1.TestForwardBroadcastMiddle (0 ms) -[----------] 20 tests from BiasLayerTest/1 (1087 ms total) - -[----------] 20 tests from BiasLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] BiasLayerTest/0.TestGradientBroadcastBegin -[ OK ] BiasLayerTest/0.TestGradientBroadcastBegin (121 ms) -[ RUN ] BiasLayerTest/0.TestGradientEltwise -[ OK ] BiasLayerTest/0.TestGradientEltwise (5 ms) -[ RUN ] BiasLayerTest/0.TestForwardEltwiseInPlace -[ OK ] BiasLayerTest/0.TestForwardEltwiseInPlace (0 ms) -[ RUN ] BiasLayerTest/0.TestGradientEltwiseWithParam -[ OK ] BiasLayerTest/0.TestGradientEltwiseWithParam (314 ms) -[ RUN ] BiasLayerTest/0.TestForwardBroadcastMiddle -[ OK ] BiasLayerTest/0.TestForwardBroadcastMiddle (1 ms) -[ RUN ] BiasLayerTest/0.TestForwardBroadcastEnd -[ OK ] BiasLayerTest/0.TestForwardBroadcastEnd (0 ms) -[ RUN ] BiasLayerTest/0.TestGradientBroadcastEnd -[ OK ] BiasLayerTest/0.TestGradientBroadcastEnd (187 ms) -[ RUN ] BiasLayerTest/0.TestForwardBiasAxis2 -[ OK ] BiasLayerTest/0.TestForwardBiasAxis2 (0 ms) -[ RUN ] BiasLayerTest/0.TestForwardEltwiseWithParam -[ OK ] BiasLayerTest/0.TestForwardEltwiseWithParam (1 ms) -[ RUN ] BiasLayerTest/0.TestForwardBroadcastBegin -[ OK ] BiasLayerTest/0.TestForwardBroadcastBegin (0 ms) -[ RUN ] BiasLayerTest/0.TestForwardEltwise -[ OK ] BiasLayerTest/0.TestForwardEltwise (0 ms) -[ RUN ] BiasLayerTest/0.TestGradientBroadcastMiddle -[ OK ] BiasLayerTest/0.TestGradientBroadcastMiddle (144 ms) -[ RUN ] BiasLayerTest/0.TestBackwardBroadcastMiddleInPlace -[ OK ] BiasLayerTest/0.TestBackwardBroadcastMiddleInPlace (0 ms) -[ RUN ] BiasLayerTest/0.TestGradientBiasAxis2 -[ OK ] BiasLayerTest/0.TestGradientBiasAxis2 (115 ms) -[ RUN ] BiasLayerTest/0.TestForwardBroadcastMiddleWithParam -[ OK ] BiasLayerTest/0.TestForwardBroadcastMiddleWithParam (0 ms) -[ RUN ] BiasLayerTest/0.TestForwardBroadcastMiddleInPlace -[ OK ] BiasLayerTest/0.TestForwardBroadcastMiddleInPlace (0 ms) -[ RUN ] BiasLayerTest/0.TestForwardBias -[ OK ] BiasLayerTest/0.TestForwardBias (0 ms) -[ RUN ] BiasLayerTest/0.TestGradientBroadcastMiddleWithParam -[ OK ] BiasLayerTest/0.TestGradientBroadcastMiddleWithParam (139 ms) -[ RUN ] BiasLayerTest/0.TestBackwardEltwiseInPlace -[ OK ] BiasLayerTest/0.TestBackwardEltwiseInPlace (0 ms) -[ RUN ] BiasLayerTest/0.TestGradientBias -[ OK ] BiasLayerTest/0.TestGradientBias (130 ms) -[----------] 20 tests from BiasLayerTest/0 (1158 ms total) - -[----------] 6 tests from FlattenLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] FlattenLayerTest/1.TestForward -[ OK ] FlattenLayerTest/1.TestForward (0 ms) -[ RUN ] FlattenLayerTest/1.TestSetup -[ OK ] FlattenLayerTest/1.TestSetup (0 ms) -[ RUN ] FlattenLayerTest/1.TestSetupWithStartAndEndAxis -[ OK ] FlattenLayerTest/1.TestSetupWithStartAndEndAxis (0 ms) -[ RUN ] FlattenLayerTest/1.TestGradient -[ OK ] FlattenLayerTest/1.TestGradient (4 ms) -[ RUN ] FlattenLayerTest/1.TestSetupWithAxis -[ OK ] FlattenLayerTest/1.TestSetupWithAxis (0 ms) -[ RUN ] FlattenLayerTest/1.TestSetupWithEndAxis -[ OK ] FlattenLayerTest/1.TestSetupWithEndAxis (0 ms) -[----------] 6 tests from FlattenLayerTest/1 (4 ms total) - -[----------] 8 tests from SliceLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] SliceLayerTest/1.TestGradientAcrossNum -[ OK ] SliceLayerTest/1.TestGradientAcrossNum (51 ms) -[ RUN ] SliceLayerTest/1.TestTrivialSlice -[ OK ] SliceLayerTest/1.TestTrivialSlice (1 ms) -[ RUN ] SliceLayerTest/1.TestSliceAcrossNum -[ OK ] SliceLayerTest/1.TestSliceAcrossNum (0 ms) -[ RUN ] SliceLayerTest/1.TestSliceAcrossChannels -[ OK ] SliceLayerTest/1.TestSliceAcrossChannels (0 ms) -[ RUN ] SliceLayerTest/1.TestGradientAcrossChannels -[ OK ] SliceLayerTest/1.TestGradientAcrossChannels (59 ms) -[ RUN ] SliceLayerTest/1.TestGradientTrivial -[ OK ] SliceLayerTest/1.TestGradientTrivial (18 ms) -[ RUN ] SliceLayerTest/1.TestSetupNum -[ OK ] SliceLayerTest/1.TestSetupNum (0 ms) -[ RUN ] SliceLayerTest/1.TestSetupChannels -[ OK ] SliceLayerTest/1.TestSetupChannels (0 ms) -[----------] 8 tests from SliceLayerTest/1 (129 ms total) - -[----------] 10 tests from PowerLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] PowerLayerTest/0.TestPowerGradientShiftZero -[ OK ] PowerLayerTest/0.TestPowerGradientShiftZero (3 ms) -[ RUN ] PowerLayerTest/0.TestPowerTwo -[ OK ] PowerLayerTest/0.TestPowerTwo (0 ms) -[ RUN ] PowerLayerTest/0.TestPowerGradient -[ OK ] PowerLayerTest/0.TestPowerGradient (4 ms) -[ RUN ] PowerLayerTest/0.TestPowerTwoScaleHalfGradient -[ OK ] PowerLayerTest/0.TestPowerTwoScaleHalfGradient (5 ms) -[ RUN ] PowerLayerTest/0.TestPowerZero -[ OK ] PowerLayerTest/0.TestPowerZero (0 ms) -[ RUN ] PowerLayerTest/0.TestPower -[ OK ] PowerLayerTest/0.TestPower (0 ms) -[ RUN ] PowerLayerTest/0.TestPowerOne -[ OK ] PowerLayerTest/0.TestPowerOne (0 ms) -[ RUN ] PowerLayerTest/0.TestPowerZeroGradient -[ OK ] PowerLayerTest/0.TestPowerZeroGradient (3 ms) -[ RUN ] PowerLayerTest/0.TestPowerTwoGradient -[ OK ] PowerLayerTest/0.TestPowerTwoGradient (4 ms) -[ RUN ] PowerLayerTest/0.TestPowerOneGradient -[ OK ] PowerLayerTest/0.TestPowerOneGradient (3 ms) -[----------] 10 tests from PowerLayerTest/0 (22 ms total) - [----------] 11 tests from AdaDeltaSolverTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] AdaDeltaSolverTest/1.TestAdaDeltaLeastSquaresUpdateWithEverything -[ OK ] AdaDeltaSolverTest/1.TestAdaDeltaLeastSquaresUpdateWithEverything (190 ms) -[ RUN ] AdaDeltaSolverTest/1.TestAdaDeltaLeastSquaresUpdate -[ OK ] AdaDeltaSolverTest/1.TestAdaDeltaLeastSquaresUpdate (16 ms) -[ RUN ] AdaDeltaSolverTest/1.TestAdaDeltaLeastSquaresUpdateWithWeightDecay -[ OK ] AdaDeltaSolverTest/1.TestAdaDeltaLeastSquaresUpdateWithWeightDecay (17 ms) -[ RUN ] AdaDeltaSolverTest/1.TestLeastSquaresUpdateWithEverythingAccum -[ OK ] AdaDeltaSolverTest/1.TestLeastSquaresUpdateWithEverythingAccum (3 ms) -[ RUN ] AdaDeltaSolverTest/1.TestLeastSquaresUpdateWithEverythingAccumShare -[ OK ] AdaDeltaSolverTest/1.TestLeastSquaresUpdateWithEverythingAccumShare (4 ms) [ RUN ] AdaDeltaSolverTest/1.TestAdaDeltaLeastSquaresUpdateWithEverythingShare -[ OK ] AdaDeltaSolverTest/1.TestAdaDeltaLeastSquaresUpdateWithEverythingShare (95 ms) -[ RUN ] AdaDeltaSolverTest/1.TestSnapshotShare -[ OK ] AdaDeltaSolverTest/1.TestSnapshotShare (24 ms) +[ OK ] AdaDeltaSolverTest/1.TestAdaDeltaLeastSquaresUpdateWithEverythingShare (194 ms) [ RUN ] AdaDeltaSolverTest/1.TestSnapshot -[ OK ] AdaDeltaSolverTest/1.TestSnapshot (24 ms) -[ RUN ] AdaDeltaSolverTest/1.TestAdaDeltaLeastSquaresUpdateWithMomentum -[ OK ] AdaDeltaSolverTest/1.TestAdaDeltaLeastSquaresUpdateWithMomentum (33 ms) +[ OK ] AdaDeltaSolverTest/1.TestSnapshot (16 ms) [ RUN ] AdaDeltaSolverTest/1.TestAdaDeltaLeastSquaresUpdateWithHalfMomentum -[ OK ] AdaDeltaSolverTest/1.TestAdaDeltaLeastSquaresUpdateWithHalfMomentum (32 ms) +[ OK ] AdaDeltaSolverTest/1.TestAdaDeltaLeastSquaresUpdateWithHalfMomentum (29 ms) [ RUN ] AdaDeltaSolverTest/1.TestLeastSquaresUpdateWithMomentumMultiIter -[ OK ] AdaDeltaSolverTest/1.TestLeastSquaresUpdateWithMomentumMultiIter (82 ms) -[----------] 11 tests from AdaDeltaSolverTest/1 (521 ms total) - -[----------] 2 tests from CommonTest -[ RUN ] CommonTest.TestRandSeedCPU -[ OK ] CommonTest.TestRandSeedCPU (0 ms) -[ RUN ] CommonTest.TestBrewMode -[ OK ] CommonTest.TestBrewMode (0 ms) -[----------] 2 tests from CommonTest (0 ms total) - -[----------] 2 tests from BatchReindexLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] BatchReindexLayerTest/0.TestGradient -[ OK ] BatchReindexLayerTest/0.TestGradient (161 ms) -[ RUN ] BatchReindexLayerTest/0.TestForward -[ OK ] BatchReindexLayerTest/0.TestForward (0 ms) -[----------] 2 tests from BatchReindexLayerTest/0 (161 ms total) +[ OK ] AdaDeltaSolverTest/1.TestLeastSquaresUpdateWithMomentumMultiIter (73 ms) +[ RUN ] AdaDeltaSolverTest/1.TestLeastSquaresUpdateWithEverythingAccumShare +[ OK ] AdaDeltaSolverTest/1.TestLeastSquaresUpdateWithEverythingAccumShare (4 ms) +[ RUN ] AdaDeltaSolverTest/1.TestAdaDeltaLeastSquaresUpdateWithMomentum +[ OK ] AdaDeltaSolverTest/1.TestAdaDeltaLeastSquaresUpdateWithMomentum (29 ms) +[ RUN ] AdaDeltaSolverTest/1.TestAdaDeltaLeastSquaresUpdate +[ OK ] AdaDeltaSolverTest/1.TestAdaDeltaLeastSquaresUpdate (14 ms) +[ RUN ] AdaDeltaSolverTest/1.TestAdaDeltaLeastSquaresUpdateWithWeightDecay +[ OK ] AdaDeltaSolverTest/1.TestAdaDeltaLeastSquaresUpdateWithWeightDecay (15 ms) +[ RUN ] AdaDeltaSolverTest/1.TestSnapshotShare +[ OK ] AdaDeltaSolverTest/1.TestSnapshotShare (21 ms) +[ RUN ] AdaDeltaSolverTest/1.TestAdaDeltaLeastSquaresUpdateWithEverything +[ OK ] AdaDeltaSolverTest/1.TestAdaDeltaLeastSquaresUpdateWithEverything (73 ms) +[ RUN ] AdaDeltaSolverTest/1.TestLeastSquaresUpdateWithEverythingAccum +[ OK ] AdaDeltaSolverTest/1.TestLeastSquaresUpdateWithEverythingAccum (3 ms) +[----------] 11 tests from AdaDeltaSolverTest/1 (471 ms total) -[----------] 12 tests from NesterovSolverTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] NesterovSolverTest/1.TestNesterovLeastSquaresUpdateWithEverythingShare -[ OK ] NesterovSolverTest/1.TestNesterovLeastSquaresUpdateWithEverythingShare (86 ms) -[ RUN ] NesterovSolverTest/1.TestLeastSquaresUpdateWithMomentumMultiIter -[ OK ] NesterovSolverTest/1.TestLeastSquaresUpdateWithMomentumMultiIter (81 ms) -[ RUN ] NesterovSolverTest/1.TestSnapshotShare -[ OK ] NesterovSolverTest/1.TestSnapshotShare (21 ms) -[ RUN ] NesterovSolverTest/1.TestNesterovLeastSquaresUpdateWithWeightDecay -[ OK ] NesterovSolverTest/1.TestNesterovLeastSquaresUpdateWithWeightDecay (17 ms) -[ RUN ] NesterovSolverTest/1.TestLeastSquaresUpdateWithEverythingAccum -[ OK ] NesterovSolverTest/1.TestLeastSquaresUpdateWithEverythingAccum (2 ms) -[ RUN ] NesterovSolverTest/1.TestNesterovLeastSquaresUpdateWithMomentum -[ OK ] NesterovSolverTest/1.TestNesterovLeastSquaresUpdateWithMomentum (33 ms) -[ RUN ] NesterovSolverTest/1.TestNesterovLeastSquaresUpdateLROneHundredth -[ OK ] NesterovSolverTest/1.TestNesterovLeastSquaresUpdateLROneHundredth (16 ms) -[ RUN ] NesterovSolverTest/1.TestLeastSquaresUpdateWithEverythingAccumShare -[ OK ] NesterovSolverTest/1.TestLeastSquaresUpdateWithEverythingAccumShare (4 ms) -[ RUN ] NesterovSolverTest/1.TestNesterovLeastSquaresUpdateWithWeightDecayMultiIter -[ OK ] NesterovSolverTest/1.TestNesterovLeastSquaresUpdateWithWeightDecayMultiIter (81 ms) -[ RUN ] NesterovSolverTest/1.TestNesterovLeastSquaresUpdateWithEverything -[ OK ] NesterovSolverTest/1.TestNesterovLeastSquaresUpdateWithEverything (82 ms) -[ RUN ] NesterovSolverTest/1.TestNesterovLeastSquaresUpdate -[ OK ] NesterovSolverTest/1.TestNesterovLeastSquaresUpdate (16 ms) -[ RUN ] NesterovSolverTest/1.TestSnapshot -[ OK ] NesterovSolverTest/1.TestSnapshot (16 ms) -[----------] 12 tests from NesterovSolverTest/1 (455 ms total) +[----------] 2 tests from SoftmaxLayerTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] SoftmaxLayerTest/1.TestGradient +[ OK ] SoftmaxLayerTest/1.TestGradient (274 ms) +[ RUN ] SoftmaxLayerTest/1.TestForward +[ OK ] SoftmaxLayerTest/1.TestForward (0 ms) +[----------] 2 tests from SoftmaxLayerTest/1 (274 ms total) -[----------] 5 tests from DBTest/1, where TypeParam = caffe::TypeLMDB -[ RUN ] DBTest/1.TestNext -[ OK ] DBTest/1.TestNext (14 ms) -[ RUN ] DBTest/1.TestKeyValue -[ OK ] DBTest/1.TestKeyValue (14 ms) -[ RUN ] DBTest/1.TestWrite -[ OK ] DBTest/1.TestWrite (14 ms) -[ RUN ] DBTest/1.TestSeekToFirst -[ OK ] DBTest/1.TestSeekToFirst (14 ms) -[ RUN ] DBTest/1.TestGetDB -[ OK ] DBTest/1.TestGetDB (13 ms) -[----------] 5 tests from DBTest/1 (69 ms total) +[----------] 4 tests from ConstantFillerTest/0, where TypeParam = float +[ RUN ] ConstantFillerTest/0.TestFill1D +[ OK ] ConstantFillerTest/0.TestFill1D (0 ms) +[ RUN ] ConstantFillerTest/0.TestFill2D +[ OK ] ConstantFillerTest/0.TestFill2D (0 ms) +[ RUN ] ConstantFillerTest/0.TestFill5D +[ OK ] ConstantFillerTest/0.TestFill5D (0 ms) +[ RUN ] ConstantFillerTest/0.TestFill +[ OK ] ConstantFillerTest/0.TestFill (0 ms) +[----------] 4 tests from ConstantFillerTest/0 (0 ms total) -[----------] 14 tests from DataLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] DataLayerTest/1.TestReadCropTrainSequenceUnseededLevelDB -[ OK ] DataLayerTest/1.TestReadCropTrainSequenceUnseededLevelDB (5 ms) -[ RUN ] DataLayerTest/1.TestReadCropTestLevelDB -[ OK ] DataLayerTest/1.TestReadCropTestLevelDB (8 ms) -[ RUN ] DataLayerTest/1.TestReshapeLMDB -[ OK ] DataLayerTest/1.TestReshapeLMDB (2 ms) -[ RUN ] DataLayerTest/1.TestReadCropTrainSequenceSeededLMDB -[ OK ] DataLayerTest/1.TestReadCropTrainSequenceSeededLMDB (16 ms) -[ RUN ] DataLayerTest/1.TestSkipLevelDB -[ OK ] DataLayerTest/1.TestSkipLevelDB (15 ms) -[ RUN ] DataLayerTest/1.TestReadLMDB -[ OK ] DataLayerTest/1.TestReadLMDB (6 ms) -[ RUN ] DataLayerTest/1.TestReadCropTrainLevelDB -[ OK ] DataLayerTest/1.TestReadCropTrainLevelDB (1 ms) -[ RUN ] DataLayerTest/1.TestSkipLMDB -[ OK ] DataLayerTest/1.TestSkipLMDB (14 ms) -[ RUN ] DataLayerTest/1.TestReadCropTrainSequenceSeededLevelDB -[ OK ] DataLayerTest/1.TestReadCropTrainSequenceSeededLevelDB (2 ms) -[ RUN ] DataLayerTest/1.TestReadCropTrainLMDB -[ OK ] DataLayerTest/1.TestReadCropTrainLMDB (1 ms) -[ RUN ] DataLayerTest/1.TestReshapeLevelDB -[ OK ] DataLayerTest/1.TestReshapeLevelDB (7 ms) -[ RUN ] DataLayerTest/1.TestReadLevelDB -[ OK ] DataLayerTest/1.TestReadLevelDB (13 ms) -[ RUN ] DataLayerTest/1.TestReadCropTestLMDB -[ OK ] DataLayerTest/1.TestReadCropTestLMDB (9 ms) -[ RUN ] DataLayerTest/1.TestReadCropTrainSequenceUnseededLMDB -[ OK ] DataLayerTest/1.TestReadCropTrainSequenceUnseededLMDB (4 ms) -[----------] 14 tests from DataLayerTest/1 (103 ms total) +[----------] 10 tests from EltwiseLayerTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] EltwiseLayerTest/0.TestSumCoeff +[ OK ] EltwiseLayerTest/0.TestSumCoeff (0 ms) +[ RUN ] EltwiseLayerTest/0.TestSetUp +[ OK ] EltwiseLayerTest/0.TestSetUp (0 ms) +[ RUN ] EltwiseLayerTest/0.TestProd +[ OK ] EltwiseLayerTest/0.TestProd (0 ms) +[ RUN ] EltwiseLayerTest/0.TestMax +[ OK ] EltwiseLayerTest/0.TestMax (0 ms) +[ RUN ] EltwiseLayerTest/0.TestUnstableProdGradient +[ OK ] EltwiseLayerTest/0.TestUnstableProdGradient (5 ms) +[ RUN ] EltwiseLayerTest/0.TestStableProdGradient +[ OK ] EltwiseLayerTest/0.TestStableProdGradient (4 ms) +[ RUN ] EltwiseLayerTest/0.TestMaxGradient +[ OK ] EltwiseLayerTest/0.TestMaxGradient (5 ms) +[ RUN ] EltwiseLayerTest/0.TestSumGradient +[ OK ] EltwiseLayerTest/0.TestSumGradient (5 ms) +[ RUN ] EltwiseLayerTest/0.TestSumCoeffGradient +[ OK ] EltwiseLayerTest/0.TestSumCoeffGradient (5 ms) +[ RUN ] EltwiseLayerTest/0.TestSum +[ OK ] EltwiseLayerTest/0.TestSum (0 ms) +[----------] 10 tests from EltwiseLayerTest/0 (24 ms total) [----------] 12 tests from SGDSolverTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] SGDSolverTest/1.TestLeastSquaresUpdateWithMomentumMultiIter -[ OK ] SGDSolverTest/1.TestLeastSquaresUpdateWithMomentumMultiIter (121 ms) -[ RUN ] SGDSolverTest/1.TestLeastSquaresUpdateWithWeightDecayMultiIter -[ OK ] SGDSolverTest/1.TestLeastSquaresUpdateWithWeightDecayMultiIter (120 ms) -[ RUN ] SGDSolverTest/1.TestSnapshotShare -[ OK ] SGDSolverTest/1.TestSnapshotShare (29 ms) [ RUN ] SGDSolverTest/1.TestLeastSquaresUpdate -[ OK ] SGDSolverTest/1.TestLeastSquaresUpdate (22 ms) -[ RUN ] SGDSolverTest/1.TestLeastSquaresUpdateWithWeightDecay -[ OK ] SGDSolverTest/1.TestLeastSquaresUpdateWithWeightDecay (40 ms) -[ RUN ] SGDSolverTest/1.TestLeastSquaresUpdateWithEverythingShare -[ OK ] SGDSolverTest/1.TestLeastSquaresUpdateWithEverythingShare (108 ms) -[ RUN ] SGDSolverTest/1.TestLeastSquaresUpdateLROneHundredth -[ OK ] SGDSolverTest/1.TestLeastSquaresUpdateLROneHundredth (18 ms) -[ RUN ] SGDSolverTest/1.TestLeastSquaresUpdateWithEverythingAccumShare -[ OK ] SGDSolverTest/1.TestLeastSquaresUpdateWithEverythingAccumShare (5 ms) -[ RUN ] SGDSolverTest/1.TestLeastSquaresUpdateWithEverything -[ OK ] SGDSolverTest/1.TestLeastSquaresUpdateWithEverything (98 ms) -[ RUN ] SGDSolverTest/1.TestLeastSquaresUpdateWithMomentum -[ OK ] SGDSolverTest/1.TestLeastSquaresUpdateWithMomentum (40 ms) +[ OK ] SGDSolverTest/1.TestLeastSquaresUpdate (14 ms) [ RUN ] SGDSolverTest/1.TestSnapshot -[ OK ] SGDSolverTest/1.TestSnapshot (24 ms) +[ OK ] SGDSolverTest/1.TestSnapshot (14 ms) [ RUN ] SGDSolverTest/1.TestLeastSquaresUpdateWithEverythingAccum -[ OK ] SGDSolverTest/1.TestLeastSquaresUpdateWithEverythingAccum (5 ms) -[----------] 12 tests from SGDSolverTest/1 (630 ms total) - -[----------] 5 tests from EmbedLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] EmbedLayerTest/1.TestForwardWithBias -[ OK ] EmbedLayerTest/1.TestForwardWithBias (0 ms) -[ RUN ] EmbedLayerTest/1.TestGradient -[ OK ] EmbedLayerTest/1.TestGradient (17 ms) -[ RUN ] EmbedLayerTest/1.TestForward -[ OK ] EmbedLayerTest/1.TestForward (0 ms) -[ RUN ] EmbedLayerTest/1.TestSetUp -[ OK ] EmbedLayerTest/1.TestSetUp (0 ms) -[ RUN ] EmbedLayerTest/1.TestGradientWithBias -[ OK ] EmbedLayerTest/1.TestGradientWithBias (27 ms) -[----------] 5 tests from EmbedLayerTest/1 (44 ms total) - -[----------] 9 tests from AdaGradSolverTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] AdaGradSolverTest/1.TestSnapshot -[ OK ] AdaGradSolverTest/1.TestSnapshot (26 ms) -[ RUN ] AdaGradSolverTest/1.TestAdaGradLeastSquaresUpdateWithWeightDecay -[ OK ] AdaGradSolverTest/1.TestAdaGradLeastSquaresUpdateWithWeightDecay (21 ms) -[ RUN ] AdaGradSolverTest/1.TestAdaGradLeastSquaresUpdateWithEverything -[ OK ] AdaGradSolverTest/1.TestAdaGradLeastSquaresUpdateWithEverything (115 ms) -[ RUN ] AdaGradSolverTest/1.TestSnapshotShare -[ OK ] AdaGradSolverTest/1.TestSnapshotShare (32 ms) -[ RUN ] AdaGradSolverTest/1.TestAdaGradLeastSquaresUpdateLROneHundredth -[ OK ] AdaGradSolverTest/1.TestAdaGradLeastSquaresUpdateLROneHundredth (21 ms) -[ RUN ] AdaGradSolverTest/1.TestLeastSquaresUpdateWithEverythingAccumShare -[ OK ] AdaGradSolverTest/1.TestLeastSquaresUpdateWithEverythingAccumShare (5 ms) -[ RUN ] AdaGradSolverTest/1.TestAdaGradLeastSquaresUpdate -[ OK ] AdaGradSolverTest/1.TestAdaGradLeastSquaresUpdate (22 ms) -[ RUN ] AdaGradSolverTest/1.TestAdaGradLeastSquaresUpdateWithEverythingShare -[ OK ] AdaGradSolverTest/1.TestAdaGradLeastSquaresUpdateWithEverythingShare (112 ms) -[ RUN ] AdaGradSolverTest/1.TestLeastSquaresUpdateWithEverythingAccum -[ OK ] AdaGradSolverTest/1.TestLeastSquaresUpdateWithEverythingAccum (4 ms) -[----------] 9 tests from AdaGradSolverTest/1 (358 ms total) - -[----------] 5 tests from DBTest/0, where TypeParam = caffe::TypeLevelDB -[ RUN ] DBTest/0.TestNext -[ OK ] DBTest/0.TestNext (21 ms) -[ RUN ] DBTest/0.TestGetDB -[ OK ] DBTest/0.TestGetDB (20 ms) -[ RUN ] DBTest/0.TestKeyValue -[ OK ] DBTest/0.TestKeyValue (29 ms) -[ RUN ] DBTest/0.TestWrite -[ OK ] DBTest/0.TestWrite (27 ms) -[ RUN ] DBTest/0.TestSeekToFirst -[ OK ] DBTest/0.TestSeekToFirst (27 ms) -[----------] 5 tests from DBTest/0 (124 ms total) - -[----------] 3 tests from BatchNormLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] BatchNormLayerTest/1.TestForward -[ OK ] BatchNormLayerTest/1.TestForward (0 ms) -[ RUN ] BatchNormLayerTest/1.TestForwardInplace -[ OK ] BatchNormLayerTest/1.TestForwardInplace (0 ms) -[ RUN ] BatchNormLayerTest/1.TestGradient -[ OK ] BatchNormLayerTest/1.TestGradient (287 ms) -[----------] 3 tests from BatchNormLayerTest/1 (287 ms total) - -[----------] 6 tests from MVNLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] MVNLayerTest/1.TestGradientAcrossChannels -[ OK ] MVNLayerTest/1.TestGradientAcrossChannels (343 ms) -[ RUN ] MVNLayerTest/1.TestGradient -[ OK ] MVNLayerTest/1.TestGradient (350 ms) -[ RUN ] MVNLayerTest/1.TestGradientMeanOnly -[ OK ] MVNLayerTest/1.TestGradientMeanOnly (151 ms) -[ RUN ] MVNLayerTest/1.TestForward -[ OK ] MVNLayerTest/1.TestForward (0 ms) -[ RUN ] MVNLayerTest/1.TestForwardMeanOnly -[ OK ] MVNLayerTest/1.TestForwardMeanOnly (0 ms) -[ RUN ] MVNLayerTest/1.TestForwardAcrossChannels -[ OK ] MVNLayerTest/1.TestForwardAcrossChannels (0 ms) -[----------] 6 tests from MVNLayerTest/1 (845 ms total) - -[----------] 4 tests from PositiveUnitballFillerTest/0, where TypeParam = float -[ RUN ] PositiveUnitballFillerTest/0.TestFill -[ OK ] PositiveUnitballFillerTest/0.TestFill (0 ms) -[ RUN ] PositiveUnitballFillerTest/0.TestFill5D -[ OK ] PositiveUnitballFillerTest/0.TestFill5D (0 ms) -[ RUN ] PositiveUnitballFillerTest/0.TestFill2D -[ OK ] PositiveUnitballFillerTest/0.TestFill2D (0 ms) -[ RUN ] PositiveUnitballFillerTest/0.TestFill1D -[ OK ] PositiveUnitballFillerTest/0.TestFill1D (0 ms) -[----------] 4 tests from PositiveUnitballFillerTest/0 (0 ms total) - -[----------] 4 tests from SoftmaxWithLossLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] SoftmaxWithLossLayerTest/0.TestForwardIgnoreLabel -[ OK ] SoftmaxWithLossLayerTest/0.TestForwardIgnoreLabel (1 ms) -[ RUN ] SoftmaxWithLossLayerTest/0.TestGradient -[ OK ] SoftmaxWithLossLayerTest/0.TestGradient (9 ms) -[ RUN ] SoftmaxWithLossLayerTest/0.TestGradientUnnormalized -[ OK ] SoftmaxWithLossLayerTest/0.TestGradientUnnormalized (9 ms) -[ RUN ] SoftmaxWithLossLayerTest/0.TestGradientIgnoreLabel -[ OK ] SoftmaxWithLossLayerTest/0.TestGradientIgnoreLabel (9 ms) -[----------] 4 tests from SoftmaxWithLossLayerTest/0 (28 ms total) +[ OK ] SGDSolverTest/1.TestLeastSquaresUpdateWithEverythingAccum (2 ms) +[ RUN ] SGDSolverTest/1.TestLeastSquaresUpdateWithMomentum +[ OK ] SGDSolverTest/1.TestLeastSquaresUpdateWithMomentum (28 ms) +[ RUN ] SGDSolverTest/1.TestLeastSquaresUpdateWithEverything +[ OK ] SGDSolverTest/1.TestLeastSquaresUpdateWithEverything (72 ms) +[ RUN ] SGDSolverTest/1.TestLeastSquaresUpdateWithEverythingShare +[ OK ] SGDSolverTest/1.TestLeastSquaresUpdateWithEverythingShare (77 ms) +[ RUN ] SGDSolverTest/1.TestLeastSquaresUpdateWithMomentumMultiIter +[ OK ] SGDSolverTest/1.TestLeastSquaresUpdateWithMomentumMultiIter (71 ms) +[ RUN ] SGDSolverTest/1.TestSnapshotShare +[ OK ] SGDSolverTest/1.TestSnapshotShare (20 ms) +[ RUN ] SGDSolverTest/1.TestLeastSquaresUpdateWithEverythingAccumShare +[ OK ] SGDSolverTest/1.TestLeastSquaresUpdateWithEverythingAccumShare (3 ms) +[ RUN ] SGDSolverTest/1.TestLeastSquaresUpdateLROneHundredth +[ OK ] SGDSolverTest/1.TestLeastSquaresUpdateLROneHundredth (15 ms) +[ RUN ] SGDSolverTest/1.TestLeastSquaresUpdateWithWeightDecay +[ OK ] SGDSolverTest/1.TestLeastSquaresUpdateWithWeightDecay (29 ms) +[ RUN ] SGDSolverTest/1.TestLeastSquaresUpdateWithWeightDecayMultiIter +[ OK ] SGDSolverTest/1.TestLeastSquaresUpdateWithWeightDecayMultiIter (72 ms) +[----------] 12 tests from SGDSolverTest/1 (418 ms total) -[----------] 3 tests from SyncedMemoryTest -[ RUN ] SyncedMemoryTest.TestAllocationCPU -[ OK ] SyncedMemoryTest.TestAllocationCPU (0 ms) -[ RUN ] SyncedMemoryTest.TestInitialization -[ OK ] SyncedMemoryTest.TestInitialization (0 ms) -[ RUN ] SyncedMemoryTest.TestCPUWrite -[ OK ] SyncedMemoryTest.TestCPUWrite (0 ms) -[----------] 3 tests from SyncedMemoryTest (0 ms total) +[----------] 11 tests from RandomNumberGeneratorTest/0, where TypeParam = float +[ RUN ] RandomNumberGeneratorTest/0.TestRngGaussian2 +[ OK ] RandomNumberGeneratorTest/0.TestRngGaussian2 (0 ms) +[ RUN ] RandomNumberGeneratorTest/0.TestRngUniform2 +[ OK ] RandomNumberGeneratorTest/0.TestRngUniform2 (1 ms) +[ RUN ] RandomNumberGeneratorTest/0.TestRngGaussian +[ OK ] RandomNumberGeneratorTest/0.TestRngGaussian (0 ms) +[ RUN ] RandomNumberGeneratorTest/0.TestRngBernoulli2 +[ OK ] RandomNumberGeneratorTest/0.TestRngBernoulli2 (0 ms) +[ RUN ] RandomNumberGeneratorTest/0.TestRngUniformTimesUniform +[ OK ] RandomNumberGeneratorTest/0.TestRngUniformTimesUniform (0 ms) +[ RUN ] RandomNumberGeneratorTest/0.TestRngBernoulli +[ OK ] RandomNumberGeneratorTest/0.TestRngBernoulli (0 ms) +[ RUN ] RandomNumberGeneratorTest/0.TestRngUniformTimesBernoulli +[ OK ] RandomNumberGeneratorTest/0.TestRngUniformTimesBernoulli (1 ms) +[ RUN ] RandomNumberGeneratorTest/0.TestRngGaussianTimesGaussian +[ OK ] RandomNumberGeneratorTest/0.TestRngGaussianTimesGaussian (0 ms) +[ RUN ] RandomNumberGeneratorTest/0.TestRngBernoulliTimesBernoulli +[ OK ] RandomNumberGeneratorTest/0.TestRngBernoulliTimesBernoulli (0 ms) +[ RUN ] RandomNumberGeneratorTest/0.TestRngUniform +[ OK ] RandomNumberGeneratorTest/0.TestRngUniform (1 ms) +[ RUN ] RandomNumberGeneratorTest/0.TestRngGaussianTimesBernoulli +[ OK ] RandomNumberGeneratorTest/0.TestRngGaussianTimesBernoulli (0 ms) +[----------] 11 tests from RandomNumberGeneratorTest/0 (3 ms total) -[----------] 1 test from SolverTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] SolverTest/1.TestInitTrainTestNets -[ OK ] SolverTest/1.TestInitTrainTestNets (2 ms) -[----------] 1 test from SolverTest/1 (2 ms total) +[----------] 2 tests from EuclideanLossLayerTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] EuclideanLossLayerTest/1.TestForward +[ OK ] EuclideanLossLayerTest/1.TestForward (0 ms) +[ RUN ] EuclideanLossLayerTest/1.TestGradient +[ OK ] EuclideanLossLayerTest/1.TestGradient (1 ms) +[----------] 2 tests from EuclideanLossLayerTest/1 (1 ms total) -[----------] 6 tests from XavierFillerTest/0, where TypeParam = float -[ RUN ] XavierFillerTest/0.TestFillAverage -[ OK ] XavierFillerTest/0.TestFillAverage (46 ms) -[ RUN ] XavierFillerTest/0.TestFill5D -[ OK ] XavierFillerTest/0.TestFill5D (0 ms) -[ RUN ] XavierFillerTest/0.TestFillFanIn -[ OK ] XavierFillerTest/0.TestFillFanIn (45 ms) -[ RUN ] XavierFillerTest/0.TestFill1D -[ OK ] XavierFillerTest/0.TestFill1D (1 ms) -[ RUN ] XavierFillerTest/0.TestFill2D -[ OK ] XavierFillerTest/0.TestFill2D (0 ms) -[ RUN ] XavierFillerTest/0.TestFillFanOut -[ OK ] XavierFillerTest/0.TestFillFanOut (46 ms) -[----------] 6 tests from XavierFillerTest/0 (138 ms total) +[----------] 12 tests from NesterovSolverTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] NesterovSolverTest/0.TestNesterovLeastSquaresUpdateLROneHundredth +[ OK ] NesterovSolverTest/0.TestNesterovLeastSquaresUpdateLROneHundredth (14 ms) +[ RUN ] NesterovSolverTest/0.TestNesterovLeastSquaresUpdateWithEverythingShare +[ OK ] NesterovSolverTest/0.TestNesterovLeastSquaresUpdateWithEverythingShare (75 ms) +[ RUN ] NesterovSolverTest/0.TestNesterovLeastSquaresUpdateWithWeightDecayMultiIter +[ OK ] NesterovSolverTest/0.TestNesterovLeastSquaresUpdateWithWeightDecayMultiIter (71 ms) +[ RUN ] NesterovSolverTest/0.TestLeastSquaresUpdateWithMomentumMultiIter +[ OK ] NesterovSolverTest/0.TestLeastSquaresUpdateWithMomentumMultiIter (71 ms) +[ RUN ] NesterovSolverTest/0.TestNesterovLeastSquaresUpdateWithWeightDecay +[ OK ] NesterovSolverTest/0.TestNesterovLeastSquaresUpdateWithWeightDecay (14 ms) +[ RUN ] NesterovSolverTest/0.TestLeastSquaresUpdateWithEverythingAccumShare +[ OK ] NesterovSolverTest/0.TestLeastSquaresUpdateWithEverythingAccumShare (3 ms) +[ RUN ] NesterovSolverTest/0.TestLeastSquaresUpdateWithEverythingAccum +[ OK ] NesterovSolverTest/0.TestLeastSquaresUpdateWithEverythingAccum (2 ms) +[ RUN ] NesterovSolverTest/0.TestSnapshotShare +[ OK ] NesterovSolverTest/0.TestSnapshotShare (19 ms) +[ RUN ] NesterovSolverTest/0.TestNesterovLeastSquaresUpdate +[ OK ] NesterovSolverTest/0.TestNesterovLeastSquaresUpdate (14 ms) +[ RUN ] NesterovSolverTest/0.TestSnapshot +[ OK ] NesterovSolverTest/0.TestSnapshot (14 ms) +[ RUN ] NesterovSolverTest/0.TestNesterovLeastSquaresUpdateWithMomentum +[ OK ] NesterovSolverTest/0.TestNesterovLeastSquaresUpdateWithMomentum (28 ms) +[ RUN ] NesterovSolverTest/0.TestNesterovLeastSquaresUpdateWithEverything +[ OK ] NesterovSolverTest/0.TestNesterovLeastSquaresUpdateWithEverything (72 ms) +[----------] 12 tests from NesterovSolverTest/0 (397 ms total) -[----------] 22 tests from ScaleLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] ScaleLayerTest/0.TestGradientScaleAndBias -[ OK ] ScaleLayerTest/0.TestGradientScaleAndBias (131 ms) -[ RUN ] ScaleLayerTest/0.TestForwardBroadcastBegin -[ OK ] ScaleLayerTest/0.TestForwardBroadcastBegin (0 ms) -[ RUN ] ScaleLayerTest/0.TestForwardEltwiseInPlace -[ OK ] ScaleLayerTest/0.TestForwardEltwiseInPlace (0 ms) -[ RUN ] ScaleLayerTest/0.TestGradientEltwiseWithParam -[ OK ] ScaleLayerTest/0.TestGradientEltwiseWithParam (385 ms) -[ RUN ] ScaleLayerTest/0.TestForwardBroadcastMiddle -[ OK ] ScaleLayerTest/0.TestForwardBroadcastMiddle (0 ms) -[ RUN ] ScaleLayerTest/0.TestForwardScaleAxis2 -[ OK ] ScaleLayerTest/0.TestForwardScaleAxis2 (0 ms) -[ RUN ] ScaleLayerTest/0.TestForwardBroadcastMiddleWithParam -[ OK ] ScaleLayerTest/0.TestForwardBroadcastMiddleWithParam (0 ms) -[ RUN ] ScaleLayerTest/0.TestForwardBroadcastMiddleWithParamAndBias -[ OK ] ScaleLayerTest/0.TestForwardBroadcastMiddleWithParamAndBias (1 ms) -[ RUN ] ScaleLayerTest/0.TestGradientBroadcastMiddle -[ OK ] ScaleLayerTest/0.TestGradientBroadcastMiddle (139 ms) -[ RUN ] ScaleLayerTest/0.TestForwardScale -[ OK ] ScaleLayerTest/0.TestForwardScale (0 ms) -[ RUN ] ScaleLayerTest/0.TestGradientBroadcastEnd -[ OK ] ScaleLayerTest/0.TestGradientBroadcastEnd (235 ms) -[ RUN ] ScaleLayerTest/0.TestGradientBroadcastBegin -[ OK ] ScaleLayerTest/0.TestGradientBroadcastBegin (131 ms) -[ RUN ] ScaleLayerTest/0.TestGradientEltwise -[ OK ] ScaleLayerTest/0.TestGradientEltwise (14 ms) -[ RUN ] ScaleLayerTest/0.TestGradientBroadcastMiddleWithParam -[ OK ] ScaleLayerTest/0.TestGradientBroadcastMiddleWithParam (161 ms) -[ RUN ] ScaleLayerTest/0.TestGradientScaleAxis2 -[ OK ] ScaleLayerTest/0.TestGradientScaleAxis2 (103 ms) -[ RUN ] ScaleLayerTest/0.TestGradientScale -[ OK ] ScaleLayerTest/0.TestGradientScale (107 ms) -[ RUN ] ScaleLayerTest/0.TestForwardEltwiseWithParam -[ OK ] ScaleLayerTest/0.TestForwardEltwiseWithParam (0 ms) -[ RUN ] ScaleLayerTest/0.TestForwardBroadcastEnd -[ OK ] ScaleLayerTest/0.TestForwardBroadcastEnd (0 ms) -[ RUN ] ScaleLayerTest/0.TestForwardEltwise -[ OK ] ScaleLayerTest/0.TestForwardEltwise (0 ms) -[ RUN ] ScaleLayerTest/0.TestForwardBroadcastMiddleInPlace -[ OK ] ScaleLayerTest/0.TestForwardBroadcastMiddleInPlace (0 ms) -[ RUN ] ScaleLayerTest/0.TestBackwardBroadcastMiddleInPlace -[ OK ] ScaleLayerTest/0.TestBackwardBroadcastMiddleInPlace (0 ms) -[ RUN ] ScaleLayerTest/0.TestBackwardEltwiseInPlace -[ OK ] ScaleLayerTest/0.TestBackwardEltwiseInPlace (0 ms) -[----------] 22 tests from ScaleLayerTest/0 (1407 ms total) +[----------] 1 test from MultinomialLogisticLossLayerTest/1, where TypeParam = double +[ RUN ] MultinomialLogisticLossLayerTest/1.TestGradientCPU +[ OK ] MultinomialLogisticLossLayerTest/1.TestGradientCPU (0 ms) +[----------] 1 test from MultinomialLogisticLossLayerTest/1 (0 ms total) -[----------] 8 tests from Im2colLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] Im2colLayerTest/1.TestRect -[ OK ] Im2colLayerTest/1.TestRect (0 ms) -[ RUN ] Im2colLayerTest/1.TestGradient -[ OK ] Im2colLayerTest/1.TestGradient (402 ms) -[ RUN ] Im2colLayerTest/1.TestDilatedGradient -[ OK ] Im2colLayerTest/1.TestDilatedGradient (1106 ms) -[ RUN ] Im2colLayerTest/1.TestSetup -[ OK ] Im2colLayerTest/1.TestSetup (0 ms) -[ RUN ] Im2colLayerTest/1.TestDilatedGradientForceND -[ OK ] Im2colLayerTest/1.TestDilatedGradientForceND (2045 ms) -[ RUN ] Im2colLayerTest/1.TestRectGradient -[ OK ] Im2colLayerTest/1.TestRectGradient (304 ms) -[ RUN ] Im2colLayerTest/1.TestForward -[ OK ] Im2colLayerTest/1.TestForward (0 ms) -[ RUN ] Im2colLayerTest/1.TestGradientForceND -[ OK ] Im2colLayerTest/1.TestGradientForceND (668 ms) -[----------] 8 tests from Im2colLayerTest/1 (4525 ms total) +[----------] 3 tests from SigmoidCrossEntropyLossLayerTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] SigmoidCrossEntropyLossLayerTest/0.TestGradient +[ OK ] SigmoidCrossEntropyLossLayerTest/0.TestGradient (1 ms) +[ RUN ] SigmoidCrossEntropyLossLayerTest/0.TestIgnoreGradient +[ OK ] SigmoidCrossEntropyLossLayerTest/0.TestIgnoreGradient (0 ms) +[ RUN ] SigmoidCrossEntropyLossLayerTest/0.TestSigmoidCrossEntropyLoss +[ OK ] SigmoidCrossEntropyLossLayerTest/0.TestSigmoidCrossEntropyLoss (2 ms) +[----------] 3 tests from SigmoidCrossEntropyLossLayerTest/0 (3 ms total) [----------] 12 tests from DataTransformTest/1, where TypeParam = double -[ RUN ] DataTransformTest/1.TestCropTest -[ OK ] DataTransformTest/1.TestCropTest (0 ms) +[ RUN ] DataTransformTest/1.TestCropSize +[ OK ] DataTransformTest/1.TestCropSize (0 ms) [ RUN ] DataTransformTest/1.TestCropMirrorTest [ OK ] DataTransformTest/1.TestCropMirrorTest (0 ms) -[ RUN ] DataTransformTest/1.TestEmptyTransform -[ OK ] DataTransformTest/1.TestEmptyTransform (0 ms) -[ RUN ] DataTransformTest/1.TestCropMirrorTrain -[ OK ] DataTransformTest/1.TestCropMirrorTrain (0 ms) +[ RUN ] DataTransformTest/1.TestMirrorTest +[ OK ] DataTransformTest/1.TestMirrorTest (0 ms) +[ RUN ] DataTransformTest/1.TestMeanFile +[ OK ] DataTransformTest/1.TestMeanFile (0 ms) +[ RUN ] DataTransformTest/1.TestCropTest +[ OK ] DataTransformTest/1.TestCropTest (0 ms) [ RUN ] DataTransformTest/1.TestEmptyTransformUniquePixels [ OK ] DataTransformTest/1.TestEmptyTransformUniquePixels (0 ms) -[ RUN ] DataTransformTest/1.TestCropSize -[ OK ] DataTransformTest/1.TestCropSize (0 ms) +[ RUN ] DataTransformTest/1.TestEmptyTransform +[ OK ] DataTransformTest/1.TestEmptyTransform (0 ms) [ RUN ] DataTransformTest/1.TestMirrorTrain [ OK ] DataTransformTest/1.TestMirrorTrain (0 ms) -[ RUN ] DataTransformTest/1.TestMeanFile -[ OK ] DataTransformTest/1.TestMeanFile (0 ms) [ RUN ] DataTransformTest/1.TestCropTrain [ OK ] DataTransformTest/1.TestCropTrain (0 ms) -[ RUN ] DataTransformTest/1.TestMirrorTest -[ OK ] DataTransformTest/1.TestMirrorTest (0 ms) -[ RUN ] DataTransformTest/1.TestMeanValue -[ OK ] DataTransformTest/1.TestMeanValue (0 ms) +[ RUN ] DataTransformTest/1.TestCropMirrorTrain +[ OK ] DataTransformTest/1.TestCropMirrorTrain (0 ms) [ RUN ] DataTransformTest/1.TestMeanValues [ OK ] DataTransformTest/1.TestMeanValues (0 ms) +[ RUN ] DataTransformTest/1.TestMeanValue +[ OK ] DataTransformTest/1.TestMeanValue (0 ms) [----------] 12 tests from DataTransformTest/1 (0 ms total) -[----------] 11 tests from AdaDeltaSolverTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] AdaDeltaSolverTest/0.TestAdaDeltaLeastSquaresUpdateWithWeightDecay -[ OK ] AdaDeltaSolverTest/0.TestAdaDeltaLeastSquaresUpdateWithWeightDecay (17 ms) -[ RUN ] AdaDeltaSolverTest/0.TestAdaDeltaLeastSquaresUpdateWithMomentum -[ OK ] AdaDeltaSolverTest/0.TestAdaDeltaLeastSquaresUpdateWithMomentum (32 ms) -[ RUN ] AdaDeltaSolverTest/0.TestLeastSquaresUpdateWithEverythingAccum -[ OK ] AdaDeltaSolverTest/0.TestLeastSquaresUpdateWithEverythingAccum (3 ms) -[ RUN ] AdaDeltaSolverTest/0.TestAdaDeltaLeastSquaresUpdate -[ OK ] AdaDeltaSolverTest/0.TestAdaDeltaLeastSquaresUpdate (17 ms) -[ RUN ] AdaDeltaSolverTest/0.TestAdaDeltaLeastSquaresUpdateWithEverything -[ OK ] AdaDeltaSolverTest/0.TestAdaDeltaLeastSquaresUpdateWithEverything (81 ms) -[ RUN ] AdaDeltaSolverTest/0.TestSnapshot -[ OK ] AdaDeltaSolverTest/0.TestSnapshot (16 ms) -[ RUN ] AdaDeltaSolverTest/0.TestAdaDeltaLeastSquaresUpdateWithHalfMomentum -[ OK ] AdaDeltaSolverTest/0.TestAdaDeltaLeastSquaresUpdateWithHalfMomentum (32 ms) -[ RUN ] AdaDeltaSolverTest/0.TestLeastSquaresUpdateWithMomentumMultiIter -[ OK ] AdaDeltaSolverTest/0.TestLeastSquaresUpdateWithMomentumMultiIter (81 ms) -[ RUN ] AdaDeltaSolverTest/0.TestSnapshotShare -[ OK ] AdaDeltaSolverTest/0.TestSnapshotShare (21 ms) -[ RUN ] AdaDeltaSolverTest/0.TestAdaDeltaLeastSquaresUpdateWithEverythingShare -[ OK ] AdaDeltaSolverTest/0.TestAdaDeltaLeastSquaresUpdateWithEverythingShare (84 ms) -[ RUN ] AdaDeltaSolverTest/0.TestLeastSquaresUpdateWithEverythingAccumShare -[ OK ] AdaDeltaSolverTest/0.TestLeastSquaresUpdateWithEverythingAccumShare (4 ms) -[----------] 11 tests from AdaDeltaSolverTest/0 (389 ms total) +[----------] 3 tests from BlobMathTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] BlobMathTest/1.TestAsum +[ OK ] BlobMathTest/1.TestAsum (0 ms) +[ RUN ] BlobMathTest/1.TestScaleData +[ OK ] BlobMathTest/1.TestScaleData (0 ms) +[ RUN ] BlobMathTest/1.TestSumOfSquares +[ OK ] BlobMathTest/1.TestSumOfSquares (0 ms) +[----------] 3 tests from BlobMathTest/1 (0 ms total) -[----------] 9 tests from LSTMLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] LSTMLayerTest/0.TestLSTMUnitGradientNonZeroCont -[ OK ] LSTMLayerTest/0.TestLSTMUnitGradientNonZeroCont (50 ms) -[ RUN ] LSTMLayerTest/0.TestGradientNonZeroContBufferSize2WithStaticInput -[ OK ] LSTMLayerTest/0.TestGradientNonZeroContBufferSize2WithStaticInput (4163 ms) -[ RUN ] LSTMLayerTest/0.TestSetUp -[ OK ] LSTMLayerTest/0.TestSetUp (2 ms) -[ RUN ] LSTMLayerTest/0.TestGradient -[ OK ] LSTMLayerTest/0.TestGradient (323 ms) -[ RUN ] LSTMLayerTest/0.TestGradientNonZeroCont -[ OK ] LSTMLayerTest/0.TestGradientNonZeroCont (327 ms) -[ RUN ] LSTMLayerTest/0.TestGradientNonZeroContBufferSize2 -[ OK ] LSTMLayerTest/0.TestGradientNonZeroContBufferSize2 (626 ms) -[ RUN ] LSTMLayerTest/0.TestForward -[ OK ] LSTMLayerTest/0.TestForward (5 ms) -[ RUN ] LSTMLayerTest/0.TestLSTMUnitSetUp -[ OK ] LSTMLayerTest/0.TestLSTMUnitSetUp (0 ms) -[ RUN ] LSTMLayerTest/0.TestLSTMUnitGradient -[ OK ] LSTMLayerTest/0.TestLSTMUnitGradient (53 ms) -[----------] 9 tests from LSTMLayerTest/0 (5549 ms total) +[----------] 5 tests from SPPLayerTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] SPPLayerTest/0.TestEqualOutputDims2 +[ OK ] SPPLayerTest/0.TestEqualOutputDims2 (0 ms) +[ RUN ] SPPLayerTest/0.TestEqualOutputDims +[ OK ] SPPLayerTest/0.TestEqualOutputDims (1 ms) +[ RUN ] SPPLayerTest/0.TestSetup +[ OK ] SPPLayerTest/0.TestSetup (0 ms) +[ RUN ] SPPLayerTest/0.TestForwardBackward +[ OK ] SPPLayerTest/0.TestForwardBackward (0 ms) +[ RUN ] SPPLayerTest/0.TestGradient +[ OK ] SPPLayerTest/0.TestGradient (2846 ms) +[----------] 5 tests from SPPLayerTest/0 (2848 ms total) + +[----------] 4 tests from ContrastiveLossLayerTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] ContrastiveLossLayerTest/1.TestForward +[ OK ] ContrastiveLossLayerTest/1.TestForward (0 ms) +[ RUN ] ContrastiveLossLayerTest/1.TestGradientLegacy +[ OK ] ContrastiveLossLayerTest/1.TestGradientLegacy (216 ms) +[ RUN ] ContrastiveLossLayerTest/1.TestForwardLegacy +[ OK ] ContrastiveLossLayerTest/1.TestForwardLegacy (0 ms) +[ RUN ] ContrastiveLossLayerTest/1.TestGradient +[ OK ] ContrastiveLossLayerTest/1.TestGradient (242 ms) +[----------] 4 tests from ContrastiveLossLayerTest/1 (458 ms total) + +[----------] 2 tests from HingeLossLayerTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] HingeLossLayerTest/1.TestGradientL1 +[ OK ] HingeLossLayerTest/1.TestGradientL1 (0 ms) +[ RUN ] HingeLossLayerTest/1.TestGradientL2 +[ OK ] HingeLossLayerTest/1.TestGradientL2 (0 ms) +[----------] 2 tests from HingeLossLayerTest/1 (0 ms total) + +[----------] 9 tests from LSTMLayerTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] LSTMLayerTest/1.TestLSTMUnitSetUp +[ OK ] LSTMLayerTest/1.TestLSTMUnitSetUp (1 ms) +[ RUN ] LSTMLayerTest/1.TestGradientNonZeroContBufferSize2WithStaticInput +[ OK ] LSTMLayerTest/1.TestGradientNonZeroContBufferSize2WithStaticInput (4208 ms) +[ RUN ] LSTMLayerTest/1.TestSetUp +[ OK ] LSTMLayerTest/1.TestSetUp (2 ms) +[ RUN ] LSTMLayerTest/1.TestGradientNonZeroContBufferSize2 +[ OK ] LSTMLayerTest/1.TestGradientNonZeroContBufferSize2 (604 ms) +[ RUN ] LSTMLayerTest/1.TestLSTMUnitGradientNonZeroCont +[ OK ] LSTMLayerTest/1.TestLSTMUnitGradientNonZeroCont (54 ms) +[ RUN ] LSTMLayerTest/1.TestGradient +[ OK ] LSTMLayerTest/1.TestGradient (310 ms) +[ RUN ] LSTMLayerTest/1.TestGradientNonZeroCont +[ OK ] LSTMLayerTest/1.TestGradientNonZeroCont (294 ms) +[ RUN ] LSTMLayerTest/1.TestForward +[ OK ] LSTMLayerTest/1.TestForward (4 ms) +[ RUN ] LSTMLayerTest/1.TestLSTMUnitGradient +[ OK ] LSTMLayerTest/1.TestLSTMUnitGradient (53 ms) +[----------] 9 tests from LSTMLayerTest/1 (5530 ms total) + +[----------] 1 test from SolverFactoryTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] SolverFactoryTest/0.TestCreateSolver +[ OK ] SolverFactoryTest/0.TestCreateSolver (1 ms) +[----------] 1 test from SolverFactoryTest/0 (1 ms total) + +[----------] 6 tests from XavierFillerTest/0, where TypeParam = float +[ RUN ] XavierFillerTest/0.TestFillFanOut +[ OK ] XavierFillerTest/0.TestFillFanOut (42 ms) +[ RUN ] XavierFillerTest/0.TestFill1D +[ OK ] XavierFillerTest/0.TestFill1D (0 ms) +[ RUN ] XavierFillerTest/0.TestFillFanIn +[ OK ] XavierFillerTest/0.TestFillFanIn (43 ms) +[ RUN ] XavierFillerTest/0.TestFillAverage +[ OK ] XavierFillerTest/0.TestFillAverage (44 ms) +[ RUN ] XavierFillerTest/0.TestFill5D +[ OK ] XavierFillerTest/0.TestFill5D (0 ms) +[ RUN ] XavierFillerTest/0.TestFill2D +[ OK ] XavierFillerTest/0.TestFill2D (0 ms) +[----------] 6 tests from XavierFillerTest/0 (129 ms total) + +[----------] 5 tests from DeconvolutionLayerTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] DeconvolutionLayerTest/0.TestSetup +[ OK ] DeconvolutionLayerTest/0.TestSetup (0 ms) +[ RUN ] DeconvolutionLayerTest/0.TestGradient3D +[ OK ] DeconvolutionLayerTest/0.TestGradient3D (273 ms) +[ RUN ] DeconvolutionLayerTest/0.TestNDAgainst2D +[ OK ] DeconvolutionLayerTest/0.TestNDAgainst2D (1186 ms) +[ RUN ] DeconvolutionLayerTest/0.TestSimpleDeconvolution +[ OK ] DeconvolutionLayerTest/0.TestSimpleDeconvolution (1 ms) +[ RUN ] DeconvolutionLayerTest/0.TestGradient +[ OK ] DeconvolutionLayerTest/0.TestGradient (782 ms) +[----------] 5 tests from DeconvolutionLayerTest/0 (2242 ms total) + +[----------] 1 test from CPUStochasticPoolingLayerTest/0, where TypeParam = float +[ RUN ] CPUStochasticPoolingLayerTest/0.TestSetup +[ OK ] CPUStochasticPoolingLayerTest/0.TestSetup (1 ms) +[----------] 1 test from CPUStochasticPoolingLayerTest/0 (1 ms total) [----------] 10 tests from ConcatLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] ConcatLayerTest/0.TestSetupChannelsNegativeIndexing -[ OK ] ConcatLayerTest/0.TestSetupChannelsNegativeIndexing (0 ms) +[ RUN ] ConcatLayerTest/0.TestSetupChannels +[ OK ] ConcatLayerTest/0.TestSetupChannels (0 ms) +[ RUN ] ConcatLayerTest/0.TestForwardTrivial +[ OK ] ConcatLayerTest/0.TestForwardTrivial (0 ms) +[ RUN ] ConcatLayerTest/0.TestForwardChannels +[ OK ] ConcatLayerTest/0.TestForwardChannels (0 ms) +[ RUN ] ConcatLayerTest/0.TestGradientNum +[ OK ] ConcatLayerTest/0.TestGradientNum (5 ms) +[ RUN ] ConcatLayerTest/0.TestGradientChannelsBottomOneOnly +[ OK ] ConcatLayerTest/0.TestGradientChannelsBottomOneOnly (2 ms) [ RUN ] ConcatLayerTest/0.TestForwardNum [ OK ] ConcatLayerTest/0.TestForwardNum (0 ms) [ RUN ] ConcatLayerTest/0.TestGradientTrivial -[ OK ] ConcatLayerTest/0.TestGradientTrivial (4 ms) -[ RUN ] ConcatLayerTest/0.TestForwardTrivial -[ OK ] ConcatLayerTest/0.TestForwardTrivial (0 ms) +[ OK ] ConcatLayerTest/0.TestGradientTrivial (3 ms) [ RUN ] ConcatLayerTest/0.TestGradientChannels -[ OK ] ConcatLayerTest/0.TestGradientChannels (5 ms) -[ RUN ] ConcatLayerTest/0.TestSetupChannels -[ OK ] ConcatLayerTest/0.TestSetupChannels (0 ms) +[ OK ] ConcatLayerTest/0.TestGradientChannels (3 ms) +[ RUN ] ConcatLayerTest/0.TestSetupChannelsNegativeIndexing +[ OK ] ConcatLayerTest/0.TestSetupChannelsNegativeIndexing (1 ms) [ RUN ] ConcatLayerTest/0.TestSetupNum [ OK ] ConcatLayerTest/0.TestSetupNum (0 ms) -[ RUN ] ConcatLayerTest/0.TestGradientNum -[ OK ] ConcatLayerTest/0.TestGradientNum (6 ms) -[ RUN ] ConcatLayerTest/0.TestGradientChannelsBottomOneOnly -[ OK ] ConcatLayerTest/0.TestGradientChannelsBottomOneOnly (3 ms) -[ RUN ] ConcatLayerTest/0.TestForwardChannels -[ OK ] ConcatLayerTest/0.TestForwardChannels (0 ms) -[----------] 10 tests from ConcatLayerTest/0 (18 ms total) +[----------] 10 tests from ConcatLayerTest/0 (14 ms total) -[----------] 1 test from LayerFactoryTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] LayerFactoryTest/1.TestCreateLayer -[ OK ] LayerFactoryTest/1.TestCreateLayer (2 ms) -[----------] 1 test from LayerFactoryTest/1 (2 ms total) +[----------] 4 tests from SoftmaxWithLossLayerTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] SoftmaxWithLossLayerTest/0.TestGradientIgnoreLabel +[ OK ] SoftmaxWithLossLayerTest/0.TestGradientIgnoreLabel (7 ms) +[ RUN ] SoftmaxWithLossLayerTest/0.TestGradientUnnormalized +[ OK ] SoftmaxWithLossLayerTest/0.TestGradientUnnormalized (8 ms) +[ RUN ] SoftmaxWithLossLayerTest/0.TestGradient +[ OK ] SoftmaxWithLossLayerTest/0.TestGradient (8 ms) +[ RUN ] SoftmaxWithLossLayerTest/0.TestForwardIgnoreLabel +[ OK ] SoftmaxWithLossLayerTest/0.TestForwardIgnoreLabel (0 ms) +[----------] 4 tests from SoftmaxWithLossLayerTest/0 (23 ms total) -[----------] 7 tests from CPUMathFunctionsTest/0, where TypeParam = float -[ RUN ] CPUMathFunctionsTest/0.TestAsum -[ OK ] CPUMathFunctionsTest/0.TestAsum (3 ms) -[ RUN ] CPUMathFunctionsTest/0.TestScale -[ OK ] CPUMathFunctionsTest/0.TestScale (3 ms) -[ RUN ] CPUMathFunctionsTest/0.TestSign -[ OK ] CPUMathFunctionsTest/0.TestSign (3 ms) -[ RUN ] CPUMathFunctionsTest/0.TestCopy -[ OK ] CPUMathFunctionsTest/0.TestCopy (3 ms) -[ RUN ] CPUMathFunctionsTest/0.TestSgnbit -[ OK ] CPUMathFunctionsTest/0.TestSgnbit (3 ms) -[ RUN ] CPUMathFunctionsTest/0.TestFabs -[ OK ] CPUMathFunctionsTest/0.TestFabs (4 ms) -[ RUN ] CPUMathFunctionsTest/0.TestNothing -[ OK ] CPUMathFunctionsTest/0.TestNothing (2 ms) -[----------] 7 tests from CPUMathFunctionsTest/0 (21 ms total) +[----------] 3 tests from PaddingLayerUpgradeTest +[ RUN ] PaddingLayerUpgradeTest.TestSimple +[ OK ] PaddingLayerUpgradeTest.TestSimple (1 ms) +[ RUN ] PaddingLayerUpgradeTest.TestImageNet +[ OK ] PaddingLayerUpgradeTest.TestImageNet (1 ms) +[ RUN ] PaddingLayerUpgradeTest.TestTwoTops +[ OK ] PaddingLayerUpgradeTest.TestTwoTops (1 ms) +[----------] 3 tests from PaddingLayerUpgradeTest (3 ms total) + +[----------] 2 tests from BilinearFillerTest/1, where TypeParam = double +[ RUN ] BilinearFillerTest/1.TestFillOdd +[ OK ] BilinearFillerTest/1.TestFillOdd (11 ms) +[ RUN ] BilinearFillerTest/1.TestFillEven +[ OK ] BilinearFillerTest/1.TestFillEven (9 ms) +[----------] 2 tests from BilinearFillerTest/1 (20 ms total) + +[----------] 7 tests from TileLayerTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] TileLayerTest/1.TestTrivialSetup +[ OK ] TileLayerTest/1.TestTrivialSetup (0 ms) +[ RUN ] TileLayerTest/1.TestForwardNum +[ OK ] TileLayerTest/1.TestForwardNum (1 ms) +[ RUN ] TileLayerTest/1.TestGradientChannels +[ OK ] TileLayerTest/1.TestGradientChannels (285 ms) +[ RUN ] TileLayerTest/1.TestTrivialGradient +[ OK ] TileLayerTest/1.TestTrivialGradient (83 ms) +[ RUN ] TileLayerTest/1.TestSetup +[ OK ] TileLayerTest/1.TestSetup (0 ms) +[ RUN ] TileLayerTest/1.TestForwardChannels +[ OK ] TileLayerTest/1.TestForwardChannels (0 ms) +[ RUN ] TileLayerTest/1.TestGradientNum +[ OK ] TileLayerTest/1.TestGradientNum (274 ms) +[----------] 7 tests from TileLayerTest/1 (643 ms total) [----------] 8 tests from AdamSolverTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] AdamSolverTest/1.TestAdamLeastSquaresUpdateWithWeightDecay -[ OK ] AdamSolverTest/1.TestAdamLeastSquaresUpdateWithWeightDecay (18 ms) -[ RUN ] AdamSolverTest/1.TestSnapshotShare -[ OK ] AdamSolverTest/1.TestSnapshotShare (24 ms) -[ RUN ] AdamSolverTest/1.TestAdamLeastSquaresUpdateWithEverythingShare -[ OK ] AdamSolverTest/1.TestAdamLeastSquaresUpdateWithEverythingShare (95 ms) -[ RUN ] AdamSolverTest/1.TestLeastSquaresUpdateWithEverythingAccumShare -[ OK ] AdamSolverTest/1.TestLeastSquaresUpdateWithEverythingAccumShare (5 ms) +[ RUN ] AdamSolverTest/1.TestAdamLeastSquaresUpdateWithEverything +[ OK ] AdamSolverTest/1.TestAdamLeastSquaresUpdateWithEverything (74 ms) [ RUN ] AdamSolverTest/1.TestSnapshot -[ OK ] AdamSolverTest/1.TestSnapshot (23 ms) +[ OK ] AdamSolverTest/1.TestSnapshot (15 ms) [ RUN ] AdamSolverTest/1.TestAdamLeastSquaresUpdate -[ OK ] AdamSolverTest/1.TestAdamLeastSquaresUpdate (17 ms) -[ RUN ] AdamSolverTest/1.TestAdamLeastSquaresUpdateWithEverything -[ OK ] AdamSolverTest/1.TestAdamLeastSquaresUpdateWithEverything (90 ms) +[ OK ] AdamSolverTest/1.TestAdamLeastSquaresUpdate (15 ms) +[ RUN ] AdamSolverTest/1.TestSnapshotShare +[ OK ] AdamSolverTest/1.TestSnapshotShare (20 ms) [ RUN ] AdamSolverTest/1.TestLeastSquaresUpdateWithEverythingAccum [ OK ] AdamSolverTest/1.TestLeastSquaresUpdateWithEverythingAccum (3 ms) -[----------] 8 tests from AdamSolverTest/1 (275 ms total) - -[----------] 2 tests from InfogainLossLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] InfogainLossLayerTest/1.TestGradient -[ OK ] InfogainLossLayerTest/1.TestGradient (3 ms) -[ RUN ] InfogainLossLayerTest/1.TestInfogainLoss -[ OK ] InfogainLossLayerTest/1.TestInfogainLoss (0 ms) -[----------] 2 tests from InfogainLossLayerTest/1 (3 ms total) - -[----------] 9 tests from InnerProductLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] InnerProductLayerTest/0.TestSetUpTransposeFalse -[ OK ] InnerProductLayerTest/0.TestSetUpTransposeFalse (0 ms) -[ RUN ] InnerProductLayerTest/0.TestSetUp -[ OK ] InnerProductLayerTest/0.TestSetUp (0 ms) -[ RUN ] InnerProductLayerTest/0.TestForward -[ OK ] InnerProductLayerTest/0.TestForward (0 ms) -[ RUN ] InnerProductLayerTest/0.TestForwardNoBatch -[ OK ] InnerProductLayerTest/0.TestForwardNoBatch (0 ms) -[ RUN ] InnerProductLayerTest/0.TestBackwardTranspose -[ OK ] InnerProductLayerTest/0.TestBackwardTranspose (0 ms) -[ RUN ] InnerProductLayerTest/0.TestForwardTranspose -[ OK ] InnerProductLayerTest/0.TestForwardTranspose (0 ms) -[ RUN ] InnerProductLayerTest/0.TestGradient -[ OK ] InnerProductLayerTest/0.TestGradient (176 ms) -[ RUN ] InnerProductLayerTest/0.TestSetUpTransposeTrue -[ OK ] InnerProductLayerTest/0.TestSetUpTransposeTrue (0 ms) -[ RUN ] InnerProductLayerTest/0.TestGradientTranspose -[ OK ] InnerProductLayerTest/0.TestGradientTranspose (195 ms) -[----------] 9 tests from InnerProductLayerTest/0 (371 ms total) - -[----------] 2 tests from SoftmaxLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] SoftmaxLayerTest/1.TestForward -[ OK ] SoftmaxLayerTest/1.TestForward (0 ms) -[ RUN ] SoftmaxLayerTest/1.TestGradient -[ OK ] SoftmaxLayerTest/1.TestGradient (338 ms) -[----------] 2 tests from SoftmaxLayerTest/1 (338 ms total) - -[----------] 11 tests from RandomNumberGeneratorTest/1, where TypeParam = double -[ RUN ] RandomNumberGeneratorTest/1.TestRngGaussian -[ OK ] RandomNumberGeneratorTest/1.TestRngGaussian (1 ms) -[ RUN ] RandomNumberGeneratorTest/1.TestRngUniform -[ OK ] RandomNumberGeneratorTest/1.TestRngUniform (0 ms) -[ RUN ] RandomNumberGeneratorTest/1.TestRngBernoulli -[ OK ] RandomNumberGeneratorTest/1.TestRngBernoulli (0 ms) -[ RUN ] RandomNumberGeneratorTest/1.TestRngUniformTimesUniform -[ OK ] RandomNumberGeneratorTest/1.TestRngUniformTimesUniform (0 ms) -[ RUN ] RandomNumberGeneratorTest/1.TestRngUniformTimesBernoulli -[ OK ] RandomNumberGeneratorTest/1.TestRngUniformTimesBernoulli (0 ms) -[ RUN ] RandomNumberGeneratorTest/1.TestRngGaussianTimesBernoulli -[ OK ] RandomNumberGeneratorTest/1.TestRngGaussianTimesBernoulli (1 ms) -[ RUN ] RandomNumberGeneratorTest/1.TestRngBernoulliTimesBernoulli -[ OK ] RandomNumberGeneratorTest/1.TestRngBernoulliTimesBernoulli (0 ms) -[ RUN ] RandomNumberGeneratorTest/1.TestRngGaussian2 -[ OK ] RandomNumberGeneratorTest/1.TestRngGaussian2 (0 ms) -[ RUN ] RandomNumberGeneratorTest/1.TestRngGaussianTimesGaussian -[ OK ] RandomNumberGeneratorTest/1.TestRngGaussianTimesGaussian (1 ms) -[ RUN ] RandomNumberGeneratorTest/1.TestRngUniform2 -[ OK ] RandomNumberGeneratorTest/1.TestRngUniform2 (0 ms) -[ RUN ] RandomNumberGeneratorTest/1.TestRngBernoulli2 -[ OK ] RandomNumberGeneratorTest/1.TestRngBernoulli2 (0 ms) -[----------] 11 tests from RandomNumberGeneratorTest/1 (3 ms total) +[ RUN ] AdamSolverTest/1.TestAdamLeastSquaresUpdateWithWeightDecay +[ OK ] AdamSolverTest/1.TestAdamLeastSquaresUpdateWithWeightDecay (15 ms) +[ RUN ] AdamSolverTest/1.TestLeastSquaresUpdateWithEverythingAccumShare +[ OK ] AdamSolverTest/1.TestLeastSquaresUpdateWithEverythingAccumShare (3 ms) +[ RUN ] AdamSolverTest/1.TestAdamLeastSquaresUpdateWithEverythingShare +[ OK ] AdamSolverTest/1.TestAdamLeastSquaresUpdateWithEverythingShare (77 ms) +[----------] 8 tests from AdamSolverTest/1 (222 ms total) -[----------] 11 tests from CropLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] CropLayerTest/1.TestCrop5D -[ OK ] CropLayerTest/1.TestCrop5D (0 ms) -[ RUN ] CropLayerTest/1.TestDimensionsCheck -[ OK ] CropLayerTest/1.TestDimensionsCheck (1 ms) -[ RUN ] CropLayerTest/1.TestCropAllGradient -[ OK ] CropLayerTest/1.TestCropAllGradient (345 ms) -[ RUN ] CropLayerTest/1.TestSetupShapeNegativeIndexing -[ OK ] CropLayerTest/1.TestSetupShapeNegativeIndexing (0 ms) -[ RUN ] CropLayerTest/1.TestCropAll -[ OK ] CropLayerTest/1.TestCropAll (0 ms) -[ RUN ] CropLayerTest/1.TestSetupShapeAll -[ OK ] CropLayerTest/1.TestSetupShapeAll (0 ms) -[ RUN ] CropLayerTest/1.TestCrop5DGradient -[ OK ] CropLayerTest/1.TestCrop5DGradient (1448 ms) -[ RUN ] CropLayerTest/1.TestCropHWGradient -[ OK ] CropLayerTest/1.TestCropHWGradient (374 ms) -[ RUN ] CropLayerTest/1.TestCropAllOffset -[ OK ] CropLayerTest/1.TestCropAllOffset (0 ms) -[ RUN ] CropLayerTest/1.TestCropHW -[ OK ] CropLayerTest/1.TestCropHW (0 ms) -[ RUN ] CropLayerTest/1.TestSetupShapeDefault -[ OK ] CropLayerTest/1.TestSetupShapeDefault (0 ms) -[----------] 11 tests from CropLayerTest/1 (2171 ms total) +[----------] 20 tests from BiasLayerTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] BiasLayerTest/1.TestForwardEltwiseInPlace +[ OK ] BiasLayerTest/1.TestForwardEltwiseInPlace (0 ms) +[ RUN ] BiasLayerTest/1.TestGradientEltwise +[ OK ] BiasLayerTest/1.TestGradientEltwise (5 ms) +[ RUN ] BiasLayerTest/1.TestGradientEltwiseWithParam +[ OK ] BiasLayerTest/1.TestGradientEltwiseWithParam (211 ms) +[ RUN ] BiasLayerTest/1.TestGradientBias +[ OK ] BiasLayerTest/1.TestGradientBias (92 ms) +[ RUN ] BiasLayerTest/1.TestForwardEltwiseWithParam +[ OK ] BiasLayerTest/1.TestForwardEltwiseWithParam (0 ms) +[ RUN ] BiasLayerTest/1.TestGradientBroadcastBegin +[ OK ] BiasLayerTest/1.TestGradientBroadcastBegin (94 ms) +[ RUN ] BiasLayerTest/1.TestForwardBroadcastMiddle +[ OK ] BiasLayerTest/1.TestForwardBroadcastMiddle (1 ms) +[ RUN ] BiasLayerTest/1.TestForwardBiasAxis2 +[ OK ] BiasLayerTest/1.TestForwardBiasAxis2 (0 ms) +[ RUN ] BiasLayerTest/1.TestGradientBroadcastMiddleWithParam +[ OK ] BiasLayerTest/1.TestGradientBroadcastMiddleWithParam (103 ms) +[ RUN ] BiasLayerTest/1.TestGradientBroadcastMiddle +[ OK ] BiasLayerTest/1.TestGradientBroadcastMiddle (103 ms) +[ RUN ] BiasLayerTest/1.TestBackwardEltwiseInPlace +[ OK ] BiasLayerTest/1.TestBackwardEltwiseInPlace (1 ms) +[ RUN ] BiasLayerTest/1.TestGradientBiasAxis2 +[ OK ] BiasLayerTest/1.TestGradientBiasAxis2 (90 ms) +[ RUN ] BiasLayerTest/1.TestForwardBias +[ OK ] BiasLayerTest/1.TestForwardBias (0 ms) +[ RUN ] BiasLayerTest/1.TestGradientBroadcastEnd +[ OK ] BiasLayerTest/1.TestGradientBroadcastEnd (133 ms) +[ RUN ] BiasLayerTest/1.TestBackwardBroadcastMiddleInPlace +[ OK ] BiasLayerTest/1.TestBackwardBroadcastMiddleInPlace (0 ms) +[ RUN ] BiasLayerTest/1.TestForwardEltwise +[ OK ] BiasLayerTest/1.TestForwardEltwise (0 ms) +[ RUN ] BiasLayerTest/1.TestForwardBroadcastEnd +[ OK ] BiasLayerTest/1.TestForwardBroadcastEnd (0 ms) +[ RUN ] BiasLayerTest/1.TestForwardBroadcastBegin +[ OK ] BiasLayerTest/1.TestForwardBroadcastBegin (0 ms) +[ RUN ] BiasLayerTest/1.TestForwardBroadcastMiddleInPlace +[ OK ] BiasLayerTest/1.TestForwardBroadcastMiddleInPlace (0 ms) +[ RUN ] BiasLayerTest/1.TestForwardBroadcastMiddleWithParam +[ OK ] BiasLayerTest/1.TestForwardBroadcastMiddleWithParam (0 ms) +[----------] 20 tests from BiasLayerTest/1 (833 ms total) -[----------] 4 tests from NetUpgradeTest -[ RUN ] NetUpgradeTest.TestSimple -[ OK ] NetUpgradeTest.TestSimple (0 ms) -[ RUN ] NetUpgradeTest.TestAllParams -[ OK ] NetUpgradeTest.TestAllParams (1 ms) -[ RUN ] NetUpgradeTest.TestImageNet -[ OK ] NetUpgradeTest.TestImageNet (2 ms) -[ RUN ] NetUpgradeTest.TestUpgradeV1LayerType -[ OK ] NetUpgradeTest.TestUpgradeV1LayerType (1 ms) -[----------] 4 tests from NetUpgradeTest (4 ms total) +[----------] 8 tests from AdamSolverTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] AdamSolverTest/0.TestSnapshot +[ OK ] AdamSolverTest/0.TestSnapshot (15 ms) +[ RUN ] AdamSolverTest/0.TestAdamLeastSquaresUpdateWithEverythingShare +[ OK ] AdamSolverTest/0.TestAdamLeastSquaresUpdateWithEverythingShare (75 ms) +[ RUN ] AdamSolverTest/0.TestLeastSquaresUpdateWithEverythingAccumShare +[ OK ] AdamSolverTest/0.TestLeastSquaresUpdateWithEverythingAccumShare (3 ms) +[ RUN ] AdamSolverTest/0.TestSnapshotShare +[ OK ] AdamSolverTest/0.TestSnapshotShare (19 ms) +[ RUN ] AdamSolverTest/0.TestAdamLeastSquaresUpdateWithWeightDecay +[ OK ] AdamSolverTest/0.TestAdamLeastSquaresUpdateWithWeightDecay (14 ms) +[ RUN ] AdamSolverTest/0.TestAdamLeastSquaresUpdateWithEverything +[ OK ] AdamSolverTest/0.TestAdamLeastSquaresUpdateWithEverything (80 ms) +[ RUN ] AdamSolverTest/0.TestLeastSquaresUpdateWithEverythingAccum +[ OK ] AdamSolverTest/0.TestLeastSquaresUpdateWithEverythingAccum (3 ms) +[ RUN ] AdamSolverTest/0.TestAdamLeastSquaresUpdate +[ OK ] AdamSolverTest/0.TestAdamLeastSquaresUpdate (16 ms) +[----------] 8 tests from AdamSolverTest/0 (225 ms total) -[----------] 9 tests from AccuracyLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] AccuracyLayerTest/1.TestSetup -[ OK ] AccuracyLayerTest/1.TestSetup (0 ms) -[ RUN ] AccuracyLayerTest/1.TestForwardIgnoreLabel -[ OK ] AccuracyLayerTest/1.TestForwardIgnoreLabel (1 ms) -[ RUN ] AccuracyLayerTest/1.TestSetupOutputPerClass -[ OK ] AccuracyLayerTest/1.TestSetupOutputPerClass (0 ms) -[ RUN ] AccuracyLayerTest/1.TestForwardTopK -[ OK ] AccuracyLayerTest/1.TestForwardTopK (2 ms) -[ RUN ] AccuracyLayerTest/1.TestForwardPerClassWithIgnoreLabel -[ OK ] AccuracyLayerTest/1.TestForwardPerClassWithIgnoreLabel (1 ms) -[ RUN ] AccuracyLayerTest/1.TestSetupTopK -[ OK ] AccuracyLayerTest/1.TestSetupTopK (0 ms) -[ RUN ] AccuracyLayerTest/1.TestForwardWithSpatialAxes -[ OK ] AccuracyLayerTest/1.TestForwardWithSpatialAxes (0 ms) -[ RUN ] AccuracyLayerTest/1.TestForwardPerClass -[ OK ] AccuracyLayerTest/1.TestForwardPerClass (1 ms) -[ RUN ] AccuracyLayerTest/1.TestForward -[ OK ] AccuracyLayerTest/1.TestForward (0 ms) -[----------] 9 tests from AccuracyLayerTest/1 (5 ms total) +[----------] 5 tests from BenchmarkTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] BenchmarkTest/1.TestTimerSeconds +[ OK ] BenchmarkTest/1.TestTimerSeconds (301 ms) +[ RUN ] BenchmarkTest/1.TestTimerMilliSeconds +[ OK ] BenchmarkTest/1.TestTimerMilliSeconds (300 ms) +[ RUN ] BenchmarkTest/1.TestTimerConstructor +[ OK ] BenchmarkTest/1.TestTimerConstructor (0 ms) +[ RUN ] BenchmarkTest/1.TestTimerStop +[ OK ] BenchmarkTest/1.TestTimerStop (0 ms) +[ RUN ] BenchmarkTest/1.TestTimerStart +[ OK ] BenchmarkTest/1.TestTimerStart (0 ms) +[----------] 5 tests from BenchmarkTest/1 (601 ms total) -[----------] 8 tests from RMSPropSolverTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] RMSPropSolverTest/1.TestLeastSquaresUpdateWithEverythingAccumShare -[ OK ] RMSPropSolverTest/1.TestLeastSquaresUpdateWithEverythingAccumShare (4 ms) -[ RUN ] RMSPropSolverTest/1.TestRMSPropLeastSquaresUpdateWithWeightDecay -[ OK ] RMSPropSolverTest/1.TestRMSPropLeastSquaresUpdateWithWeightDecay (14 ms) -[ RUN ] RMSPropSolverTest/1.TestRMSPropLeastSquaresUpdateWithEverythingShare -[ OK ] RMSPropSolverTest/1.TestRMSPropLeastSquaresUpdateWithEverythingShare (78 ms) -[ RUN ] RMSPropSolverTest/1.TestRMSPropLeastSquaresUpdateWithEverything -[ OK ] RMSPropSolverTest/1.TestRMSPropLeastSquaresUpdateWithEverything (70 ms) -[ RUN ] RMSPropSolverTest/1.TestRMSPropLeastSquaresUpdateWithRmsDecay -[ OK ] RMSPropSolverTest/1.TestRMSPropLeastSquaresUpdateWithRmsDecay (68 ms) -[ RUN ] RMSPropSolverTest/1.TestSnapshotShare -[ OK ] RMSPropSolverTest/1.TestSnapshotShare (18 ms) -[ RUN ] RMSPropSolverTest/1.TestLeastSquaresUpdateWithEverythingAccum -[ OK ] RMSPropSolverTest/1.TestLeastSquaresUpdateWithEverythingAccum (3 ms) -[ RUN ] RMSPropSolverTest/1.TestSnapshot -[ OK ] RMSPropSolverTest/1.TestSnapshot (14 ms) -[----------] 8 tests from RMSPropSolverTest/1 (269 ms total) +[----------] 4 tests from GaussianFillerTest/0, where TypeParam = float +[ RUN ] GaussianFillerTest/0.TestFill1D +[ OK ] GaussianFillerTest/0.TestFill1D (1 ms) +[ RUN ] GaussianFillerTest/0.TestFill +[ OK ] GaussianFillerTest/0.TestFill (0 ms) +[ RUN ] GaussianFillerTest/0.TestFill5D +[ OK ] GaussianFillerTest/0.TestFill5D (0 ms) +[ RUN ] GaussianFillerTest/0.TestFill2D +[ OK ] GaussianFillerTest/0.TestFill2D (0 ms) +[----------] 4 tests from GaussianFillerTest/0 (1 ms total) -[----------] 15 tests from ConvolutionLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] ConvolutionLayerTest/0.TestSimpleConvolution -[ OK ] ConvolutionLayerTest/0.TestSimpleConvolution (0 ms) -[ RUN ] ConvolutionLayerTest/0.TestDilated3DConvolution -[ OK ] ConvolutionLayerTest/0.TestDilated3DConvolution (6 ms) -[ RUN ] ConvolutionLayerTest/0.Test0DConvolution -[ OK ] ConvolutionLayerTest/0.Test0DConvolution (0 ms) -[ RUN ] ConvolutionLayerTest/0.Test1x1Convolution -[ OK ] ConvolutionLayerTest/0.Test1x1Convolution (0 ms) -[ RUN ] ConvolutionLayerTest/0.TestDilatedConvolution -[ OK ] ConvolutionLayerTest/0.TestDilatedConvolution (1 ms) -[ RUN ] ConvolutionLayerTest/0.TestNDAgainst2D -[ OK ] ConvolutionLayerTest/0.TestNDAgainst2D (462 ms) -[ RUN ] ConvolutionLayerTest/0.TestGradient3D -[ OK ] ConvolutionLayerTest/0.TestGradient3D (781 ms) -[ RUN ] ConvolutionLayerTest/0.Test1x1Gradient -[ OK ] ConvolutionLayerTest/0.Test1x1Gradient (1078 ms) -[ RUN ] ConvolutionLayerTest/0.TestDilatedGradient -[ OK ] ConvolutionLayerTest/0.TestDilatedGradient (26 ms) -[ RUN ] ConvolutionLayerTest/0.TestGradient -[ OK ] ConvolutionLayerTest/0.TestGradient (103 ms) -[ RUN ] ConvolutionLayerTest/0.TestSimple3DConvolution -[ OK ] ConvolutionLayerTest/0.TestSimple3DConvolution (2 ms) -[ RUN ] ConvolutionLayerTest/0.TestSobelConvolution -[ OK ] ConvolutionLayerTest/0.TestSobelConvolution (0 ms) -[ RUN ] ConvolutionLayerTest/0.TestSimpleConvolutionGroup -[ OK ] ConvolutionLayerTest/0.TestSimpleConvolutionGroup (0 ms) -[ RUN ] ConvolutionLayerTest/0.TestGradientGroup -[ OK ] ConvolutionLayerTest/0.TestGradientGroup (27 ms) -[ RUN ] ConvolutionLayerTest/0.TestSetup -[ OK ] ConvolutionLayerTest/0.TestSetup (1 ms) -[----------] 15 tests from ConvolutionLayerTest/0 (2488 ms total) +[----------] 1 test from SolverFactoryTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] SolverFactoryTest/1.TestCreateSolver +[ OK ] SolverFactoryTest/1.TestCreateSolver (1 ms) +[----------] 1 test from SolverFactoryTest/1 (1 ms total) -[----------] 6 tests from MVNLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] MVNLayerTest/0.TestForwardAcrossChannels -[ OK ] MVNLayerTest/0.TestForwardAcrossChannels (0 ms) -[ RUN ] MVNLayerTest/0.TestGradientAcrossChannels -[ OK ] MVNLayerTest/0.TestGradientAcrossChannels (305 ms) -[ RUN ] MVNLayerTest/0.TestGradientMeanOnly -[ OK ] MVNLayerTest/0.TestGradientMeanOnly (156 ms) -[ RUN ] MVNLayerTest/0.TestGradient -[ OK ] MVNLayerTest/0.TestGradient (274 ms) -[ RUN ] MVNLayerTest/0.TestForward -[ OK ] MVNLayerTest/0.TestForward (0 ms) -[ RUN ] MVNLayerTest/0.TestForwardMeanOnly -[ OK ] MVNLayerTest/0.TestForwardMeanOnly (1 ms) -[----------] 6 tests from MVNLayerTest/0 (736 ms total) +[----------] 3 tests from FilterLayerTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] FilterLayerTest/1.TestReshape +[ OK ] FilterLayerTest/1.TestReshape (0 ms) +[ RUN ] FilterLayerTest/1.TestForward +[ OK ] FilterLayerTest/1.TestForward (0 ms) +[ RUN ] FilterLayerTest/1.TestGradient +[ OK ] FilterLayerTest/1.TestGradient (278 ms) +[----------] 3 tests from FilterLayerTest/1 (278 ms total) [----------] 3 tests from TanHLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] TanHLayerTest/1.TestTanHOverflow -[ OK ] TanHLayerTest/1.TestTanHOverflow (0 ms) -[ RUN ] TanHLayerTest/1.TestTanHGradient -[ OK ] TanHLayerTest/1.TestTanHGradient (4 ms) [ RUN ] TanHLayerTest/1.TestTanH [ OK ] TanHLayerTest/1.TestTanH (0 ms) -[----------] 3 tests from TanHLayerTest/1 (4 ms total) +[ RUN ] TanHLayerTest/1.TestTanHGradient +[ OK ] TanHLayerTest/1.TestTanHGradient (5 ms) +[ RUN ] TanHLayerTest/1.TestTanHOverflow +[ OK ] TanHLayerTest/1.TestTanHOverflow (0 ms) +[----------] 3 tests from TanHLayerTest/1 (5 ms total) -[----------] 6 tests from MSRAFillerTest/1, where TypeParam = double -[ RUN ] MSRAFillerTest/1.TestFillFanOut -[ OK ] MSRAFillerTest/1.TestFillFanOut (81 ms) -[ RUN ] MSRAFillerTest/1.TestFill2D -[ OK ] MSRAFillerTest/1.TestFill2D (0 ms) -[ RUN ] MSRAFillerTest/1.TestFill1D -[ OK ] MSRAFillerTest/1.TestFill1D (0 ms) -[ RUN ] MSRAFillerTest/1.TestFillAverage -[ OK ] MSRAFillerTest/1.TestFillAverage (81 ms) -[ RUN ] MSRAFillerTest/1.TestFill5D -[ OK ] MSRAFillerTest/1.TestFill5D (0 ms) -[ RUN ] MSRAFillerTest/1.TestFillFanIn -[ OK ] MSRAFillerTest/1.TestFillFanIn (81 ms) -[----------] 6 tests from MSRAFillerTest/1 (243 ms total) +[----------] 5 tests from MemoryDataLayerTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] MemoryDataLayerTest/0.TestSetup +[ OK ] MemoryDataLayerTest/0.TestSetup (1 ms) +[ RUN ] MemoryDataLayerTest/0.AddDatumVectorDefaultTransform +[ OK ] MemoryDataLayerTest/0.AddDatumVectorDefaultTransform (1 ms) +[ RUN ] MemoryDataLayerTest/0.AddMatVectorDefaultTransform +[ OK ] MemoryDataLayerTest/0.AddMatVectorDefaultTransform (6 ms) +[ RUN ] MemoryDataLayerTest/0.TestSetBatchSize +[ OK ] MemoryDataLayerTest/0.TestSetBatchSize (2 ms) +[ RUN ] MemoryDataLayerTest/0.TestForward +[ OK ] MemoryDataLayerTest/0.TestForward (32 ms) +[----------] 5 tests from MemoryDataLayerTest/0 (42 ms total) -[----------] 4 tests from ConstantFillerTest/1, where TypeParam = double -[ RUN ] ConstantFillerTest/1.TestFill1D -[ OK ] ConstantFillerTest/1.TestFill1D (0 ms) -[ RUN ] ConstantFillerTest/1.TestFill5D -[ OK ] ConstantFillerTest/1.TestFill5D (0 ms) -[ RUN ] ConstantFillerTest/1.TestFill2D -[ OK ] ConstantFillerTest/1.TestFill2D (0 ms) -[ RUN ] ConstantFillerTest/1.TestFill -[ OK ] ConstantFillerTest/1.TestFill (0 ms) -[----------] 4 tests from ConstantFillerTest/1 (0 ms total) +[----------] 2 tests from BatchReindexLayerTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] BatchReindexLayerTest/1.TestGradient +[ OK ] BatchReindexLayerTest/1.TestGradient (122 ms) +[ RUN ] BatchReindexLayerTest/1.TestForward +[ OK ] BatchReindexLayerTest/1.TestForward (0 ms) +[----------] 2 tests from BatchReindexLayerTest/1 (122 ms total) -[----------] 20 tests from FilterNetTest -[ RUN ] FilterNetTest.TestFilterOutByNotStage -[ OK ] FilterNetTest.TestFilterOutByNotStage (0 ms) -[ RUN ] FilterNetTest.TestFilterOutByMultipleStage -[ OK ] FilterNetTest.TestFilterOutByMultipleStage (0 ms) -[ RUN ] FilterNetTest.TestFilterInByIncludeMultiRule -[ OK ] FilterNetTest.TestFilterInByIncludeMultiRule (0 ms) -[ RUN ] FilterNetTest.TestFilterLeNetTrainTest -[ OK ] FilterNetTest.TestFilterLeNetTrainTest (1 ms) -[ RUN ] FilterNetTest.TestFilterOutByStage -[ OK ] FilterNetTest.TestFilterOutByStage (0 ms) -[ RUN ] FilterNetTest.TestFilterOutByMaxLevel -[ OK ] FilterNetTest.TestFilterOutByMaxLevel (1 ms) -[ RUN ] FilterNetTest.TestFilterInByMultipleStage -[ OK ] FilterNetTest.TestFilterInByMultipleStage (0 ms) -[ RUN ] FilterNetTest.TestFilterInByMultipleStage2 -[ OK ] FilterNetTest.TestFilterInByMultipleStage2 (0 ms) -[ RUN ] FilterNetTest.TestFilterInByMaxLevel -[ OK ] FilterNetTest.TestFilterInByMaxLevel (0 ms) -[ RUN ] FilterNetTest.TestFilterInOutByExcludeMultiRule -[ OK ] FilterNetTest.TestFilterInOutByExcludeMultiRule (0 ms) -[ RUN ] FilterNetTest.TestNoFilter -[ OK ] FilterNetTest.TestNoFilter (0 ms) -[ RUN ] FilterNetTest.TestFilterInByNotStage -[ OK ] FilterNetTest.TestFilterInByNotStage (0 ms) -[ RUN ] FilterNetTest.TestFilterOutByStage2 -[ OK ] FilterNetTest.TestFilterOutByStage2 (1 ms) -[ RUN ] FilterNetTest.TestFilterOutByMinLevel -[ OK ] FilterNetTest.TestFilterOutByMinLevel (0 ms) -[ RUN ] FilterNetTest.TestFilterInByMinLevel2 -[ OK ] FilterNetTest.TestFilterInByMinLevel2 (0 ms) -[ RUN ] FilterNetTest.TestFilterInByMaxLevel2 -[ OK ] FilterNetTest.TestFilterInByMaxLevel2 (0 ms) -[ RUN ] FilterNetTest.TestFilterInByStage2 -[ OK ] FilterNetTest.TestFilterInByStage2 (0 ms) -[ RUN ] FilterNetTest.TestFilterInOutByIncludeMultiRule -[ OK ] FilterNetTest.TestFilterInOutByIncludeMultiRule (0 ms) -[ RUN ] FilterNetTest.TestFilterInByStage -[ OK ] FilterNetTest.TestFilterInByStage (0 ms) -[ RUN ] FilterNetTest.TestFilterInByMinLevel -[ OK ] FilterNetTest.TestFilterInByMinLevel (1 ms) -[----------] 20 tests from FilterNetTest (4 ms total) +[----------] 3 tests from DummyDataLayerTest/1, where TypeParam = double +[ RUN ] DummyDataLayerTest/1.TestThreeTopConstantGaussianConstant +[ OK ] DummyDataLayerTest/1.TestThreeTopConstantGaussianConstant (0 ms) +[ RUN ] DummyDataLayerTest/1.TestTwoTopConstant +[ OK ] DummyDataLayerTest/1.TestTwoTopConstant (1 ms) +[ RUN ] DummyDataLayerTest/1.TestOneTopConstant +[ OK ] DummyDataLayerTest/1.TestOneTopConstant (0 ms) +[----------] 3 tests from DummyDataLayerTest/1 (1 ms total) -[----------] 9 tests from AccuracyLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] AccuracyLayerTest/0.TestForwardWithSpatialAxes -[ OK ] AccuracyLayerTest/0.TestForwardWithSpatialAxes (0 ms) -[ RUN ] AccuracyLayerTest/0.TestSetupTopK -[ OK ] AccuracyLayerTest/0.TestSetupTopK (0 ms) -[ RUN ] AccuracyLayerTest/0.TestSetupOutputPerClass -[ OK ] AccuracyLayerTest/0.TestSetupOutputPerClass (0 ms) -[ RUN ] AccuracyLayerTest/0.TestForward -[ OK ] AccuracyLayerTest/0.TestForward (0 ms) -[ RUN ] AccuracyLayerTest/0.TestSetup -[ OK ] AccuracyLayerTest/0.TestSetup (0 ms) -[ RUN ] AccuracyLayerTest/0.TestForwardTopK -[ OK ] AccuracyLayerTest/0.TestForwardTopK (2 ms) -[ RUN ] AccuracyLayerTest/0.TestForwardIgnoreLabel -[ OK ] AccuracyLayerTest/0.TestForwardIgnoreLabel (1 ms) -[ RUN ] AccuracyLayerTest/0.TestForwardPerClassWithIgnoreLabel -[ OK ] AccuracyLayerTest/0.TestForwardPerClassWithIgnoreLabel (0 ms) -[ RUN ] AccuracyLayerTest/0.TestForwardPerClass -[ OK ] AccuracyLayerTest/0.TestForwardPerClass (1 ms) -[----------] 9 tests from AccuracyLayerTest/0 (4 ms total) +[----------] 3 tests from BlobMathTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] BlobMathTest/0.TestAsum +[ OK ] BlobMathTest/0.TestAsum (0 ms) +[ RUN ] BlobMathTest/0.TestSumOfSquares +[ OK ] BlobMathTest/0.TestSumOfSquares (0 ms) +[ RUN ] BlobMathTest/0.TestScaleData +[ OK ] BlobMathTest/0.TestScaleData (0 ms) +[----------] 3 tests from BlobMathTest/0 (0 ms total) -[----------] 9 tests from InnerProductLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] InnerProductLayerTest/1.TestSetUpTransposeFalse -[ OK ] InnerProductLayerTest/1.TestSetUpTransposeFalse (0 ms) -[ RUN ] InnerProductLayerTest/1.TestForwardNoBatch -[ OK ] InnerProductLayerTest/1.TestForwardNoBatch (0 ms) -[ RUN ] InnerProductLayerTest/1.TestBackwardTranspose -[ OK ] InnerProductLayerTest/1.TestBackwardTranspose (0 ms) -[ RUN ] InnerProductLayerTest/1.TestGradient -[ OK ] InnerProductLayerTest/1.TestGradient (140 ms) -[ RUN ] InnerProductLayerTest/1.TestForwardTranspose -[ OK ] InnerProductLayerTest/1.TestForwardTranspose (0 ms) -[ RUN ] InnerProductLayerTest/1.TestSetUp -[ OK ] InnerProductLayerTest/1.TestSetUp (1 ms) -[ RUN ] InnerProductLayerTest/1.TestGradientTranspose -[ OK ] InnerProductLayerTest/1.TestGradientTranspose (140 ms) -[ RUN ] InnerProductLayerTest/1.TestSetUpTransposeTrue -[ OK ] InnerProductLayerTest/1.TestSetUpTransposeTrue (0 ms) -[ RUN ] InnerProductLayerTest/1.TestForward -[ OK ] InnerProductLayerTest/1.TestForward (0 ms) -[----------] 9 tests from InnerProductLayerTest/1 (281 ms total) +[----------] 11 tests from CropLayerTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] CropLayerTest/1.TestSetupShapeAll +[ OK ] CropLayerTest/1.TestSetupShapeAll (0 ms) +[ RUN ] CropLayerTest/1.TestCropHWGradient +[ OK ] CropLayerTest/1.TestCropHWGradient (415 ms) +[ RUN ] CropLayerTest/1.TestCropAllGradient +[ OK ] CropLayerTest/1.TestCropAllGradient (266 ms) +[ RUN ] CropLayerTest/1.TestSetupShapeDefault +[ OK ] CropLayerTest/1.TestSetupShapeDefault (0 ms) +[ RUN ] CropLayerTest/1.TestCropAll +[ OK ] CropLayerTest/1.TestCropAll (0 ms) +[ RUN ] CropLayerTest/1.TestDimensionsCheck +[ OK ] CropLayerTest/1.TestDimensionsCheck (0 ms) +[ RUN ] CropLayerTest/1.TestCropHW +[ OK ] CropLayerTest/1.TestCropHW (0 ms) +[ RUN ] CropLayerTest/1.TestCropAllOffset +[ OK ] CropLayerTest/1.TestCropAllOffset (1 ms) +[ RUN ] CropLayerTest/1.TestSetupShapeNegativeIndexing +[ OK ] CropLayerTest/1.TestSetupShapeNegativeIndexing (0 ms) +[ RUN ] CropLayerTest/1.TestCrop5DGradient +[ OK ] CropLayerTest/1.TestCrop5DGradient (1792 ms) +[ RUN ] CropLayerTest/1.TestCrop5D +[ OK ] CropLayerTest/1.TestCrop5D (0 ms) +[----------] 11 tests from CropLayerTest/1 (2475 ms total) -[----------] 4 tests from BlobSimpleTest/0, where TypeParam = float -[ RUN ] BlobSimpleTest/0.TestLegacyBlobProtoShapeEquals -[ OK ] BlobSimpleTest/0.TestLegacyBlobProtoShapeEquals (0 ms) -[ RUN ] BlobSimpleTest/0.TestReshapeZero -[ OK ] BlobSimpleTest/0.TestReshapeZero (0 ms) -[ RUN ] BlobSimpleTest/0.TestReshape -[ OK ] BlobSimpleTest/0.TestReshape (0 ms) -[ RUN ] BlobSimpleTest/0.TestInitialization -[ OK ] BlobSimpleTest/0.TestInitialization (0 ms) -[----------] 4 tests from BlobSimpleTest/0 (0 ms total) +[----------] 4 tests from ContrastiveLossLayerTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] ContrastiveLossLayerTest/0.TestGradientLegacy +[ OK ] ContrastiveLossLayerTest/0.TestGradientLegacy (186 ms) +[ RUN ] ContrastiveLossLayerTest/0.TestForward +[ OK ] ContrastiveLossLayerTest/0.TestForward (0 ms) +[ RUN ] ContrastiveLossLayerTest/0.TestGradient +[ OK ] ContrastiveLossLayerTest/0.TestGradient (203 ms) +[ RUN ] ContrastiveLossLayerTest/0.TestForwardLegacy +[ OK ] ContrastiveLossLayerTest/0.TestForwardLegacy (0 ms) +[----------] 4 tests from ContrastiveLossLayerTest/0 (389 ms total) -[----------] 2 tests from HDF5DataLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] HDF5DataLayerTest/0.TestRead -[ OK ] HDF5DataLayerTest/0.TestRead (23 ms) -[ RUN ] HDF5DataLayerTest/0.TestSkip -[ OK ] HDF5DataLayerTest/0.TestSkip (13 ms) -[----------] 2 tests from HDF5DataLayerTest/0 (36 ms total) +[----------] 6 tests from MSRAFillerTest/0, where TypeParam = float +[ RUN ] MSRAFillerTest/0.TestFill2D +[ OK ] MSRAFillerTest/0.TestFill2D (0 ms) +[ RUN ] MSRAFillerTest/0.TestFillAverage +[ OK ] MSRAFillerTest/0.TestFillAverage (57 ms) +[ RUN ] MSRAFillerTest/0.TestFill1D +[ OK ] MSRAFillerTest/0.TestFill1D (0 ms) +[ RUN ] MSRAFillerTest/0.TestFill5D +[ OK ] MSRAFillerTest/0.TestFill5D (0 ms) +[ RUN ] MSRAFillerTest/0.TestFillFanOut +[ OK ] MSRAFillerTest/0.TestFillFanOut (57 ms) +[ RUN ] MSRAFillerTest/0.TestFillFanIn +[ OK ] MSRAFillerTest/0.TestFillFanIn (59 ms) +[----------] 6 tests from MSRAFillerTest/0 (174 ms total) -[----------] 26 tests from IOTest -[ RUN ] IOTest.TestDecodeDatum -[ OK ] IOTest.TestDecodeDatum (11 ms) -[ RUN ] IOTest.TestReadImageToDatumContent -[ OK ] IOTest.TestReadImageToDatumContent (10 ms) -[ RUN ] IOTest.TestReadImageToDatumResized -[ OK ] IOTest.TestReadImageToDatumResized (4 ms) -[ RUN ] IOTest.TestCVMatToDatum -[ OK ] IOTest.TestCVMatToDatum (5 ms) -[ RUN ] IOTest.TestReadImageToCVMat -[ OK ] IOTest.TestReadImageToCVMat (4 ms) -[ RUN ] IOTest.TestReadImageToDatumReferenceResized -[ OK ] IOTest.TestReadImageToDatumReferenceResized (8 ms) -[ RUN ] IOTest.TestDecodeDatumToCVMatContent -[ OK ] IOTest.TestDecodeDatumToCVMatContent (11 ms) -[ RUN ] IOTest.TestCVMatToDatumContent -[ OK ] IOTest.TestCVMatToDatumContent (11 ms) -[ RUN ] IOTest.TestDecodeDatumToCVMat -[ OK ] IOTest.TestDecodeDatumToCVMat (6 ms) -[ RUN ] IOTest.TestReadFileToDatum -[ OK ] IOTest.TestReadFileToDatum (0 ms) -[ RUN ] IOTest.TestCVMatToDatumReference -[ OK ] IOTest.TestCVMatToDatumReference (11 ms) -[ RUN ] IOTest.TestReadImageToDatumResizedGray -[ OK ] IOTest.TestReadImageToDatumResizedGray (3 ms) -[ RUN ] IOTest.TestReadImageToCVMatResizedGray -[ OK ] IOTest.TestReadImageToCVMatResizedGray (3 ms) -[ RUN ] IOTest.TestReadImageToCVMatResized -[ OK ] IOTest.TestReadImageToCVMatResized (4 ms) -[ RUN ] IOTest.TestDecodeDatumToCVMatNativeGray -[ OK ] IOTest.TestDecodeDatumToCVMatNativeGray (23 ms) -[ RUN ] IOTest.TestDecodeDatumToCVMatNative -[ OK ] IOTest.TestDecodeDatumToCVMatNative (4 ms) -[ RUN ] IOTest.TestReadImageToDatumGray -[ OK ] IOTest.TestReadImageToDatumGray (3 ms) -[ RUN ] IOTest.TestDecodeDatumNative -[ OK ] IOTest.TestDecodeDatumNative (11 ms) -[ RUN ] IOTest.TestReadImageToCVMatResizedSquare -[ OK ] IOTest.TestReadImageToCVMatResizedSquare (5 ms) -[ RUN ] IOTest.TestReadImageToDatumContentGray -[ OK ] IOTest.TestReadImageToDatumContentGray (5 ms) -[ RUN ] IOTest.TestReadImageToDatumReference -[ OK ] IOTest.TestReadImageToDatumReference (11 ms) -[ RUN ] IOTest.TestReadImageToDatumResizedSquare -[ OK ] IOTest.TestReadImageToDatumResizedSquare (5 ms) -[ RUN ] IOTest.TestDecodeDatumNativeGray -[ OK ] IOTest.TestDecodeDatumNativeGray (5 ms) -[ RUN ] IOTest.TestReadImageToDatum -[ OK ] IOTest.TestReadImageToDatum (5 ms) -[ RUN ] IOTest.TestDecodeDatumToCVMatContentNative -[ OK ] IOTest.TestDecodeDatumToCVMatContentNative (11 ms) -[ RUN ] IOTest.TestReadImageToCVMatGray -[ OK ] IOTest.TestReadImageToCVMatGray (2 ms) -[----------] 26 tests from IOTest (181 ms total) +[----------] 9 tests from LSTMLayerTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] LSTMLayerTest/0.TestLSTMUnitGradientNonZeroCont +[ OK ] LSTMLayerTest/0.TestLSTMUnitGradientNonZeroCont (48 ms) +[ RUN ] LSTMLayerTest/0.TestGradient +[ OK ] LSTMLayerTest/0.TestGradient (272 ms) +[ RUN ] LSTMLayerTest/0.TestSetUp +[ OK ] LSTMLayerTest/0.TestSetUp (2 ms) +[ RUN ] LSTMLayerTest/0.TestGradientNonZeroContBufferSize2 +[ OK ] LSTMLayerTest/0.TestGradientNonZeroContBufferSize2 (509 ms) +[ RUN ] LSTMLayerTest/0.TestGradientNonZeroContBufferSize2WithStaticInput +[ OK ] LSTMLayerTest/0.TestGradientNonZeroContBufferSize2WithStaticInput (3418 ms) +[ RUN ] LSTMLayerTest/0.TestGradientNonZeroCont +[ OK ] LSTMLayerTest/0.TestGradientNonZeroCont (294 ms) +[ RUN ] LSTMLayerTest/0.TestLSTMUnitGradient +[ OK ] LSTMLayerTest/0.TestLSTMUnitGradient (48 ms) +[ RUN ] LSTMLayerTest/0.TestForward +[ OK ] LSTMLayerTest/0.TestForward (6 ms) +[ RUN ] LSTMLayerTest/0.TestLSTMUnitSetUp +[ OK ] LSTMLayerTest/0.TestLSTMUnitSetUp (0 ms) +[----------] 9 tests from LSTMLayerTest/0 (4597 ms total) -[----------] 7 tests from CPUMathFunctionsTest/1, where TypeParam = double -[ RUN ] CPUMathFunctionsTest/1.TestAsum -[ OK ] CPUMathFunctionsTest/1.TestAsum (4 ms) -[ RUN ] CPUMathFunctionsTest/1.TestScale -[ OK ] CPUMathFunctionsTest/1.TestScale (4 ms) -[ RUN ] CPUMathFunctionsTest/1.TestNothing -[ OK ] CPUMathFunctionsTest/1.TestNothing (3 ms) -[ RUN ] CPUMathFunctionsTest/1.TestSign -[ OK ] CPUMathFunctionsTest/1.TestSign (4 ms) -[ RUN ] CPUMathFunctionsTest/1.TestSgnbit -[ OK ] CPUMathFunctionsTest/1.TestSgnbit (4 ms) -[ RUN ] CPUMathFunctionsTest/1.TestCopy -[ OK ] CPUMathFunctionsTest/1.TestCopy (4 ms) -[ RUN ] CPUMathFunctionsTest/1.TestFabs -[ OK ] CPUMathFunctionsTest/1.TestFabs (4 ms) -[----------] 7 tests from CPUMathFunctionsTest/1 (27 ms total) +[----------] 5 tests from SPPLayerTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] SPPLayerTest/1.TestSetup +[ OK ] SPPLayerTest/1.TestSetup (1 ms) +[ RUN ] SPPLayerTest/1.TestEqualOutputDims +[ OK ] SPPLayerTest/1.TestEqualOutputDims (0 ms) +[ RUN ] SPPLayerTest/1.TestEqualOutputDims2 +[ OK ] SPPLayerTest/1.TestEqualOutputDims2 (0 ms) +[ RUN ] SPPLayerTest/1.TestForwardBackward +[ OK ] SPPLayerTest/1.TestForwardBackward (0 ms) +[ RUN ] SPPLayerTest/1.TestGradient +[ OK ] SPPLayerTest/1.TestGradient (2812 ms) +[----------] 5 tests from SPPLayerTest/1 (2813 ms total) -[----------] 10 tests from EltwiseLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] EltwiseLayerTest/1.TestProd -[ OK ] EltwiseLayerTest/1.TestProd (0 ms) -[ RUN ] EltwiseLayerTest/1.TestUnstableProdGradient -[ OK ] EltwiseLayerTest/1.TestUnstableProdGradient (4 ms) -[ RUN ] EltwiseLayerTest/1.TestSum -[ OK ] EltwiseLayerTest/1.TestSum (0 ms) -[ RUN ] EltwiseLayerTest/1.TestSumCoeffGradient -[ OK ] EltwiseLayerTest/1.TestSumCoeffGradient (5 ms) -[ RUN ] EltwiseLayerTest/1.TestSumCoeff -[ OK ] EltwiseLayerTest/1.TestSumCoeff (0 ms) -[ RUN ] EltwiseLayerTest/1.TestSumGradient -[ OK ] EltwiseLayerTest/1.TestSumGradient (4 ms) -[ RUN ] EltwiseLayerTest/1.TestStableProdGradient -[ OK ] EltwiseLayerTest/1.TestStableProdGradient (4 ms) -[ RUN ] EltwiseLayerTest/1.TestMax -[ OK ] EltwiseLayerTest/1.TestMax (0 ms) -[ RUN ] EltwiseLayerTest/1.TestMaxGradient -[ OK ] EltwiseLayerTest/1.TestMaxGradient (5 ms) -[ RUN ] EltwiseLayerTest/1.TestSetUp -[ OK ] EltwiseLayerTest/1.TestSetUp (0 ms) -[----------] 10 tests from EltwiseLayerTest/1 (22 ms total) +[----------] 2 tests from InfogainLossLayerTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] InfogainLossLayerTest/1.TestInfogainLoss +[ OK ] InfogainLossLayerTest/1.TestInfogainLoss (0 ms) +[ RUN ] InfogainLossLayerTest/1.TestGradient +[ OK ] InfogainLossLayerTest/1.TestGradient (3 ms) +[----------] 2 tests from InfogainLossLayerTest/1 (3 ms total) -[----------] 8 tests from LRNLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] LRNLayerTest/0.TestForwardAcrossChannelsLargeRegion -[ OK ] LRNLayerTest/0.TestForwardAcrossChannelsLargeRegion (0 ms) -[ RUN ] LRNLayerTest/0.TestSetupWithinChannel -[ OK ] LRNLayerTest/0.TestSetupWithinChannel (0 ms) -[ RUN ] LRNLayerTest/0.TestSetupAcrossChannels -[ OK ] LRNLayerTest/0.TestSetupAcrossChannels (0 ms) -[ RUN ] LRNLayerTest/0.TestGradientWithinChannel -[ OK ] LRNLayerTest/0.TestGradientWithinChannel (489 ms) -[ RUN ] LRNLayerTest/0.TestGradientAcrossChannelsLargeRegion -[ OK ] LRNLayerTest/0.TestGradientAcrossChannelsLargeRegion (819 ms) -[ RUN ] LRNLayerTest/0.TestForwardWithinChannel -[ OK ] LRNLayerTest/0.TestForwardWithinChannel (0 ms) -[ RUN ] LRNLayerTest/0.TestForwardAcrossChannels -[ OK ] LRNLayerTest/0.TestForwardAcrossChannels (0 ms) -[ RUN ] LRNLayerTest/0.TestGradientAcrossChannels -[ OK ] LRNLayerTest/0.TestGradientAcrossChannels (641 ms) -[----------] 8 tests from LRNLayerTest/0 (1949 ms total) +[----------] 3 tests from FilterLayerTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] FilterLayerTest/0.TestReshape +[ OK ] FilterLayerTest/0.TestReshape (0 ms) +[ RUN ] FilterLayerTest/0.TestForward +[ OK ] FilterLayerTest/0.TestForward (0 ms) +[ RUN ] FilterLayerTest/0.TestGradient +[ OK ] FilterLayerTest/0.TestGradient (261 ms) +[----------] 3 tests from FilterLayerTest/0 (261 ms total) -[----------] 1 test from ProtoTest -[ RUN ] ProtoTest.TestSerialization -Printing in binary format. +[----------] 26 tests from NetTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] NetTest/1.TestBottomNeedBackwardEuclideanForce +[ OK ] NetTest/1.TestBottomNeedBackwardEuclideanForce (1 ms) +[ RUN ] NetTest/1.TestSharedWeightsUpdate +[ OK ] NetTest/1.TestSharedWeightsUpdate (1 ms) +[ RUN ] NetTest/1.TestForcePropagateDown +[ OK ] NetTest/1.TestForcePropagateDown (0 ms) +[ RUN ] NetTest/1.TestBottomNeedBackwardTricky +[ OK ] NetTest/1.TestBottomNeedBackwardTricky (1 ms) +[ RUN ] NetTest/1.TestReshape +[ OK ] NetTest/1.TestReshape (2 ms) +[ RUN ] NetTest/1.TestGetBlob +[ OK ] NetTest/1.TestGetBlob (1 ms) +[ RUN ] NetTest/1.TestParamPropagateDown +[ OK ] NetTest/1.TestParamPropagateDown (2 ms) +[ RUN ] NetTest/1.TestLossWeight +[ OK ] NetTest/1.TestLossWeight (4 ms) +[ RUN ] NetTest/1.TestSharedWeightsDiffNet +[ OK ] NetTest/1.TestSharedWeightsDiffNet (0 ms) +[ RUN ] NetTest/1.TestAllInOneNetDeploy +[ OK ] NetTest/1.TestAllInOneNetDeploy (1 ms) +[ RUN ] NetTest/1.TestGetLayerByName +[ OK ] NetTest/1.TestGetLayerByName (1 ms) +[ RUN ] NetTest/1.TestUnsharedWeightsDataNet +[ OK ] NetTest/1.TestUnsharedWeightsDataNet (1 ms) +[ RUN ] NetTest/1.TestLossWeightMidNet +[ OK ] NetTest/1.TestLossWeightMidNet (4 ms) +[ RUN ] NetTest/1.TestSharedWeightsDataNet +[ OK ] NetTest/1.TestSharedWeightsDataNet (0 ms) +[ RUN ] NetTest/1.TestAllInOneNetVal +[ OK ] NetTest/1.TestAllInOneNetVal (1 ms) +[ RUN ] NetTest/1.TestBottomNeedBackwardForce +[ OK ] NetTest/1.TestBottomNeedBackwardForce (1 ms) +[ RUN ] NetTest/1.TestComboLossWeight +[ OK ] NetTest/1.TestComboLossWeight (3 ms) +[ RUN ] NetTest/1.TestBackwardWithAccuracyLayer +[ OK ] NetTest/1.TestBackwardWithAccuracyLayer (2 ms) +[ RUN ] NetTest/1.TestUnsharedWeightsDiffNet +[ OK ] NetTest/1.TestUnsharedWeightsDiffNet (1 ms) +[ RUN ] NetTest/1.TestSharedWeightsResume +[ OK ] NetTest/1.TestSharedWeightsResume (1 ms) +[ RUN ] NetTest/1.TestBottomNeedBackward +[ OK ] NetTest/1.TestBottomNeedBackward (1 ms) +[ RUN ] NetTest/1.TestFromTo +[ OK ] NetTest/1.TestFromTo (3 ms) +[ RUN ] NetTest/1.TestHasBlob +[ OK ] NetTest/1.TestHasBlob (1 ms) +[ RUN ] NetTest/1.TestSkipPropagateDown +[ OK ] NetTest/1.TestSkipPropagateDown (1 ms) +[ RUN ] NetTest/1.TestHasLayer +[ OK ] NetTest/1.TestHasLayer (1 ms) +[ RUN ] NetTest/1.TestAllInOneNetTrain +[ OK ] NetTest/1.TestAllInOneNetTrain (1 ms) +[----------] 26 tests from NetTest/1 (36 ms total) -testTest -Printing in text format. -name: "test" -type: "Test" +[----------] 8 tests from RMSPropSolverTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] RMSPropSolverTest/0.TestRMSPropLeastSquaresUpdateWithRmsDecay +[ OK ] RMSPropSolverTest/0.TestRMSPropLeastSquaresUpdateWithRmsDecay (73 ms) +[ RUN ] RMSPropSolverTest/0.TestLeastSquaresUpdateWithEverythingAccumShare +[ OK ] RMSPropSolverTest/0.TestLeastSquaresUpdateWithEverythingAccumShare (3 ms) +[ RUN ] RMSPropSolverTest/0.TestSnapshot +[ OK ] RMSPropSolverTest/0.TestSnapshot (15 ms) +[ RUN ] RMSPropSolverTest/0.TestSnapshotShare +[ OK ] RMSPropSolverTest/0.TestSnapshotShare (18 ms) +[ RUN ] RMSPropSolverTest/0.TestRMSPropLeastSquaresUpdateWithWeightDecay +[ OK ] RMSPropSolverTest/0.TestRMSPropLeastSquaresUpdateWithWeightDecay (15 ms) +[ RUN ] RMSPropSolverTest/0.TestLeastSquaresUpdateWithEverythingAccum +[ OK ] RMSPropSolverTest/0.TestLeastSquaresUpdateWithEverythingAccum (2 ms) +[ RUN ] RMSPropSolverTest/0.TestRMSPropLeastSquaresUpdateWithEverythingShare +[ OK ] RMSPropSolverTest/0.TestRMSPropLeastSquaresUpdateWithEverythingShare (78 ms) +[ RUN ] RMSPropSolverTest/0.TestRMSPropLeastSquaresUpdateWithEverything +[ OK ] RMSPropSolverTest/0.TestRMSPropLeastSquaresUpdateWithEverything (73 ms) +[----------] 8 tests from RMSPropSolverTest/0 (277 ms total) -[ OK ] ProtoTest.TestSerialization (0 ms) -[----------] 1 test from ProtoTest (0 ms total) +[----------] 5 tests from ImageDataLayerTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] ImageDataLayerTest/0.TestReshape +[ OK ] ImageDataLayerTest/0.TestReshape (61 ms) +[ RUN ] ImageDataLayerTest/0.TestSpace +[ OK ] ImageDataLayerTest/0.TestSpace (54 ms) +[ RUN ] ImageDataLayerTest/0.TestRead +[ OK ] ImageDataLayerTest/0.TestRead (157 ms) +[ RUN ] ImageDataLayerTest/0.TestShuffle +[ OK ] ImageDataLayerTest/0.TestShuffle (161 ms) +[ RUN ] ImageDataLayerTest/0.TestResize +[ OK ] ImageDataLayerTest/0.TestResize (142 ms) +[----------] 5 tests from ImageDataLayerTest/0 (576 ms total) -[----------] 2 tests from EuclideanLossLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] EuclideanLossLayerTest/0.TestForward -[ OK ] EuclideanLossLayerTest/0.TestForward (0 ms) -[ RUN ] EuclideanLossLayerTest/0.TestGradient -[ OK ] EuclideanLossLayerTest/0.TestGradient (0 ms) -[----------] 2 tests from EuclideanLossLayerTest/0 (0 ms total) +[----------] 12 tests from SGDSolverTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] SGDSolverTest/0.TestLeastSquaresUpdateWithWeightDecay +[ OK ] SGDSolverTest/0.TestLeastSquaresUpdateWithWeightDecay (32 ms) +[ RUN ] SGDSolverTest/0.TestLeastSquaresUpdateWithEverythingAccum +[ OK ] SGDSolverTest/0.TestLeastSquaresUpdateWithEverythingAccum (3 ms) +[ RUN ] SGDSolverTest/0.TestLeastSquaresUpdateLROneHundredth +[ OK ] SGDSolverTest/0.TestLeastSquaresUpdateLROneHundredth (16 ms) +[ RUN ] SGDSolverTest/0.TestLeastSquaresUpdateWithEverythingAccumShare +[ OK ] SGDSolverTest/0.TestLeastSquaresUpdateWithEverythingAccumShare (4 ms) +[ RUN ] SGDSolverTest/0.TestLeastSquaresUpdateWithMomentumMultiIter +[ OK ] SGDSolverTest/0.TestLeastSquaresUpdateWithMomentumMultiIter (77 ms) +[ RUN ] SGDSolverTest/0.TestLeastSquaresUpdateWithEverythingShare +[ OK ] SGDSolverTest/0.TestLeastSquaresUpdateWithEverythingShare (80 ms) +[ RUN ] SGDSolverTest/0.TestLeastSquaresUpdateWithEverything +[ OK ] SGDSolverTest/0.TestLeastSquaresUpdateWithEverything (77 ms) +[ RUN ] SGDSolverTest/0.TestLeastSquaresUpdateWithMomentum +[ OK ] SGDSolverTest/0.TestLeastSquaresUpdateWithMomentum (30 ms) +[ RUN ] SGDSolverTest/0.TestLeastSquaresUpdateWithWeightDecayMultiIter +[ OK ] SGDSolverTest/0.TestLeastSquaresUpdateWithWeightDecayMultiIter (76 ms) +[ RUN ] SGDSolverTest/0.TestSnapshotShare +[ OK ] SGDSolverTest/0.TestSnapshotShare (23 ms) +[ RUN ] SGDSolverTest/0.TestSnapshot +[ OK ] SGDSolverTest/0.TestSnapshot (17 ms) +[ RUN ] SGDSolverTest/0.TestLeastSquaresUpdate +[ OK ] SGDSolverTest/0.TestLeastSquaresUpdate (16 ms) +[----------] 12 tests from SGDSolverTest/0 (453 ms total) -[----------] 11 tests from PoolingLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] PoolingLayerTest/0.TestSetupGlobalPooling -[ OK ] PoolingLayerTest/0.TestSetupGlobalPooling (0 ms) -[ RUN ] PoolingLayerTest/0.TestForwardMaxTopMask -[ OK ] PoolingLayerTest/0.TestForwardMaxTopMask (0 ms) -[ RUN ] PoolingLayerTest/0.TestForwardMaxPadded -[ OK ] PoolingLayerTest/0.TestForwardMaxPadded (0 ms) -[ RUN ] PoolingLayerTest/0.TestGradientAvePadded -[ OK ] PoolingLayerTest/0.TestGradientAvePadded (906 ms) -[ RUN ] PoolingLayerTest/0.TestGradientAve -[ OK ] PoolingLayerTest/0.TestGradientAve (245 ms) -[ RUN ] PoolingLayerTest/0.TestForwardMax -[ OK ] PoolingLayerTest/0.TestForwardMax (0 ms) -[ RUN ] PoolingLayerTest/0.TestSetup -[ OK ] PoolingLayerTest/0.TestSetup (0 ms) -[ RUN ] PoolingLayerTest/0.TestSetupPadded -[ OK ] PoolingLayerTest/0.TestSetupPadded (0 ms) -[ RUN ] PoolingLayerTest/0.TestGradientMaxTopMask -[ OK ] PoolingLayerTest/0.TestGradientMaxTopMask (655 ms) -[ RUN ] PoolingLayerTest/0.TestGradientMax -[ OK ] PoolingLayerTest/0.TestGradientMax (861 ms) -[ RUN ] PoolingLayerTest/0.TestForwardAve -[ OK ] PoolingLayerTest/0.TestForwardAve (0 ms) -[----------] 11 tests from PoolingLayerTest/0 (2667 ms total) +[----------] 9 tests from AccuracyLayerTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] AccuracyLayerTest/1.TestSetupTopK +[ OK ] AccuracyLayerTest/1.TestSetupTopK (0 ms) +[ RUN ] AccuracyLayerTest/1.TestForwardTopK +[ OK ] AccuracyLayerTest/1.TestForwardTopK (2 ms) +[ RUN ] AccuracyLayerTest/1.TestSetup +[ OK ] AccuracyLayerTest/1.TestSetup (0 ms) +[ RUN ] AccuracyLayerTest/1.TestForwardWithSpatialAxes +[ OK ] AccuracyLayerTest/1.TestForwardWithSpatialAxes (1 ms) +[ RUN ] AccuracyLayerTest/1.TestForwardIgnoreLabel +[ OK ] AccuracyLayerTest/1.TestForwardIgnoreLabel (0 ms) +[ RUN ] AccuracyLayerTest/1.TestForwardPerClass +[ OK ] AccuracyLayerTest/1.TestForwardPerClass (1 ms) +[ RUN ] AccuracyLayerTest/1.TestForward +[ OK ] AccuracyLayerTest/1.TestForward (1 ms) +[ RUN ] AccuracyLayerTest/1.TestForwardPerClassWithIgnoreLabel +[ OK ] AccuracyLayerTest/1.TestForwardPerClassWithIgnoreLabel (1 ms) +[ RUN ] AccuracyLayerTest/1.TestSetupOutputPerClass +[ OK ] AccuracyLayerTest/1.TestSetupOutputPerClass (0 ms) +[----------] 9 tests from AccuracyLayerTest/1 (6 ms total) + +[----------] 5 tests from DBTest/1, where TypeParam = caffe::TypeLMDB +[ RUN ] DBTest/1.TestWrite +[ OK ] DBTest/1.TestWrite (15 ms) +[ RUN ] DBTest/1.TestKeyValue +[ OK ] DBTest/1.TestKeyValue (14 ms) +[ RUN ] DBTest/1.TestSeekToFirst +[ OK ] DBTest/1.TestSeekToFirst (14 ms) +[ RUN ] DBTest/1.TestNext +[ OK ] DBTest/1.TestNext (13 ms) +[ RUN ] DBTest/1.TestGetDB +[ OK ] DBTest/1.TestGetDB (13 ms) +[----------] 5 tests from DBTest/1 (69 ms total) [----------] 4 tests from UniformFillerTest/0, where TypeParam = float -[ RUN ] UniformFillerTest/0.TestFill -[ OK ] UniformFillerTest/0.TestFill (0 ms) [ RUN ] UniformFillerTest/0.TestFill1D [ OK ] UniformFillerTest/0.TestFill1D (0 ms) -[ RUN ] UniformFillerTest/0.TestFill2D -[ OK ] UniformFillerTest/0.TestFill2D (0 ms) +[ RUN ] UniformFillerTest/0.TestFill +[ OK ] UniformFillerTest/0.TestFill (0 ms) [ RUN ] UniformFillerTest/0.TestFill5D [ OK ] UniformFillerTest/0.TestFill5D (0 ms) +[ RUN ] UniformFillerTest/0.TestFill2D +[ OK ] UniformFillerTest/0.TestFill2D (0 ms) [----------] 4 tests from UniformFillerTest/0 (0 ms total) -[----------] 4 tests from GaussianFillerTest/1, where TypeParam = double -[ RUN ] GaussianFillerTest/1.TestFill1D -[ OK ] GaussianFillerTest/1.TestFill1D (0 ms) -[ RUN ] GaussianFillerTest/1.TestFill5D -[ OK ] GaussianFillerTest/1.TestFill5D (1 ms) -[ RUN ] GaussianFillerTest/1.TestFill -[ OK ] GaussianFillerTest/1.TestFill (0 ms) -[ RUN ] GaussianFillerTest/1.TestFill2D -[ OK ] GaussianFillerTest/1.TestFill2D (0 ms) -[----------] 4 tests from GaussianFillerTest/1 (1 ms total) - -[----------] 9 tests from AdaGradSolverTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] AdaGradSolverTest/0.TestLeastSquaresUpdateWithEverythingAccumShare -[ OK ] AdaGradSolverTest/0.TestLeastSquaresUpdateWithEverythingAccumShare (5 ms) -[ RUN ] AdaGradSolverTest/0.TestAdaGradLeastSquaresUpdateWithEverything -[ OK ] AdaGradSolverTest/0.TestAdaGradLeastSquaresUpdateWithEverything (79 ms) -[ RUN ] AdaGradSolverTest/0.TestSnapshotShare -[ OK ] AdaGradSolverTest/0.TestSnapshotShare (23 ms) -[ RUN ] AdaGradSolverTest/0.TestAdaGradLeastSquaresUpdate -[ OK ] AdaGradSolverTest/0.TestAdaGradLeastSquaresUpdate (15 ms) -[ RUN ] AdaGradSolverTest/0.TestSnapshot -[ OK ] AdaGradSolverTest/0.TestSnapshot (18 ms) -[ RUN ] AdaGradSolverTest/0.TestAdaGradLeastSquaresUpdateWithWeightDecay -[ OK ] AdaGradSolverTest/0.TestAdaGradLeastSquaresUpdateWithWeightDecay (16 ms) -[ RUN ] AdaGradSolverTest/0.TestLeastSquaresUpdateWithEverythingAccum -[ OK ] AdaGradSolverTest/0.TestLeastSquaresUpdateWithEverythingAccum (2 ms) -[ RUN ] AdaGradSolverTest/0.TestAdaGradLeastSquaresUpdateWithEverythingShare -[ OK ] AdaGradSolverTest/0.TestAdaGradLeastSquaresUpdateWithEverythingShare (84 ms) -[ RUN ] AdaGradSolverTest/0.TestAdaGradLeastSquaresUpdateLROneHundredth -[ OK ] AdaGradSolverTest/0.TestAdaGradLeastSquaresUpdateLROneHundredth (16 ms) -[----------] 9 tests from AdaGradSolverTest/0 (258 ms total) - -[----------] 5 tests from DeconvolutionLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] DeconvolutionLayerTest/1.TestNDAgainst2D -[ OK ] DeconvolutionLayerTest/1.TestNDAgainst2D (1581 ms) -[ RUN ] DeconvolutionLayerTest/1.TestSetup -[ OK ] DeconvolutionLayerTest/1.TestSetup (1 ms) -[ RUN ] DeconvolutionLayerTest/1.TestGradient3D -[ OK ] DeconvolutionLayerTest/1.TestGradient3D (327 ms) -[ RUN ] DeconvolutionLayerTest/1.TestGradient -[ OK ] DeconvolutionLayerTest/1.TestGradient (942 ms) -[ RUN ] DeconvolutionLayerTest/1.TestSimpleDeconvolution -[ OK ] DeconvolutionLayerTest/1.TestSimpleDeconvolution (0 ms) -[----------] 5 tests from DeconvolutionLayerTest/1 (2851 ms total) - -[----------] 1 test from SolverTypeUpgradeTest -[ RUN ] SolverTypeUpgradeTest.TestSimple -[ OK ] SolverTypeUpgradeTest.TestSimple (1 ms) -[----------] 1 test from SolverTypeUpgradeTest (1 ms total) +[----------] 6 tests from XavierFillerTest/1, where TypeParam = double +[ RUN ] XavierFillerTest/1.TestFillFanIn +[ OK ] XavierFillerTest/1.TestFillFanIn (43 ms) +[ RUN ] XavierFillerTest/1.TestFill5D +[ OK ] XavierFillerTest/1.TestFill5D (0 ms) +[ RUN ] XavierFillerTest/1.TestFill2D +[ OK ] XavierFillerTest/1.TestFill2D (0 ms) +[ RUN ] XavierFillerTest/1.TestFill1D +[ OK ] XavierFillerTest/1.TestFill1D (0 ms) +[ RUN ] XavierFillerTest/1.TestFillAverage +[ OK ] XavierFillerTest/1.TestFillAverage (43 ms) +[ RUN ] XavierFillerTest/1.TestFillFanOut +[ OK ] XavierFillerTest/1.TestFillFanOut (42 ms) +[----------] 6 tests from XavierFillerTest/1 (128 ms total) -[----------] 4 tests from ContrastiveLossLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] ContrastiveLossLayerTest/1.TestGradientLegacy -[ OK ] ContrastiveLossLayerTest/1.TestGradientLegacy (215 ms) -[ RUN ] ContrastiveLossLayerTest/1.TestForward -[ OK ] ContrastiveLossLayerTest/1.TestForward (0 ms) -[ RUN ] ContrastiveLossLayerTest/1.TestGradient -[ OK ] ContrastiveLossLayerTest/1.TestGradient (181 ms) -[ RUN ] ContrastiveLossLayerTest/1.TestForwardLegacy -[ OK ] ContrastiveLossLayerTest/1.TestForwardLegacy (0 ms) -[----------] 4 tests from ContrastiveLossLayerTest/1 (396 ms total) +[----------] 11 tests from CropLayerTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] CropLayerTest/0.TestCrop5DGradient +[ OK ] CropLayerTest/0.TestCrop5DGradient (1788 ms) +[ RUN ] CropLayerTest/0.TestCropAll +[ OK ] CropLayerTest/0.TestCropAll (0 ms) +[ RUN ] CropLayerTest/0.TestSetupShapeAll +[ OK ] CropLayerTest/0.TestSetupShapeAll (0 ms) +[ RUN ] CropLayerTest/0.TestDimensionsCheck +[ OK ] CropLayerTest/0.TestDimensionsCheck (0 ms) +[ RUN ] CropLayerTest/0.TestCropHWGradient +[ OK ] CropLayerTest/0.TestCropHWGradient (441 ms) +[ RUN ] CropLayerTest/0.TestCropAllGradient +[ OK ] CropLayerTest/0.TestCropAllGradient (249 ms) +[ RUN ] CropLayerTest/0.TestSetupShapeNegativeIndexing +[ OK ] CropLayerTest/0.TestSetupShapeNegativeIndexing (0 ms) +[ RUN ] CropLayerTest/0.TestCrop5D +[ OK ] CropLayerTest/0.TestCrop5D (0 ms) +[ RUN ] CropLayerTest/0.TestCropAllOffset +[ OK ] CropLayerTest/0.TestCropAllOffset (0 ms) +[ RUN ] CropLayerTest/0.TestSetupShapeDefault +[ OK ] CropLayerTest/0.TestSetupShapeDefault (0 ms) +[ RUN ] CropLayerTest/0.TestCropHW +[ OK ] CropLayerTest/0.TestCropHW (0 ms) +[----------] 11 tests from CropLayerTest/0 (2478 ms total) -[----------] 2 tests from HingeLossLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] HingeLossLayerTest/1.TestGradientL2 -[ OK ] HingeLossLayerTest/1.TestGradientL2 (0 ms) -[ RUN ] HingeLossLayerTest/1.TestGradientL1 -[ OK ] HingeLossLayerTest/1.TestGradientL1 (0 ms) -[----------] 2 tests from HingeLossLayerTest/1 (0 ms total) +[----------] 6 tests from FlattenLayerTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] FlattenLayerTest/0.TestSetupWithAxis +[ OK ] FlattenLayerTest/0.TestSetupWithAxis (0 ms) +[ RUN ] FlattenLayerTest/0.TestSetupWithStartAndEndAxis +[ OK ] FlattenLayerTest/0.TestSetupWithStartAndEndAxis (0 ms) +[ RUN ] FlattenLayerTest/0.TestGradient +[ OK ] FlattenLayerTest/0.TestGradient (3 ms) +[ RUN ] FlattenLayerTest/0.TestSetup +[ OK ] FlattenLayerTest/0.TestSetup (0 ms) +[ RUN ] FlattenLayerTest/0.TestSetupWithEndAxis +[ OK ] FlattenLayerTest/0.TestSetupWithEndAxis (0 ms) +[ RUN ] FlattenLayerTest/0.TestForward +[ OK ] FlattenLayerTest/0.TestForward (0 ms) +[----------] 6 tests from FlattenLayerTest/0 (3 ms total) -[----------] 3 tests from BlobMathTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] BlobMathTest/0.TestSumOfSquares -[ OK ] BlobMathTest/0.TestSumOfSquares (0 ms) -[ RUN ] BlobMathTest/0.TestScaleData -[ OK ] BlobMathTest/0.TestScaleData (0 ms) -[ RUN ] BlobMathTest/0.TestAsum -[ OK ] BlobMathTest/0.TestAsum (0 ms) -[----------] 3 tests from BlobMathTest/0 (0 ms total) +[----------] 5 tests from ImageDataLayerTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] ImageDataLayerTest/1.TestReshape +[ OK ] ImageDataLayerTest/1.TestReshape (41 ms) +[ RUN ] ImageDataLayerTest/1.TestSpace +[ OK ] ImageDataLayerTest/1.TestSpace (45 ms) +[ RUN ] ImageDataLayerTest/1.TestRead +[ OK ] ImageDataLayerTest/1.TestRead (170 ms) +[ RUN ] ImageDataLayerTest/1.TestShuffle +[ OK ] ImageDataLayerTest/1.TestShuffle (171 ms) +[ RUN ] ImageDataLayerTest/1.TestResize +[ OK ] ImageDataLayerTest/1.TestResize (163 ms) +[----------] 5 tests from ImageDataLayerTest/1 (590 ms total) -[----------] 5 tests from ImageDataLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] ImageDataLayerTest/0.TestResize -[ OK ] ImageDataLayerTest/0.TestResize (139 ms) -[ RUN ] ImageDataLayerTest/0.TestSpace -[ OK ] ImageDataLayerTest/0.TestSpace (45 ms) -[ RUN ] ImageDataLayerTest/0.TestShuffle -[ OK ] ImageDataLayerTest/0.TestShuffle (275 ms) -[ RUN ] ImageDataLayerTest/0.TestRead -[ OK ] ImageDataLayerTest/0.TestRead (271 ms) -[ RUN ] ImageDataLayerTest/0.TestReshape -[ OK ] ImageDataLayerTest/0.TestReshape (95 ms) -[----------] 5 tests from ImageDataLayerTest/0 (826 ms total) +[----------] 1 test from HDF5OutputLayerTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] HDF5OutputLayerTest/0.TestForward +[ OK ] HDF5OutputLayerTest/0.TestForward (35 ms) +[----------] 1 test from HDF5OutputLayerTest/0 (35 ms total) -[----------] 1 test from MultinomialLogisticLossLayerTest/0, where TypeParam = float -[ RUN ] MultinomialLogisticLossLayerTest/0.TestGradientCPU -[ OK ] MultinomialLogisticLossLayerTest/0.TestGradientCPU (0 ms) -[----------] 1 test from MultinomialLogisticLossLayerTest/0 (0 ms total) +[----------] 8 tests from Im2colLayerTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] Im2colLayerTest/1.TestSetup +[ OK ] Im2colLayerTest/1.TestSetup (0 ms) +[ RUN ] Im2colLayerTest/1.TestGradientForceND +[ OK ] Im2colLayerTest/1.TestGradientForceND (588 ms) +[ RUN ] Im2colLayerTest/1.TestForward +[ OK ] Im2colLayerTest/1.TestForward (0 ms) +[ RUN ] Im2colLayerTest/1.TestDilatedGradientForceND +[ OK ] Im2colLayerTest/1.TestDilatedGradientForceND (1793 ms) +[ RUN ] Im2colLayerTest/1.TestRect +[ OK ] Im2colLayerTest/1.TestRect (0 ms) +[ RUN ] Im2colLayerTest/1.TestRectGradient +[ OK ] Im2colLayerTest/1.TestRectGradient (263 ms) +[ RUN ] Im2colLayerTest/1.TestGradient +[ OK ] Im2colLayerTest/1.TestGradient (313 ms) +[ RUN ] Im2colLayerTest/1.TestDilatedGradient +[ OK ] Im2colLayerTest/1.TestDilatedGradient (936 ms) +[----------] 8 tests from Im2colLayerTest/1 (3893 ms total) -[----------] 27 tests from ReductionLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] ReductionLayerTest/0.TestMeanCoeffGradientAxis1 -[ OK ] ReductionLayerTest/0.TestMeanCoeffGradientAxis1 (2 ms) -[ RUN ] ReductionLayerTest/0.TestSumCoeff -[ OK ] ReductionLayerTest/0.TestSumCoeff (0 ms) -[ RUN ] ReductionLayerTest/0.TestSetUpWithAxis1 -[ OK ] ReductionLayerTest/0.TestSetUpWithAxis1 (0 ms) -[ RUN ] ReductionLayerTest/0.TestMeanCoeff -[ OK ] ReductionLayerTest/0.TestMeanCoeff (0 ms) -[ RUN ] ReductionLayerTest/0.TestSumGradient -[ OK ] ReductionLayerTest/0.TestSumGradient (1 ms) -[ RUN ] ReductionLayerTest/0.TestAbsSumCoeffAxis1 -[ OK ] ReductionLayerTest/0.TestAbsSumCoeffAxis1 (0 ms) -[ RUN ] ReductionLayerTest/0.TestSetUpWithAxis2 -[ OK ] ReductionLayerTest/0.TestSetUpWithAxis2 (0 ms) -[ RUN ] ReductionLayerTest/0.TestSumOfSquaresCoeffAxis1Gradient -[ OK ] ReductionLayerTest/0.TestSumOfSquaresCoeffAxis1Gradient (2 ms) -[ RUN ] ReductionLayerTest/0.TestSumCoeffAxis1Gradient -[ OK ] ReductionLayerTest/0.TestSumCoeffAxis1Gradient (2 ms) -[ RUN ] ReductionLayerTest/0.TestMeanGradient -[ OK ] ReductionLayerTest/0.TestMeanGradient (6 ms) -[ RUN ] ReductionLayerTest/0.TestSumCoeffAxis1 -[ OK ] ReductionLayerTest/0.TestSumCoeffAxis1 (0 ms) -[ RUN ] ReductionLayerTest/0.TestAbsSumCoeffGradient -[ OK ] ReductionLayerTest/0.TestAbsSumCoeffGradient (1 ms) -[ RUN ] ReductionLayerTest/0.TestMeanCoeffAxis1 -[ OK ] ReductionLayerTest/0.TestMeanCoeffAxis1 (0 ms) -[ RUN ] ReductionLayerTest/0.TestMean -[ OK ] ReductionLayerTest/0.TestMean (0 ms) -[ RUN ] ReductionLayerTest/0.TestSetUp -[ OK ] ReductionLayerTest/0.TestSetUp (0 ms) -[ RUN ] ReductionLayerTest/0.TestSumOfSquaresCoeff -[ OK ] ReductionLayerTest/0.TestSumOfSquaresCoeff (0 ms) -[ RUN ] ReductionLayerTest/0.TestAbsSumCoeff -[ OK ] ReductionLayerTest/0.TestAbsSumCoeff (0 ms) -[ RUN ] ReductionLayerTest/0.TestSum -[ OK ] ReductionLayerTest/0.TestSum (0 ms) -[ RUN ] ReductionLayerTest/0.TestSumOfSquaresCoeffAxis1 -[ OK ] ReductionLayerTest/0.TestSumOfSquaresCoeffAxis1 (0 ms) -[ RUN ] ReductionLayerTest/0.TestAbsSum -[ OK ] ReductionLayerTest/0.TestAbsSum (0 ms) -[ RUN ] ReductionLayerTest/0.TestAbsSumCoeffAxis1Gradient -[ OK ] ReductionLayerTest/0.TestAbsSumCoeffAxis1Gradient (7 ms) -[ RUN ] ReductionLayerTest/0.TestSumOfSquaresCoeffGradient -[ OK ] ReductionLayerTest/0.TestSumOfSquaresCoeffGradient (1 ms) -[ RUN ] ReductionLayerTest/0.TestAbsSumGradient -[ OK ] ReductionLayerTest/0.TestAbsSumGradient (0 ms) -[ RUN ] ReductionLayerTest/0.TestSumOfSquaresGradient -[ OK ] ReductionLayerTest/0.TestSumOfSquaresGradient (2 ms) -[ RUN ] ReductionLayerTest/0.TestSumCoeffGradient -[ OK ] ReductionLayerTest/0.TestSumCoeffGradient (1 ms) -[ RUN ] ReductionLayerTest/0.TestSumOfSquares -[ OK ] ReductionLayerTest/0.TestSumOfSquares (0 ms) -[ RUN ] ReductionLayerTest/0.TestMeanCoeffGradient -[ OK ] ReductionLayerTest/0.TestMeanCoeffGradient (1 ms) -[----------] 27 tests from ReductionLayerTest/0 (26 ms total) +[----------] 9 tests from AdaGradSolverTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] AdaGradSolverTest/0.TestAdaGradLeastSquaresUpdate +[ OK ] AdaGradSolverTest/0.TestAdaGradLeastSquaresUpdate (14 ms) +[ RUN ] AdaGradSolverTest/0.TestAdaGradLeastSquaresUpdateLROneHundredth +[ OK ] AdaGradSolverTest/0.TestAdaGradLeastSquaresUpdateLROneHundredth (14 ms) +[ RUN ] AdaGradSolverTest/0.TestAdaGradLeastSquaresUpdateWithWeightDecay +[ OK ] AdaGradSolverTest/0.TestAdaGradLeastSquaresUpdateWithWeightDecay (13 ms) +[ RUN ] AdaGradSolverTest/0.TestAdaGradLeastSquaresUpdateWithEverythingShare +[ OK ] AdaGradSolverTest/0.TestAdaGradLeastSquaresUpdateWithEverythingShare (70 ms) +[ RUN ] AdaGradSolverTest/0.TestLeastSquaresUpdateWithEverythingAccumShare +[ OK ] AdaGradSolverTest/0.TestLeastSquaresUpdateWithEverythingAccumShare (4 ms) +[ RUN ] AdaGradSolverTest/0.TestLeastSquaresUpdateWithEverythingAccum +[ OK ] AdaGradSolverTest/0.TestLeastSquaresUpdateWithEverythingAccum (3 ms) +[ RUN ] AdaGradSolverTest/0.TestSnapshot +[ OK ] AdaGradSolverTest/0.TestSnapshot (16 ms) +[ RUN ] AdaGradSolverTest/0.TestSnapshotShare +[ OK ] AdaGradSolverTest/0.TestSnapshotShare (21 ms) +[ RUN ] AdaGradSolverTest/0.TestAdaGradLeastSquaresUpdateWithEverything +[ OK ] AdaGradSolverTest/0.TestAdaGradLeastSquaresUpdateWithEverything (75 ms) +[----------] 9 tests from AdaGradSolverTest/0 (230 ms total) -[----------] 1 test from SolverTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] SolverTest/0.TestInitTrainTestNets -[ OK ] SolverTest/0.TestInitTrainTestNets (3 ms) -[----------] 1 test from SolverTest/0 (3 ms total) +[----------] 8 tests from SliceLayerTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] SliceLayerTest/0.TestTrivialSlice +[ OK ] SliceLayerTest/0.TestTrivialSlice (0 ms) +[ RUN ] SliceLayerTest/0.TestSliceAcrossNum +[ OK ] SliceLayerTest/0.TestSliceAcrossNum (0 ms) +[ RUN ] SliceLayerTest/0.TestSetupNum +[ OK ] SliceLayerTest/0.TestSetupNum (0 ms) +[ RUN ] SliceLayerTest/0.TestGradientAcrossChannels +[ OK ] SliceLayerTest/0.TestGradientAcrossChannels (46 ms) +[ RUN ] SliceLayerTest/0.TestSliceAcrossChannels +[ OK ] SliceLayerTest/0.TestSliceAcrossChannels (0 ms) +[ RUN ] SliceLayerTest/0.TestGradientAcrossNum +[ OK ] SliceLayerTest/0.TestGradientAcrossNum (40 ms) +[ RUN ] SliceLayerTest/0.TestSetupChannels +[ OK ] SliceLayerTest/0.TestSetupChannels (0 ms) +[ RUN ] SliceLayerTest/0.TestGradientTrivial +[ OK ] SliceLayerTest/0.TestGradientTrivial (13 ms) +[----------] 8 tests from SliceLayerTest/0 (100 ms total) [----------] 12 tests from ReshapeLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] ReshapeLayerTest/1.TestCopyDimensions -[ OK ] ReshapeLayerTest/1.TestCopyDimensions (0 ms) -[ RUN ] ReshapeLayerTest/1.TestInsertSingletonAxesStart -[ OK ] ReshapeLayerTest/1.TestInsertSingletonAxesStart (0 ms) [ RUN ] ReshapeLayerTest/1.TestFlattenMiddle [ OK ] ReshapeLayerTest/1.TestFlattenMiddle (0 ms) +[ RUN ] ReshapeLayerTest/1.TestFlattenOutputSizes +[ OK ] ReshapeLayerTest/1.TestFlattenOutputSizes (1 ms) +[ RUN ] ReshapeLayerTest/1.TestInsertSingletonAxesStart +[ OK ] ReshapeLayerTest/1.TestInsertSingletonAxesStart (0 ms) +[ RUN ] ReshapeLayerTest/1.TestInsertSingletonAxesEnd +[ OK ] ReshapeLayerTest/1.TestInsertSingletonAxesEnd (0 ms) +[ RUN ] ReshapeLayerTest/1.TestInferenceOfUnspecified +[ OK ] ReshapeLayerTest/1.TestInferenceOfUnspecified (0 ms) [ RUN ] ReshapeLayerTest/1.TestForwardAfterReshape [ OK ] ReshapeLayerTest/1.TestForwardAfterReshape (0 ms) -[ RUN ] ReshapeLayerTest/1.TestInferenceOfUnspecifiedWithStartAxis -[ OK ] ReshapeLayerTest/1.TestInferenceOfUnspecifiedWithStartAxis (0 ms) -[ RUN ] ReshapeLayerTest/1.TestInsertSingletonAxesMiddle -[ OK ] ReshapeLayerTest/1.TestInsertSingletonAxesMiddle (0 ms) +[ RUN ] ReshapeLayerTest/1.TestGradient +[ OK ] ReshapeLayerTest/1.TestGradient (3 ms) +[ RUN ] ReshapeLayerTest/1.TestCopyDimensions +[ OK ] ReshapeLayerTest/1.TestCopyDimensions (0 ms) [ RUN ] ReshapeLayerTest/1.TestFlattenValues [ OK ] ReshapeLayerTest/1.TestFlattenValues (0 ms) +[ RUN ] ReshapeLayerTest/1.TestInferenceOfUnspecifiedWithStartAxis +[ OK ] ReshapeLayerTest/1.TestInferenceOfUnspecifiedWithStartAxis (0 ms) [ RUN ] ReshapeLayerTest/1.TestForward [ OK ] ReshapeLayerTest/1.TestForward (0 ms) -[ RUN ] ReshapeLayerTest/1.TestFlattenOutputSizes -[ OK ] ReshapeLayerTest/1.TestFlattenOutputSizes (0 ms) -[ RUN ] ReshapeLayerTest/1.TestGradient -[ OK ] ReshapeLayerTest/1.TestGradient (6 ms) -[ RUN ] ReshapeLayerTest/1.TestInferenceOfUnspecified -[ OK ] ReshapeLayerTest/1.TestInferenceOfUnspecified (0 ms) -[ RUN ] ReshapeLayerTest/1.TestInsertSingletonAxesEnd -[ OK ] ReshapeLayerTest/1.TestInsertSingletonAxesEnd (0 ms) -[----------] 12 tests from ReshapeLayerTest/1 (6 ms total) - -[----------] 1 test from SolverFactoryTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] SolverFactoryTest/1.TestCreateSolver -[ OK ] SolverFactoryTest/1.TestCreateSolver (1 ms) -[----------] 1 test from SolverFactoryTest/1 (1 ms total) - -[----------] 22 tests from ScaleLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] ScaleLayerTest/1.TestForwardEltwise -[ OK ] ScaleLayerTest/1.TestForwardEltwise (0 ms) -[ RUN ] ScaleLayerTest/1.TestForwardBroadcastEnd -[ OK ] ScaleLayerTest/1.TestForwardBroadcastEnd (0 ms) -[ RUN ] ScaleLayerTest/1.TestBackwardEltwiseInPlace -[ OK ] ScaleLayerTest/1.TestBackwardEltwiseInPlace (0 ms) -[ RUN ] ScaleLayerTest/1.TestGradientBroadcastMiddle -[ OK ] ScaleLayerTest/1.TestGradientBroadcastMiddle (170 ms) -[ RUN ] ScaleLayerTest/1.TestGradientScaleAxis2 -[ OK ] ScaleLayerTest/1.TestGradientScaleAxis2 (125 ms) -[ RUN ] ScaleLayerTest/1.TestForwardBroadcastMiddleWithParamAndBias -[ OK ] ScaleLayerTest/1.TestForwardBroadcastMiddleWithParamAndBias (0 ms) -[ RUN ] ScaleLayerTest/1.TestForwardBroadcastMiddleInPlace -[ OK ] ScaleLayerTest/1.TestForwardBroadcastMiddleInPlace (0 ms) -[ RUN ] ScaleLayerTest/1.TestGradientBroadcastBegin -[ OK ] ScaleLayerTest/1.TestGradientBroadcastBegin (109 ms) -[ RUN ] ScaleLayerTest/1.TestForwardScaleAxis2 -[ OK ] ScaleLayerTest/1.TestForwardScaleAxis2 (0 ms) -[ RUN ] ScaleLayerTest/1.TestGradientScaleAndBias -[ OK ] ScaleLayerTest/1.TestGradientScaleAndBias (126 ms) -[ RUN ] ScaleLayerTest/1.TestGradientBroadcastEnd -[ OK ] ScaleLayerTest/1.TestGradientBroadcastEnd (381 ms) -[ RUN ] ScaleLayerTest/1.TestForwardScale -[ OK ] ScaleLayerTest/1.TestForwardScale (0 ms) -[ RUN ] ScaleLayerTest/1.TestGradientEltwiseWithParam -[ OK ] ScaleLayerTest/1.TestGradientEltwiseWithParam (403 ms) -[ RUN ] ScaleLayerTest/1.TestGradientBroadcastMiddleWithParam -[ OK ] ScaleLayerTest/1.TestGradientBroadcastMiddleWithParam (156 ms) -[ RUN ] ScaleLayerTest/1.TestGradientEltwise -[ OK ] ScaleLayerTest/1.TestGradientEltwise (9 ms) -[ RUN ] ScaleLayerTest/1.TestForwardEltwiseWithParam -[ OK ] ScaleLayerTest/1.TestForwardEltwiseWithParam (0 ms) -[ RUN ] ScaleLayerTest/1.TestForwardBroadcastBegin -[ OK ] ScaleLayerTest/1.TestForwardBroadcastBegin (0 ms) -[ RUN ] ScaleLayerTest/1.TestForwardBroadcastMiddle -[ OK ] ScaleLayerTest/1.TestForwardBroadcastMiddle (0 ms) -[ RUN ] ScaleLayerTest/1.TestForwardBroadcastMiddleWithParam -[ OK ] ScaleLayerTest/1.TestForwardBroadcastMiddleWithParam (0 ms) -[ RUN ] ScaleLayerTest/1.TestBackwardBroadcastMiddleInPlace -[ OK ] ScaleLayerTest/1.TestBackwardBroadcastMiddleInPlace (1 ms) -[ RUN ] ScaleLayerTest/1.TestForwardEltwiseInPlace -[ OK ] ScaleLayerTest/1.TestForwardEltwiseInPlace (0 ms) -[ RUN ] ScaleLayerTest/1.TestGradientScale -[ OK ] ScaleLayerTest/1.TestGradientScale (102 ms) -[----------] 22 tests from ScaleLayerTest/1 (1583 ms total) - -[----------] 12 tests from SGDSolverTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] SGDSolverTest/0.TestLeastSquaresUpdate -[ OK ] SGDSolverTest/0.TestLeastSquaresUpdate (17 ms) -[ RUN ] SGDSolverTest/0.TestLeastSquaresUpdateWithMomentum -[ OK ] SGDSolverTest/0.TestLeastSquaresUpdateWithMomentum (34 ms) -[ RUN ] SGDSolverTest/0.TestLeastSquaresUpdateWithEverythingAccumShare -[ OK ] SGDSolverTest/0.TestLeastSquaresUpdateWithEverythingAccumShare (3 ms) -[ RUN ] SGDSolverTest/0.TestSnapshotShare -[ OK ] SGDSolverTest/0.TestSnapshotShare (25 ms) -[ RUN ] SGDSolverTest/0.TestLeastSquaresUpdateWithWeightDecay -[ OK ] SGDSolverTest/0.TestLeastSquaresUpdateWithWeightDecay (42 ms) -[ RUN ] SGDSolverTest/0.TestLeastSquaresUpdateWithEverythingShare -[ OK ] SGDSolverTest/0.TestLeastSquaresUpdateWithEverythingShare (100 ms) -[ RUN ] SGDSolverTest/0.TestLeastSquaresUpdateWithEverything -[ OK ] SGDSolverTest/0.TestLeastSquaresUpdateWithEverything (97 ms) -[ RUN ] SGDSolverTest/0.TestLeastSquaresUpdateWithEverythingAccum -[ OK ] SGDSolverTest/0.TestLeastSquaresUpdateWithEverythingAccum (2 ms) -[ RUN ] SGDSolverTest/0.TestSnapshot -[ OK ] SGDSolverTest/0.TestSnapshot (28 ms) -[ RUN ] SGDSolverTest/0.TestLeastSquaresUpdateWithWeightDecayMultiIter -[ OK ] SGDSolverTest/0.TestLeastSquaresUpdateWithWeightDecayMultiIter (105 ms) -[ RUN ] SGDSolverTest/0.TestLeastSquaresUpdateLROneHundredth -[ OK ] SGDSolverTest/0.TestLeastSquaresUpdateLROneHundredth (17 ms) -[ RUN ] SGDSolverTest/0.TestLeastSquaresUpdateWithMomentumMultiIter -[ OK ] SGDSolverTest/0.TestLeastSquaresUpdateWithMomentumMultiIter (112 ms) -[----------] 12 tests from SGDSolverTest/0 (584 ms total) - -[----------] 6 tests from RNNLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] RNNLayerTest/0.TestForward -[ OK ] RNNLayerTest/0.TestForward (5 ms) -[ RUN ] RNNLayerTest/0.TestGradientNonZeroCont -[ OK ] RNNLayerTest/0.TestGradientNonZeroCont (142 ms) -[ RUN ] RNNLayerTest/0.TestGradient -[ OK ] RNNLayerTest/0.TestGradient (132 ms) -[ RUN ] RNNLayerTest/0.TestGradientNonZeroContBufferSize2WithStaticInput -[ OK ] RNNLayerTest/0.TestGradientNonZeroContBufferSize2WithStaticInput (1174 ms) -[ RUN ] RNNLayerTest/0.TestGradientNonZeroContBufferSize2 -[ OK ] RNNLayerTest/0.TestGradientNonZeroContBufferSize2 (236 ms) -[ RUN ] RNNLayerTest/0.TestSetUp -[ OK ] RNNLayerTest/0.TestSetUp (2 ms) -[----------] 6 tests from RNNLayerTest/0 (1691 ms total) +[ RUN ] ReshapeLayerTest/1.TestInsertSingletonAxesMiddle +[ OK ] ReshapeLayerTest/1.TestInsertSingletonAxesMiddle (0 ms) +[----------] 12 tests from ReshapeLayerTest/1 (4 ms total) [----------] 3 tests from DummyDataLayerTest/0, where TypeParam = float -[ RUN ] DummyDataLayerTest/0.TestTwoTopConstant -[ OK ] DummyDataLayerTest/0.TestTwoTopConstant (0 ms) [ RUN ] DummyDataLayerTest/0.TestOneTopConstant [ OK ] DummyDataLayerTest/0.TestOneTopConstant (0 ms) [ RUN ] DummyDataLayerTest/0.TestThreeTopConstantGaussianConstant [ OK ] DummyDataLayerTest/0.TestThreeTopConstantGaussianConstant (0 ms) -[----------] 3 tests from DummyDataLayerTest/0 (0 ms total) +[ RUN ] DummyDataLayerTest/0.TestTwoTopConstant +[ OK ] DummyDataLayerTest/0.TestTwoTopConstant (1 ms) +[----------] 3 tests from DummyDataLayerTest/0 (1 ms total) -[----------] 6 tests from MSRAFillerTest/0, where TypeParam = float -[ RUN ] MSRAFillerTest/0.TestFillAverage -[ OK ] MSRAFillerTest/0.TestFillAverage (63 ms) -[ RUN ] MSRAFillerTest/0.TestFill1D -[ OK ] MSRAFillerTest/0.TestFill1D (0 ms) -[ RUN ] MSRAFillerTest/0.TestFillFanIn -[ OK ] MSRAFillerTest/0.TestFillFanIn (66 ms) -[ RUN ] MSRAFillerTest/0.TestFill2D -[ OK ] MSRAFillerTest/0.TestFill2D (0 ms) -[ RUN ] MSRAFillerTest/0.TestFill5D -[ OK ] MSRAFillerTest/0.TestFill5D (0 ms) -[ RUN ] MSRAFillerTest/0.TestFillFanOut -[ OK ] MSRAFillerTest/0.TestFillFanOut (61 ms) -[----------] 6 tests from MSRAFillerTest/0 (190 ms total) +[----------] 4 tests from UniformFillerTest/1, where TypeParam = double +[ RUN ] UniformFillerTest/1.TestFill +[ OK ] UniformFillerTest/1.TestFill (0 ms) +[ RUN ] UniformFillerTest/1.TestFill2D +[ OK ] UniformFillerTest/1.TestFill2D (0 ms) +[ RUN ] UniformFillerTest/1.TestFill5D +[ OK ] UniformFillerTest/1.TestFill5D (0 ms) +[ RUN ] UniformFillerTest/1.TestFill1D +[ OK ] UniformFillerTest/1.TestFill1D (0 ms) +[----------] 4 tests from UniformFillerTest/1 (0 ms total) -[----------] 5 tests from EmbedLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] EmbedLayerTest/0.TestForward -[ OK ] EmbedLayerTest/0.TestForward (0 ms) -[ RUN ] EmbedLayerTest/0.TestGradient -[ OK ] EmbedLayerTest/0.TestGradient (13 ms) -[ RUN ] EmbedLayerTest/0.TestForwardWithBias -[ OK ] EmbedLayerTest/0.TestForwardWithBias (1 ms) -[ RUN ] EmbedLayerTest/0.TestGradientWithBias -[ OK ] EmbedLayerTest/0.TestGradientWithBias (16 ms) -[ RUN ] EmbedLayerTest/0.TestSetUp -[ OK ] EmbedLayerTest/0.TestSetUp (0 ms) -[----------] 5 tests from EmbedLayerTest/0 (30 ms total) +[----------] 10 tests from EltwiseLayerTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] EltwiseLayerTest/1.TestSetUp +[ OK ] EltwiseLayerTest/1.TestSetUp (0 ms) +[ RUN ] EltwiseLayerTest/1.TestSumGradient +[ OK ] EltwiseLayerTest/1.TestSumGradient (5 ms) +[ RUN ] EltwiseLayerTest/1.TestMax +[ OK ] EltwiseLayerTest/1.TestMax (0 ms) +[ RUN ] EltwiseLayerTest/1.TestStableProdGradient +[ OK ] EltwiseLayerTest/1.TestStableProdGradient (6 ms) +[ RUN ] EltwiseLayerTest/1.TestUnstableProdGradient +[ OK ] EltwiseLayerTest/1.TestUnstableProdGradient (5 ms) +[ RUN ] EltwiseLayerTest/1.TestSumCoeff +[ OK ] EltwiseLayerTest/1.TestSumCoeff (1 ms) +[ RUN ] EltwiseLayerTest/1.TestProd +[ OK ] EltwiseLayerTest/1.TestProd (0 ms) +[ RUN ] EltwiseLayerTest/1.TestMaxGradient +[ OK ] EltwiseLayerTest/1.TestMaxGradient (6 ms) +[ RUN ] EltwiseLayerTest/1.TestSum +[ OK ] EltwiseLayerTest/1.TestSum (0 ms) +[ RUN ] EltwiseLayerTest/1.TestSumCoeffGradient +[ OK ] EltwiseLayerTest/1.TestSumCoeffGradient (6 ms) +[----------] 10 tests from EltwiseLayerTest/1 (29 ms total) -[----------] 12 tests from NesterovSolverTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] NesterovSolverTest/0.TestNesterovLeastSquaresUpdateWithWeightDecay -[ OK ] NesterovSolverTest/0.TestNesterovLeastSquaresUpdateWithWeightDecay (17 ms) -[ RUN ] NesterovSolverTest/0.TestLeastSquaresUpdateWithEverythingAccum -[ OK ] NesterovSolverTest/0.TestLeastSquaresUpdateWithEverythingAccum (2 ms) -[ RUN ] NesterovSolverTest/0.TestNesterovLeastSquaresUpdate -[ OK ] NesterovSolverTest/0.TestNesterovLeastSquaresUpdate (16 ms) -[ RUN ] NesterovSolverTest/0.TestNesterovLeastSquaresUpdateWithMomentum -[ OK ] NesterovSolverTest/0.TestNesterovLeastSquaresUpdateWithMomentum (33 ms) -[ RUN ] NesterovSolverTest/0.TestNesterovLeastSquaresUpdateWithEverything -[ OK ] NesterovSolverTest/0.TestNesterovLeastSquaresUpdateWithEverything (112 ms) -[ RUN ] NesterovSolverTest/0.TestNesterovLeastSquaresUpdateWithEverythingShare -[ OK ] NesterovSolverTest/0.TestNesterovLeastSquaresUpdateWithEverythingShare (131 ms) -[ RUN ] NesterovSolverTest/0.TestLeastSquaresUpdateWithMomentumMultiIter -[ OK ] NesterovSolverTest/0.TestLeastSquaresUpdateWithMomentumMultiIter (82 ms) -[ RUN ] NesterovSolverTest/0.TestSnapshotShare -[ OK ] NesterovSolverTest/0.TestSnapshotShare (205 ms) -[ RUN ] NesterovSolverTest/0.TestSnapshot -[ OK ] NesterovSolverTest/0.TestSnapshot (15 ms) -[ RUN ] NesterovSolverTest/0.TestNesterovLeastSquaresUpdateLROneHundredth -[ OK ] NesterovSolverTest/0.TestNesterovLeastSquaresUpdateLROneHundredth (16 ms) -[ RUN ] NesterovSolverTest/0.TestLeastSquaresUpdateWithEverythingAccumShare -[ OK ] NesterovSolverTest/0.TestLeastSquaresUpdateWithEverythingAccumShare (3 ms) -[ RUN ] NesterovSolverTest/0.TestNesterovLeastSquaresUpdateWithWeightDecayMultiIter -[ OK ] NesterovSolverTest/0.TestNesterovLeastSquaresUpdateWithWeightDecayMultiIter (79 ms) -[----------] 12 tests from NesterovSolverTest/0 (712 ms total) +[----------] 8 tests from Im2colLayerTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] Im2colLayerTest/0.TestRect +[ OK ] Im2colLayerTest/0.TestRect (0 ms) +[ RUN ] Im2colLayerTest/0.TestGradientForceND +[ OK ] Im2colLayerTest/0.TestGradientForceND (631 ms) +[ RUN ] Im2colLayerTest/0.TestDilatedGradient +[ OK ] Im2colLayerTest/0.TestDilatedGradient (1032 ms) +[ RUN ] Im2colLayerTest/0.TestRectGradient +[ OK ] Im2colLayerTest/0.TestRectGradient (285 ms) +[ RUN ] Im2colLayerTest/0.TestDilatedGradientForceND +[ OK ] Im2colLayerTest/0.TestDilatedGradientForceND (1949 ms) +[ RUN ] Im2colLayerTest/0.TestGradient +[ OK ] Im2colLayerTest/0.TestGradient (362 ms) +[ RUN ] Im2colLayerTest/0.TestForward +[ OK ] Im2colLayerTest/0.TestForward (0 ms) +[ RUN ] Im2colLayerTest/0.TestSetup +[ OK ] Im2colLayerTest/0.TestSetup (0 ms) +[----------] 8 tests from Im2colLayerTest/0 (4259 ms total) -[----------] 8 tests from LRNLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] LRNLayerTest/1.TestForwardAcrossChannels -[ OK ] LRNLayerTest/1.TestForwardAcrossChannels (1 ms) -[ RUN ] LRNLayerTest/1.TestForwardWithinChannel -[ OK ] LRNLayerTest/1.TestForwardWithinChannel (0 ms) -[ RUN ] LRNLayerTest/1.TestSetupWithinChannel -[ OK ] LRNLayerTest/1.TestSetupWithinChannel (0 ms) -[ RUN ] LRNLayerTest/1.TestGradientWithinChannel -[ OK ] LRNLayerTest/1.TestGradientWithinChannel (1144 ms) -[ RUN ] LRNLayerTest/1.TestSetupAcrossChannels -[ OK ] LRNLayerTest/1.TestSetupAcrossChannels (0 ms) -[ RUN ] LRNLayerTest/1.TestGradientAcrossChannelsLargeRegion -[ OK ] LRNLayerTest/1.TestGradientAcrossChannelsLargeRegion (1566 ms) -[ RUN ] LRNLayerTest/1.TestGradientAcrossChannels -[ OK ] LRNLayerTest/1.TestGradientAcrossChannels (1358 ms) -[ RUN ] LRNLayerTest/1.TestForwardAcrossChannelsLargeRegion -[ OK ] LRNLayerTest/1.TestForwardAcrossChannelsLargeRegion (1 ms) -[----------] 8 tests from LRNLayerTest/1 (4070 ms total) +[----------] 9 tests from InnerProductLayerTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] InnerProductLayerTest/1.TestBackwardTranspose +[ OK ] InnerProductLayerTest/1.TestBackwardTranspose (0 ms) +[ RUN ] InnerProductLayerTest/1.TestForwardNoBatch +[ OK ] InnerProductLayerTest/1.TestForwardNoBatch (0 ms) +[ RUN ] InnerProductLayerTest/1.TestGradient +[ OK ] InnerProductLayerTest/1.TestGradient (169 ms) +[ RUN ] InnerProductLayerTest/1.TestGradientTranspose +[ OK ] InnerProductLayerTest/1.TestGradientTranspose (182 ms) +[ RUN ] InnerProductLayerTest/1.TestSetUp +[ OK ] InnerProductLayerTest/1.TestSetUp (0 ms) +[ RUN ] InnerProductLayerTest/1.TestForward +[ OK ] InnerProductLayerTest/1.TestForward (0 ms) +[ RUN ] InnerProductLayerTest/1.TestSetUpTransposeTrue +[ OK ] InnerProductLayerTest/1.TestSetUpTransposeTrue (0 ms) +[ RUN ] InnerProductLayerTest/1.TestSetUpTransposeFalse +[ OK ] InnerProductLayerTest/1.TestSetUpTransposeFalse (0 ms) +[ RUN ] InnerProductLayerTest/1.TestForwardTranspose +[ OK ] InnerProductLayerTest/1.TestForwardTranspose (0 ms) +[----------] 9 tests from InnerProductLayerTest/1 (351 ms total) -[----------] 1 test from SolverFactoryTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] SolverFactoryTest/0.TestCreateSolver -[ OK ] SolverFactoryTest/0.TestCreateSolver (1 ms) -[----------] 1 test from SolverFactoryTest/0 (1 ms total) +[----------] 10 tests from PowerLayerTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] PowerLayerTest/1.TestPowerGradientShiftZero +[ OK ] PowerLayerTest/1.TestPowerGradientShiftZero (8 ms) +[ RUN ] PowerLayerTest/1.TestPowerTwoScaleHalfGradient +[ OK ] PowerLayerTest/1.TestPowerTwoScaleHalfGradient (3 ms) +[ RUN ] PowerLayerTest/1.TestPowerGradient +[ OK ] PowerLayerTest/1.TestPowerGradient (7 ms) +[ RUN ] PowerLayerTest/1.TestPowerZeroGradient +[ OK ] PowerLayerTest/1.TestPowerZeroGradient (3 ms) +[ RUN ] PowerLayerTest/1.TestPowerOne +[ OK ] PowerLayerTest/1.TestPowerOne (0 ms) +[ RUN ] PowerLayerTest/1.TestPowerTwo +[ OK ] PowerLayerTest/1.TestPowerTwo (0 ms) +[ RUN ] PowerLayerTest/1.TestPowerZero +[ OK ] PowerLayerTest/1.TestPowerZero (0 ms) +[ RUN ] PowerLayerTest/1.TestPower +[ OK ] PowerLayerTest/1.TestPower (0 ms) +[ RUN ] PowerLayerTest/1.TestPowerOneGradient +[ OK ] PowerLayerTest/1.TestPowerOneGradient (2 ms) +[ RUN ] PowerLayerTest/1.TestPowerTwoGradient +[ OK ] PowerLayerTest/1.TestPowerTwoGradient (3 ms) +[----------] 10 tests from PowerLayerTest/1 (26 ms total) + +[----------] 6 tests from MVNLayerTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] MVNLayerTest/1.TestGradientMeanOnly +[ OK ] MVNLayerTest/1.TestGradientMeanOnly (141 ms) +[ RUN ] MVNLayerTest/1.TestGradientAcrossChannels +[ OK ] MVNLayerTest/1.TestGradientAcrossChannels (264 ms) +[ RUN ] MVNLayerTest/1.TestForwardAcrossChannels +[ OK ] MVNLayerTest/1.TestForwardAcrossChannels (0 ms) +[ RUN ] MVNLayerTest/1.TestGradient +[ OK ] MVNLayerTest/1.TestGradient (244 ms) +[ RUN ] MVNLayerTest/1.TestForward +[ OK ] MVNLayerTest/1.TestForward (0 ms) +[ RUN ] MVNLayerTest/1.TestForwardMeanOnly +[ OK ] MVNLayerTest/1.TestForwardMeanOnly (0 ms) +[----------] 6 tests from MVNLayerTest/1 (649 ms total) + +[----------] 6 tests from MVNLayerTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] MVNLayerTest/0.TestForwardMeanOnly +[ OK ] MVNLayerTest/0.TestForwardMeanOnly (0 ms) +[ RUN ] MVNLayerTest/0.TestGradient +[ OK ] MVNLayerTest/0.TestGradient (276 ms) +[ RUN ] MVNLayerTest/0.TestForward +[ OK ] MVNLayerTest/0.TestForward (0 ms) +[ RUN ] MVNLayerTest/0.TestGradientAcrossChannels +[ OK ] MVNLayerTest/0.TestGradientAcrossChannels (287 ms) +[ RUN ] MVNLayerTest/0.TestGradientMeanOnly +[ OK ] MVNLayerTest/0.TestGradientMeanOnly (144 ms) +[ RUN ] MVNLayerTest/0.TestForwardAcrossChannels +[ OK ] MVNLayerTest/0.TestForwardAcrossChannels (0 ms) +[----------] 6 tests from MVNLayerTest/0 (708 ms total) + +[----------] 2 tests from EuclideanLossLayerTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] EuclideanLossLayerTest/0.TestGradient +[ OK ] EuclideanLossLayerTest/0.TestGradient (1 ms) +[ RUN ] EuclideanLossLayerTest/0.TestForward +[ OK ] EuclideanLossLayerTest/0.TestForward (0 ms) +[----------] 2 tests from EuclideanLossLayerTest/0 (1 ms total) + +[----------] 4 tests from BlobSimpleTest/0, where TypeParam = float +[ RUN ] BlobSimpleTest/0.TestReshapeZero +[ OK ] BlobSimpleTest/0.TestReshapeZero (0 ms) +[ RUN ] BlobSimpleTest/0.TestReshape +[ OK ] BlobSimpleTest/0.TestReshape (0 ms) +[ RUN ] BlobSimpleTest/0.TestLegacyBlobProtoShapeEquals +[ OK ] BlobSimpleTest/0.TestLegacyBlobProtoShapeEquals (0 ms) +[ RUN ] BlobSimpleTest/0.TestInitialization +[ OK ] BlobSimpleTest/0.TestInitialization (0 ms) +[----------] 4 tests from BlobSimpleTest/0 (0 ms total) + +[----------] 2 tests from InternalThreadTest +[ RUN ] InternalThreadTest.TestRandomSeed +[ OK ] InternalThreadTest.TestRandomSeed (0 ms) +[ RUN ] InternalThreadTest.TestStartAndExit +[ OK ] InternalThreadTest.TestStartAndExit (1 ms) +[----------] 2 tests from InternalThreadTest (1 ms total) + +[----------] 2 tests from HingeLossLayerTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] HingeLossLayerTest/0.TestGradientL2 +[ OK ] HingeLossLayerTest/0.TestGradientL2 (0 ms) +[ RUN ] HingeLossLayerTest/0.TestGradientL1 +[ OK ] HingeLossLayerTest/0.TestGradientL1 (0 ms) +[----------] 2 tests from HingeLossLayerTest/0 (0 ms total) + +[----------] 1 test from SolverTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] SolverTest/1.TestInitTrainTestNets +[ OK ] SolverTest/1.TestInitTrainTestNets (2 ms) +[----------] 1 test from SolverTest/1 (2 ms total) + +[----------] 1 test from HDF5OutputLayerTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] HDF5OutputLayerTest/1.TestForward +[ OK ] HDF5OutputLayerTest/1.TestForward (3 ms) +[----------] 1 test from HDF5OutputLayerTest/1 (3 ms total) + +[----------] 2 tests from InfogainLossLayerTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] InfogainLossLayerTest/0.TestGradient +[ OK ] InfogainLossLayerTest/0.TestGradient (2 ms) +[ RUN ] InfogainLossLayerTest/0.TestInfogainLoss +[ OK ] InfogainLossLayerTest/0.TestInfogainLoss (0 ms) +[----------] 2 tests from InfogainLossLayerTest/0 (2 ms total) + +[----------] 26 tests from NetTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] NetTest/0.TestAllInOneNetTrain +[ OK ] NetTest/0.TestAllInOneNetTrain (1 ms) +[ RUN ] NetTest/0.TestUnsharedWeightsDiffNet +[ OK ] NetTest/0.TestUnsharedWeightsDiffNet (1 ms) +[ RUN ] NetTest/0.TestAllInOneNetDeploy +[ OK ] NetTest/0.TestAllInOneNetDeploy (0 ms) +[ RUN ] NetTest/0.TestHasBlob +[ OK ] NetTest/0.TestHasBlob (1 ms) +[ RUN ] NetTest/0.TestSkipPropagateDown +[ OK ] NetTest/0.TestSkipPropagateDown (2 ms) +[ RUN ] NetTest/0.TestBottomNeedBackwardTricky +[ OK ] NetTest/0.TestBottomNeedBackwardTricky (1 ms) +[ RUN ] NetTest/0.TestHasLayer +[ OK ] NetTest/0.TestHasLayer (1 ms) +[ RUN ] NetTest/0.TestReshape +[ OK ] NetTest/0.TestReshape (1 ms) +[ RUN ] NetTest/0.TestSharedWeightsDataNet +[ OK ] NetTest/0.TestSharedWeightsDataNet (0 ms) +[ RUN ] NetTest/0.TestBottomNeedBackwardEuclideanForce +[ OK ] NetTest/0.TestBottomNeedBackwardEuclideanForce (1 ms) +[ RUN ] NetTest/0.TestLossWeightMidNet +[ OK ] NetTest/0.TestLossWeightMidNet (5 ms) +[ RUN ] NetTest/0.TestLossWeight +[ OK ] NetTest/0.TestLossWeight (5 ms) +[ RUN ] NetTest/0.TestSharedWeightsUpdate +[ OK ] NetTest/0.TestSharedWeightsUpdate (1 ms) +[ RUN ] NetTest/0.TestSharedWeightsDiffNet +[ OK ] NetTest/0.TestSharedWeightsDiffNet (1 ms) +[ RUN ] NetTest/0.TestGetLayerByName +[ OK ] NetTest/0.TestGetLayerByName (1 ms) +[ RUN ] NetTest/0.TestSharedWeightsResume +[ OK ] NetTest/0.TestSharedWeightsResume (1 ms) +[ RUN ] NetTest/0.TestGetBlob +[ OK ] NetTest/0.TestGetBlob (1 ms) +[ RUN ] NetTest/0.TestFromTo +[ OK ] NetTest/0.TestFromTo (3 ms) +[ RUN ] NetTest/0.TestParamPropagateDown +[ OK ] NetTest/0.TestParamPropagateDown (3 ms) +[ RUN ] NetTest/0.TestBottomNeedBackward +[ OK ] NetTest/0.TestBottomNeedBackward (1 ms) +[ RUN ] NetTest/0.TestBackwardWithAccuracyLayer +[ OK ] NetTest/0.TestBackwardWithAccuracyLayer (1 ms) +[ RUN ] NetTest/0.TestComboLossWeight +[ OK ] NetTest/0.TestComboLossWeight (4 ms) +[ RUN ] NetTest/0.TestBottomNeedBackwardForce +[ OK ] NetTest/0.TestBottomNeedBackwardForce (0 ms) +[ RUN ] NetTest/0.TestAllInOneNetVal +[ OK ] NetTest/0.TestAllInOneNetVal (0 ms) +[ RUN ] NetTest/0.TestUnsharedWeightsDataNet +[ OK ] NetTest/0.TestUnsharedWeightsDataNet (1 ms) +[ RUN ] NetTest/0.TestForcePropagateDown +[ OK ] NetTest/0.TestForcePropagateDown (1 ms) +[----------] 26 tests from NetTest/0 (40 ms total) + +[----------] 7 tests from TileLayerTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] TileLayerTest/0.TestTrivialSetup +[ OK ] TileLayerTest/0.TestTrivialSetup (0 ms) +[ RUN ] TileLayerTest/0.TestGradientNum +[ OK ] TileLayerTest/0.TestGradientNum (288 ms) +[ RUN ] TileLayerTest/0.TestForwardNum +[ OK ] TileLayerTest/0.TestForwardNum (1 ms) +[ RUN ] TileLayerTest/0.TestGradientChannels +[ OK ] TileLayerTest/0.TestGradientChannels (289 ms) +[ RUN ] TileLayerTest/0.TestTrivialGradient +[ OK ] TileLayerTest/0.TestTrivialGradient (98 ms) +[ RUN ] TileLayerTest/0.TestSetup +[ OK ] TileLayerTest/0.TestSetup (0 ms) +[ RUN ] TileLayerTest/0.TestForwardChannels +[ OK ] TileLayerTest/0.TestForwardChannels (1 ms) +[----------] 7 tests from TileLayerTest/0 (677 ms total) + +[----------] 22 tests from ScaleLayerTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] ScaleLayerTest/0.TestForwardEltwise +[ OK ] ScaleLayerTest/0.TestForwardEltwise (0 ms) +[ RUN ] ScaleLayerTest/0.TestForwardScaleAxis2 +[ OK ] ScaleLayerTest/0.TestForwardScaleAxis2 (0 ms) +[ RUN ] ScaleLayerTest/0.TestGradientBroadcastEnd +[ OK ] ScaleLayerTest/0.TestGradientBroadcastEnd (203 ms) +[ RUN ] ScaleLayerTest/0.TestForwardBroadcastEnd +[ OK ] ScaleLayerTest/0.TestForwardBroadcastEnd (0 ms) +[ RUN ] ScaleLayerTest/0.TestForwardBroadcastMiddleWithParamAndBias +[ OK ] ScaleLayerTest/0.TestForwardBroadcastMiddleWithParamAndBias (0 ms) +[ RUN ] ScaleLayerTest/0.TestForwardBroadcastMiddleInPlace +[ OK ] ScaleLayerTest/0.TestForwardBroadcastMiddleInPlace (0 ms) +[ RUN ] ScaleLayerTest/0.TestBackwardBroadcastMiddleInPlace +[ OK ] ScaleLayerTest/0.TestBackwardBroadcastMiddleInPlace (0 ms) +[ RUN ] ScaleLayerTest/0.TestForwardBroadcastMiddle +[ OK ] ScaleLayerTest/0.TestForwardBroadcastMiddle (0 ms) +[ RUN ] ScaleLayerTest/0.TestForwardBroadcastBegin +[ OK ] ScaleLayerTest/0.TestForwardBroadcastBegin (1 ms) +[ RUN ] ScaleLayerTest/0.TestGradientScaleAxis2 +[ OK ] ScaleLayerTest/0.TestGradientScaleAxis2 (96 ms) +[ RUN ] ScaleLayerTest/0.TestGradientScaleAndBias +[ OK ] ScaleLayerTest/0.TestGradientScaleAndBias (127 ms) +[ RUN ] ScaleLayerTest/0.TestForwardEltwiseInPlace +[ OK ] ScaleLayerTest/0.TestForwardEltwiseInPlace (0 ms) +[ RUN ] ScaleLayerTest/0.TestBackwardEltwiseInPlace +[ OK ] ScaleLayerTest/0.TestBackwardEltwiseInPlace (0 ms) +[ RUN ] ScaleLayerTest/0.TestForwardScale +[ OK ] ScaleLayerTest/0.TestForwardScale (0 ms) +[ RUN ] ScaleLayerTest/0.TestGradientEltwiseWithParam +[ OK ] ScaleLayerTest/0.TestGradientEltwiseWithParam (392 ms) +[ RUN ] ScaleLayerTest/0.TestGradientScale +[ OK ] ScaleLayerTest/0.TestGradientScale (101 ms) +[ RUN ] ScaleLayerTest/0.TestForwardEltwiseWithParam +[ OK ] ScaleLayerTest/0.TestForwardEltwiseWithParam (0 ms) +[ RUN ] ScaleLayerTest/0.TestGradientEltwise +[ OK ] ScaleLayerTest/0.TestGradientEltwise (6 ms) +[ RUN ] ScaleLayerTest/0.TestGradientBroadcastMiddle +[ OK ] ScaleLayerTest/0.TestGradientBroadcastMiddle (132 ms) +[ RUN ] ScaleLayerTest/0.TestGradientBroadcastMiddleWithParam +[ OK ] ScaleLayerTest/0.TestGradientBroadcastMiddleWithParam (132 ms) +[ RUN ] ScaleLayerTest/0.TestForwardBroadcastMiddleWithParam +[ OK ] ScaleLayerTest/0.TestForwardBroadcastMiddleWithParam (0 ms) +[ RUN ] ScaleLayerTest/0.TestGradientBroadcastBegin +[ OK ] ScaleLayerTest/0.TestGradientBroadcastBegin (116 ms) +[----------] 22 tests from ScaleLayerTest/0 (1306 ms total) + +[----------] 9 tests from InnerProductLayerTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] InnerProductLayerTest/0.TestForwardTranspose +[ OK ] InnerProductLayerTest/0.TestForwardTranspose (1 ms) +[ RUN ] InnerProductLayerTest/0.TestBackwardTranspose +[ OK ] InnerProductLayerTest/0.TestBackwardTranspose (0 ms) +[ RUN ] InnerProductLayerTest/0.TestSetUpTransposeTrue +[ OK ] InnerProductLayerTest/0.TestSetUpTransposeTrue (0 ms) +[ RUN ] InnerProductLayerTest/0.TestForwardNoBatch +[ OK ] InnerProductLayerTest/0.TestForwardNoBatch (0 ms) +[ RUN ] InnerProductLayerTest/0.TestSetUp +[ OK ] InnerProductLayerTest/0.TestSetUp (0 ms) +[ RUN ] InnerProductLayerTest/0.TestGradientTranspose +[ OK ] InnerProductLayerTest/0.TestGradientTranspose (184 ms) +[ RUN ] InnerProductLayerTest/0.TestForward +[ OK ] InnerProductLayerTest/0.TestForward (0 ms) +[ RUN ] InnerProductLayerTest/0.TestSetUpTransposeFalse +[ OK ] InnerProductLayerTest/0.TestSetUpTransposeFalse (0 ms) +[ RUN ] InnerProductLayerTest/0.TestGradient +[ OK ] InnerProductLayerTest/0.TestGradient (182 ms) +[----------] 9 tests from InnerProductLayerTest/0 (367 ms total) [----------] 3 tests from SplitLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] SplitLayerTest/1.TestGradient -[ OK ] SplitLayerTest/1.TestGradient (7 ms) [ RUN ] SplitLayerTest/1.Test [ OK ] SplitLayerTest/1.Test (0 ms) +[ RUN ] SplitLayerTest/1.TestGradient +[ OK ] SplitLayerTest/1.TestGradient (7 ms) [ RUN ] SplitLayerTest/1.TestSetup [ OK ] SplitLayerTest/1.TestSetup (0 ms) [----------] 3 tests from SplitLayerTest/1 (7 ms total) -[----------] 5 tests from MemoryDataLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] MemoryDataLayerTest/1.AddMatVectorDefaultTransform -[ OK ] MemoryDataLayerTest/1.AddMatVectorDefaultTransform (2 ms) -[ RUN ] MemoryDataLayerTest/1.TestSetup -[ OK ] MemoryDataLayerTest/1.TestSetup (0 ms) -[ RUN ] MemoryDataLayerTest/1.TestForward -[ OK ] MemoryDataLayerTest/1.TestForward (46 ms) -[ RUN ] MemoryDataLayerTest/1.AddDatumVectorDefaultTransform -[ OK ] MemoryDataLayerTest/1.AddDatumVectorDefaultTransform (2 ms) -[ RUN ] MemoryDataLayerTest/1.TestSetBatchSize -[ OK ] MemoryDataLayerTest/1.TestSetBatchSize (2 ms) -[----------] 5 tests from MemoryDataLayerTest/1 (52 ms total) +[----------] 3 tests from SigmoidCrossEntropyLossLayerTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] SigmoidCrossEntropyLossLayerTest/1.TestIgnoreGradient +[ OK ] SigmoidCrossEntropyLossLayerTest/1.TestIgnoreGradient (1 ms) +[ RUN ] SigmoidCrossEntropyLossLayerTest/1.TestSigmoidCrossEntropyLoss +[ OK ] SigmoidCrossEntropyLossLayerTest/1.TestSigmoidCrossEntropyLoss (3 ms) +[ RUN ] SigmoidCrossEntropyLossLayerTest/1.TestGradient +[ OK ] SigmoidCrossEntropyLossLayerTest/1.TestGradient (1 ms) +[----------] 3 tests from SigmoidCrossEntropyLossLayerTest/1 (5 ms total) + +[----------] 1 test from LayerFactoryTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] LayerFactoryTest/1.TestCreateLayer +[ OK ] LayerFactoryTest/1.TestCreateLayer (21 ms) +[----------] 1 test from LayerFactoryTest/1 (21 ms total) + +[----------] 2 tests from BilinearFillerTest/0, where TypeParam = float +[ RUN ] BilinearFillerTest/0.TestFillEven +[ OK ] BilinearFillerTest/0.TestFillEven (10 ms) +[ RUN ] BilinearFillerTest/0.TestFillOdd +[ OK ] BilinearFillerTest/0.TestFillOdd (13 ms) +[----------] 2 tests from BilinearFillerTest/0 (23 ms total) + +[----------] 8 tests from SplitLayerInsertionTest +[ RUN ] SplitLayerInsertionTest.TestInsertionTwoTop +[ OK ] SplitLayerInsertionTest.TestInsertionTwoTop (1 ms) +[ RUN ] SplitLayerInsertionTest.TestLossInsertion +[ OK ] SplitLayerInsertionTest.TestLossInsertion (0 ms) +[ RUN ] SplitLayerInsertionTest.TestInsertion +[ OK ] SplitLayerInsertionTest.TestInsertion (0 ms) +[ RUN ] SplitLayerInsertionTest.TestWithInPlace +[ OK ] SplitLayerInsertionTest.TestWithInPlace (0 ms) +[ RUN ] SplitLayerInsertionTest.TestNoInsertionImageNet +[ OK ] SplitLayerInsertionTest.TestNoInsertionImageNet (2 ms) +[ RUN ] SplitLayerInsertionTest.TestNoInsertion2 +[ OK ] SplitLayerInsertionTest.TestNoInsertion2 (0 ms) +[ RUN ] SplitLayerInsertionTest.TestNoInsertion1 +[ OK ] SplitLayerInsertionTest.TestNoInsertion1 (0 ms) +[ RUN ] SplitLayerInsertionTest.TestNoInsertionWithInPlace +[ OK ] SplitLayerInsertionTest.TestNoInsertionWithInPlace (1 ms) +[----------] 8 tests from SplitLayerInsertionTest (5 ms total) + +[----------] 4 tests from PositiveUnitballFillerTest/0, where TypeParam = float +[ RUN ] PositiveUnitballFillerTest/0.TestFill1D +[ OK ] PositiveUnitballFillerTest/0.TestFill1D (0 ms) +[ RUN ] PositiveUnitballFillerTest/0.TestFill5D +[ OK ] PositiveUnitballFillerTest/0.TestFill5D (0 ms) +[ RUN ] PositiveUnitballFillerTest/0.TestFill2D +[ OK ] PositiveUnitballFillerTest/0.TestFill2D (0 ms) +[ RUN ] PositiveUnitballFillerTest/0.TestFill +[ OK ] PositiveUnitballFillerTest/0.TestFill (0 ms) +[----------] 4 tests from PositiveUnitballFillerTest/0 (0 ms total) + +[----------] 8 tests from LRNLayerTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] LRNLayerTest/0.TestForwardAcrossChannels +[ OK ] LRNLayerTest/0.TestForwardAcrossChannels (0 ms) +[ RUN ] LRNLayerTest/0.TestSetupWithinChannel +[ OK ] LRNLayerTest/0.TestSetupWithinChannel (0 ms) +[ RUN ] LRNLayerTest/0.TestGradientAcrossChannelsLargeRegion +[ OK ] LRNLayerTest/0.TestGradientAcrossChannelsLargeRegion (1461 ms) +[ RUN ] LRNLayerTest/0.TestForwardAcrossChannelsLargeRegion +[ OK ] LRNLayerTest/0.TestForwardAcrossChannelsLargeRegion (0 ms) +[ RUN ] LRNLayerTest/0.TestGradientAcrossChannels +[ OK ] LRNLayerTest/0.TestGradientAcrossChannels (1376 ms) +[ RUN ] LRNLayerTest/0.TestGradientWithinChannel +[ OK ] LRNLayerTest/0.TestGradientWithinChannel (770 ms) +[ RUN ] LRNLayerTest/0.TestSetupAcrossChannels +[ OK ] LRNLayerTest/0.TestSetupAcrossChannels (0 ms) +[ RUN ] LRNLayerTest/0.TestForwardWithinChannel +[ OK ] LRNLayerTest/0.TestForwardWithinChannel (1 ms) +[----------] 8 tests from LRNLayerTest/0 (3617 ms total) + +[----------] 14 tests from DataLayerTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] DataLayerTest/0.TestReshapeLMDB +[ OK ] DataLayerTest/0.TestReshapeLMDB (35 ms) +[ RUN ] DataLayerTest/0.TestReadLMDB +[ OK ] DataLayerTest/0.TestReadLMDB (112 ms) +[ RUN ] DataLayerTest/0.TestSkipLevelDB +[ OK ] DataLayerTest/0.TestSkipLevelDB (51 ms) +[ RUN ] DataLayerTest/0.TestReadCropTrainSequenceUnseededLevelDB +[ OK ] DataLayerTest/0.TestReadCropTrainSequenceUnseededLevelDB (3 ms) +[ RUN ] DataLayerTest/0.TestReadCropTrainLevelDB +[ OK ] DataLayerTest/0.TestReadCropTrainLevelDB (2 ms) +[ RUN ] DataLayerTest/0.TestReshapeLevelDB +[ OK ] DataLayerTest/0.TestReshapeLevelDB (1 ms) +[ RUN ] DataLayerTest/0.TestReadLevelDB +[ OK ] DataLayerTest/0.TestReadLevelDB (4 ms) +[ RUN ] DataLayerTest/0.TestReadCropTrainSequenceUnseededLMDB +[ OK ] DataLayerTest/0.TestReadCropTrainSequenceUnseededLMDB (6 ms) +[ RUN ] DataLayerTest/0.TestReadCropTrainSequenceSeededLMDB +[ OK ] DataLayerTest/0.TestReadCropTrainSequenceSeededLMDB (2 ms) +[ RUN ] DataLayerTest/0.TestReadCropTrainSequenceSeededLevelDB +[ OK ] DataLayerTest/0.TestReadCropTrainSequenceSeededLevelDB (2 ms) +[ RUN ] DataLayerTest/0.TestReadCropTestLevelDB +[ OK ] DataLayerTest/0.TestReadCropTestLevelDB (2 ms) +[ RUN ] DataLayerTest/0.TestReadCropTrainLMDB +[ OK ] DataLayerTest/0.TestReadCropTrainLMDB (1 ms) +[ RUN ] DataLayerTest/0.TestReadCropTestLMDB +[ OK ] DataLayerTest/0.TestReadCropTestLMDB (1 ms) +[ RUN ] DataLayerTest/0.TestSkipLMDB +[ OK ] DataLayerTest/0.TestSkipLMDB (5 ms) +[----------] 14 tests from DataLayerTest/0 (227 ms total) + +[----------] 1 test from MultinomialLogisticLossLayerTest/0, where TypeParam = float +[ RUN ] MultinomialLogisticLossLayerTest/0.TestGradientCPU +[ OK ] MultinomialLogisticLossLayerTest/0.TestGradientCPU (1 ms) +[----------] 1 test from MultinomialLogisticLossLayerTest/0 (1 ms total) + +[----------] 1 test from LayerFactoryTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] LayerFactoryTest/0.TestCreateLayer +[ OK ] LayerFactoryTest/0.TestCreateLayer (1 ms) +[----------] 1 test from LayerFactoryTest/0 (1 ms total) + +[----------] 2 tests from SoftmaxLayerTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] SoftmaxLayerTest/0.TestForward +[ OK ] SoftmaxLayerTest/0.TestForward (0 ms) +[ RUN ] SoftmaxLayerTest/0.TestGradient +[ OK ] SoftmaxLayerTest/0.TestGradient (196 ms) +[----------] 2 tests from SoftmaxLayerTest/0 (196 ms total) [----------] 11 tests from PoolingLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] PoolingLayerTest/1.TestSetup -[ OK ] PoolingLayerTest/1.TestSetup (0 ms) -[ RUN ] PoolingLayerTest/1.TestForwardAve -[ OK ] PoolingLayerTest/1.TestForwardAve (0 ms) -[ RUN ] PoolingLayerTest/1.TestSetupGlobalPooling -[ OK ] PoolingLayerTest/1.TestSetupGlobalPooling (0 ms) -[ RUN ] PoolingLayerTest/1.TestForwardMaxTopMask -[ OK ] PoolingLayerTest/1.TestForwardMaxTopMask (0 ms) -[ RUN ] PoolingLayerTest/1.TestGradientAvePadded -[ OK ] PoolingLayerTest/1.TestGradientAvePadded (1341 ms) -[ RUN ] PoolingLayerTest/1.TestGradientAve -[ OK ] PoolingLayerTest/1.TestGradientAve (310 ms) -[ RUN ] PoolingLayerTest/1.TestForwardMaxPadded -[ OK ] PoolingLayerTest/1.TestForwardMaxPadded (0 ms) -[ RUN ] PoolingLayerTest/1.TestGradientMaxTopMask -[ OK ] PoolingLayerTest/1.TestGradientMaxTopMask (795 ms) [ RUN ] PoolingLayerTest/1.TestSetupPadded [ OK ] PoolingLayerTest/1.TestSetupPadded (0 ms) [ RUN ] PoolingLayerTest/1.TestForwardMax [ OK ] PoolingLayerTest/1.TestForwardMax (0 ms) +[ RUN ] PoolingLayerTest/1.TestForwardAve +[ OK ] PoolingLayerTest/1.TestForwardAve (0 ms) [ RUN ] PoolingLayerTest/1.TestGradientMax -[ OK ] PoolingLayerTest/1.TestGradientMax (1001 ms) -[----------] 11 tests from PoolingLayerTest/1 (3447 ms total) +[ OK ] PoolingLayerTest/1.TestGradientMax (929 ms) +[ RUN ] PoolingLayerTest/1.TestForwardMaxPadded +[ OK ] PoolingLayerTest/1.TestForwardMaxPadded (0 ms) +[ RUN ] PoolingLayerTest/1.TestGradientAve +[ OK ] PoolingLayerTest/1.TestGradientAve (277 ms) +[ RUN ] PoolingLayerTest/1.TestGradientMaxTopMask +[ OK ] PoolingLayerTest/1.TestGradientMaxTopMask (832 ms) +[ RUN ] PoolingLayerTest/1.TestGradientAvePadded +[ OK ] PoolingLayerTest/1.TestGradientAvePadded (1165 ms) +[ RUN ] PoolingLayerTest/1.TestSetup +[ OK ] PoolingLayerTest/1.TestSetup (0 ms) +[ RUN ] PoolingLayerTest/1.TestForwardMaxTopMask +[ OK ] PoolingLayerTest/1.TestForwardMaxTopMask (0 ms) +[ RUN ] PoolingLayerTest/1.TestSetupGlobalPooling +[ OK ] PoolingLayerTest/1.TestSetupGlobalPooling (0 ms) +[----------] 11 tests from PoolingLayerTest/1 (3204 ms total) [----------] 12 tests from ReshapeLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] ReshapeLayerTest/0.TestInsertSingletonAxesStart -[ OK ] ReshapeLayerTest/0.TestInsertSingletonAxesStart (0 ms) -[ RUN ] ReshapeLayerTest/0.TestInferenceOfUnspecified -[ OK ] ReshapeLayerTest/0.TestInferenceOfUnspecified (0 ms) -[ RUN ] ReshapeLayerTest/0.TestInferenceOfUnspecifiedWithStartAxis -[ OK ] ReshapeLayerTest/0.TestInferenceOfUnspecifiedWithStartAxis (0 ms) -[ RUN ] ReshapeLayerTest/0.TestForwardAfterReshape -[ OK ] ReshapeLayerTest/0.TestForwardAfterReshape (0 ms) -[ RUN ] ReshapeLayerTest/0.TestFlattenValues -[ OK ] ReshapeLayerTest/0.TestFlattenValues (0 ms) -[ RUN ] ReshapeLayerTest/0.TestGradient -[ OK ] ReshapeLayerTest/0.TestGradient (3 ms) [ RUN ] ReshapeLayerTest/0.TestCopyDimensions [ OK ] ReshapeLayerTest/0.TestCopyDimensions (0 ms) +[ RUN ] ReshapeLayerTest/0.TestInferenceOfUnspecifiedWithStartAxis +[ OK ] ReshapeLayerTest/0.TestInferenceOfUnspecifiedWithStartAxis (0 ms) +[ RUN ] ReshapeLayerTest/0.TestFlattenOutputSizes +[ OK ] ReshapeLayerTest/0.TestFlattenOutputSizes (0 ms) [ RUN ] ReshapeLayerTest/0.TestForward [ OK ] ReshapeLayerTest/0.TestForward (0 ms) +[ RUN ] ReshapeLayerTest/0.TestFlattenMiddle +[ OK ] ReshapeLayerTest/0.TestFlattenMiddle (0 ms) +[ RUN ] ReshapeLayerTest/0.TestFlattenValues +[ OK ] ReshapeLayerTest/0.TestFlattenValues (1 ms) [ RUN ] ReshapeLayerTest/0.TestInsertSingletonAxesMiddle [ OK ] ReshapeLayerTest/0.TestInsertSingletonAxesMiddle (0 ms) -[ RUN ] ReshapeLayerTest/0.TestFlattenOutputSizes -[ OK ] ReshapeLayerTest/0.TestFlattenOutputSizes (0 ms) +[ RUN ] ReshapeLayerTest/0.TestForwardAfterReshape +[ OK ] ReshapeLayerTest/0.TestForwardAfterReshape (0 ms) +[ RUN ] ReshapeLayerTest/0.TestInsertSingletonAxesStart +[ OK ] ReshapeLayerTest/0.TestInsertSingletonAxesStart (0 ms) +[ RUN ] ReshapeLayerTest/0.TestInferenceOfUnspecified +[ OK ] ReshapeLayerTest/0.TestInferenceOfUnspecified (0 ms) [ RUN ] ReshapeLayerTest/0.TestInsertSingletonAxesEnd [ OK ] ReshapeLayerTest/0.TestInsertSingletonAxesEnd (0 ms) -[ RUN ] ReshapeLayerTest/0.TestFlattenMiddle -[ OK ] ReshapeLayerTest/0.TestFlattenMiddle (0 ms) -[----------] 12 tests from ReshapeLayerTest/0 (3 ms total) +[ RUN ] ReshapeLayerTest/0.TestGradient +[ OK ] ReshapeLayerTest/0.TestGradient (3 ms) +[----------] 12 tests from ReshapeLayerTest/0 (4 ms total) -[----------] 8 tests from SplitLayerInsertionTest -[ RUN ] SplitLayerInsertionTest.TestInsertionTwoTop -[ OK ] SplitLayerInsertionTest.TestInsertionTwoTop (1 ms) -[ RUN ] SplitLayerInsertionTest.TestNoInsertionWithInPlace -[ OK ] SplitLayerInsertionTest.TestNoInsertionWithInPlace (0 ms) -[ RUN ] SplitLayerInsertionTest.TestNoInsertion2 -[ OK ] SplitLayerInsertionTest.TestNoInsertion2 (0 ms) -[ RUN ] SplitLayerInsertionTest.TestWithInPlace -[ OK ] SplitLayerInsertionTest.TestWithInPlace (0 ms) -[ RUN ] SplitLayerInsertionTest.TestInsertion -[ OK ] SplitLayerInsertionTest.TestInsertion (1 ms) -[ RUN ] SplitLayerInsertionTest.TestNoInsertion1 -[ OK ] SplitLayerInsertionTest.TestNoInsertion1 (0 ms) -[ RUN ] SplitLayerInsertionTest.TestNoInsertionImageNet -[ OK ] SplitLayerInsertionTest.TestNoInsertionImageNet (1 ms) -[ RUN ] SplitLayerInsertionTest.TestLossInsertion -[ OK ] SplitLayerInsertionTest.TestLossInsertion (1 ms) -[----------] 8 tests from SplitLayerInsertionTest (4 ms total) +[----------] 6 tests from RNNLayerTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] RNNLayerTest/1.TestGradientNonZeroContBufferSize2 +[ OK ] RNNLayerTest/1.TestGradientNonZeroContBufferSize2 (245 ms) +[ RUN ] RNNLayerTest/1.TestGradientNonZeroCont +[ OK ] RNNLayerTest/1.TestGradientNonZeroCont (123 ms) +[ RUN ] RNNLayerTest/1.TestForward +[ OK ] RNNLayerTest/1.TestForward (6 ms) +[ RUN ] RNNLayerTest/1.TestGradientNonZeroContBufferSize2WithStaticInput +[ OK ] RNNLayerTest/1.TestGradientNonZeroContBufferSize2WithStaticInput (1118 ms) +[ RUN ] RNNLayerTest/1.TestGradient +[ OK ] RNNLayerTest/1.TestGradient (112 ms) +[ RUN ] RNNLayerTest/1.TestSetUp +[ OK ] RNNLayerTest/1.TestSetUp (1 ms) +[----------] 6 tests from RNNLayerTest/1 (1605 ms total) -[----------] 5 tests from SPPLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] SPPLayerTest/1.TestGradient -[ OK ] SPPLayerTest/1.TestGradient (3060 ms) -[ RUN ] SPPLayerTest/1.TestForwardBackward -[ OK ] SPPLayerTest/1.TestForwardBackward (0 ms) -[ RUN ] SPPLayerTest/1.TestEqualOutputDims -[ OK ] SPPLayerTest/1.TestEqualOutputDims (0 ms) -[ RUN ] SPPLayerTest/1.TestSetup -[ OK ] SPPLayerTest/1.TestSetup (0 ms) -[ RUN ] SPPLayerTest/1.TestEqualOutputDims2 -[ OK ] SPPLayerTest/1.TestEqualOutputDims2 (0 ms) -[----------] 5 tests from SPPLayerTest/1 (3061 ms total) +[----------] 10 tests from PowerLayerTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] PowerLayerTest/0.TestPowerZeroGradient +[ OK ] PowerLayerTest/0.TestPowerZeroGradient (2 ms) +[ RUN ] PowerLayerTest/0.TestPowerOne +[ OK ] PowerLayerTest/0.TestPowerOne (0 ms) +[ RUN ] PowerLayerTest/0.TestPower +[ OK ] PowerLayerTest/0.TestPower (0 ms) +[ RUN ] PowerLayerTest/0.TestPowerTwoGradient +[ OK ] PowerLayerTest/0.TestPowerTwoGradient (3 ms) +[ RUN ] PowerLayerTest/0.TestPowerGradientShiftZero +[ OK ] PowerLayerTest/0.TestPowerGradientShiftZero (3 ms) +[ RUN ] PowerLayerTest/0.TestPowerGradient +[ OK ] PowerLayerTest/0.TestPowerGradient (3 ms) +[ RUN ] PowerLayerTest/0.TestPowerOneGradient +[ OK ] PowerLayerTest/0.TestPowerOneGradient (2 ms) +[ RUN ] PowerLayerTest/0.TestPowerZero +[ OK ] PowerLayerTest/0.TestPowerZero (0 ms) +[ RUN ] PowerLayerTest/0.TestPowerTwo +[ OK ] PowerLayerTest/0.TestPowerTwo (0 ms) +[ RUN ] PowerLayerTest/0.TestPowerTwoScaleHalfGradient +[ OK ] PowerLayerTest/0.TestPowerTwoScaleHalfGradient (3 ms) +[----------] 10 tests from PowerLayerTest/0 (16 ms total) -[----------] 6 tests from FlattenLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] FlattenLayerTest/0.TestSetup -[ OK ] FlattenLayerTest/0.TestSetup (0 ms) -[ RUN ] FlattenLayerTest/0.TestSetupWithStartAndEndAxis -[ OK ] FlattenLayerTest/0.TestSetupWithStartAndEndAxis (0 ms) -[ RUN ] FlattenLayerTest/0.TestSetupWithEndAxis -[ OK ] FlattenLayerTest/0.TestSetupWithEndAxis (0 ms) -[ RUN ] FlattenLayerTest/0.TestSetupWithAxis -[ OK ] FlattenLayerTest/0.TestSetupWithAxis (0 ms) -[ RUN ] FlattenLayerTest/0.TestForward -[ OK ] FlattenLayerTest/0.TestForward (0 ms) -[ RUN ] FlattenLayerTest/0.TestGradient -[ OK ] FlattenLayerTest/0.TestGradient (3 ms) -[----------] 6 tests from FlattenLayerTest/0 (3 ms total) +[----------] 2 tests from HDF5DataLayerTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] HDF5DataLayerTest/0.TestSkip +[ OK ] HDF5DataLayerTest/0.TestSkip (23 ms) +[ RUN ] HDF5DataLayerTest/0.TestRead +[ OK ] HDF5DataLayerTest/0.TestRead (5 ms) +[----------] 2 tests from HDF5DataLayerTest/0 (28 ms total) -[----------] 3 tests from FilterLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] FilterLayerTest/0.TestForward -[ OK ] FilterLayerTest/0.TestForward (0 ms) -[ RUN ] FilterLayerTest/0.TestGradient -[ OK ] FilterLayerTest/0.TestGradient (283 ms) -[ RUN ] FilterLayerTest/0.TestReshape -[ OK ] FilterLayerTest/0.TestReshape (0 ms) -[----------] 3 tests from FilterLayerTest/0 (283 ms total) +[----------] 6 tests from FlattenLayerTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] FlattenLayerTest/1.TestSetup +[ OK ] FlattenLayerTest/1.TestSetup (0 ms) +[ RUN ] FlattenLayerTest/1.TestSetupWithEndAxis +[ OK ] FlattenLayerTest/1.TestSetupWithEndAxis (0 ms) +[ RUN ] FlattenLayerTest/1.TestForward +[ OK ] FlattenLayerTest/1.TestForward (0 ms) +[ RUN ] FlattenLayerTest/1.TestGradient +[ OK ] FlattenLayerTest/1.TestGradient (4 ms) +[ RUN ] FlattenLayerTest/1.TestSetupWithStartAndEndAxis +[ OK ] FlattenLayerTest/1.TestSetupWithStartAndEndAxis (0 ms) +[ RUN ] FlattenLayerTest/1.TestSetupWithAxis +[ OK ] FlattenLayerTest/1.TestSetupWithAxis (0 ms) +[----------] 6 tests from FlattenLayerTest/1 (4 ms total) -[----------] 12 tests from ArgMaxLayerTest/0, where TypeParam = float -[ RUN ] ArgMaxLayerTest/0.TestCPUAxisMaxValTopK -[ OK ] ArgMaxLayerTest/0.TestCPUAxisMaxValTopK (25 ms) -[ RUN ] ArgMaxLayerTest/0.TestSetup -[ OK ] ArgMaxLayerTest/0.TestSetup (0 ms) -[ RUN ] ArgMaxLayerTest/0.TestSetupAxisNegativeIndexing -[ OK ] ArgMaxLayerTest/0.TestSetupAxisNegativeIndexing (1 ms) -[ RUN ] ArgMaxLayerTest/0.TestCPU -[ OK ] ArgMaxLayerTest/0.TestCPU (1 ms) -[ RUN ] ArgMaxLayerTest/0.TestSetupMaxVal -[ OK ] ArgMaxLayerTest/0.TestSetupMaxVal (0 ms) -[ RUN ] ArgMaxLayerTest/0.TestCPUTopK -[ OK ] ArgMaxLayerTest/0.TestCPUTopK (1 ms) -[ RUN ] ArgMaxLayerTest/0.TestCPUAxis -[ OK ] ArgMaxLayerTest/0.TestCPUAxis (8 ms) -[ RUN ] ArgMaxLayerTest/0.TestSetupAxis -[ OK ] ArgMaxLayerTest/0.TestSetupAxis (1 ms) -[ RUN ] ArgMaxLayerTest/0.TestCPUMaxValTopK -[ OK ] ArgMaxLayerTest/0.TestCPUMaxValTopK (1 ms) -[ RUN ] ArgMaxLayerTest/0.TestCPUAxisTopK -[ OK ] ArgMaxLayerTest/0.TestCPUAxisTopK (39 ms) -[ RUN ] ArgMaxLayerTest/0.TestSetupAxisMaxVal -[ OK ] ArgMaxLayerTest/0.TestSetupAxisMaxVal (0 ms) -[ RUN ] ArgMaxLayerTest/0.TestCPUMaxVal -[ OK ] ArgMaxLayerTest/0.TestCPUMaxVal (1 ms) -[----------] 12 tests from ArgMaxLayerTest/0 (79 ms total) +[----------] 3 tests from MaxPoolingDropoutTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] MaxPoolingDropoutTest/0.TestSetup +[ OK ] MaxPoolingDropoutTest/0.TestSetup (0 ms) +[ RUN ] MaxPoolingDropoutTest/0.TestBackward +[ OK ] MaxPoolingDropoutTest/0.TestBackward (0 ms) +[ RUN ] MaxPoolingDropoutTest/0.TestForward +[ OK ] MaxPoolingDropoutTest/0.TestForward (0 ms) +[----------] 3 tests from MaxPoolingDropoutTest/0 (0 ms total) -[----------] 10 tests from EltwiseLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] EltwiseLayerTest/0.TestSetUp -[ OK ] EltwiseLayerTest/0.TestSetUp (0 ms) -[ RUN ] EltwiseLayerTest/0.TestStableProdGradient -[ OK ] EltwiseLayerTest/0.TestStableProdGradient (5 ms) -[ RUN ] EltwiseLayerTest/0.TestSumGradient -[ OK ] EltwiseLayerTest/0.TestSumGradient (5 ms) -[ RUN ] EltwiseLayerTest/0.TestSumCoeffGradient -[ OK ] EltwiseLayerTest/0.TestSumCoeffGradient (5 ms) -[ RUN ] EltwiseLayerTest/0.TestSum -[ OK ] EltwiseLayerTest/0.TestSum (0 ms) -[ RUN ] EltwiseLayerTest/0.TestUnstableProdGradient -[ OK ] EltwiseLayerTest/0.TestUnstableProdGradient (4 ms) -[ RUN ] EltwiseLayerTest/0.TestMaxGradient -[ OK ] EltwiseLayerTest/0.TestMaxGradient (5 ms) -[ RUN ] EltwiseLayerTest/0.TestMax -[ OK ] EltwiseLayerTest/0.TestMax (0 ms) -[ RUN ] EltwiseLayerTest/0.TestSumCoeff -[ OK ] EltwiseLayerTest/0.TestSumCoeff (0 ms) -[ RUN ] EltwiseLayerTest/0.TestProd -[ OK ] EltwiseLayerTest/0.TestProd (0 ms) -[----------] 10 tests from EltwiseLayerTest/0 (24 ms total) +[----------] 5 tests from BenchmarkTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] BenchmarkTest/0.TestTimerStart +[ OK ] BenchmarkTest/0.TestTimerStart (0 ms) +[ RUN ] BenchmarkTest/0.TestTimerSeconds +[ OK ] BenchmarkTest/0.TestTimerSeconds (300 ms) +[ RUN ] BenchmarkTest/0.TestTimerMilliSeconds +[ OK ] BenchmarkTest/0.TestTimerMilliSeconds (301 ms) +[ RUN ] BenchmarkTest/0.TestTimerStop +[ OK ] BenchmarkTest/0.TestTimerStop (0 ms) +[ RUN ] BenchmarkTest/0.TestTimerConstructor +[ OK ] BenchmarkTest/0.TestTimerConstructor (0 ms) +[----------] 5 tests from BenchmarkTest/0 (601 ms total) -[----------] 2 tests from BilinearFillerTest/1, where TypeParam = double -[ RUN ] BilinearFillerTest/1.TestFillEven -[ OK ] BilinearFillerTest/1.TestFillEven (9 ms) -[ RUN ] BilinearFillerTest/1.TestFillOdd -[ OK ] BilinearFillerTest/1.TestFillOdd (12 ms) -[----------] 2 tests from BilinearFillerTest/1 (21 ms total) +[----------] 11 tests from RandomNumberGeneratorTest/1, where TypeParam = double +[ RUN ] RandomNumberGeneratorTest/1.TestRngUniformTimesBernoulli +[ OK ] RandomNumberGeneratorTest/1.TestRngUniformTimesBernoulli (0 ms) +[ RUN ] RandomNumberGeneratorTest/1.TestRngBernoulli2 +[ OK ] RandomNumberGeneratorTest/1.TestRngBernoulli2 (0 ms) +[ RUN ] RandomNumberGeneratorTest/1.TestRngGaussianTimesGaussian +[ OK ] RandomNumberGeneratorTest/1.TestRngGaussianTimesGaussian (1 ms) +[ RUN ] RandomNumberGeneratorTest/1.TestRngGaussian +[ OK ] RandomNumberGeneratorTest/1.TestRngGaussian (0 ms) +[ RUN ] RandomNumberGeneratorTest/1.TestRngUniform2 +[ OK ] RandomNumberGeneratorTest/1.TestRngUniform2 (0 ms) +[ RUN ] RandomNumberGeneratorTest/1.TestRngBernoulliTimesBernoulli +[ OK ] RandomNumberGeneratorTest/1.TestRngBernoulliTimesBernoulli (1 ms) +[ RUN ] RandomNumberGeneratorTest/1.TestRngGaussian2 +[ OK ] RandomNumberGeneratorTest/1.TestRngGaussian2 (0 ms) +[ RUN ] RandomNumberGeneratorTest/1.TestRngBernoulli +[ OK ] RandomNumberGeneratorTest/1.TestRngBernoulli (0 ms) +[ RUN ] RandomNumberGeneratorTest/1.TestRngGaussianTimesBernoulli +[ OK ] RandomNumberGeneratorTest/1.TestRngGaussianTimesBernoulli (0 ms) +[ RUN ] RandomNumberGeneratorTest/1.TestRngUniform +[ OK ] RandomNumberGeneratorTest/1.TestRngUniform (0 ms) +[ RUN ] RandomNumberGeneratorTest/1.TestRngUniformTimesUniform +[ OK ] RandomNumberGeneratorTest/1.TestRngUniformTimesUniform (1 ms) +[----------] 11 tests from RandomNumberGeneratorTest/1 (3 ms total) -[----------] 1 test from MultinomialLogisticLossLayerTest/1, where TypeParam = double -[ RUN ] MultinomialLogisticLossLayerTest/1.TestGradientCPU -[ OK ] MultinomialLogisticLossLayerTest/1.TestGradientCPU (1 ms) -[----------] 1 test from MultinomialLogisticLossLayerTest/1 (1 ms total) +[----------] 3 tests from SplitLayerTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] SplitLayerTest/0.Test +[ OK ] SplitLayerTest/0.Test (0 ms) +[ RUN ] SplitLayerTest/0.TestGradient +[ OK ] SplitLayerTest/0.TestGradient (8 ms) +[ RUN ] SplitLayerTest/0.TestSetup +[ OK ] SplitLayerTest/0.TestSetup (0 ms) +[----------] 3 tests from SplitLayerTest/0 (8 ms total) -[----------] 7 tests from TileLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] TileLayerTest/1.TestSetup -[ OK ] TileLayerTest/1.TestSetup (0 ms) -[ RUN ] TileLayerTest/1.TestGradientChannels -[ OK ] TileLayerTest/1.TestGradientChannels (293 ms) -[ RUN ] TileLayerTest/1.TestTrivialSetup -[ OK ] TileLayerTest/1.TestTrivialSetup (0 ms) -[ RUN ] TileLayerTest/1.TestTrivialGradient -[ OK ] TileLayerTest/1.TestTrivialGradient (91 ms) -[ RUN ] TileLayerTest/1.TestGradientNum -[ OK ] TileLayerTest/1.TestGradientNum (258 ms) -[ RUN ] TileLayerTest/1.TestForwardNum -[ OK ] TileLayerTest/1.TestForwardNum (0 ms) -[ RUN ] TileLayerTest/1.TestForwardChannels -[ OK ] TileLayerTest/1.TestForwardChannels (0 ms) -[----------] 7 tests from TileLayerTest/1 (642 ms total) +[----------] 12 tests from ArgMaxLayerTest/1, where TypeParam = double +[ RUN ] ArgMaxLayerTest/1.TestCPUMaxVal +[ OK ] ArgMaxLayerTest/1.TestCPUMaxVal (2 ms) +[ RUN ] ArgMaxLayerTest/1.TestCPU +[ OK ] ArgMaxLayerTest/1.TestCPU (1 ms) +[ RUN ] ArgMaxLayerTest/1.TestCPUAxisMaxValTopK +[ OK ] ArgMaxLayerTest/1.TestCPUAxisMaxValTopK (29 ms) +[ RUN ] ArgMaxLayerTest/1.TestSetup +[ OK ] ArgMaxLayerTest/1.TestSetup (1 ms) +[ RUN ] ArgMaxLayerTest/1.TestCPUTopK +[ OK ] ArgMaxLayerTest/1.TestCPUTopK (1 ms) +[ RUN ] ArgMaxLayerTest/1.TestSetupAxis +[ OK ] ArgMaxLayerTest/1.TestSetupAxis (1 ms) +[ RUN ] ArgMaxLayerTest/1.TestSetupMaxVal +[ OK ] ArgMaxLayerTest/1.TestSetupMaxVal (1 ms) +[ RUN ] ArgMaxLayerTest/1.TestCPUAxis +[ OK ] ArgMaxLayerTest/1.TestCPUAxis (13 ms) +[ RUN ] ArgMaxLayerTest/1.TestCPUAxisTopK +[ OK ] ArgMaxLayerTest/1.TestCPUAxisTopK (47 ms) +[ RUN ] ArgMaxLayerTest/1.TestSetupAxisMaxVal +[ OK ] ArgMaxLayerTest/1.TestSetupAxisMaxVal (0 ms) +[ RUN ] ArgMaxLayerTest/1.TestSetupAxisNegativeIndexing +[ OK ] ArgMaxLayerTest/1.TestSetupAxisNegativeIndexing (1 ms) +[ RUN ] ArgMaxLayerTest/1.TestCPUMaxValTopK +[ OK ] ArgMaxLayerTest/1.TestCPUMaxValTopK (2 ms) +[----------] 12 tests from ArgMaxLayerTest/1 (99 ms total) -[----------] 8 tests from SliceLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] SliceLayerTest/0.TestSetupChannels -[ OK ] SliceLayerTest/0.TestSetupChannels (0 ms) -[ RUN ] SliceLayerTest/0.TestSliceAcrossNum -[ OK ] SliceLayerTest/0.TestSliceAcrossNum (0 ms) -[ RUN ] SliceLayerTest/0.TestSetupNum -[ OK ] SliceLayerTest/0.TestSetupNum (0 ms) -[ RUN ] SliceLayerTest/0.TestGradientAcrossChannels -[ OK ] SliceLayerTest/0.TestGradientAcrossChannels (38 ms) -[ RUN ] SliceLayerTest/0.TestSliceAcrossChannels -[ OK ] SliceLayerTest/0.TestSliceAcrossChannels (0 ms) -[ RUN ] SliceLayerTest/0.TestTrivialSlice -[ OK ] SliceLayerTest/0.TestTrivialSlice (0 ms) -[ RUN ] SliceLayerTest/0.TestGradientTrivial -[ OK ] SliceLayerTest/0.TestGradientTrivial (10 ms) -[ RUN ] SliceLayerTest/0.TestGradientAcrossNum -[ OK ] SliceLayerTest/0.TestGradientAcrossNum (33 ms) -[----------] 8 tests from SliceLayerTest/0 (81 ms total) +[----------] 20 tests from FilterNetTest +[ RUN ] FilterNetTest.TestFilterLeNetTrainTest +[ OK ] FilterNetTest.TestFilterLeNetTrainTest (1 ms) +[ RUN ] FilterNetTest.TestFilterInOutByExcludeMultiRule +[ OK ] FilterNetTest.TestFilterInOutByExcludeMultiRule (1 ms) +[ RUN ] FilterNetTest.TestFilterInByIncludeMultiRule +[ OK ] FilterNetTest.TestFilterInByIncludeMultiRule (0 ms) +[ RUN ] FilterNetTest.TestFilterInByMultipleStage +[ OK ] FilterNetTest.TestFilterInByMultipleStage (0 ms) +[ RUN ] FilterNetTest.TestFilterInByMaxLevel2 +[ OK ] FilterNetTest.TestFilterInByMaxLevel2 (0 ms) +[ RUN ] FilterNetTest.TestFilterOutByStage +[ OK ] FilterNetTest.TestFilterOutByStage (0 ms) +[ RUN ] FilterNetTest.TestFilterOutByStage2 +[ OK ] FilterNetTest.TestFilterOutByStage2 (0 ms) +[ RUN ] FilterNetTest.TestFilterInByNotStage +[ OK ] FilterNetTest.TestFilterInByNotStage (0 ms) +[ RUN ] FilterNetTest.TestFilterOutByMinLevel +[ OK ] FilterNetTest.TestFilterOutByMinLevel (0 ms) +[ RUN ] FilterNetTest.TestFilterOutByMaxLevel +[ OK ] FilterNetTest.TestFilterOutByMaxLevel (0 ms) +[ RUN ] FilterNetTest.TestFilterOutByNotStage +[ OK ] FilterNetTest.TestFilterOutByNotStage (0 ms) +[ RUN ] FilterNetTest.TestFilterInByMaxLevel +[ OK ] FilterNetTest.TestFilterInByMaxLevel (1 ms) +[ RUN ] FilterNetTest.TestFilterInByMinLevel2 +[ OK ] FilterNetTest.TestFilterInByMinLevel2 (0 ms) +[ RUN ] FilterNetTest.TestFilterOutByMultipleStage +[ OK ] FilterNetTest.TestFilterOutByMultipleStage (0 ms) +[ RUN ] FilterNetTest.TestNoFilter +[ OK ] FilterNetTest.TestNoFilter (0 ms) +[ RUN ] FilterNetTest.TestFilterInOutByIncludeMultiRule +[ OK ] FilterNetTest.TestFilterInOutByIncludeMultiRule (0 ms) +[ RUN ] FilterNetTest.TestFilterInByStage2 +[ OK ] FilterNetTest.TestFilterInByStage2 (1 ms) +[ RUN ] FilterNetTest.TestFilterInByMultipleStage2 +[ OK ] FilterNetTest.TestFilterInByMultipleStage2 (0 ms) +[ RUN ] FilterNetTest.TestFilterInByMinLevel +[ OK ] FilterNetTest.TestFilterInByMinLevel (0 ms) +[ RUN ] FilterNetTest.TestFilterInByStage +[ OK ] FilterNetTest.TestFilterInByStage (0 ms) +[----------] 20 tests from FilterNetTest (5 ms total) + +[----------] 5 tests from DBTest/0, where TypeParam = caffe::TypeLevelDB +[ RUN ] DBTest/0.TestGetDB +[ OK ] DBTest/0.TestGetDB (13 ms) +[ RUN ] DBTest/0.TestSeekToFirst +[ OK ] DBTest/0.TestSeekToFirst (19 ms) +[ RUN ] DBTest/0.TestWrite +[ OK ] DBTest/0.TestWrite (21 ms) +[ RUN ] DBTest/0.TestNext +[ OK ] DBTest/0.TestNext (20 ms) +[ RUN ] DBTest/0.TestKeyValue +[ OK ] DBTest/0.TestKeyValue (21 ms) +[----------] 5 tests from DBTest/0 (95 ms total) + +[----------] 27 tests from ReductionLayerTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] ReductionLayerTest/0.TestSumGradient +[ OK ] ReductionLayerTest/0.TestSumGradient (1 ms) +[ RUN ] ReductionLayerTest/0.TestMean +[ OK ] ReductionLayerTest/0.TestMean (0 ms) +[ RUN ] ReductionLayerTest/0.TestSetUp +[ OK ] ReductionLayerTest/0.TestSetUp (0 ms) +[ RUN ] ReductionLayerTest/0.TestSumCoeffAxis1 +[ OK ] ReductionLayerTest/0.TestSumCoeffAxis1 (0 ms) +[ RUN ] ReductionLayerTest/0.TestMeanCoeffGradientAxis1 +[ OK ] ReductionLayerTest/0.TestMeanCoeffGradientAxis1 (1 ms) +[ RUN ] ReductionLayerTest/0.TestSumCoeffGradient +[ OK ] ReductionLayerTest/0.TestSumCoeffGradient (1 ms) +[ RUN ] ReductionLayerTest/0.TestAbsSumCoeffGradient +[ OK ] ReductionLayerTest/0.TestAbsSumCoeffGradient (1 ms) +[ RUN ] ReductionLayerTest/0.TestSumOfSquares +[ OK ] ReductionLayerTest/0.TestSumOfSquares (0 ms) +[ RUN ] ReductionLayerTest/0.TestSum +[ OK ] ReductionLayerTest/0.TestSum (0 ms) +[ RUN ] ReductionLayerTest/0.TestSumCoeff +[ OK ] ReductionLayerTest/0.TestSumCoeff (0 ms) +[ RUN ] ReductionLayerTest/0.TestSetUpWithAxis1 +[ OK ] ReductionLayerTest/0.TestSetUpWithAxis1 (0 ms) +[ RUN ] ReductionLayerTest/0.TestMeanCoeffGradient +[ OK ] ReductionLayerTest/0.TestMeanCoeffGradient (0 ms) +[ RUN ] ReductionLayerTest/0.TestMeanCoeffAxis1 +[ OK ] ReductionLayerTest/0.TestMeanCoeffAxis1 (1 ms) +[ RUN ] ReductionLayerTest/0.TestSumOfSquaresCoeff +[ OK ] ReductionLayerTest/0.TestSumOfSquaresCoeff (0 ms) +[ RUN ] ReductionLayerTest/0.TestAbsSumCoeffAxis1 +[ OK ] ReductionLayerTest/0.TestAbsSumCoeffAxis1 (0 ms) +[ RUN ] ReductionLayerTest/0.TestSumOfSquaresGradient +[ OK ] ReductionLayerTest/0.TestSumOfSquaresGradient (0 ms) +[ RUN ] ReductionLayerTest/0.TestAbsSum +[ OK ] ReductionLayerTest/0.TestAbsSum (1 ms) +[ RUN ] ReductionLayerTest/0.TestSumOfSquaresCoeffGradient +[ OK ] ReductionLayerTest/0.TestSumOfSquaresCoeffGradient (0 ms) +[ RUN ] ReductionLayerTest/0.TestMeanCoeff +[ OK ] ReductionLayerTest/0.TestMeanCoeff (0 ms) +[ RUN ] ReductionLayerTest/0.TestSumOfSquaresCoeffAxis1Gradient +[ OK ] ReductionLayerTest/0.TestSumOfSquaresCoeffAxis1Gradient (2 ms) +[ RUN ] ReductionLayerTest/0.TestSetUpWithAxis2 +[ OK ] ReductionLayerTest/0.TestSetUpWithAxis2 (0 ms) +[ RUN ] ReductionLayerTest/0.TestMeanGradient +[ OK ] ReductionLayerTest/0.TestMeanGradient (1 ms) +[ RUN ] ReductionLayerTest/0.TestAbsSumCoeff +[ OK ] ReductionLayerTest/0.TestAbsSumCoeff (0 ms) +[ RUN ] ReductionLayerTest/0.TestAbsSumGradient +[ OK ] ReductionLayerTest/0.TestAbsSumGradient (1 ms) +[ RUN ] ReductionLayerTest/0.TestAbsSumCoeffAxis1Gradient +[ OK ] ReductionLayerTest/0.TestAbsSumCoeffAxis1Gradient (1 ms) +[ RUN ] ReductionLayerTest/0.TestSumCoeffAxis1Gradient +[ OK ] ReductionLayerTest/0.TestSumCoeffAxis1Gradient (2 ms) +[ RUN ] ReductionLayerTest/0.TestSumOfSquaresCoeffAxis1 +[ OK ] ReductionLayerTest/0.TestSumOfSquaresCoeffAxis1 (0 ms) +[----------] 27 tests from ReductionLayerTest/0 (13 ms total) + +[----------] 8 tests from SliceLayerTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] SliceLayerTest/1.TestGradientTrivial +[ OK ] SliceLayerTest/1.TestGradientTrivial (13 ms) +[ RUN ] SliceLayerTest/1.TestSliceAcrossNum +[ OK ] SliceLayerTest/1.TestSliceAcrossNum (0 ms) +[ RUN ] SliceLayerTest/1.TestGradientAcrossNum +[ OK ] SliceLayerTest/1.TestGradientAcrossNum (46 ms) +[ RUN ] SliceLayerTest/1.TestSetupNum +[ OK ] SliceLayerTest/1.TestSetupNum (0 ms) +[ RUN ] SliceLayerTest/1.TestGradientAcrossChannels +[ OK ] SliceLayerTest/1.TestGradientAcrossChannels (49 ms) +[ RUN ] SliceLayerTest/1.TestTrivialSlice +[ OK ] SliceLayerTest/1.TestTrivialSlice (0 ms) +[ RUN ] SliceLayerTest/1.TestSetupChannels +[ OK ] SliceLayerTest/1.TestSetupChannels (0 ms) +[ RUN ] SliceLayerTest/1.TestSliceAcrossChannels +[ OK ] SliceLayerTest/1.TestSliceAcrossChannels (0 ms) +[----------] 8 tests from SliceLayerTest/1 (108 ms total) + +[----------] 9 tests from AdaGradSolverTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] AdaGradSolverTest/1.TestAdaGradLeastSquaresUpdateWithEverythingShare +[ OK ] AdaGradSolverTest/1.TestAdaGradLeastSquaresUpdateWithEverythingShare (79 ms) +[ RUN ] AdaGradSolverTest/1.TestSnapshotShare +[ OK ] AdaGradSolverTest/1.TestSnapshotShare (20 ms) +[ RUN ] AdaGradSolverTest/1.TestLeastSquaresUpdateWithEverythingAccum +[ OK ] AdaGradSolverTest/1.TestLeastSquaresUpdateWithEverythingAccum (4 ms) +[ RUN ] AdaGradSolverTest/1.TestSnapshot +[ OK ] AdaGradSolverTest/1.TestSnapshot (19 ms) +[ RUN ] AdaGradSolverTest/1.TestAdaGradLeastSquaresUpdate +[ OK ] AdaGradSolverTest/1.TestAdaGradLeastSquaresUpdate (16 ms) +[ RUN ] AdaGradSolverTest/1.TestLeastSquaresUpdateWithEverythingAccumShare +[ OK ] AdaGradSolverTest/1.TestLeastSquaresUpdateWithEverythingAccumShare (4 ms) +[ RUN ] AdaGradSolverTest/1.TestAdaGradLeastSquaresUpdateLROneHundredth +[ OK ] AdaGradSolverTest/1.TestAdaGradLeastSquaresUpdateLROneHundredth (16 ms) +[ RUN ] AdaGradSolverTest/1.TestAdaGradLeastSquaresUpdateWithEverything +[ OK ] AdaGradSolverTest/1.TestAdaGradLeastSquaresUpdateWithEverything (82 ms) +[ RUN ] AdaGradSolverTest/1.TestAdaGradLeastSquaresUpdateWithWeightDecay +[ OK ] AdaGradSolverTest/1.TestAdaGradLeastSquaresUpdateWithWeightDecay (16 ms) +[----------] 9 tests from AdaGradSolverTest/1 (256 ms total) [----------] 27 tests from ReductionLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] ReductionLayerTest/1.TestSetUpWithAxis2 -[ OK ] ReductionLayerTest/1.TestSetUpWithAxis2 (0 ms) -[ RUN ] ReductionLayerTest/1.TestSumOfSquaresCoeffAxis1 -[ OK ] ReductionLayerTest/1.TestSumOfSquaresCoeffAxis1 (0 ms) -[ RUN ] ReductionLayerTest/1.TestSumOfSquaresCoeff -[ OK ] ReductionLayerTest/1.TestSumOfSquaresCoeff (0 ms) -[ RUN ] ReductionLayerTest/1.TestMeanCoeffGradientAxis1 -[ OK ] ReductionLayerTest/1.TestMeanCoeffGradientAxis1 (2 ms) -[ RUN ] ReductionLayerTest/1.TestSumCoeff -[ OK ] ReductionLayerTest/1.TestSumCoeff (0 ms) -[ RUN ] ReductionLayerTest/1.TestSumCoeffGradient -[ OK ] ReductionLayerTest/1.TestSumCoeffGradient (0 ms) -[ RUN ] ReductionLayerTest/1.TestAbsSumGradient -[ OK ] ReductionLayerTest/1.TestAbsSumGradient (1 ms) -[ RUN ] ReductionLayerTest/1.TestMeanCoeffAxis1 -[ OK ] ReductionLayerTest/1.TestMeanCoeffAxis1 (0 ms) -[ RUN ] ReductionLayerTest/1.TestSumCoeffAxis1Gradient -[ OK ] ReductionLayerTest/1.TestSumCoeffAxis1Gradient (1 ms) [ RUN ] ReductionLayerTest/1.TestSetUpWithAxis1 [ OK ] ReductionLayerTest/1.TestSetUpWithAxis1 (0 ms) -[ RUN ] ReductionLayerTest/1.TestAbsSumCoeff -[ OK ] ReductionLayerTest/1.TestAbsSumCoeff (0 ms) -[ RUN ] ReductionLayerTest/1.TestAbsSum -[ OK ] ReductionLayerTest/1.TestAbsSum (0 ms) -[ RUN ] ReductionLayerTest/1.TestSetUp -[ OK ] ReductionLayerTest/1.TestSetUp (0 ms) [ RUN ] ReductionLayerTest/1.TestSum [ OK ] ReductionLayerTest/1.TestSum (0 ms) -[ RUN ] ReductionLayerTest/1.TestSumOfSquares -[ OK ] ReductionLayerTest/1.TestSumOfSquares (0 ms) [ RUN ] ReductionLayerTest/1.TestAbsSumCoeffAxis1Gradient [ OK ] ReductionLayerTest/1.TestAbsSumCoeffAxis1Gradient (2 ms) -[ RUN ] ReductionLayerTest/1.TestMeanCoeff -[ OK ] ReductionLayerTest/1.TestMeanCoeff (0 ms) -[ RUN ] ReductionLayerTest/1.TestSumOfSquaresCoeffGradient -[ OK ] ReductionLayerTest/1.TestSumOfSquaresCoeffGradient (0 ms) -[ RUN ] ReductionLayerTest/1.TestMeanGradient -[ OK ] ReductionLayerTest/1.TestMeanGradient (1 ms) -[ RUN ] ReductionLayerTest/1.TestSumOfSquaresGradient -[ OK ] ReductionLayerTest/1.TestSumOfSquaresGradient (1 ms) -[ RUN ] ReductionLayerTest/1.TestAbsSumCoeffGradient -[ OK ] ReductionLayerTest/1.TestAbsSumCoeffGradient (0 ms) -[ RUN ] ReductionLayerTest/1.TestMean -[ OK ] ReductionLayerTest/1.TestMean (0 ms) -[ RUN ] ReductionLayerTest/1.TestSumGradient -[ OK ] ReductionLayerTest/1.TestSumGradient (1 ms) -[ RUN ] ReductionLayerTest/1.TestSumOfSquaresCoeffAxis1Gradient -[ OK ] ReductionLayerTest/1.TestSumOfSquaresCoeffAxis1Gradient (1 ms) [ RUN ] ReductionLayerTest/1.TestSumCoeffAxis1 [ OK ] ReductionLayerTest/1.TestSumCoeffAxis1 (0 ms) [ RUN ] ReductionLayerTest/1.TestAbsSumCoeffAxis1 [ OK ] ReductionLayerTest/1.TestAbsSumCoeffAxis1 (0 ms) +[ RUN ] ReductionLayerTest/1.TestSumGradient +[ OK ] ReductionLayerTest/1.TestSumGradient (1 ms) [ RUN ] ReductionLayerTest/1.TestMeanCoeffGradient [ OK ] ReductionLayerTest/1.TestMeanCoeffGradient (1 ms) -[----------] 27 tests from ReductionLayerTest/1 (11 ms total) - -[----------] 11 tests from RandomNumberGeneratorTest/0, where TypeParam = float -[ RUN ] RandomNumberGeneratorTest/0.TestRngGaussianTimesBernoulli -[ OK ] RandomNumberGeneratorTest/0.TestRngGaussianTimesBernoulli (0 ms) -[ RUN ] RandomNumberGeneratorTest/0.TestRngBernoulli -[ OK ] RandomNumberGeneratorTest/0.TestRngBernoulli (0 ms) -[ RUN ] RandomNumberGeneratorTest/0.TestRngUniformTimesBernoulli -[ OK ] RandomNumberGeneratorTest/0.TestRngUniformTimesBernoulli (1 ms) -[ RUN ] RandomNumberGeneratorTest/0.TestRngGaussianTimesGaussian -[ OK ] RandomNumberGeneratorTest/0.TestRngGaussianTimesGaussian (0 ms) -[ RUN ] RandomNumberGeneratorTest/0.TestRngBernoulliTimesBernoulli -[ OK ] RandomNumberGeneratorTest/0.TestRngBernoulliTimesBernoulli (0 ms) -[ RUN ] RandomNumberGeneratorTest/0.TestRngUniformTimesUniform -[ OK ] RandomNumberGeneratorTest/0.TestRngUniformTimesUniform (0 ms) -[ RUN ] RandomNumberGeneratorTest/0.TestRngBernoulli2 -[ OK ] RandomNumberGeneratorTest/0.TestRngBernoulli2 (0 ms) -[ RUN ] RandomNumberGeneratorTest/0.TestRngUniform2 -[ OK ] RandomNumberGeneratorTest/0.TestRngUniform2 (1 ms) -[ RUN ] RandomNumberGeneratorTest/0.TestRngGaussian -[ OK ] RandomNumberGeneratorTest/0.TestRngGaussian (0 ms) -[ RUN ] RandomNumberGeneratorTest/0.TestRngGaussian2 -[ OK ] RandomNumberGeneratorTest/0.TestRngGaussian2 (0 ms) -[ RUN ] RandomNumberGeneratorTest/0.TestRngUniform -[ OK ] RandomNumberGeneratorTest/0.TestRngUniform (0 ms) -[----------] 11 tests from RandomNumberGeneratorTest/0 (2 ms total) - -[----------] 1 test from CPUStochasticPoolingLayerTest/1, where TypeParam = double -[ RUN ] CPUStochasticPoolingLayerTest/1.TestSetup -[ OK ] CPUStochasticPoolingLayerTest/1.TestSetup (0 ms) -[----------] 1 test from CPUStochasticPoolingLayerTest/1 (0 ms total) - -[----------] 3 tests from PaddingLayerUpgradeTest -[ RUN ] PaddingLayerUpgradeTest.TestTwoTops -[ OK ] PaddingLayerUpgradeTest.TestTwoTops (1 ms) -[ RUN ] PaddingLayerUpgradeTest.TestSimple -[ OK ] PaddingLayerUpgradeTest.TestSimple (0 ms) -[ RUN ] PaddingLayerUpgradeTest.TestImageNet -[ OK ] PaddingLayerUpgradeTest.TestImageNet (1 ms) -[----------] 3 tests from PaddingLayerUpgradeTest (2 ms total) - -[----------] 5 tests from SPPLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] SPPLayerTest/0.TestEqualOutputDims2 -[ OK ] SPPLayerTest/0.TestEqualOutputDims2 (1 ms) -[ RUN ] SPPLayerTest/0.TestForwardBackward -[ OK ] SPPLayerTest/0.TestForwardBackward (0 ms) -[ RUN ] SPPLayerTest/0.TestGradient -[ OK ] SPPLayerTest/0.TestGradient (3016 ms) -[ RUN ] SPPLayerTest/0.TestSetup -[ OK ] SPPLayerTest/0.TestSetup (0 ms) -[ RUN ] SPPLayerTest/0.TestEqualOutputDims -[ OK ] SPPLayerTest/0.TestEqualOutputDims (0 ms) -[----------] 5 tests from SPPLayerTest/0 (3017 ms total) - -[----------] 8 tests from AdamSolverTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] AdamSolverTest/0.TestAdamLeastSquaresUpdateWithEverything -[ OK ] AdamSolverTest/0.TestAdamLeastSquaresUpdateWithEverything (85 ms) -[ RUN ] AdamSolverTest/0.TestLeastSquaresUpdateWithEverythingAccum -[ OK ] AdamSolverTest/0.TestLeastSquaresUpdateWithEverythingAccum (3 ms) -[ RUN ] AdamSolverTest/0.TestAdamLeastSquaresUpdate -[ OK ] AdamSolverTest/0.TestAdamLeastSquaresUpdate (16 ms) -[ RUN ] AdamSolverTest/0.TestAdamLeastSquaresUpdateWithWeightDecay -[ OK ] AdamSolverTest/0.TestAdamLeastSquaresUpdateWithWeightDecay (16 ms) -[ RUN ] AdamSolverTest/0.TestSnapshot -[ OK ] AdamSolverTest/0.TestSnapshot (17 ms) -[ RUN ] AdamSolverTest/0.TestSnapshotShare -[ OK ] AdamSolverTest/0.TestSnapshotShare (23 ms) -[ RUN ] AdamSolverTest/0.TestAdamLeastSquaresUpdateWithEverythingShare -[ OK ] AdamSolverTest/0.TestAdamLeastSquaresUpdateWithEverythingShare (89 ms) -[ RUN ] AdamSolverTest/0.TestLeastSquaresUpdateWithEverythingAccumShare -[ OK ] AdamSolverTest/0.TestLeastSquaresUpdateWithEverythingAccumShare (4 ms) -[----------] 8 tests from AdamSolverTest/0 (254 ms total) - -[----------] 3 tests from DummyDataLayerTest/1, where TypeParam = double -[ RUN ] DummyDataLayerTest/1.TestTwoTopConstant -[ OK ] DummyDataLayerTest/1.TestTwoTopConstant (0 ms) -[ RUN ] DummyDataLayerTest/1.TestThreeTopConstantGaussianConstant -[ OK ] DummyDataLayerTest/1.TestThreeTopConstantGaussianConstant (0 ms) -[ RUN ] DummyDataLayerTest/1.TestOneTopConstant -[ OK ] DummyDataLayerTest/1.TestOneTopConstant (0 ms) -[----------] 3 tests from DummyDataLayerTest/1 (0 ms total) - -[----------] 1 test from HDF5OutputLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] HDF5OutputLayerTest/0.TestForward -[ OK ] HDF5OutputLayerTest/0.TestForward (2 ms) -[----------] 1 test from HDF5OutputLayerTest/0 (2 ms total) - -[----------] 5 tests from BenchmarkTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] BenchmarkTest/0.TestTimerConstructor -[ OK ] BenchmarkTest/0.TestTimerConstructor (0 ms) -[ RUN ] BenchmarkTest/0.TestTimerStop -[ OK ] BenchmarkTest/0.TestTimerStop (0 ms) -[ RUN ] BenchmarkTest/0.TestTimerStart -[ OK ] BenchmarkTest/0.TestTimerStart (0 ms) -[ RUN ] BenchmarkTest/0.TestTimerMilliSeconds -[ OK ] BenchmarkTest/0.TestTimerMilliSeconds (301 ms) -[ RUN ] BenchmarkTest/0.TestTimerSeconds -[ OK ] BenchmarkTest/0.TestTimerSeconds (304 ms) -[----------] 5 tests from BenchmarkTest/0 (605 ms total) +[ RUN ] ReductionLayerTest/1.TestMean +[ OK ] ReductionLayerTest/1.TestMean (0 ms) +[ RUN ] ReductionLayerTest/1.TestMeanCoeffGradientAxis1 +[ OK ] ReductionLayerTest/1.TestMeanCoeffGradientAxis1 (1 ms) +[ RUN ] ReductionLayerTest/1.TestMeanCoeffAxis1 +[ OK ] ReductionLayerTest/1.TestMeanCoeffAxis1 (0 ms) +[ RUN ] ReductionLayerTest/1.TestMeanGradient +[ OK ] ReductionLayerTest/1.TestMeanGradient (1 ms) +[ RUN ] ReductionLayerTest/1.TestSumOfSquaresCoeffAxis1Gradient +[ OK ] ReductionLayerTest/1.TestSumOfSquaresCoeffAxis1Gradient (2 ms) +[ RUN ] ReductionLayerTest/1.TestAbsSumGradient +[ OK ] ReductionLayerTest/1.TestAbsSumGradient (1 ms) +[ RUN ] ReductionLayerTest/1.TestSumOfSquaresCoeffAxis1 +[ OK ] ReductionLayerTest/1.TestSumOfSquaresCoeffAxis1 (0 ms) +[ RUN ] ReductionLayerTest/1.TestSumCoeffGradient +[ OK ] ReductionLayerTest/1.TestSumCoeffGradient (1 ms) +[ RUN ] ReductionLayerTest/1.TestSumCoeffAxis1Gradient +[ OK ] ReductionLayerTest/1.TestSumCoeffAxis1Gradient (1 ms) +[ RUN ] ReductionLayerTest/1.TestSumOfSquares +[ OK ] ReductionLayerTest/1.TestSumOfSquares (0 ms) +[ RUN ] ReductionLayerTest/1.TestAbsSumCoeffGradient +[ OK ] ReductionLayerTest/1.TestAbsSumCoeffGradient (1 ms) +[ RUN ] ReductionLayerTest/1.TestAbsSum +[ OK ] ReductionLayerTest/1.TestAbsSum (0 ms) +[ RUN ] ReductionLayerTest/1.TestSumCoeff +[ OK ] ReductionLayerTest/1.TestSumCoeff (0 ms) +[ RUN ] ReductionLayerTest/1.TestSumOfSquaresCoeff +[ OK ] ReductionLayerTest/1.TestSumOfSquaresCoeff (0 ms) +[ RUN ] ReductionLayerTest/1.TestSumOfSquaresCoeffGradient +[ OK ] ReductionLayerTest/1.TestSumOfSquaresCoeffGradient (2 ms) +[ RUN ] ReductionLayerTest/1.TestSetUpWithAxis2 +[ OK ] ReductionLayerTest/1.TestSetUpWithAxis2 (0 ms) +[ RUN ] ReductionLayerTest/1.TestAbsSumCoeff +[ OK ] ReductionLayerTest/1.TestAbsSumCoeff (0 ms) +[ RUN ] ReductionLayerTest/1.TestMeanCoeff +[ OK ] ReductionLayerTest/1.TestMeanCoeff (0 ms) +[ RUN ] ReductionLayerTest/1.TestSumOfSquaresGradient +[ OK ] ReductionLayerTest/1.TestSumOfSquaresGradient (1 ms) +[ RUN ] ReductionLayerTest/1.TestSetUp +[ OK ] ReductionLayerTest/1.TestSetUp (0 ms) +[----------] 27 tests from ReductionLayerTest/1 (15 ms total) -[----------] 5 tests from MemoryDataLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] MemoryDataLayerTest/0.TestForward -[ OK ] MemoryDataLayerTest/0.TestForward (80 ms) -[ RUN ] MemoryDataLayerTest/0.AddMatVectorDefaultTransform -[ OK ] MemoryDataLayerTest/0.AddMatVectorDefaultTransform (1 ms) -[ RUN ] MemoryDataLayerTest/0.TestSetup -[ OK ] MemoryDataLayerTest/0.TestSetup (0 ms) -[ RUN ] MemoryDataLayerTest/0.TestSetBatchSize -[ OK ] MemoryDataLayerTest/0.TestSetBatchSize (2 ms) -[ RUN ] MemoryDataLayerTest/0.AddDatumVectorDefaultTransform -[ OK ] MemoryDataLayerTest/0.AddDatumVectorDefaultTransform (9 ms) -[----------] 5 tests from MemoryDataLayerTest/0 (92 ms total) +[----------] 3 tests from ThresholdLayerTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] ThresholdLayerTest/1.TestSetup +[ OK ] ThresholdLayerTest/1.TestSetup (0 ms) +[ RUN ] ThresholdLayerTest/1.Test2 +[ OK ] ThresholdLayerTest/1.Test2 (0 ms) +[ RUN ] ThresholdLayerTest/1.Test +[ OK ] ThresholdLayerTest/1.Test (0 ms) +[----------] 3 tests from ThresholdLayerTest/1 (0 ms total) -[----------] 3 tests from MaxPoolingDropoutTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] MaxPoolingDropoutTest/1.TestForward -[ OK ] MaxPoolingDropoutTest/1.TestForward (0 ms) -[ RUN ] MaxPoolingDropoutTest/1.TestSetup -[ OK ] MaxPoolingDropoutTest/1.TestSetup (0 ms) -[ RUN ] MaxPoolingDropoutTest/1.TestBackward -[ OK ] MaxPoolingDropoutTest/1.TestBackward (0 ms) -[----------] 3 tests from MaxPoolingDropoutTest/1 (0 ms total) +[----------] 12 tests from NesterovSolverTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] NesterovSolverTest/1.TestNesterovLeastSquaresUpdate +[ OK ] NesterovSolverTest/1.TestNesterovLeastSquaresUpdate (16 ms) +[ RUN ] NesterovSolverTest/1.TestNesterovLeastSquaresUpdateWithWeightDecayMultiIter +[ OK ] NesterovSolverTest/1.TestNesterovLeastSquaresUpdateWithWeightDecayMultiIter (78 ms) +[ RUN ] NesterovSolverTest/1.TestNesterovLeastSquaresUpdateWithWeightDecay +[ OK ] NesterovSolverTest/1.TestNesterovLeastSquaresUpdateWithWeightDecay (16 ms) +[ RUN ] NesterovSolverTest/1.TestSnapshotShare +[ OK ] NesterovSolverTest/1.TestSnapshotShare (24 ms) +[ RUN ] NesterovSolverTest/1.TestSnapshot +[ OK ] NesterovSolverTest/1.TestSnapshot (18 ms) +[ RUN ] NesterovSolverTest/1.TestLeastSquaresUpdateWithEverythingAccum +[ OK ] NesterovSolverTest/1.TestLeastSquaresUpdateWithEverythingAccum (3 ms) +[ RUN ] NesterovSolverTest/1.TestNesterovLeastSquaresUpdateWithMomentum +[ OK ] NesterovSolverTest/1.TestNesterovLeastSquaresUpdateWithMomentum (32 ms) +[ RUN ] NesterovSolverTest/1.TestLeastSquaresUpdateWithMomentumMultiIter +[ OK ] NesterovSolverTest/1.TestLeastSquaresUpdateWithMomentumMultiIter (77 ms) +[ RUN ] NesterovSolverTest/1.TestNesterovLeastSquaresUpdateLROneHundredth +[ OK ] NesterovSolverTest/1.TestNesterovLeastSquaresUpdateLROneHundredth (16 ms) +[ RUN ] NesterovSolverTest/1.TestNesterovLeastSquaresUpdateWithEverythingShare +[ OK ] NesterovSolverTest/1.TestNesterovLeastSquaresUpdateWithEverythingShare (81 ms) +[ RUN ] NesterovSolverTest/1.TestNesterovLeastSquaresUpdateWithEverything +[ OK ] NesterovSolverTest/1.TestNesterovLeastSquaresUpdateWithEverything (78 ms) +[ RUN ] NesterovSolverTest/1.TestLeastSquaresUpdateWithEverythingAccumShare +[ OK ] NesterovSolverTest/1.TestLeastSquaresUpdateWithEverythingAccumShare (3 ms) +[----------] 12 tests from NesterovSolverTest/1 (442 ms total) -[----------] 3 tests from SigmoidCrossEntropyLossLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] SigmoidCrossEntropyLossLayerTest/0.TestSigmoidCrossEntropyLoss -[ OK ] SigmoidCrossEntropyLossLayerTest/0.TestSigmoidCrossEntropyLoss (2 ms) -[ RUN ] SigmoidCrossEntropyLossLayerTest/0.TestGradient -[ OK ] SigmoidCrossEntropyLossLayerTest/0.TestGradient (1 ms) -[ RUN ] SigmoidCrossEntropyLossLayerTest/0.TestIgnoreGradient -[ OK ] SigmoidCrossEntropyLossLayerTest/0.TestIgnoreGradient (0 ms) -[----------] 3 tests from SigmoidCrossEntropyLossLayerTest/0 (3 ms total) +[----------] 2 tests from BatchReindexLayerTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] BatchReindexLayerTest/0.TestForward +[ OK ] BatchReindexLayerTest/0.TestForward (0 ms) +[ RUN ] BatchReindexLayerTest/0.TestGradient +[ OK ] BatchReindexLayerTest/0.TestGradient (144 ms) +[----------] 2 tests from BatchReindexLayerTest/0 (144 ms total) [----------] 10 tests from ConcatLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] ConcatLayerTest/1.TestForwardChannels -[ OK ] ConcatLayerTest/1.TestForwardChannels (0 ms) -[ RUN ] ConcatLayerTest/1.TestGradientTrivial -[ OK ] ConcatLayerTest/1.TestGradientTrivial (8 ms) -[ RUN ] ConcatLayerTest/1.TestGradientNum -[ OK ] ConcatLayerTest/1.TestGradientNum (14 ms) [ RUN ] ConcatLayerTest/1.TestSetupNum [ OK ] ConcatLayerTest/1.TestSetupNum (0 ms) +[ RUN ] ConcatLayerTest/1.TestForwardNum +[ OK ] ConcatLayerTest/1.TestForwardNum (1 ms) +[ RUN ] ConcatLayerTest/1.TestForwardChannels +[ OK ] ConcatLayerTest/1.TestForwardChannels (0 ms) +[ RUN ] ConcatLayerTest/1.TestGradientChannels +[ OK ] ConcatLayerTest/1.TestGradientChannels (4 ms) +[ RUN ] ConcatLayerTest/1.TestSetupChannelsNegativeIndexing +[ OK ] ConcatLayerTest/1.TestSetupChannelsNegativeIndexing (0 ms) [ RUN ] ConcatLayerTest/1.TestSetupChannels [ OK ] ConcatLayerTest/1.TestSetupChannels (0 ms) +[ RUN ] ConcatLayerTest/1.TestGradientTrivial +[ OK ] ConcatLayerTest/1.TestGradientTrivial (4 ms) [ RUN ] ConcatLayerTest/1.TestGradientChannelsBottomOneOnly [ OK ] ConcatLayerTest/1.TestGradientChannelsBottomOneOnly (3 ms) +[ RUN ] ConcatLayerTest/1.TestGradientNum +[ OK ] ConcatLayerTest/1.TestGradientNum (6 ms) [ RUN ] ConcatLayerTest/1.TestForwardTrivial [ OK ] ConcatLayerTest/1.TestForwardTrivial (0 ms) -[ RUN ] ConcatLayerTest/1.TestForwardNum -[ OK ] ConcatLayerTest/1.TestForwardNum (0 ms) -[ RUN ] ConcatLayerTest/1.TestSetupChannelsNegativeIndexing -[ OK ] ConcatLayerTest/1.TestSetupChannelsNegativeIndexing (0 ms) -[ RUN ] ConcatLayerTest/1.TestGradientChannels -[ OK ] ConcatLayerTest/1.TestGradientChannels (5 ms) -[----------] 10 tests from ConcatLayerTest/1 (30 ms total) - -[----------] 8 tests from RMSPropSolverTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] RMSPropSolverTest/0.TestRMSPropLeastSquaresUpdateWithWeightDecay -[ OK ] RMSPropSolverTest/0.TestRMSPropLeastSquaresUpdateWithWeightDecay (16 ms) -[ RUN ] RMSPropSolverTest/0.TestRMSPropLeastSquaresUpdateWithRmsDecay -[ OK ] RMSPropSolverTest/0.TestRMSPropLeastSquaresUpdateWithRmsDecay (102 ms) -[ RUN ] RMSPropSolverTest/0.TestSnapshot -[ OK ] RMSPropSolverTest/0.TestSnapshot (17 ms) -[ RUN ] RMSPropSolverTest/0.TestRMSPropLeastSquaresUpdateWithEverythingShare -[ OK ] RMSPropSolverTest/0.TestRMSPropLeastSquaresUpdateWithEverythingShare (87 ms) -[ RUN ] RMSPropSolverTest/0.TestLeastSquaresUpdateWithEverythingAccum -[ OK ] RMSPropSolverTest/0.TestLeastSquaresUpdateWithEverythingAccum (3 ms) -[ RUN ] RMSPropSolverTest/0.TestLeastSquaresUpdateWithEverythingAccumShare -[ OK ] RMSPropSolverTest/0.TestLeastSquaresUpdateWithEverythingAccumShare (4 ms) -[ RUN ] RMSPropSolverTest/0.TestRMSPropLeastSquaresUpdateWithEverything -[ OK ] RMSPropSolverTest/0.TestRMSPropLeastSquaresUpdateWithEverything (80 ms) -[ RUN ] RMSPropSolverTest/0.TestSnapshotShare -[ OK ] RMSPropSolverTest/0.TestSnapshotShare (23 ms) -[----------] 8 tests from RMSPropSolverTest/0 (332 ms total) - -[----------] 6 tests from XavierFillerTest/1, where TypeParam = double -[ RUN ] XavierFillerTest/1.TestFill2D -[ OK ] XavierFillerTest/1.TestFill2D (0 ms) -[ RUN ] XavierFillerTest/1.TestFill5D -[ OK ] XavierFillerTest/1.TestFill5D (0 ms) -[ RUN ] XavierFillerTest/1.TestFill1D -[ OK ] XavierFillerTest/1.TestFill1D (0 ms) -[ RUN ] XavierFillerTest/1.TestFillFanIn -[ OK ] XavierFillerTest/1.TestFillFanIn (53 ms) -[ RUN ] XavierFillerTest/1.TestFillFanOut -[ OK ] XavierFillerTest/1.TestFillFanOut (48 ms) -[ RUN ] XavierFillerTest/1.TestFillAverage -[ OK ] XavierFillerTest/1.TestFillAverage (47 ms) -[----------] 6 tests from XavierFillerTest/1 (148 ms total) - -[----------] 4 tests from GaussianFillerTest/0, where TypeParam = float -[ RUN ] GaussianFillerTest/0.TestFill2D -[ OK ] GaussianFillerTest/0.TestFill2D (1 ms) -[ RUN ] GaussianFillerTest/0.TestFill1D -[ OK ] GaussianFillerTest/0.TestFill1D (0 ms) -[ RUN ] GaussianFillerTest/0.TestFill -[ OK ] GaussianFillerTest/0.TestFill (0 ms) -[ RUN ] GaussianFillerTest/0.TestFill5D -[ OK ] GaussianFillerTest/0.TestFill5D (0 ms) -[----------] 4 tests from GaussianFillerTest/0 (1 ms total) - -[----------] 2 tests from BilinearFillerTest/0, where TypeParam = float -[ RUN ] BilinearFillerTest/0.TestFillOdd -[ OK ] BilinearFillerTest/0.TestFillOdd (13 ms) -[ RUN ] BilinearFillerTest/0.TestFillEven -[ OK ] BilinearFillerTest/0.TestFillEven (10 ms) -[----------] 2 tests from BilinearFillerTest/0 (23 ms total) - -[----------] 10 tests from PowerLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] PowerLayerTest/1.TestPowerZeroGradient -[ OK ] PowerLayerTest/1.TestPowerZeroGradient (2 ms) -[ RUN ] PowerLayerTest/1.TestPowerZero -[ OK ] PowerLayerTest/1.TestPowerZero (0 ms) -[ RUN ] PowerLayerTest/1.TestPower -[ OK ] PowerLayerTest/1.TestPower (0 ms) -[ RUN ] PowerLayerTest/1.TestPowerTwo -[ OK ] PowerLayerTest/1.TestPowerTwo (0 ms) -[ RUN ] PowerLayerTest/1.TestPowerGradientShiftZero -[ OK ] PowerLayerTest/1.TestPowerGradientShiftZero (8 ms) -[ RUN ] PowerLayerTest/1.TestPowerOneGradient -[ OK ] PowerLayerTest/1.TestPowerOneGradient (3 ms) -[ RUN ] PowerLayerTest/1.TestPowerTwoScaleHalfGradient -[ OK ] PowerLayerTest/1.TestPowerTwoScaleHalfGradient (3 ms) -[ RUN ] PowerLayerTest/1.TestPowerOne -[ OK ] PowerLayerTest/1.TestPowerOne (0 ms) -[ RUN ] PowerLayerTest/1.TestPowerTwoGradient -[ OK ] PowerLayerTest/1.TestPowerTwoGradient (4 ms) -[ RUN ] PowerLayerTest/1.TestPowerGradient -[ OK ] PowerLayerTest/1.TestPowerGradient (9 ms) -[----------] 10 tests from PowerLayerTest/1 (31 ms total) - -[----------] 4 tests from PositiveUnitballFillerTest/1, where TypeParam = double -[ RUN ] PositiveUnitballFillerTest/1.TestFill2D -[ OK ] PositiveUnitballFillerTest/1.TestFill2D (0 ms) -[ RUN ] PositiveUnitballFillerTest/1.TestFill5D -[ OK ] PositiveUnitballFillerTest/1.TestFill5D (0 ms) -[ RUN ] PositiveUnitballFillerTest/1.TestFill1D -[ OK ] PositiveUnitballFillerTest/1.TestFill1D (0 ms) -[ RUN ] PositiveUnitballFillerTest/1.TestFill -[ OK ] PositiveUnitballFillerTest/1.TestFill (0 ms) -[----------] 4 tests from PositiveUnitballFillerTest/1 (0 ms total) - -[----------] 2 tests from HingeLossLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] HingeLossLayerTest/0.TestGradientL1 -[ OK ] HingeLossLayerTest/0.TestGradientL1 (1 ms) -[ RUN ] HingeLossLayerTest/0.TestGradientL2 -[ OK ] HingeLossLayerTest/0.TestGradientL2 (0 ms) -[----------] 2 tests from HingeLossLayerTest/0 (1 ms total) - -[----------] 3 tests from BatchNormLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] BatchNormLayerTest/0.TestForward -[ OK ] BatchNormLayerTest/0.TestForward (0 ms) -[ RUN ] BatchNormLayerTest/0.TestForwardInplace -[ OK ] BatchNormLayerTest/0.TestForwardInplace (0 ms) -[ RUN ] BatchNormLayerTest/0.TestGradient -[ OK ] BatchNormLayerTest/0.TestGradient (213 ms) -[----------] 3 tests from BatchNormLayerTest/0 (213 ms total) - -[----------] 4 tests from SoftmaxWithLossLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] SoftmaxWithLossLayerTest/1.TestGradientIgnoreLabel -[ OK ] SoftmaxWithLossLayerTest/1.TestGradientIgnoreLabel (14 ms) -[ RUN ] SoftmaxWithLossLayerTest/1.TestGradientUnnormalized -[ OK ] SoftmaxWithLossLayerTest/1.TestGradientUnnormalized (16 ms) -[ RUN ] SoftmaxWithLossLayerTest/1.TestGradient -[ OK ] SoftmaxWithLossLayerTest/1.TestGradient (16 ms) -[ RUN ] SoftmaxWithLossLayerTest/1.TestForwardIgnoreLabel -[ OK ] SoftmaxWithLossLayerTest/1.TestForwardIgnoreLabel (0 ms) -[----------] 4 tests from SoftmaxWithLossLayerTest/1 (46 ms total) - -[----------] 3 tests from SplitLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] SplitLayerTest/0.TestGradient -[ OK ] SplitLayerTest/0.TestGradient (7 ms) -[ RUN ] SplitLayerTest/0.Test -[ OK ] SplitLayerTest/0.Test (0 ms) -[ RUN ] SplitLayerTest/0.TestSetup -[ OK ] SplitLayerTest/0.TestSetup (0 ms) -[----------] 3 tests from SplitLayerTest/0 (7 ms total) - -[----------] 3 tests from BlobMathTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] BlobMathTest/1.TestSumOfSquares -[ OK ] BlobMathTest/1.TestSumOfSquares (0 ms) -[ RUN ] BlobMathTest/1.TestScaleData -[ OK ] BlobMathTest/1.TestScaleData (1 ms) -[ RUN ] BlobMathTest/1.TestAsum -[ OK ] BlobMathTest/1.TestAsum (0 ms) -[----------] 3 tests from BlobMathTest/1 (1 ms total) - -[----------] 4 tests from UniformFillerTest/1, where TypeParam = double -[ RUN ] UniformFillerTest/1.TestFill1D -[ OK ] UniformFillerTest/1.TestFill1D (0 ms) -[ RUN ] UniformFillerTest/1.TestFill -[ OK ] UniformFillerTest/1.TestFill (0 ms) -[ RUN ] UniformFillerTest/1.TestFill5D -[ OK ] UniformFillerTest/1.TestFill5D (0 ms) -[ RUN ] UniformFillerTest/1.TestFill2D -[ OK ] UniformFillerTest/1.TestFill2D (0 ms) -[----------] 4 tests from UniformFillerTest/1 (0 ms total) - -[----------] 3 tests from ThresholdLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] ThresholdLayerTest/1.Test -[ OK ] ThresholdLayerTest/1.Test (0 ms) -[ RUN ] ThresholdLayerTest/1.TestSetup -[ OK ] ThresholdLayerTest/1.TestSetup (0 ms) -[ RUN ] ThresholdLayerTest/1.Test2 -[ OK ] ThresholdLayerTest/1.Test2 (0 ms) -[----------] 3 tests from ThresholdLayerTest/1 (0 ms total) - -[----------] 8 tests from Im2colLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] Im2colLayerTest/0.TestGradientForceND -[ OK ] Im2colLayerTest/0.TestGradientForceND (741 ms) -[ RUN ] Im2colLayerTest/0.TestGradient -[ OK ] Im2colLayerTest/0.TestGradient (358 ms) -[ RUN ] Im2colLayerTest/0.TestDilatedGradient -[ OK ] Im2colLayerTest/0.TestDilatedGradient (1062 ms) -[ RUN ] Im2colLayerTest/0.TestRect -[ OK ] Im2colLayerTest/0.TestRect (0 ms) -[ RUN ] Im2colLayerTest/0.TestSetup -[ OK ] Im2colLayerTest/0.TestSetup (0 ms) -[ RUN ] Im2colLayerTest/0.TestForward -[ OK ] Im2colLayerTest/0.TestForward (0 ms) -[ RUN ] Im2colLayerTest/0.TestRectGradient -[ OK ] Im2colLayerTest/0.TestRectGradient (291 ms) -[ RUN ] Im2colLayerTest/0.TestDilatedGradientForceND -[ OK ] Im2colLayerTest/0.TestDilatedGradientForceND (2022 ms) -[----------] 8 tests from Im2colLayerTest/0 (4474 ms total) - -[----------] 4 tests from ContrastiveLossLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] ContrastiveLossLayerTest/0.TestForward -[ OK ] ContrastiveLossLayerTest/0.TestForward (0 ms) -[ RUN ] ContrastiveLossLayerTest/0.TestForwardLegacy -[ OK ] ContrastiveLossLayerTest/0.TestForwardLegacy (1 ms) -[ RUN ] ContrastiveLossLayerTest/0.TestGradient -[ OK ] ContrastiveLossLayerTest/0.TestGradient (254 ms) -[ RUN ] ContrastiveLossLayerTest/0.TestGradientLegacy -[ OK ] ContrastiveLossLayerTest/0.TestGradientLegacy (236 ms) -[----------] 4 tests from ContrastiveLossLayerTest/0 (491 ms total) - -[----------] 9 tests from LSTMLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] LSTMLayerTest/1.TestLSTMUnitGradientNonZeroCont -[ OK ] LSTMLayerTest/1.TestLSTMUnitGradientNonZeroCont (55 ms) -[ RUN ] LSTMLayerTest/1.TestLSTMUnitGradient -[ OK ] LSTMLayerTest/1.TestLSTMUnitGradient (56 ms) -[ RUN ] LSTMLayerTest/1.TestGradient -[ OK ] LSTMLayerTest/1.TestGradient (335 ms) -[ RUN ] LSTMLayerTest/1.TestGradientNonZeroContBufferSize2 -[ OK ] LSTMLayerTest/1.TestGradientNonZeroContBufferSize2 (680 ms) -[ RUN ] LSTMLayerTest/1.TestGradientNonZeroContBufferSize2WithStaticInput -[ OK ] LSTMLayerTest/1.TestGradientNonZeroContBufferSize2WithStaticInput (4196 ms) -[ RUN ] LSTMLayerTest/1.TestSetUp -[ OK ] LSTMLayerTest/1.TestSetUp (2 ms) -[ RUN ] LSTMLayerTest/1.TestForward -[ OK ] LSTMLayerTest/1.TestForward (5 ms) -[ RUN ] LSTMLayerTest/1.TestLSTMUnitSetUp -[ OK ] LSTMLayerTest/1.TestLSTMUnitSetUp (0 ms) -[ RUN ] LSTMLayerTest/1.TestGradientNonZeroCont -[ OK ] LSTMLayerTest/1.TestGradientNonZeroCont (350 ms) -[----------] 9 tests from LSTMLayerTest/1 (5679 ms total) - -[----------] 2 tests from SoftmaxLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] SoftmaxLayerTest/0.TestGradient -[ OK ] SoftmaxLayerTest/0.TestGradient (195 ms) -[ RUN ] SoftmaxLayerTest/0.TestForward -[ OK ] SoftmaxLayerTest/0.TestForward (0 ms) -[----------] 2 tests from SoftmaxLayerTest/0 (195 ms total) - -[----------] 15 tests from ConvolutionLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] ConvolutionLayerTest/1.TestSimpleConvolution -[ OK ] ConvolutionLayerTest/1.TestSimpleConvolution (0 ms) -[ RUN ] ConvolutionLayerTest/1.Test0DConvolution -[ OK ] ConvolutionLayerTest/1.Test0DConvolution (0 ms) -[ RUN ] ConvolutionLayerTest/1.TestSetup -[ OK ] ConvolutionLayerTest/1.TestSetup (0 ms) -[ RUN ] ConvolutionLayerTest/1.TestSobelConvolution -[ OK ] ConvolutionLayerTest/1.TestSobelConvolution (0 ms) -[ RUN ] ConvolutionLayerTest/1.TestGradient3D -[ OK ] ConvolutionLayerTest/1.TestGradient3D (904 ms) -[ RUN ] ConvolutionLayerTest/1.TestGradientGroup -[ OK ] ConvolutionLayerTest/1.TestGradientGroup (27 ms) -[ RUN ] ConvolutionLayerTest/1.TestNDAgainst2D -[ OK ] ConvolutionLayerTest/1.TestNDAgainst2D (579 ms) -[ RUN ] ConvolutionLayerTest/1.TestDilatedConvolution -[ OK ] ConvolutionLayerTest/1.TestDilatedConvolution (2 ms) -[ RUN ] ConvolutionLayerTest/1.Test1x1Gradient -[ OK ] ConvolutionLayerTest/1.Test1x1Gradient (835 ms) -[ RUN ] ConvolutionLayerTest/1.TestDilatedGradient -[ OK ] ConvolutionLayerTest/1.TestDilatedGradient (24 ms) -[ RUN ] ConvolutionLayerTest/1.TestDilated3DConvolution -[ OK ] ConvolutionLayerTest/1.TestDilated3DConvolution (9 ms) -[ RUN ] ConvolutionLayerTest/1.Test1x1Convolution -[ OK ] ConvolutionLayerTest/1.Test1x1Convolution (0 ms) -[ RUN ] ConvolutionLayerTest/1.TestGradient -[ OK ] ConvolutionLayerTest/1.TestGradient (102 ms) -[ RUN ] ConvolutionLayerTest/1.TestSimple3DConvolution -[ OK ] ConvolutionLayerTest/1.TestSimple3DConvolution (1 ms) -[ RUN ] ConvolutionLayerTest/1.TestSimpleConvolutionGroup -[ OK ] ConvolutionLayerTest/1.TestSimpleConvolutionGroup (0 ms) -[----------] 15 tests from ConvolutionLayerTest/1 (2483 ms total) - -[----------] 5 tests from DeconvolutionLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] DeconvolutionLayerTest/0.TestSimpleDeconvolution -[ OK ] DeconvolutionLayerTest/0.TestSimpleDeconvolution (1 ms) -[ RUN ] DeconvolutionLayerTest/0.TestNDAgainst2D -[ OK ] DeconvolutionLayerTest/0.TestNDAgainst2D (1465 ms) -[ RUN ] DeconvolutionLayerTest/0.TestSetup -[ OK ] DeconvolutionLayerTest/0.TestSetup (0 ms) -[ RUN ] DeconvolutionLayerTest/0.TestGradient -[ OK ] DeconvolutionLayerTest/0.TestGradient (961 ms) -[ RUN ] DeconvolutionLayerTest/0.TestGradient3D -[ OK ] DeconvolutionLayerTest/0.TestGradient3D (332 ms) -[----------] 5 tests from DeconvolutionLayerTest/0 (2759 ms total) - -[----------] 58 tests from NeuronLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] NeuronLayerTest/0.TestClipGradient -[ OK ] NeuronLayerTest/0.TestClipGradient (2 ms) -[ RUN ] NeuronLayerTest/0.TestSwishGradient -[ OK ] NeuronLayerTest/0.TestSwishGradient (6 ms) -[ RUN ] NeuronLayerTest/0.TestAbsGradient -[ OK ] NeuronLayerTest/0.TestAbsGradient (1 ms) -[ RUN ] NeuronLayerTest/0.TestReLUGradientWithNegativeSlope -[ OK ] NeuronLayerTest/0.TestReLUGradientWithNegativeSlope (3 ms) -[ RUN ] NeuronLayerTest/0.TestExpLayerBase2Shift1Scale3 -[ OK ] NeuronLayerTest/0.TestExpLayerBase2Shift1Scale3 (0 ms) -[ RUN ] NeuronLayerTest/0.TestBNLL -[ OK ] NeuronLayerTest/0.TestBNLL (0 ms) -[ RUN ] NeuronLayerTest/0.TestSigmoid -[ OK ] NeuronLayerTest/0.TestSigmoid (0 ms) -[ RUN ] NeuronLayerTest/0.TestTanHGradient -[ OK ] NeuronLayerTest/0.TestTanHGradient (6 ms) -[ RUN ] NeuronLayerTest/0.TestExpGradientBase2 -[ OK ] NeuronLayerTest/0.TestExpGradientBase2 (3 ms) -[ RUN ] NeuronLayerTest/0.TestExpGradientWithShift -[ OK ] NeuronLayerTest/0.TestExpGradientWithShift (3 ms) -[ RUN ] NeuronLayerTest/0.TestPReLUConsistencyReLU -[ OK ] NeuronLayerTest/0.TestPReLUConsistencyReLU (0 ms) -[ RUN ] NeuronLayerTest/0.TestSwishWithBeta -[ OK ] NeuronLayerTest/0.TestSwishWithBeta (0 ms) -[ RUN ] NeuronLayerTest/0.TestDropoutGradientTest -[ OK ] NeuronLayerTest/0.TestDropoutGradientTest (2 ms) -[ RUN ] NeuronLayerTest/0.TestReLU -[ OK ] NeuronLayerTest/0.TestReLU (0 ms) -[ RUN ] NeuronLayerTest/0.TestPReLUGradientChannelShared -[ OK ] NeuronLayerTest/0.TestPReLUGradientChannelShared (145 ms) -[ RUN ] NeuronLayerTest/0.TestExpGradient -[ OK ] NeuronLayerTest/0.TestExpGradient (3 ms) -[ RUN ] NeuronLayerTest/0.TestSwishAsLinearGradient -[ OK ] NeuronLayerTest/0.TestSwishAsLinearGradient (3 ms) -[ RUN ] NeuronLayerTest/0.TestReLUWithNegativeSlope -[ OK ] NeuronLayerTest/0.TestReLUWithNegativeSlope (0 ms) -[ RUN ] NeuronLayerTest/0.TestExpLayerWithShift -[ OK ] NeuronLayerTest/0.TestExpLayerWithShift (0 ms) -[ RUN ] NeuronLayerTest/0.TestLogGradientBase2 -[ OK ] NeuronLayerTest/0.TestLogGradientBase2 (3 ms) -[ RUN ] NeuronLayerTest/0.TestReLUGradient -[ OK ] NeuronLayerTest/0.TestReLUGradient (2 ms) -[ RUN ] NeuronLayerTest/0.TestExpGradientBase2Shift1Scale3 -[ OK ] NeuronLayerTest/0.TestExpGradientBase2Shift1Scale3 (3 ms) -[ RUN ] NeuronLayerTest/0.TestELUasReLU -[ OK ] NeuronLayerTest/0.TestELUasReLU (0 ms) -[ RUN ] NeuronLayerTest/0.TestDropoutTestPhase -[ OK ] NeuronLayerTest/0.TestDropoutTestPhase (0 ms) -[ RUN ] NeuronLayerTest/0.TestSwishAsLinear -[ OK ] NeuronLayerTest/0.TestSwishAsLinear (0 ms) -[ RUN ] NeuronLayerTest/0.TestSwish -[ OK ] NeuronLayerTest/0.TestSwish (0 ms) -[ RUN ] NeuronLayerTest/0.TestExpGradientBase2Shift1 -[ OK ] NeuronLayerTest/0.TestExpGradientBase2Shift1 (3 ms) -[ RUN ] NeuronLayerTest/0.TestLogGradientBase2Shift1Scale3 -[ OK ] NeuronLayerTest/0.TestLogGradientBase2Shift1Scale3 (3 ms) -[ RUN ] NeuronLayerTest/0.TestExpLayerBase2Shift1 -[ OK ] NeuronLayerTest/0.TestExpLayerBase2Shift1 (0 ms) -[ RUN ] NeuronLayerTest/0.TestPReLUForward -[ OK ] NeuronLayerTest/0.TestPReLUForward (0 ms) -[ RUN ] NeuronLayerTest/0.TestLogLayerBase2 -[ OK ] NeuronLayerTest/0.TestLogLayerBase2 (0 ms) -[ RUN ] NeuronLayerTest/0.TestSwishWithBetaGradient -[ OK ] NeuronLayerTest/0.TestSwishWithBetaGradient (6 ms) -[ RUN ] NeuronLayerTest/0.TestSigmoidGradient -[ OK ] NeuronLayerTest/0.TestSigmoidGradient (5 ms) -[ RUN ] NeuronLayerTest/0.TestLogGradientBase2Scale3 -[ OK ] NeuronLayerTest/0.TestLogGradientBase2Scale3 (3 ms) -[ RUN ] NeuronLayerTest/0.TestExpGradientBase2Scale3 -[ OK ] NeuronLayerTest/0.TestExpGradientBase2Scale3 (3 ms) -[ RUN ] NeuronLayerTest/0.TestClip -[ OK ] NeuronLayerTest/0.TestClip (0 ms) -[ RUN ] NeuronLayerTest/0.TestLogGradient -[ OK ] NeuronLayerTest/0.TestLogGradient (2 ms) -[ RUN ] NeuronLayerTest/0.TestLogLayerBase2Shift1 -[ OK ] NeuronLayerTest/0.TestLogLayerBase2Shift1 (0 ms) -[ RUN ] NeuronLayerTest/0.TestPReLUParam -[ OK ] NeuronLayerTest/0.TestPReLUParam (0 ms) -[ RUN ] NeuronLayerTest/0.TestExpLayerBase2 -[ OK ] NeuronLayerTest/0.TestExpLayerBase2 (0 ms) -[ RUN ] NeuronLayerTest/0.TestDropoutHalf -[ OK ] NeuronLayerTest/0.TestDropoutHalf (0 ms) -[ RUN ] NeuronLayerTest/0.TestELU -[ OK ] NeuronLayerTest/0.TestELU (1 ms) -[ RUN ] NeuronLayerTest/0.TestDropoutThreeQuarters -[ OK ] NeuronLayerTest/0.TestDropoutThreeQuarters (0 ms) -[ RUN ] NeuronLayerTest/0.TestPReLUInPlace -[ OK ] NeuronLayerTest/0.TestPReLUInPlace (0 ms) -[ RUN ] NeuronLayerTest/0.TestPReLUForwardChannelShared -[ OK ] NeuronLayerTest/0.TestPReLUForwardChannelShared (0 ms) -[ RUN ] NeuronLayerTest/0.TestTanH -[ OK ] NeuronLayerTest/0.TestTanH (0 ms) -[ RUN ] NeuronLayerTest/0.TestDropoutGradient -[ OK ] NeuronLayerTest/0.TestDropoutGradient (3 ms) -[ RUN ] NeuronLayerTest/0.TestLogLayer -[ OK ] NeuronLayerTest/0.TestLogLayer (0 ms) -[ RUN ] NeuronLayerTest/0.TestLogLayerBase2Shift1Scale3 -[ OK ] NeuronLayerTest/0.TestLogLayerBase2Shift1Scale3 (0 ms) -[ RUN ] NeuronLayerTest/0.TestLogLayerBase2Scale3 -[ OK ] NeuronLayerTest/0.TestLogLayerBase2Scale3 (0 ms) -[ RUN ] NeuronLayerTest/0.TestELUasReLUGradient -[ OK ] NeuronLayerTest/0.TestELUasReLUGradient (3 ms) -[ RUN ] NeuronLayerTest/0.TestPReLUGradient -[ OK ] NeuronLayerTest/0.TestPReLUGradient (146 ms) -[ RUN ] NeuronLayerTest/0.TestAbsVal -[ OK ] NeuronLayerTest/0.TestAbsVal (0 ms) -[ RUN ] NeuronLayerTest/0.TestELUGradient -[ OK ] NeuronLayerTest/0.TestELUGradient (3 ms) -[ RUN ] NeuronLayerTest/0.TestExpLayerBase2Scale3 -[ OK ] NeuronLayerTest/0.TestExpLayerBase2Scale3 (0 ms) -[ RUN ] NeuronLayerTest/0.TestBNLLGradient -[ OK ] NeuronLayerTest/0.TestBNLLGradient (5 ms) -[ RUN ] NeuronLayerTest/0.TestLogGradientBase2Shift1 -[ OK ] NeuronLayerTest/0.TestLogGradientBase2Shift1 (3 ms) -[ RUN ] NeuronLayerTest/0.TestExpLayer -[ OK ] NeuronLayerTest/0.TestExpLayer (0 ms) -[----------] 58 tests from NeuronLayerTest/0 (374 ms total) - -[----------] 1 test from CPUStochasticPoolingLayerTest/0, where TypeParam = float -[ RUN ] CPUStochasticPoolingLayerTest/0.TestSetup -[ OK ] CPUStochasticPoolingLayerTest/0.TestSetup (0 ms) -[----------] 1 test from CPUStochasticPoolingLayerTest/0 (0 ms total) +[----------] 10 tests from ConcatLayerTest/1 (18 ms total) -[----------] 11 tests from CropLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] CropLayerTest/0.TestSetupShapeDefault -[ OK ] CropLayerTest/0.TestSetupShapeDefault (0 ms) -[ RUN ] CropLayerTest/0.TestDimensionsCheck -[ OK ] CropLayerTest/0.TestDimensionsCheck (0 ms) -[ RUN ] CropLayerTest/0.TestCropHW -[ OK ] CropLayerTest/0.TestCropHW (0 ms) -[ RUN ] CropLayerTest/0.TestCropAllGradient -[ OK ] CropLayerTest/0.TestCropAllGradient (318 ms) -[ RUN ] CropLayerTest/0.TestCrop5D -[ OK ] CropLayerTest/0.TestCrop5D (1 ms) -[ RUN ] CropLayerTest/0.TestSetupShapeNegativeIndexing -[ OK ] CropLayerTest/0.TestSetupShapeNegativeIndexing (0 ms) -[ RUN ] CropLayerTest/0.TestCropAllOffset -[ OK ] CropLayerTest/0.TestCropAllOffset (0 ms) -[ RUN ] CropLayerTest/0.TestCropHWGradient -[ OK ] CropLayerTest/0.TestCropHWGradient (599 ms) -[ RUN ] CropLayerTest/0.TestCropAll -[ OK ] CropLayerTest/0.TestCropAll (0 ms) -[ RUN ] CropLayerTest/0.TestSetupShapeAll -[ OK ] CropLayerTest/0.TestSetupShapeAll (0 ms) -[ RUN ] CropLayerTest/0.TestCrop5DGradient -[ OK ] CropLayerTest/0.TestCrop5DGradient (2014 ms) -[----------] 11 tests from CropLayerTest/0 (2932 ms total) - -[----------] 2 tests from BatchReindexLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] BatchReindexLayerTest/1.TestForward -[ OK ] BatchReindexLayerTest/1.TestForward (0 ms) -[ RUN ] BatchReindexLayerTest/1.TestGradient -[ OK ] BatchReindexLayerTest/1.TestGradient (140 ms) -[----------] 2 tests from BatchReindexLayerTest/1 (142 ms total) - -[----------] 4 tests from ConstantFillerTest/0, where TypeParam = float -[ RUN ] ConstantFillerTest/0.TestFill -[ OK ] ConstantFillerTest/0.TestFill (0 ms) -[ RUN ] ConstantFillerTest/0.TestFill5D -[ OK ] ConstantFillerTest/0.TestFill5D (0 ms) -[ RUN ] ConstantFillerTest/0.TestFill2D -[ OK ] ConstantFillerTest/0.TestFill2D (0 ms) -[ RUN ] ConstantFillerTest/0.TestFill1D -[ OK ] ConstantFillerTest/0.TestFill1D (0 ms) -[----------] 4 tests from ConstantFillerTest/0 (0 ms total) +[----------] 4 tests from ConstantFillerTest/1, where TypeParam = double +[ RUN ] ConstantFillerTest/1.TestFill +[ OK ] ConstantFillerTest/1.TestFill (0 ms) +[ RUN ] ConstantFillerTest/1.TestFill1D +[ OK ] ConstantFillerTest/1.TestFill1D (0 ms) +[ RUN ] ConstantFillerTest/1.TestFill2D +[ OK ] ConstantFillerTest/1.TestFill2D (0 ms) +[ RUN ] ConstantFillerTest/1.TestFill5D +[ OK ] ConstantFillerTest/1.TestFill5D (0 ms) +[----------] 4 tests from ConstantFillerTest/1 (0 ms total) [----------] 4 tests from BlobSimpleTest/1, where TypeParam = double +[ RUN ] BlobSimpleTest/1.TestReshapeZero +[ OK ] BlobSimpleTest/1.TestReshapeZero (0 ms) [ RUN ] BlobSimpleTest/1.TestReshape [ OK ] BlobSimpleTest/1.TestReshape (0 ms) [ RUN ] BlobSimpleTest/1.TestLegacyBlobProtoShapeEquals [ OK ] BlobSimpleTest/1.TestLegacyBlobProtoShapeEquals (0 ms) [ RUN ] BlobSimpleTest/1.TestInitialization [ OK ] BlobSimpleTest/1.TestInitialization (0 ms) -[ RUN ] BlobSimpleTest/1.TestReshapeZero -[ OK ] BlobSimpleTest/1.TestReshapeZero (0 ms) [----------] 4 tests from BlobSimpleTest/1 (0 ms total) -[----------] 26 tests from NetTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] NetTest/1.TestGetBlob -[ OK ] NetTest/1.TestGetBlob (4 ms) -[ RUN ] NetTest/1.TestGetLayerByName -[ OK ] NetTest/1.TestGetLayerByName (1 ms) -[ RUN ] NetTest/1.TestSharedWeightsDataNet -[ OK ] NetTest/1.TestSharedWeightsDataNet (0 ms) -[ RUN ] NetTest/1.TestBottomNeedBackwardTricky -[ OK ] NetTest/1.TestBottomNeedBackwardTricky (1 ms) -[ RUN ] NetTest/1.TestLossWeightMidNet -[ OK ] NetTest/1.TestLossWeightMidNet (7 ms) -[ RUN ] NetTest/1.TestHasLayer -[ OK ] NetTest/1.TestHasLayer (1 ms) -[ RUN ] NetTest/1.TestSharedWeightsUpdate -[ OK ] NetTest/1.TestSharedWeightsUpdate (2 ms) -[ RUN ] NetTest/1.TestAllInOneNetTrain -[ OK ] NetTest/1.TestAllInOneNetTrain (0 ms) -[ RUN ] NetTest/1.TestHasBlob -[ OK ] NetTest/1.TestHasBlob (1 ms) -[ RUN ] NetTest/1.TestSharedWeightsResume -[ OK ] NetTest/1.TestSharedWeightsResume (1 ms) -[ RUN ] NetTest/1.TestLossWeight -[ OK ] NetTest/1.TestLossWeight (13 ms) -[ RUN ] NetTest/1.TestAllInOneNetVal -[ OK ] NetTest/1.TestAllInOneNetVal (5 ms) -[ RUN ] NetTest/1.TestAllInOneNetDeploy -[ OK ] NetTest/1.TestAllInOneNetDeploy (0 ms) -[ RUN ] NetTest/1.TestComboLossWeight -[ OK ] NetTest/1.TestComboLossWeight (8 ms) -[ RUN ] NetTest/1.TestSharedWeightsDiffNet -[ OK ] NetTest/1.TestSharedWeightsDiffNet (0 ms) -[ RUN ] NetTest/1.TestFromTo -[ OK ] NetTest/1.TestFromTo (7 ms) -[ RUN ] NetTest/1.TestBottomNeedBackwardForce -[ OK ] NetTest/1.TestBottomNeedBackwardForce (1 ms) -[ RUN ] NetTest/1.TestReshape -[ OK ] NetTest/1.TestReshape (1 ms) -[ RUN ] NetTest/1.TestUnsharedWeightsDiffNet -[ OK ] NetTest/1.TestUnsharedWeightsDiffNet (0 ms) -[ RUN ] NetTest/1.TestSkipPropagateDown -[ OK ] NetTest/1.TestSkipPropagateDown (5 ms) -[ RUN ] NetTest/1.TestBottomNeedBackward -[ OK ] NetTest/1.TestBottomNeedBackward (1 ms) -[ RUN ] NetTest/1.TestParamPropagateDown -[ OK ] NetTest/1.TestParamPropagateDown (3 ms) -[ RUN ] NetTest/1.TestBackwardWithAccuracyLayer -[ OK ] NetTest/1.TestBackwardWithAccuracyLayer (2 ms) -[ RUN ] NetTest/1.TestForcePropagateDown -[ OK ] NetTest/1.TestForcePropagateDown (1 ms) -[ RUN ] NetTest/1.TestUnsharedWeightsDataNet -[ OK ] NetTest/1.TestUnsharedWeightsDataNet (1 ms) -[ RUN ] NetTest/1.TestBottomNeedBackwardEuclideanForce -[ OK ] NetTest/1.TestBottomNeedBackwardEuclideanForce (0 ms) -[----------] 26 tests from NetTest/1 (76 ms total) +[----------] 1 test from ProtoTest +[ RUN ] ProtoTest.TestSerialization +Printing in binary format. -[----------] 3 tests from TanHLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] TanHLayerTest/0.TestTanHOverflow -[ OK ] TanHLayerTest/0.TestTanHOverflow (0 ms) -[ RUN ] TanHLayerTest/0.TestTanH -[ OK ] TanHLayerTest/0.TestTanH (0 ms) -[ RUN ] TanHLayerTest/0.TestTanHGradient -[ OK ] TanHLayerTest/0.TestTanHGradient (6 ms) -[----------] 3 tests from TanHLayerTest/0 (11 ms total) +testTest +Printing in text format. +name: "test" +type: "Test" -[----------] 2 tests from InternalThreadTest -[ RUN ] InternalThreadTest.TestStartAndExit -[ OK ] InternalThreadTest.TestStartAndExit (12 ms) -[ RUN ] InternalThreadTest.TestRandomSeed -[ OK ] InternalThreadTest.TestRandomSeed (20 ms) -[----------] 2 tests from InternalThreadTest (32 ms total) +[ OK ] ProtoTest.TestSerialization (0 ms) +[----------] 1 test from ProtoTest (0 ms total) + +[----------] 2 tests from CommonTest +[ RUN ] CommonTest.TestBrewMode +[ OK ] CommonTest.TestBrewMode (0 ms) +[ RUN ] CommonTest.TestRandSeedCPU +[ OK ] CommonTest.TestRandSeedCPU (0 ms) +[----------] 2 tests from CommonTest (0 ms total) + +[----------] 6 tests from RNNLayerTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] RNNLayerTest/0.TestSetUp +[ OK ] RNNLayerTest/0.TestSetUp (1 ms) +[ RUN ] RNNLayerTest/0.TestForward +[ OK ] RNNLayerTest/0.TestForward (5 ms) +[ RUN ] RNNLayerTest/0.TestGradient +[ OK ] RNNLayerTest/0.TestGradient (119 ms) +[ RUN ] RNNLayerTest/0.TestGradientNonZeroContBufferSize2 +[ OK ] RNNLayerTest/0.TestGradientNonZeroContBufferSize2 (226 ms) +[ RUN ] RNNLayerTest/0.TestGradientNonZeroCont +[ OK ] RNNLayerTest/0.TestGradientNonZeroCont (117 ms) +[ RUN ] RNNLayerTest/0.TestGradientNonZeroContBufferSize2WithStaticInput +[ OK ] RNNLayerTest/0.TestGradientNonZeroContBufferSize2WithStaticInput (949 ms) +[----------] 6 tests from RNNLayerTest/0 (1417 ms total) [----------] 3 tests from ThresholdLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] ThresholdLayerTest/0.Test -[ OK ] ThresholdLayerTest/0.Test (0 ms) [ RUN ] ThresholdLayerTest/0.Test2 [ OK ] ThresholdLayerTest/0.Test2 (0 ms) +[ RUN ] ThresholdLayerTest/0.Test +[ OK ] ThresholdLayerTest/0.Test (0 ms) [ RUN ] ThresholdLayerTest/0.TestSetup [ OK ] ThresholdLayerTest/0.TestSetup (0 ms) [----------] 3 tests from ThresholdLayerTest/0 (0 ms total) -[----------] 2 tests from InfogainLossLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] InfogainLossLayerTest/0.TestInfogainLoss -[ OK ] InfogainLossLayerTest/0.TestInfogainLoss (0 ms) -[ RUN ] InfogainLossLayerTest/0.TestGradient -[ OK ] InfogainLossLayerTest/0.TestGradient (2 ms) -[----------] 2 tests from InfogainLossLayerTest/0 (2 ms total) +[----------] 5 tests from EmbedLayerTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] EmbedLayerTest/1.TestSetUp +[ OK ] EmbedLayerTest/1.TestSetUp (0 ms) +[ RUN ] EmbedLayerTest/1.TestForwardWithBias +[ OK ] EmbedLayerTest/1.TestForwardWithBias (0 ms) +[ RUN ] EmbedLayerTest/1.TestForward +[ OK ] EmbedLayerTest/1.TestForward (0 ms) +[ RUN ] EmbedLayerTest/1.TestGradient +[ OK ] EmbedLayerTest/1.TestGradient (12 ms) +[ RUN ] EmbedLayerTest/1.TestGradientWithBias +[ OK ] EmbedLayerTest/1.TestGradientWithBias (15 ms) +[----------] 5 tests from EmbedLayerTest/1 (27 ms total) -[----------] 1 test from LayerFactoryTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] LayerFactoryTest/0.TestCreateLayer -[ OK ] LayerFactoryTest/0.TestCreateLayer (1 ms) -[----------] 1 test from LayerFactoryTest/0 (1 ms total) +[----------] 4 tests from SoftmaxWithLossLayerTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] SoftmaxWithLossLayerTest/1.TestGradientIgnoreLabel +[ OK ] SoftmaxWithLossLayerTest/1.TestGradientIgnoreLabel (12 ms) +[ RUN ] SoftmaxWithLossLayerTest/1.TestGradientUnnormalized +[ OK ] SoftmaxWithLossLayerTest/1.TestGradientUnnormalized (12 ms) +[ RUN ] SoftmaxWithLossLayerTest/1.TestForwardIgnoreLabel +[ OK ] SoftmaxWithLossLayerTest/1.TestForwardIgnoreLabel (1 ms) +[ RUN ] SoftmaxWithLossLayerTest/1.TestGradient +[ OK ] SoftmaxWithLossLayerTest/1.TestGradient (12 ms) +[----------] 4 tests from SoftmaxWithLossLayerTest/1 (38 ms total) -[----------] 5 tests from BenchmarkTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] BenchmarkTest/1.TestTimerStop -[ OK ] BenchmarkTest/1.TestTimerStop (0 ms) -[ RUN ] BenchmarkTest/1.TestTimerStart -[ OK ] BenchmarkTest/1.TestTimerStart (0 ms) -[ RUN ] BenchmarkTest/1.TestTimerConstructor -[ OK ] BenchmarkTest/1.TestTimerConstructor (0 ms) -[ RUN ] BenchmarkTest/1.TestTimerSeconds -[ OK ] BenchmarkTest/1.TestTimerSeconds (301 ms) -[ RUN ] BenchmarkTest/1.TestTimerMilliSeconds -[ OK ] BenchmarkTest/1.TestTimerMilliSeconds (304 ms) -[----------] 5 tests from BenchmarkTest/1 (605 ms total) +[----------] 12 tests from ArgMaxLayerTest/0, where TypeParam = float +[ RUN ] ArgMaxLayerTest/0.TestSetupAxis +[ OK ] ArgMaxLayerTest/0.TestSetupAxis (0 ms) +[ RUN ] ArgMaxLayerTest/0.TestSetupAxisMaxVal +[ OK ] ArgMaxLayerTest/0.TestSetupAxisMaxVal (1 ms) +[ RUN ] ArgMaxLayerTest/0.TestSetup +[ OK ] ArgMaxLayerTest/0.TestSetup (0 ms) +[ RUN ] ArgMaxLayerTest/0.TestCPUAxis +[ OK ] ArgMaxLayerTest/0.TestCPUAxis (8 ms) +[ RUN ] ArgMaxLayerTest/0.TestCPUMaxVal +[ OK ] ArgMaxLayerTest/0.TestCPUMaxVal (1 ms) +[ RUN ] ArgMaxLayerTest/0.TestCPUMaxValTopK +[ OK ] ArgMaxLayerTest/0.TestCPUMaxValTopK (1 ms) +[ RUN ] ArgMaxLayerTest/0.TestCPU +[ OK ] ArgMaxLayerTest/0.TestCPU (1 ms) +[ RUN ] ArgMaxLayerTest/0.TestCPUAxisMaxValTopK +[ OK ] ArgMaxLayerTest/0.TestCPUAxisMaxValTopK (24 ms) +[ RUN ] ArgMaxLayerTest/0.TestSetupAxisNegativeIndexing +[ OK ] ArgMaxLayerTest/0.TestSetupAxisNegativeIndexing (0 ms) +[ RUN ] ArgMaxLayerTest/0.TestCPUAxisTopK +[ OK ] ArgMaxLayerTest/0.TestCPUAxisTopK (37 ms) +[ RUN ] ArgMaxLayerTest/0.TestCPUTopK +[ OK ] ArgMaxLayerTest/0.TestCPUTopK (1 ms) +[ RUN ] ArgMaxLayerTest/0.TestSetupMaxVal +[ OK ] ArgMaxLayerTest/0.TestSetupMaxVal (0 ms) +[----------] 12 tests from ArgMaxLayerTest/0 (74 ms total) + +[----------] 6 tests from MSRAFillerTest/1, where TypeParam = double +[ RUN ] MSRAFillerTest/1.TestFill2D +[ OK ] MSRAFillerTest/1.TestFill2D (0 ms) +[ RUN ] MSRAFillerTest/1.TestFill1D +[ OK ] MSRAFillerTest/1.TestFill1D (0 ms) +[ RUN ] MSRAFillerTest/1.TestFillAverage +[ OK ] MSRAFillerTest/1.TestFillAverage (85 ms) +[ RUN ] MSRAFillerTest/1.TestFillFanOut +[ OK ] MSRAFillerTest/1.TestFillFanOut (84 ms) +[ RUN ] MSRAFillerTest/1.TestFillFanIn +[ OK ] MSRAFillerTest/1.TestFillFanIn (84 ms) +[ RUN ] MSRAFillerTest/1.TestFill5D +[ OK ] MSRAFillerTest/1.TestFill5D (0 ms) +[----------] 6 tests from MSRAFillerTest/1 (254 ms total) + +[----------] 3 tests from MaxPoolingDropoutTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] MaxPoolingDropoutTest/1.TestSetup +[ OK ] MaxPoolingDropoutTest/1.TestSetup (0 ms) +[ RUN ] MaxPoolingDropoutTest/1.TestBackward +[ OK ] MaxPoolingDropoutTest/1.TestBackward (0 ms) +[ RUN ] MaxPoolingDropoutTest/1.TestForward +[ OK ] MaxPoolingDropoutTest/1.TestForward (0 ms) +[----------] 3 tests from MaxPoolingDropoutTest/1 (0 ms total) + +[----------] 5 tests from DeconvolutionLayerTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] DeconvolutionLayerTest/1.TestSetup +[ OK ] DeconvolutionLayerTest/1.TestSetup (0 ms) +[ RUN ] DeconvolutionLayerTest/1.TestNDAgainst2D +[ OK ] DeconvolutionLayerTest/1.TestNDAgainst2D (1419 ms) +[ RUN ] DeconvolutionLayerTest/1.TestGradient3D +[ OK ] DeconvolutionLayerTest/1.TestGradient3D (269 ms) +[ RUN ] DeconvolutionLayerTest/1.TestSimpleDeconvolution +[ OK ] DeconvolutionLayerTest/1.TestSimpleDeconvolution (1 ms) +[ RUN ] DeconvolutionLayerTest/1.TestGradient +[ OK ] DeconvolutionLayerTest/1.TestGradient (750 ms) +[----------] 5 tests from DeconvolutionLayerTest/1 (2439 ms total) + +[----------] 1 test from SolverTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] SolverTest/0.TestInitTrainTestNets +[ OK ] SolverTest/0.TestInitTrainTestNets (1 ms) +[----------] 1 test from SolverTest/0 (1 ms total) + +[----------] 4 tests from NetUpgradeTest +[ RUN ] NetUpgradeTest.TestAllParams +[ OK ] NetUpgradeTest.TestAllParams (1 ms) +[ RUN ] NetUpgradeTest.TestUpgradeV1LayerType +[ OK ] NetUpgradeTest.TestUpgradeV1LayerType (1 ms) +[ RUN ] NetUpgradeTest.TestSimple +[ OK ] NetUpgradeTest.TestSimple (1 ms) +[ RUN ] NetUpgradeTest.TestImageNet +[ OK ] NetUpgradeTest.TestImageNet (2 ms) +[----------] 4 tests from NetUpgradeTest (5 ms total) + +[----------] 1 test from CPUStochasticPoolingLayerTest/1, where TypeParam = double +[ RUN ] CPUStochasticPoolingLayerTest/1.TestSetup +[ OK ] CPUStochasticPoolingLayerTest/1.TestSetup (0 ms) +[----------] 1 test from CPUStochasticPoolingLayerTest/1 (0 ms total) [----------] 58 tests from NeuronLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] NeuronLayerTest/1.TestELU -[ OK ] NeuronLayerTest/1.TestELU (0 ms) +[ RUN ] NeuronLayerTest/1.TestSwishAsLinearGradient +[ OK ] NeuronLayerTest/1.TestSwishAsLinearGradient (2 ms) +[ RUN ] NeuronLayerTest/1.TestLogLayer +[ OK ] NeuronLayerTest/1.TestLogLayer (0 ms) +[ RUN ] NeuronLayerTest/1.TestReLUGradientWithNegativeSlope +[ OK ] NeuronLayerTest/1.TestReLUGradientWithNegativeSlope (3 ms) +[ RUN ] NeuronLayerTest/1.TestELUasReLU +[ OK ] NeuronLayerTest/1.TestELUasReLU (0 ms) +[ RUN ] NeuronLayerTest/1.TestBNLL +[ OK ] NeuronLayerTest/1.TestBNLL (0 ms) +[ RUN ] NeuronLayerTest/1.TestDropoutThreeQuarters +[ OK ] NeuronLayerTest/1.TestDropoutThreeQuarters (0 ms) +[ RUN ] NeuronLayerTest/1.TestLogGradientBase2Scale3 +[ OK ] NeuronLayerTest/1.TestLogGradientBase2Scale3 (4 ms) +[ RUN ] NeuronLayerTest/1.TestSwishWithBeta +[ OK ] NeuronLayerTest/1.TestSwishWithBeta (0 ms) [ RUN ] NeuronLayerTest/1.TestLogLayerBase2Scale3 [ OK ] NeuronLayerTest/1.TestLogLayerBase2Scale3 (0 ms) -[ RUN ] NeuronLayerTest/1.TestAbsVal -[ OK ] NeuronLayerTest/1.TestAbsVal (0 ms) -[ RUN ] NeuronLayerTest/1.TestExpLayerBase2 -[ OK ] NeuronLayerTest/1.TestExpLayerBase2 (0 ms) +[ RUN ] NeuronLayerTest/1.TestSwishGradient +[ OK ] NeuronLayerTest/1.TestSwishGradient (5 ms) +[ RUN ] NeuronLayerTest/1.TestTanH +[ OK ] NeuronLayerTest/1.TestTanH (0 ms) +[ RUN ] NeuronLayerTest/1.TestSigmoidGradient +[ OK ] NeuronLayerTest/1.TestSigmoidGradient (4 ms) +[ RUN ] NeuronLayerTest/1.TestSigmoid +[ OK ] NeuronLayerTest/1.TestSigmoid (0 ms) +[ RUN ] NeuronLayerTest/1.TestExpGradientBase2 +[ OK ] NeuronLayerTest/1.TestExpGradientBase2 (4 ms) +[ RUN ] NeuronLayerTest/1.TestELUasReLUGradient +[ OK ] NeuronLayerTest/1.TestELUasReLUGradient (2 ms) [ RUN ] NeuronLayerTest/1.TestDropoutGradientTest [ OK ] NeuronLayerTest/1.TestDropoutGradientTest (2 ms) +[ RUN ] NeuronLayerTest/1.TestExpLayerBase2Shift1 +[ OK ] NeuronLayerTest/1.TestExpLayerBase2Shift1 (0 ms) [ RUN ] NeuronLayerTest/1.TestLogGradientBase2 -[ OK ] NeuronLayerTest/1.TestLogGradientBase2 (5 ms) +[ OK ] NeuronLayerTest/1.TestLogGradientBase2 (4 ms) [ RUN ] NeuronLayerTest/1.TestSwishAsLinear [ OK ] NeuronLayerTest/1.TestSwishAsLinear (0 ms) +[ RUN ] NeuronLayerTest/1.TestPReLUParam +[ OK ] NeuronLayerTest/1.TestPReLUParam (0 ms) +[ RUN ] NeuronLayerTest/1.TestAbsGradient +[ OK ] NeuronLayerTest/1.TestAbsGradient (2 ms) +[ RUN ] NeuronLayerTest/1.TestExpGradientWithShift +[ OK ] NeuronLayerTest/1.TestExpGradientWithShift (4 ms) +[ RUN ] NeuronLayerTest/1.TestReLUGradient +[ OK ] NeuronLayerTest/1.TestReLUGradient (2 ms) +[ RUN ] NeuronLayerTest/1.TestDropoutGradient +[ OK ] NeuronLayerTest/1.TestDropoutGradient (2 ms) +[ RUN ] NeuronLayerTest/1.TestTanHGradient +[ OK ] NeuronLayerTest/1.TestTanHGradient (5 ms) +[ RUN ] NeuronLayerTest/1.TestELU +[ OK ] NeuronLayerTest/1.TestELU (0 ms) +[ RUN ] NeuronLayerTest/1.TestExpGradientBase2Shift1 +[ OK ] NeuronLayerTest/1.TestExpGradientBase2Shift1 (4 ms) +[ RUN ] NeuronLayerTest/1.TestLogGradient +[ OK ] NeuronLayerTest/1.TestLogGradient (4 ms) +[ RUN ] NeuronLayerTest/1.TestPReLUGradientChannelShared +[ OK ] NeuronLayerTest/1.TestPReLUGradientChannelShared (129 ms) +[ RUN ] NeuronLayerTest/1.TestExpGradientBase2Scale3 +[ OK ] NeuronLayerTest/1.TestExpGradientBase2Scale3 (4 ms) [ RUN ] NeuronLayerTest/1.TestDropoutTestPhase [ OK ] NeuronLayerTest/1.TestDropoutTestPhase (0 ms) -[ RUN ] NeuronLayerTest/1.TestClip -[ OK ] NeuronLayerTest/1.TestClip (0 ms) -[ RUN ] NeuronLayerTest/1.TestExpLayerBase2Shift1 -[ OK ] NeuronLayerTest/1.TestExpLayerBase2Shift1 (0 ms) -[ RUN ] NeuronLayerTest/1.TestReLU -[ OK ] NeuronLayerTest/1.TestReLU (0 ms) -[ RUN ] NeuronLayerTest/1.TestSwishAsLinearGradient -[ OK ] NeuronLayerTest/1.TestSwishAsLinearGradient (11 ms) -[ RUN ] NeuronLayerTest/1.TestDropoutHalf -[ OK ] NeuronLayerTest/1.TestDropoutHalf (0 ms) -[ RUN ] NeuronLayerTest/1.TestSwishWithBetaGradient -[ OK ] NeuronLayerTest/1.TestSwishWithBetaGradient (6 ms) -[ RUN ] NeuronLayerTest/1.TestLogLayerBase2 -[ OK ] NeuronLayerTest/1.TestLogLayerBase2 (8 ms) [ RUN ] NeuronLayerTest/1.TestExpLayerBase2Shift1Scale3 [ OK ] NeuronLayerTest/1.TestExpLayerBase2Shift1Scale3 (0 ms) -[ RUN ] NeuronLayerTest/1.TestSigmoid -[ OK ] NeuronLayerTest/1.TestSigmoid (0 ms) -[ RUN ] NeuronLayerTest/1.TestLogGradientBase2Shift1Scale3 -[ OK ] NeuronLayerTest/1.TestLogGradientBase2Shift1Scale3 (4 ms) -[ RUN ] NeuronLayerTest/1.TestExpLayerBase2Scale3 -[ OK ] NeuronLayerTest/1.TestExpLayerBase2Scale3 (0 ms) -[ RUN ] NeuronLayerTest/1.TestExpLayerWithShift -[ OK ] NeuronLayerTest/1.TestExpLayerWithShift (0 ms) -[ RUN ] NeuronLayerTest/1.TestTanHGradient -[ OK ] NeuronLayerTest/1.TestTanHGradient (14 ms) -[ RUN ] NeuronLayerTest/1.TestExpGradientBase2Shift1 -[ OK ] NeuronLayerTest/1.TestExpGradientBase2Shift1 (4 ms) -[ RUN ] NeuronLayerTest/1.TestTanH -[ OK ] NeuronLayerTest/1.TestTanH (0 ms) -[ RUN ] NeuronLayerTest/1.TestLogLayer -[ OK ] NeuronLayerTest/1.TestLogLayer (0 ms) [ RUN ] NeuronLayerTest/1.TestExpLayer [ OK ] NeuronLayerTest/1.TestExpLayer (0 ms) -[ RUN ] NeuronLayerTest/1.TestExpGradientWithShift -[ OK ] NeuronLayerTest/1.TestExpGradientWithShift (12 ms) -[ RUN ] NeuronLayerTest/1.TestSwish -[ OK ] NeuronLayerTest/1.TestSwish (0 ms) -[ RUN ] NeuronLayerTest/1.TestPReLUGradientChannelShared -[ OK ] NeuronLayerTest/1.TestPReLUGradientChannelShared (168 ms) -[ RUN ] NeuronLayerTest/1.TestReLUGradient -[ OK ] NeuronLayerTest/1.TestReLUGradient (2 ms) -[ RUN ] NeuronLayerTest/1.TestClipGradient -[ OK ] NeuronLayerTest/1.TestClipGradient (3 ms) -[ RUN ] NeuronLayerTest/1.TestPReLUGradient -[ OK ] NeuronLayerTest/1.TestPReLUGradient (142 ms) -[ RUN ] NeuronLayerTest/1.TestDropoutGradient -[ OK ] NeuronLayerTest/1.TestDropoutGradient (3 ms) -[ RUN ] NeuronLayerTest/1.TestSwishGradient -[ OK ] NeuronLayerTest/1.TestSwishGradient (5 ms) -[ RUN ] NeuronLayerTest/1.TestBNLLGradient -[ OK ] NeuronLayerTest/1.TestBNLLGradient (6 ms) -[ RUN ] NeuronLayerTest/1.TestLogGradientBase2Scale3 -[ OK ] NeuronLayerTest/1.TestLogGradientBase2Scale3 (5 ms) -[ RUN ] NeuronLayerTest/1.TestExpGradient -[ OK ] NeuronLayerTest/1.TestExpGradient (3 ms) -[ RUN ] NeuronLayerTest/1.TestReLUGradientWithNegativeSlope -[ OK ] NeuronLayerTest/1.TestReLUGradientWithNegativeSlope (3 ms) +[ RUN ] NeuronLayerTest/1.TestExpLayerBase2 +[ OK ] NeuronLayerTest/1.TestExpLayerBase2 (0 ms) +[ RUN ] NeuronLayerTest/1.TestExpGradientBase2Shift1Scale3 +[ OK ] NeuronLayerTest/1.TestExpGradientBase2Shift1Scale3 (3 ms) +[ RUN ] NeuronLayerTest/1.TestClip +[ OK ] NeuronLayerTest/1.TestClip (0 ms) +[ RUN ] NeuronLayerTest/1.TestLogGradientBase2Shift1 +[ OK ] NeuronLayerTest/1.TestLogGradientBase2Shift1 (5 ms) +[ RUN ] NeuronLayerTest/1.TestSwishWithBetaGradient +[ OK ] NeuronLayerTest/1.TestSwishWithBetaGradient (5 ms) +[ RUN ] NeuronLayerTest/1.TestReLU +[ OK ] NeuronLayerTest/1.TestReLU (0 ms) +[ RUN ] NeuronLayerTest/1.TestLogLayerBase2Shift1 +[ OK ] NeuronLayerTest/1.TestLogLayerBase2Shift1 (0 ms) [ RUN ] NeuronLayerTest/1.TestPReLUForward [ OK ] NeuronLayerTest/1.TestPReLUForward (0 ms) -[ RUN ] NeuronLayerTest/1.TestELUGradient -[ OK ] NeuronLayerTest/1.TestELUGradient (3 ms) -[ RUN ] NeuronLayerTest/1.TestLogLayerBase2Shift1Scale3 -[ OK ] NeuronLayerTest/1.TestLogLayerBase2Shift1Scale3 (0 ms) -[ RUN ] NeuronLayerTest/1.TestSigmoidGradient -[ OK ] NeuronLayerTest/1.TestSigmoidGradient (5 ms) +[ RUN ] NeuronLayerTest/1.TestLogGradientBase2Shift1Scale3 +[ OK ] NeuronLayerTest/1.TestLogGradientBase2Shift1Scale3 (4 ms) +[ RUN ] NeuronLayerTest/1.TestLogLayerBase2 +[ OK ] NeuronLayerTest/1.TestLogLayerBase2 (0 ms) [ RUN ] NeuronLayerTest/1.TestPReLUInPlace [ OK ] NeuronLayerTest/1.TestPReLUInPlace (0 ms) +[ RUN ] NeuronLayerTest/1.TestPReLUConsistencyReLU +[ OK ] NeuronLayerTest/1.TestPReLUConsistencyReLU (1 ms) [ RUN ] NeuronLayerTest/1.TestPReLUForwardChannelShared [ OK ] NeuronLayerTest/1.TestPReLUForwardChannelShared (0 ms) -[ RUN ] NeuronLayerTest/1.TestDropoutThreeQuarters -[ OK ] NeuronLayerTest/1.TestDropoutThreeQuarters (0 ms) -[ RUN ] NeuronLayerTest/1.TestLogLayerBase2Shift1 -[ OK ] NeuronLayerTest/1.TestLogLayerBase2Shift1 (0 ms) -[ RUN ] NeuronLayerTest/1.TestPReLUParam -[ OK ] NeuronLayerTest/1.TestPReLUParam (0 ms) -[ RUN ] NeuronLayerTest/1.TestSwishWithBeta -[ OK ] NeuronLayerTest/1.TestSwishWithBeta (0 ms) -[ RUN ] NeuronLayerTest/1.TestELUasReLUGradient -[ OK ] NeuronLayerTest/1.TestELUasReLUGradient (3 ms) +[ RUN ] NeuronLayerTest/1.TestClipGradient +[ OK ] NeuronLayerTest/1.TestClipGradient (1 ms) [ RUN ] NeuronLayerTest/1.TestReLUWithNegativeSlope -[ OK ] NeuronLayerTest/1.TestReLUWithNegativeSlope (0 ms) -[ RUN ] NeuronLayerTest/1.TestExpGradientBase2Scale3 -[ OK ] NeuronLayerTest/1.TestExpGradientBase2Scale3 (4 ms) -[ RUN ] NeuronLayerTest/1.TestELUasReLU -[ OK ] NeuronLayerTest/1.TestELUasReLU (0 ms) -[ RUN ] NeuronLayerTest/1.TestLogGradientBase2Shift1 -[ OK ] NeuronLayerTest/1.TestLogGradientBase2Shift1 (4 ms) -[ RUN ] NeuronLayerTest/1.TestAbsGradient -[ OK ] NeuronLayerTest/1.TestAbsGradient (3 ms) -[ RUN ] NeuronLayerTest/1.TestPReLUConsistencyReLU -[ OK ] NeuronLayerTest/1.TestPReLUConsistencyReLU (0 ms) -[ RUN ] NeuronLayerTest/1.TestExpGradientBase2Shift1Scale3 -[ OK ] NeuronLayerTest/1.TestExpGradientBase2Shift1Scale3 (4 ms) -[ RUN ] NeuronLayerTest/1.TestLogGradient -[ OK ] NeuronLayerTest/1.TestLogGradient (4 ms) -[ RUN ] NeuronLayerTest/1.TestExpGradientBase2 -[ OK ] NeuronLayerTest/1.TestExpGradientBase2 (4 ms) -[ RUN ] NeuronLayerTest/1.TestBNLL -[ OK ] NeuronLayerTest/1.TestBNLL (0 ms) -[----------] 58 tests from NeuronLayerTest/1 (441 ms total) +[ OK ] NeuronLayerTest/1.TestReLUWithNegativeSlope (1 ms) +[ RUN ] NeuronLayerTest/1.TestBNLLGradient +[ OK ] NeuronLayerTest/1.TestBNLLGradient (5 ms) +[ RUN ] NeuronLayerTest/1.TestLogLayerBase2Shift1Scale3 +[ OK ] NeuronLayerTest/1.TestLogLayerBase2Shift1Scale3 (0 ms) +[ RUN ] NeuronLayerTest/1.TestPReLUGradient +[ OK ] NeuronLayerTest/1.TestPReLUGradient (133 ms) +[ RUN ] NeuronLayerTest/1.TestELUGradient +[ OK ] NeuronLayerTest/1.TestELUGradient (3 ms) +[ RUN ] NeuronLayerTest/1.TestAbsVal +[ OK ] NeuronLayerTest/1.TestAbsVal (0 ms) +[ RUN ] NeuronLayerTest/1.TestDropoutHalf +[ OK ] NeuronLayerTest/1.TestDropoutHalf (0 ms) +[ RUN ] NeuronLayerTest/1.TestExpGradient +[ OK ] NeuronLayerTest/1.TestExpGradient (4 ms) +[ RUN ] NeuronLayerTest/1.TestSwish +[ OK ] NeuronLayerTest/1.TestSwish (0 ms) +[ RUN ] NeuronLayerTest/1.TestExpLayerWithShift +[ OK ] NeuronLayerTest/1.TestExpLayerWithShift (0 ms) +[ RUN ] NeuronLayerTest/1.TestExpLayerBase2Scale3 +[ OK ] NeuronLayerTest/1.TestExpLayerBase2Scale3 (0 ms) +[----------] 58 tests from NeuronLayerTest/1 (351 ms total) -[----------] 3 tests from SigmoidCrossEntropyLossLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] SigmoidCrossEntropyLossLayerTest/1.TestSigmoidCrossEntropyLoss -[ OK ] SigmoidCrossEntropyLossLayerTest/1.TestSigmoidCrossEntropyLoss (2 ms) -[ RUN ] SigmoidCrossEntropyLossLayerTest/1.TestIgnoreGradient -[ OK ] SigmoidCrossEntropyLossLayerTest/1.TestIgnoreGradient (0 ms) -[ RUN ] SigmoidCrossEntropyLossLayerTest/1.TestGradient -[ OK ] SigmoidCrossEntropyLossLayerTest/1.TestGradient (2 ms) -[----------] 3 tests from SigmoidCrossEntropyLossLayerTest/1 (4 ms total) +[----------] 22 tests from ScaleLayerTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] ScaleLayerTest/1.TestGradientBroadcastBegin +[ OK ] ScaleLayerTest/1.TestGradientBroadcastBegin (99 ms) +[ RUN ] ScaleLayerTest/1.TestForwardEltwiseWithParam +[ OK ] ScaleLayerTest/1.TestForwardEltwiseWithParam (0 ms) +[ RUN ] ScaleLayerTest/1.TestBackwardBroadcastMiddleInPlace +[ OK ] ScaleLayerTest/1.TestBackwardBroadcastMiddleInPlace (0 ms) +[ RUN ] ScaleLayerTest/1.TestGradientScale +[ OK ] ScaleLayerTest/1.TestGradientScale (92 ms) +[ RUN ] ScaleLayerTest/1.TestForwardEltwiseInPlace +[ OK ] ScaleLayerTest/1.TestForwardEltwiseInPlace (0 ms) +[ RUN ] ScaleLayerTest/1.TestGradientBroadcastEnd +[ OK ] ScaleLayerTest/1.TestGradientBroadcastEnd (178 ms) +[ RUN ] ScaleLayerTest/1.TestForwardEltwise +[ OK ] ScaleLayerTest/1.TestForwardEltwise (0 ms) +[ RUN ] ScaleLayerTest/1.TestForwardScale +[ OK ] ScaleLayerTest/1.TestForwardScale (0 ms) +[ RUN ] ScaleLayerTest/1.TestGradientEltwise +[ OK ] ScaleLayerTest/1.TestGradientEltwise (6 ms) +[ RUN ] ScaleLayerTest/1.TestForwardBroadcastBegin +[ OK ] ScaleLayerTest/1.TestForwardBroadcastBegin (0 ms) +[ RUN ] ScaleLayerTest/1.TestForwardBroadcastMiddleWithParamAndBias +[ OK ] ScaleLayerTest/1.TestForwardBroadcastMiddleWithParamAndBias (0 ms) +[ RUN ] ScaleLayerTest/1.TestForwardBroadcastMiddleInPlace +[ OK ] ScaleLayerTest/1.TestForwardBroadcastMiddleInPlace (0 ms) +[ RUN ] ScaleLayerTest/1.TestForwardBroadcastEnd +[ OK ] ScaleLayerTest/1.TestForwardBroadcastEnd (0 ms) +[ RUN ] ScaleLayerTest/1.TestGradientBroadcastMiddleWithParam +[ OK ] ScaleLayerTest/1.TestGradientBroadcastMiddleWithParam (116 ms) +[ RUN ] ScaleLayerTest/1.TestForwardBroadcastMiddle +[ OK ] ScaleLayerTest/1.TestForwardBroadcastMiddle (0 ms) +[ RUN ] ScaleLayerTest/1.TestGradientEltwiseWithParam +[ OK ] ScaleLayerTest/1.TestGradientEltwiseWithParam (305 ms) +[ RUN ] ScaleLayerTest/1.TestGradientScaleAxis2 +[ OK ] ScaleLayerTest/1.TestGradientScaleAxis2 (91 ms) +[ RUN ] ScaleLayerTest/1.TestForwardScaleAxis2 +[ OK ] ScaleLayerTest/1.TestForwardScaleAxis2 (0 ms) +[ RUN ] ScaleLayerTest/1.TestGradientScaleAndBias +[ OK ] ScaleLayerTest/1.TestGradientScaleAndBias (115 ms) +[ RUN ] ScaleLayerTest/1.TestBackwardEltwiseInPlace +[ OK ] ScaleLayerTest/1.TestBackwardEltwiseInPlace (0 ms) +[ RUN ] ScaleLayerTest/1.TestGradientBroadcastMiddle +[ OK ] ScaleLayerTest/1.TestGradientBroadcastMiddle (113 ms) +[ RUN ] ScaleLayerTest/1.TestForwardBroadcastMiddleWithParam +[ OK ] ScaleLayerTest/1.TestForwardBroadcastMiddleWithParam (0 ms) +[----------] 22 tests from ScaleLayerTest/1 (1116 ms total) -[----------] 3 tests from MaxPoolingDropoutTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] MaxPoolingDropoutTest/0.TestBackward -[ OK ] MaxPoolingDropoutTest/0.TestBackward (0 ms) -[ RUN ] MaxPoolingDropoutTest/0.TestSetup -[ OK ] MaxPoolingDropoutTest/0.TestSetup (0 ms) -[ RUN ] MaxPoolingDropoutTest/0.TestForward -[ OK ] MaxPoolingDropoutTest/0.TestForward (0 ms) -[----------] 3 tests from MaxPoolingDropoutTest/0 (0 ms total) +[----------] 3 tests from BatchNormLayerTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] BatchNormLayerTest/1.TestGradient +[ OK ] BatchNormLayerTest/1.TestGradient (187 ms) +[ RUN ] BatchNormLayerTest/1.TestForward +[ OK ] BatchNormLayerTest/1.TestForward (0 ms) +[ RUN ] BatchNormLayerTest/1.TestForwardInplace +[ OK ] BatchNormLayerTest/1.TestForwardInplace (0 ms) +[----------] 3 tests from BatchNormLayerTest/1 (187 ms total) -[----------] 6 tests from RNNLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] RNNLayerTest/1.TestGradientNonZeroContBufferSize2 -[ OK ] RNNLayerTest/1.TestGradientNonZeroContBufferSize2 (230 ms) -[ RUN ] RNNLayerTest/1.TestGradientNonZeroContBufferSize2WithStaticInput -[ OK ] RNNLayerTest/1.TestGradientNonZeroContBufferSize2WithStaticInput (1177 ms) -[ RUN ] RNNLayerTest/1.TestGradient -[ OK ] RNNLayerTest/1.TestGradient (123 ms) -[ RUN ] RNNLayerTest/1.TestForward -[ OK ] RNNLayerTest/1.TestForward (5 ms) -[ RUN ] RNNLayerTest/1.TestSetUp -[ OK ] RNNLayerTest/1.TestSetUp (1 ms) -[ RUN ] RNNLayerTest/1.TestGradientNonZeroCont -[ OK ] RNNLayerTest/1.TestGradientNonZeroCont (129 ms) -[----------] 6 tests from RNNLayerTest/1 (1665 ms total) +[----------] 3 tests from BatchNormLayerTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] BatchNormLayerTest/0.TestForwardInplace +[ OK ] BatchNormLayerTest/0.TestForwardInplace (1 ms) +[ RUN ] BatchNormLayerTest/0.TestGradient +[ OK ] BatchNormLayerTest/0.TestGradient (193 ms) +[ RUN ] BatchNormLayerTest/0.TestForward +[ OK ] BatchNormLayerTest/0.TestForward (0 ms) +[----------] 3 tests from BatchNormLayerTest/0 (194 ms total) -[----------] 14 tests from DataLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] DataLayerTest/0.TestReadCropTestLevelDB -[ OK ] DataLayerTest/0.TestReadCropTestLevelDB (3 ms) -[ RUN ] DataLayerTest/0.TestReshapeLMDB -[ OK ] DataLayerTest/0.TestReshapeLMDB (2 ms) -[ RUN ] DataLayerTest/0.TestReadCropTrainSequenceUnseededLMDB -[ OK ] DataLayerTest/0.TestReadCropTrainSequenceUnseededLMDB (1 ms) -[ RUN ] DataLayerTest/0.TestReadLMDB -[ OK ] DataLayerTest/0.TestReadLMDB (51 ms) -[ RUN ] DataLayerTest/0.TestReadCropTrainLMDB -[ OK ] DataLayerTest/0.TestReadCropTrainLMDB (13 ms) -[ RUN ] DataLayerTest/0.TestReadCropTestLMDB -[ OK ] DataLayerTest/0.TestReadCropTestLMDB (3 ms) -[ RUN ] DataLayerTest/0.TestReadLevelDB -[ OK ] DataLayerTest/0.TestReadLevelDB (4 ms) -[ RUN ] DataLayerTest/0.TestReadCropTrainLevelDB -[ OK ] DataLayerTest/0.TestReadCropTrainLevelDB (1 ms) -[ RUN ] DataLayerTest/0.TestReadCropTrainSequenceSeededLevelDB -[ OK ] DataLayerTest/0.TestReadCropTrainSequenceSeededLevelDB (1 ms) -[ RUN ] DataLayerTest/0.TestSkipLevelDB -[ OK ] DataLayerTest/0.TestSkipLevelDB (28 ms) -[ RUN ] DataLayerTest/0.TestReshapeLevelDB -[ OK ] DataLayerTest/0.TestReshapeLevelDB (5 ms) -[ RUN ] DataLayerTest/0.TestReadCropTrainSequenceSeededLMDB -[ OK ] DataLayerTest/0.TestReadCropTrainSequenceSeededLMDB (2 ms) -[ RUN ] DataLayerTest/0.TestSkipLMDB -[ OK ] DataLayerTest/0.TestSkipLMDB (37 ms) -[ RUN ] DataLayerTest/0.TestReadCropTrainSequenceUnseededLevelDB -[ OK ] DataLayerTest/0.TestReadCropTrainSequenceUnseededLevelDB (10 ms) -[----------] 14 tests from DataLayerTest/0 (162 ms total) +[----------] 11 tests from AdaDeltaSolverTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] AdaDeltaSolverTest/0.TestAdaDeltaLeastSquaresUpdate +[ OK ] AdaDeltaSolverTest/0.TestAdaDeltaLeastSquaresUpdate (14 ms) +[ RUN ] AdaDeltaSolverTest/0.TestAdaDeltaLeastSquaresUpdateWithHalfMomentum +[ OK ] AdaDeltaSolverTest/0.TestAdaDeltaLeastSquaresUpdateWithHalfMomentum (27 ms) +[ RUN ] AdaDeltaSolverTest/0.TestAdaDeltaLeastSquaresUpdateWithEverything +[ OK ] AdaDeltaSolverTest/0.TestAdaDeltaLeastSquaresUpdateWithEverything (69 ms) +[ RUN ] AdaDeltaSolverTest/0.TestAdaDeltaLeastSquaresUpdateWithEverythingShare +[ OK ] AdaDeltaSolverTest/0.TestAdaDeltaLeastSquaresUpdateWithEverythingShare (73 ms) +[ RUN ] AdaDeltaSolverTest/0.TestSnapshot +[ OK ] AdaDeltaSolverTest/0.TestSnapshot (16 ms) +[ RUN ] AdaDeltaSolverTest/0.TestAdaDeltaLeastSquaresUpdateWithWeightDecay +[ OK ] AdaDeltaSolverTest/0.TestAdaDeltaLeastSquaresUpdateWithWeightDecay (13 ms) +[ RUN ] AdaDeltaSolverTest/0.TestLeastSquaresUpdateWithMomentumMultiIter +[ OK ] AdaDeltaSolverTest/0.TestLeastSquaresUpdateWithMomentumMultiIter (68 ms) +[ RUN ] AdaDeltaSolverTest/0.TestLeastSquaresUpdateWithEverythingAccumShare +[ OK ] AdaDeltaSolverTest/0.TestLeastSquaresUpdateWithEverythingAccumShare (4 ms) +[ RUN ] AdaDeltaSolverTest/0.TestAdaDeltaLeastSquaresUpdateWithMomentum +[ OK ] AdaDeltaSolverTest/0.TestAdaDeltaLeastSquaresUpdateWithMomentum (27 ms) +[ RUN ] AdaDeltaSolverTest/0.TestSnapshotShare +[ OK ] AdaDeltaSolverTest/0.TestSnapshotShare (23 ms) +[ RUN ] AdaDeltaSolverTest/0.TestLeastSquaresUpdateWithEverythingAccum +[ OK ] AdaDeltaSolverTest/0.TestLeastSquaresUpdateWithEverythingAccum (3 ms) +[----------] 11 tests from AdaDeltaSolverTest/0 (340 ms total) -[----------] 7 tests from TileLayerTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] TileLayerTest/0.TestGradientChannels -[ OK ] TileLayerTest/0.TestGradientChannels (346 ms) -[ RUN ] TileLayerTest/0.TestTrivialGradient -[ OK ] TileLayerTest/0.TestTrivialGradient (146 ms) -[ RUN ] TileLayerTest/0.TestSetup -[ OK ] TileLayerTest/0.TestSetup (0 ms) -[ RUN ] TileLayerTest/0.TestTrivialSetup -[ OK ] TileLayerTest/0.TestTrivialSetup (0 ms) -[ RUN ] TileLayerTest/0.TestForwardChannels -[ OK ] TileLayerTest/0.TestForwardChannels (0 ms) -[ RUN ] TileLayerTest/0.TestForwardNum -[ OK ] TileLayerTest/0.TestForwardNum (0 ms) -[ RUN ] TileLayerTest/0.TestGradientNum -[ OK ] TileLayerTest/0.TestGradientNum (309 ms) -[----------] 7 tests from TileLayerTest/0 (801 ms total) +[----------] 1 test from SolverTypeUpgradeTest +[ RUN ] SolverTypeUpgradeTest.TestSimple +[ OK ] SolverTypeUpgradeTest.TestSimple (0 ms) +[----------] 1 test from SolverTypeUpgradeTest (0 ms total) -[----------] 12 tests from ArgMaxLayerTest/1, where TypeParam = double -[ RUN ] ArgMaxLayerTest/1.TestSetupAxis -[ OK ] ArgMaxLayerTest/1.TestSetupAxis (1 ms) -[ RUN ] ArgMaxLayerTest/1.TestCPUMaxValTopK -[ OK ] ArgMaxLayerTest/1.TestCPUMaxValTopK (1 ms) -[ RUN ] ArgMaxLayerTest/1.TestSetup -[ OK ] ArgMaxLayerTest/1.TestSetup (2 ms) -[ RUN ] ArgMaxLayerTest/1.TestCPUTopK -[ OK ] ArgMaxLayerTest/1.TestCPUTopK (1 ms) -[ RUN ] ArgMaxLayerTest/1.TestCPUAxisTopK -[ OK ] ArgMaxLayerTest/1.TestCPUAxisTopK (54 ms) -[ RUN ] ArgMaxLayerTest/1.TestSetupAxisNegativeIndexing -[ OK ] ArgMaxLayerTest/1.TestSetupAxisNegativeIndexing (1 ms) -[ RUN ] ArgMaxLayerTest/1.TestSetupAxisMaxVal -[ OK ] ArgMaxLayerTest/1.TestSetupAxisMaxVal (1 ms) -[ RUN ] ArgMaxLayerTest/1.TestCPUAxisMaxValTopK -[ OK ] ArgMaxLayerTest/1.TestCPUAxisMaxValTopK (35 ms) -[ RUN ] ArgMaxLayerTest/1.TestCPUMaxVal -[ OK ] ArgMaxLayerTest/1.TestCPUMaxVal (1 ms) -[ RUN ] ArgMaxLayerTest/1.TestSetupMaxVal -[ OK ] ArgMaxLayerTest/1.TestSetupMaxVal (0 ms) -[ RUN ] ArgMaxLayerTest/1.TestCPU -[ OK ] ArgMaxLayerTest/1.TestCPU (2 ms) -[ RUN ] ArgMaxLayerTest/1.TestCPUAxis -[ OK ] ArgMaxLayerTest/1.TestCPUAxis (13 ms) -[----------] 12 tests from ArgMaxLayerTest/1 (113 ms total) +[----------] 26 tests from IOTest +[ RUN ] IOTest.TestReadImageToDatumResizedGray +[ OK ] IOTest.TestReadImageToDatumResizedGray (4 ms) +[ RUN ] IOTest.TestReadImageToCVMat +[ OK ] IOTest.TestReadImageToCVMat (4 ms) +[ RUN ] IOTest.TestReadImageToDatum +[ OK ] IOTest.TestReadImageToDatum (6 ms) +[ RUN ] IOTest.TestReadFileToDatum +[ OK ] IOTest.TestReadFileToDatum (0 ms) +[ RUN ] IOTest.TestReadImageToDatumReferenceResized +[ OK ] IOTest.TestReadImageToDatumReferenceResized (9 ms) +[ RUN ] IOTest.TestReadImageToDatumResizedSquare +[ OK ] IOTest.TestReadImageToDatumResizedSquare (6 ms) +[ RUN ] IOTest.TestReadImageToCVMatGray +[ OK ] IOTest.TestReadImageToCVMatGray (3 ms) +[ RUN ] IOTest.TestDecodeDatumToCVMatContent +[ OK ] IOTest.TestDecodeDatumToCVMatContent (13 ms) +[ RUN ] IOTest.TestReadImageToDatumContentGray +[ OK ] IOTest.TestReadImageToDatumContentGray (6 ms) +[ RUN ] IOTest.TestReadImageToDatumReference +[ OK ] IOTest.TestReadImageToDatumReference (11 ms) +[ RUN ] IOTest.TestReadImageToDatumContent +[ OK ] IOTest.TestReadImageToDatumContent (12 ms) +[ RUN ] IOTest.TestCVMatToDatumReference +[ OK ] IOTest.TestCVMatToDatumReference (13 ms) +[ RUN ] IOTest.TestReadImageToCVMatResized +[ OK ] IOTest.TestReadImageToCVMatResized (4 ms) +[ RUN ] IOTest.TestDecodeDatum +[ OK ] IOTest.TestDecodeDatum (13 ms) +[ RUN ] IOTest.TestReadImageToCVMatResizedSquare +[ OK ] IOTest.TestReadImageToCVMatResizedSquare (5 ms) +[ RUN ] IOTest.TestDecodeDatumToCVMatContentNative +[ OK ] IOTest.TestDecodeDatumToCVMatContentNative (13 ms) +[ RUN ] IOTest.TestDecodeDatumToCVMat +[ OK ] IOTest.TestDecodeDatumToCVMat (7 ms) +[ RUN ] IOTest.TestDecodeDatumToCVMatNative +[ OK ] IOTest.TestDecodeDatumToCVMatNative (4 ms) +[ RUN ] IOTest.TestDecodeDatumToCVMatNativeGray +[ OK ] IOTest.TestDecodeDatumToCVMatNativeGray (6 ms) +[ RUN ] IOTest.TestDecodeDatumNativeGray +[ OK ] IOTest.TestDecodeDatumNativeGray (6 ms) +[ RUN ] IOTest.TestCVMatToDatum +[ OK ] IOTest.TestCVMatToDatum (5 ms) +[ RUN ] IOTest.TestCVMatToDatumContent +[ OK ] IOTest.TestCVMatToDatumContent (12 ms) +[ RUN ] IOTest.TestReadImageToCVMatResizedGray +[ OK ] IOTest.TestReadImageToCVMatResizedGray (3 ms) +[ RUN ] IOTest.TestReadImageToDatumGray +[ OK ] IOTest.TestReadImageToDatumGray (4 ms) +[ RUN ] IOTest.TestReadImageToDatumResized +[ OK ] IOTest.TestReadImageToDatumResized (4 ms) +[ RUN ] IOTest.TestDecodeDatumNative +[ OK ] IOTest.TestDecodeDatumNative (13 ms) +[----------] 26 tests from IOTest (186 ms total) -[----------] 2 tests from HDF5DataLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] HDF5DataLayerTest/1.TestSkip -[ OK ] HDF5DataLayerTest/1.TestSkip (20 ms) -[ RUN ] HDF5DataLayerTest/1.TestRead -[ OK ] HDF5DataLayerTest/1.TestRead (5 ms) -[----------] 2 tests from HDF5DataLayerTest/1 (25 ms total) +[----------] 9 tests from AccuracyLayerTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] AccuracyLayerTest/0.TestSetupTopK +[ OK ] AccuracyLayerTest/0.TestSetupTopK (0 ms) +[ RUN ] AccuracyLayerTest/0.TestForwardPerClassWithIgnoreLabel +[ OK ] AccuracyLayerTest/0.TestForwardPerClassWithIgnoreLabel (1 ms) +[ RUN ] AccuracyLayerTest/0.TestForward +[ OK ] AccuracyLayerTest/0.TestForward (0 ms) +[ RUN ] AccuracyLayerTest/0.TestForwardPerClass +[ OK ] AccuracyLayerTest/0.TestForwardPerClass (1 ms) +[ RUN ] AccuracyLayerTest/0.TestForwardTopK +[ OK ] AccuracyLayerTest/0.TestForwardTopK (2 ms) +[ RUN ] AccuracyLayerTest/0.TestSetup +[ OK ] AccuracyLayerTest/0.TestSetup (0 ms) +[ RUN ] AccuracyLayerTest/0.TestForwardWithSpatialAxes +[ OK ] AccuracyLayerTest/0.TestForwardWithSpatialAxes (0 ms) +[ RUN ] AccuracyLayerTest/0.TestSetupOutputPerClass +[ OK ] AccuracyLayerTest/0.TestSetupOutputPerClass (0 ms) +[ RUN ] AccuracyLayerTest/0.TestForwardIgnoreLabel +[ OK ] AccuracyLayerTest/0.TestForwardIgnoreLabel (1 ms) +[----------] 9 tests from AccuracyLayerTest/0 (5 ms total) + +[----------] 15 tests from ConvolutionLayerTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] ConvolutionLayerTest/0.TestSimple3DConvolution +[ OK ] ConvolutionLayerTest/0.TestSimple3DConvolution (1 ms) +[ RUN ] ConvolutionLayerTest/0.TestSobelConvolution +[ OK ] ConvolutionLayerTest/0.TestSobelConvolution (0 ms) +[ RUN ] ConvolutionLayerTest/0.TestGradientGroup +[ OK ] ConvolutionLayerTest/0.TestGradientGroup (24 ms) +[ RUN ] ConvolutionLayerTest/0.TestNDAgainst2D +[ OK ] ConvolutionLayerTest/0.TestNDAgainst2D (431 ms) +[ RUN ] ConvolutionLayerTest/0.TestSetup +[ OK ] ConvolutionLayerTest/0.TestSetup (0 ms) +[ RUN ] ConvolutionLayerTest/0.Test1x1Convolution +[ OK ] ConvolutionLayerTest/0.Test1x1Convolution (0 ms) +[ RUN ] ConvolutionLayerTest/0.TestSimpleConvolution +[ OK ] ConvolutionLayerTest/0.TestSimpleConvolution (0 ms) +[ RUN ] ConvolutionLayerTest/0.Test1x1Gradient +[ OK ] ConvolutionLayerTest/0.Test1x1Gradient (682 ms) +[ RUN ] ConvolutionLayerTest/0.TestDilated3DConvolution +[ OK ] ConvolutionLayerTest/0.TestDilated3DConvolution (6 ms) +[ RUN ] ConvolutionLayerTest/0.TestGradient +[ OK ] ConvolutionLayerTest/0.TestGradient (83 ms) +[ RUN ] ConvolutionLayerTest/0.Test0DConvolution +[ OK ] ConvolutionLayerTest/0.Test0DConvolution (0 ms) +[ RUN ] ConvolutionLayerTest/0.TestSimpleConvolutionGroup +[ OK ] ConvolutionLayerTest/0.TestSimpleConvolutionGroup (0 ms) +[ RUN ] ConvolutionLayerTest/0.TestGradient3D +[ OK ] ConvolutionLayerTest/0.TestGradient3D (708 ms) +[ RUN ] ConvolutionLayerTest/0.TestDilatedGradient +[ OK ] ConvolutionLayerTest/0.TestDilatedGradient (22 ms) +[ RUN ] ConvolutionLayerTest/0.TestDilatedConvolution +[ OK ] ConvolutionLayerTest/0.TestDilatedConvolution (1 ms) +[----------] 15 tests from ConvolutionLayerTest/0 (1959 ms total) + +[----------] 3 tests from TanHLayerTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] TanHLayerTest/0.TestTanH +[ OK ] TanHLayerTest/0.TestTanH (0 ms) +[ RUN ] TanHLayerTest/0.TestTanHOverflow +[ OK ] TanHLayerTest/0.TestTanHOverflow (0 ms) +[ RUN ] TanHLayerTest/0.TestTanHGradient +[ OK ] TanHLayerTest/0.TestTanHGradient (5 ms) +[----------] 3 tests from TanHLayerTest/0 (5 ms total) + +[----------] 14 tests from DataLayerTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] DataLayerTest/1.TestSkipLMDB +[ OK ] DataLayerTest/1.TestSkipLMDB (5 ms) +[ RUN ] DataLayerTest/1.TestReadCropTrainLMDB +[ OK ] DataLayerTest/1.TestReadCropTrainLMDB (1 ms) +[ RUN ] DataLayerTest/1.TestReadCropTrainSequenceSeededLevelDB +[ OK ] DataLayerTest/1.TestReadCropTrainSequenceSeededLevelDB (2 ms) +[ RUN ] DataLayerTest/1.TestReadCropTestLevelDB +[ OK ] DataLayerTest/1.TestReadCropTestLevelDB (0 ms) +[ RUN ] DataLayerTest/1.TestReadLevelDB +[ OK ] DataLayerTest/1.TestReadLevelDB (4 ms) +[ RUN ] DataLayerTest/1.TestReadCropTrainSequenceUnseededLevelDB +[ OK ] DataLayerTest/1.TestReadCropTrainSequenceUnseededLevelDB (2 ms) +[ RUN ] DataLayerTest/1.TestReshapeLMDB +[ OK ] DataLayerTest/1.TestReshapeLMDB (1 ms) +[ RUN ] DataLayerTest/1.TestReadCropTrainLevelDB +[ OK ] DataLayerTest/1.TestReadCropTrainLevelDB (1 ms) +[ RUN ] DataLayerTest/1.TestReadCropTrainSequenceUnseededLMDB +[ OK ] DataLayerTest/1.TestReadCropTrainSequenceUnseededLMDB (1 ms) +[ RUN ] DataLayerTest/1.TestReadCropTrainSequenceSeededLMDB +[ OK ] DataLayerTest/1.TestReadCropTrainSequenceSeededLMDB (1 ms) +[ RUN ] DataLayerTest/1.TestSkipLevelDB +[ OK ] DataLayerTest/1.TestSkipLevelDB (9 ms) +[ RUN ] DataLayerTest/1.TestReadLMDB +[ OK ] DataLayerTest/1.TestReadLMDB (4 ms) +[ RUN ] DataLayerTest/1.TestReshapeLevelDB +[ OK ] DataLayerTest/1.TestReshapeLevelDB (1 ms) +[ RUN ] DataLayerTest/1.TestReadCropTestLMDB +[ OK ] DataLayerTest/1.TestReadCropTestLMDB (1 ms) +[----------] 14 tests from DataLayerTest/1 (33 ms total) + +[----------] 4 tests from PositiveUnitballFillerTest/1, where TypeParam = double +[ RUN ] PositiveUnitballFillerTest/1.TestFill +[ OK ] PositiveUnitballFillerTest/1.TestFill (0 ms) +[ RUN ] PositiveUnitballFillerTest/1.TestFill1D +[ OK ] PositiveUnitballFillerTest/1.TestFill1D (0 ms) +[ RUN ] PositiveUnitballFillerTest/1.TestFill2D +[ OK ] PositiveUnitballFillerTest/1.TestFill2D (0 ms) +[ RUN ] PositiveUnitballFillerTest/1.TestFill5D +[ OK ] PositiveUnitballFillerTest/1.TestFill5D (0 ms) +[----------] 4 tests from PositiveUnitballFillerTest/1 (0 ms total) + +[----------] 3 tests from SyncedMemoryTest +[ RUN ] SyncedMemoryTest.TestCPUWrite +[ OK ] SyncedMemoryTest.TestCPUWrite (0 ms) +[ RUN ] SyncedMemoryTest.TestAllocationCPU +[ OK ] SyncedMemoryTest.TestAllocationCPU (0 ms) +[ RUN ] SyncedMemoryTest.TestInitialization +[ OK ] SyncedMemoryTest.TestInitialization (0 ms) +[----------] 3 tests from SyncedMemoryTest (0 ms total) + +[----------] 58 tests from NeuronLayerTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] NeuronLayerTest/0.TestSigmoid +[ OK ] NeuronLayerTest/0.TestSigmoid (0 ms) +[ RUN ] NeuronLayerTest/0.TestClip +[ OK ] NeuronLayerTest/0.TestClip (1 ms) +[ RUN ] NeuronLayerTest/0.TestSwishAsLinear +[ OK ] NeuronLayerTest/0.TestSwishAsLinear (0 ms) +[ RUN ] NeuronLayerTest/0.TestLogGradientBase2 +[ OK ] NeuronLayerTest/0.TestLogGradientBase2 (2 ms) +[ RUN ] NeuronLayerTest/0.TestReLU +[ OK ] NeuronLayerTest/0.TestReLU (0 ms) +[ RUN ] NeuronLayerTest/0.TestELU +[ OK ] NeuronLayerTest/0.TestELU (0 ms) +[ RUN ] NeuronLayerTest/0.TestSwish +[ OK ] NeuronLayerTest/0.TestSwish (0 ms) +[ RUN ] NeuronLayerTest/0.TestPReLUForwardChannelShared +[ OK ] NeuronLayerTest/0.TestPReLUForwardChannelShared (0 ms) +[ RUN ] NeuronLayerTest/0.TestELUasReLUGradient +[ OK ] NeuronLayerTest/0.TestELUasReLUGradient (3 ms) +[ RUN ] NeuronLayerTest/0.TestExpLayerBase2 +[ OK ] NeuronLayerTest/0.TestExpLayerBase2 (0 ms) +[ RUN ] NeuronLayerTest/0.TestExpLayer +[ OK ] NeuronLayerTest/0.TestExpLayer (0 ms) +[ RUN ] NeuronLayerTest/0.TestExpGradientWithShift +[ OK ] NeuronLayerTest/0.TestExpGradientWithShift (2 ms) +[ RUN ] NeuronLayerTest/0.TestSwishWithBetaGradient +[ OK ] NeuronLayerTest/0.TestSwishWithBetaGradient (5 ms) +[ RUN ] NeuronLayerTest/0.TestTanH +[ OK ] NeuronLayerTest/0.TestTanH (0 ms) +[ RUN ] NeuronLayerTest/0.TestELUGradient +[ OK ] NeuronLayerTest/0.TestELUGradient (3 ms) +[ RUN ] NeuronLayerTest/0.TestLogLayerBase2Shift1Scale3 +[ OK ] NeuronLayerTest/0.TestLogLayerBase2Shift1Scale3 (0 ms) +[ RUN ] NeuronLayerTest/0.TestPReLUGradient +[ OK ] NeuronLayerTest/0.TestPReLUGradient (135 ms) +[ RUN ] NeuronLayerTest/0.TestSwishWithBeta +[ OK ] NeuronLayerTest/0.TestSwishWithBeta (0 ms) +[ RUN ] NeuronLayerTest/0.TestExpLayerBase2Shift1Scale3 +[ OK ] NeuronLayerTest/0.TestExpLayerBase2Shift1Scale3 (0 ms) +[ RUN ] NeuronLayerTest/0.TestExpLayerBase2Shift1 +[ OK ] NeuronLayerTest/0.TestExpLayerBase2Shift1 (0 ms) +[ RUN ] NeuronLayerTest/0.TestDropoutTestPhase +[ OK ] NeuronLayerTest/0.TestDropoutTestPhase (0 ms) +[ RUN ] NeuronLayerTest/0.TestLogLayerBase2Shift1 +[ OK ] NeuronLayerTest/0.TestLogLayerBase2Shift1 (0 ms) +[ RUN ] NeuronLayerTest/0.TestLogGradientBase2Shift1 +[ OK ] NeuronLayerTest/0.TestLogGradientBase2Shift1 (3 ms) +[ RUN ] NeuronLayerTest/0.TestExpLayerWithShift +[ OK ] NeuronLayerTest/0.TestExpLayerWithShift (0 ms) +[ RUN ] NeuronLayerTest/0.TestSwishGradient +[ OK ] NeuronLayerTest/0.TestSwishGradient (4 ms) +[ RUN ] NeuronLayerTest/0.TestSigmoidGradient +[ OK ] NeuronLayerTest/0.TestSigmoidGradient (5 ms) +[ RUN ] NeuronLayerTest/0.TestBNLLGradient +[ OK ] NeuronLayerTest/0.TestBNLLGradient (4 ms) +[ RUN ] NeuronLayerTest/0.TestPReLUInPlace +[ OK ] NeuronLayerTest/0.TestPReLUInPlace (0 ms) +[ RUN ] NeuronLayerTest/0.TestDropoutThreeQuarters +[ OK ] NeuronLayerTest/0.TestDropoutThreeQuarters (0 ms) +[ RUN ] NeuronLayerTest/0.TestSwishAsLinearGradient +[ OK ] NeuronLayerTest/0.TestSwishAsLinearGradient (2 ms) +[ RUN ] NeuronLayerTest/0.TestAbsVal +[ OK ] NeuronLayerTest/0.TestAbsVal (0 ms) +[ RUN ] NeuronLayerTest/0.TestDropoutGradient +[ OK ] NeuronLayerTest/0.TestDropoutGradient (2 ms) +[ RUN ] NeuronLayerTest/0.TestLogLayer +[ OK ] NeuronLayerTest/0.TestLogLayer (0 ms) +[ RUN ] NeuronLayerTest/0.TestClipGradient +[ OK ] NeuronLayerTest/0.TestClipGradient (2 ms) +[ RUN ] NeuronLayerTest/0.TestExpLayerBase2Scale3 +[ OK ] NeuronLayerTest/0.TestExpLayerBase2Scale3 (0 ms) +[ RUN ] NeuronLayerTest/0.TestExpGradientBase2Shift1 +[ OK ] NeuronLayerTest/0.TestExpGradientBase2Shift1 (2 ms) +[ RUN ] NeuronLayerTest/0.TestLogLayerBase2Scale3 +[ OK ] NeuronLayerTest/0.TestLogLayerBase2Scale3 (0 ms) +[ RUN ] NeuronLayerTest/0.TestExpGradient +[ OK ] NeuronLayerTest/0.TestExpGradient (3 ms) +[ RUN ] NeuronLayerTest/0.TestLogGradient +[ OK ] NeuronLayerTest/0.TestLogGradient (2 ms) +[ RUN ] NeuronLayerTest/0.TestPReLUConsistencyReLU +[ OK ] NeuronLayerTest/0.TestPReLUConsistencyReLU (0 ms) +[ RUN ] NeuronLayerTest/0.TestBNLL +[ OK ] NeuronLayerTest/0.TestBNLL (0 ms) +[ RUN ] NeuronLayerTest/0.TestELUasReLU +[ OK ] NeuronLayerTest/0.TestELUasReLU (0 ms) +[ RUN ] NeuronLayerTest/0.TestDropoutHalf +[ OK ] NeuronLayerTest/0.TestDropoutHalf (0 ms) +[ RUN ] NeuronLayerTest/0.TestAbsGradient +[ OK ] NeuronLayerTest/0.TestAbsGradient (2 ms) +[ RUN ] NeuronLayerTest/0.TestLogLayerBase2 +[ OK ] NeuronLayerTest/0.TestLogLayerBase2 (0 ms) +[ RUN ] NeuronLayerTest/0.TestPReLUForward +[ OK ] NeuronLayerTest/0.TestPReLUForward (0 ms) +[ RUN ] NeuronLayerTest/0.TestDropoutGradientTest +[ OK ] NeuronLayerTest/0.TestDropoutGradientTest (2 ms) +[ RUN ] NeuronLayerTest/0.TestPReLUGradientChannelShared +[ OK ] NeuronLayerTest/0.TestPReLUGradientChannelShared (134 ms) +[ RUN ] NeuronLayerTest/0.TestReLUGradient +[ OK ] NeuronLayerTest/0.TestReLUGradient (1 ms) +[ RUN ] NeuronLayerTest/0.TestExpGradientBase2Scale3 +[ OK ] NeuronLayerTest/0.TestExpGradientBase2Scale3 (3 ms) +[ RUN ] NeuronLayerTest/0.TestReLUGradientWithNegativeSlope +[ OK ] NeuronLayerTest/0.TestReLUGradientWithNegativeSlope (1 ms) +[ RUN ] NeuronLayerTest/0.TestTanHGradient +[ OK ] NeuronLayerTest/0.TestTanHGradient (6 ms) +[ RUN ] NeuronLayerTest/0.TestLogGradientBase2Shift1Scale3 +[ OK ] NeuronLayerTest/0.TestLogGradientBase2Shift1Scale3 (3 ms) +[ RUN ] NeuronLayerTest/0.TestExpGradientBase2 +[ OK ] NeuronLayerTest/0.TestExpGradientBase2 (2 ms) +[ RUN ] NeuronLayerTest/0.TestReLUWithNegativeSlope +[ OK ] NeuronLayerTest/0.TestReLUWithNegativeSlope (0 ms) +[ RUN ] NeuronLayerTest/0.TestLogGradientBase2Scale3 +[ OK ] NeuronLayerTest/0.TestLogGradientBase2Scale3 (3 ms) +[ RUN ] NeuronLayerTest/0.TestExpGradientBase2Shift1Scale3 +[ OK ] NeuronLayerTest/0.TestExpGradientBase2Shift1Scale3 (2 ms) +[ RUN ] NeuronLayerTest/0.TestPReLUParam +[ OK ] NeuronLayerTest/0.TestPReLUParam (0 ms) +[----------] 58 tests from NeuronLayerTest/0 (340 ms total) [----------] 12 tests from DataTransformTest/0, where TypeParam = float +[ RUN ] DataTransformTest/0.TestMeanFile +[ OK ] DataTransformTest/0.TestMeanFile (0 ms) +[ RUN ] DataTransformTest/0.TestCropMirrorTest +[ OK ] DataTransformTest/0.TestCropMirrorTest (0 ms) +[ RUN ] DataTransformTest/0.TestMirrorTrain +[ OK ] DataTransformTest/0.TestMirrorTrain (1 ms) [ RUN ] DataTransformTest/0.TestCropTest [ OK ] DataTransformTest/0.TestCropTest (0 ms) -[ RUN ] DataTransformTest/0.TestEmptyTransformUniquePixels -[ OK ] DataTransformTest/0.TestEmptyTransformUniquePixels (0 ms) [ RUN ] DataTransformTest/0.TestCropSize [ OK ] DataTransformTest/0.TestCropSize (0 ms) -[ RUN ] DataTransformTest/0.TestMirrorTrain -[ OK ] DataTransformTest/0.TestMirrorTrain (0 ms) +[ RUN ] DataTransformTest/0.TestCropMirrorTrain +[ OK ] DataTransformTest/0.TestCropMirrorTrain (0 ms) [ RUN ] DataTransformTest/0.TestEmptyTransform [ OK ] DataTransformTest/0.TestEmptyTransform (0 ms) -[ RUN ] DataTransformTest/0.TestCropMirrorTest -[ OK ] DataTransformTest/0.TestCropMirrorTest (0 ms) [ RUN ] DataTransformTest/0.TestMeanValue [ OK ] DataTransformTest/0.TestMeanValue (0 ms) -[ RUN ] DataTransformTest/0.TestMeanFile -[ OK ] DataTransformTest/0.TestMeanFile (1 ms) +[ RUN ] DataTransformTest/0.TestCropTrain +[ OK ] DataTransformTest/0.TestCropTrain (0 ms) [ RUN ] DataTransformTest/0.TestMirrorTest [ OK ] DataTransformTest/0.TestMirrorTest (0 ms) -[ RUN ] DataTransformTest/0.TestCropMirrorTrain -[ OK ] DataTransformTest/0.TestCropMirrorTrain (0 ms) +[ RUN ] DataTransformTest/0.TestEmptyTransformUniquePixels +[ OK ] DataTransformTest/0.TestEmptyTransformUniquePixels (0 ms) [ RUN ] DataTransformTest/0.TestMeanValues [ OK ] DataTransformTest/0.TestMeanValues (0 ms) -[ RUN ] DataTransformTest/0.TestCropTrain -[ OK ] DataTransformTest/0.TestCropTrain (0 ms) [----------] 12 tests from DataTransformTest/0 (1 ms total) -[----------] 3 tests from FilterLayerTest/1, where TypeParam = caffe::CPUDevice -[ RUN ] FilterLayerTest/1.TestForward -[ OK ] FilterLayerTest/1.TestForward (0 ms) -[ RUN ] FilterLayerTest/1.TestGradient -[ OK ] FilterLayerTest/1.TestGradient (328 ms) -[ RUN ] FilterLayerTest/1.TestReshape -[ OK ] FilterLayerTest/1.TestReshape (0 ms) -[----------] 3 tests from FilterLayerTest/1 (328 ms total) +[----------] 7 tests from CPUMathFunctionsTest/1, where TypeParam = double +[ RUN ] CPUMathFunctionsTest/1.TestFabs +[ OK ] CPUMathFunctionsTest/1.TestFabs (5 ms) +[ RUN ] CPUMathFunctionsTest/1.TestSign +[ OK ] CPUMathFunctionsTest/1.TestSign (5 ms) +[ RUN ] CPUMathFunctionsTest/1.TestCopy +[ OK ] CPUMathFunctionsTest/1.TestCopy (4 ms) +[ RUN ] CPUMathFunctionsTest/1.TestAsum +[ OK ] CPUMathFunctionsTest/1.TestAsum (4 ms) +[ RUN ] CPUMathFunctionsTest/1.TestNothing +[ OK ] CPUMathFunctionsTest/1.TestNothing (3 ms) +[ RUN ] CPUMathFunctionsTest/1.TestSgnbit +[ OK ] CPUMathFunctionsTest/1.TestSgnbit (4 ms) +[ RUN ] CPUMathFunctionsTest/1.TestScale +[ OK ] CPUMathFunctionsTest/1.TestScale (4 ms) +[----------] 7 tests from CPUMathFunctionsTest/1 (29 ms total) -[----------] 26 tests from NetTest/0, where TypeParam = caffe::CPUDevice -[ RUN ] NetTest/0.TestAllInOneNetVal -[ OK ] NetTest/0.TestAllInOneNetVal (1 ms) -[ RUN ] NetTest/0.TestLossWeightMidNet -[ OK ] NetTest/0.TestLossWeightMidNet (5 ms) -[ RUN ] NetTest/0.TestForcePropagateDown -[ OK ] NetTest/0.TestForcePropagateDown (0 ms) -[ RUN ] NetTest/0.TestBottomNeedBackward -[ OK ] NetTest/0.TestBottomNeedBackward (1 ms) -[ RUN ] NetTest/0.TestUnsharedWeightsDataNet -[ OK ] NetTest/0.TestUnsharedWeightsDataNet (1 ms) -[ RUN ] NetTest/0.TestBottomNeedBackwardEuclideanForce -[ OK ] NetTest/0.TestBottomNeedBackwardEuclideanForce (0 ms) -[ RUN ] NetTest/0.TestUnsharedWeightsDiffNet -[ OK ] NetTest/0.TestUnsharedWeightsDiffNet (1 ms) -[ RUN ] NetTest/0.TestSharedWeightsDiffNet -[ OK ] NetTest/0.TestSharedWeightsDiffNet (0 ms) -[ RUN ] NetTest/0.TestHasBlob -[ OK ] NetTest/0.TestHasBlob (1 ms) -[ RUN ] NetTest/0.TestAllInOneNetTrain -[ OK ] NetTest/0.TestAllInOneNetTrain (1 ms) -[ RUN ] NetTest/0.TestBottomNeedBackwardTricky -[ OK ] NetTest/0.TestBottomNeedBackwardTricky (1 ms) -[ RUN ] NetTest/0.TestAllInOneNetDeploy -[ OK ] NetTest/0.TestAllInOneNetDeploy (0 ms) -[ RUN ] NetTest/0.TestComboLossWeight -[ OK ] NetTest/0.TestComboLossWeight (4 ms) -[ RUN ] NetTest/0.TestBackwardWithAccuracyLayer -[ OK ] NetTest/0.TestBackwardWithAccuracyLayer (2 ms) -[ RUN ] NetTest/0.TestParamPropagateDown -[ OK ] NetTest/0.TestParamPropagateDown (2 ms) -[ RUN ] NetTest/0.TestGetBlob -[ OK ] NetTest/0.TestGetBlob (1 ms) -[ RUN ] NetTest/0.TestLossWeight -[ OK ] NetTest/0.TestLossWeight (4 ms) -[ RUN ] NetTest/0.TestReshape -[ OK ] NetTest/0.TestReshape (1 ms) -[ RUN ] NetTest/0.TestSkipPropagateDown -[ OK ] NetTest/0.TestSkipPropagateDown (2 ms) -[ RUN ] NetTest/0.TestSharedWeightsDataNet -[ OK ] NetTest/0.TestSharedWeightsDataNet (1 ms) -[ RUN ] NetTest/0.TestGetLayerByName -[ OK ] NetTest/0.TestGetLayerByName (0 ms) -[ RUN ] NetTest/0.TestFromTo -[ OK ] NetTest/0.TestFromTo (3 ms) -[ RUN ] NetTest/0.TestHasLayer -[ OK ] NetTest/0.TestHasLayer (1 ms) -[ RUN ] NetTest/0.TestSharedWeightsResume -[ OK ] NetTest/0.TestSharedWeightsResume (1 ms) -[ RUN ] NetTest/0.TestSharedWeightsUpdate -[ OK ] NetTest/0.TestSharedWeightsUpdate (1 ms) -[ RUN ] NetTest/0.TestBottomNeedBackwardForce -[ OK ] NetTest/0.TestBottomNeedBackwardForce (3 ms) -[----------] 26 tests from NetTest/0 (38 ms total) +[----------] 7 tests from CPUMathFunctionsTest/0, where TypeParam = float +[ RUN ] CPUMathFunctionsTest/0.TestSign +[ OK ] CPUMathFunctionsTest/0.TestSign (3 ms) +[ RUN ] CPUMathFunctionsTest/0.TestAsum +[ OK ] CPUMathFunctionsTest/0.TestAsum (3 ms) +[ RUN ] CPUMathFunctionsTest/0.TestNothing +[ OK ] CPUMathFunctionsTest/0.TestNothing (2 ms) +[ RUN ] CPUMathFunctionsTest/0.TestCopy +[ OK ] CPUMathFunctionsTest/0.TestCopy (3 ms) +[ RUN ] CPUMathFunctionsTest/0.TestSgnbit +[ OK ] CPUMathFunctionsTest/0.TestSgnbit (2 ms) +[ RUN ] CPUMathFunctionsTest/0.TestScale +[ OK ] CPUMathFunctionsTest/0.TestScale (3 ms) +[ RUN ] CPUMathFunctionsTest/0.TestFabs +[ OK ] CPUMathFunctionsTest/0.TestFabs (3 ms) +[----------] 7 tests from CPUMathFunctionsTest/0 (19 ms total) + +[----------] 20 tests from BiasLayerTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] BiasLayerTest/0.TestBackwardEltwiseInPlace +[ OK ] BiasLayerTest/0.TestBackwardEltwiseInPlace (0 ms) +[ RUN ] BiasLayerTest/0.TestForwardBroadcastMiddleWithParam +[ OK ] BiasLayerTest/0.TestForwardBroadcastMiddleWithParam (0 ms) +[ RUN ] BiasLayerTest/0.TestForwardEltwise +[ OK ] BiasLayerTest/0.TestForwardEltwise (0 ms) +[ RUN ] BiasLayerTest/0.TestForwardEltwiseInPlace +[ OK ] BiasLayerTest/0.TestForwardEltwiseInPlace (0 ms) +[ RUN ] BiasLayerTest/0.TestGradientEltwise +[ OK ] BiasLayerTest/0.TestGradientEltwise (4 ms) +[ RUN ] BiasLayerTest/0.TestForwardEltwiseWithParam +[ OK ] BiasLayerTest/0.TestForwardEltwiseWithParam (0 ms) +[ RUN ] BiasLayerTest/0.TestForwardBroadcastBegin +[ OK ] BiasLayerTest/0.TestForwardBroadcastBegin (0 ms) +[ RUN ] BiasLayerTest/0.TestGradientBroadcastBegin +[ OK ] BiasLayerTest/0.TestGradientBroadcastBegin (99 ms) +[ RUN ] BiasLayerTest/0.TestGradientBroadcastMiddle +[ OK ] BiasLayerTest/0.TestGradientBroadcastMiddle (110 ms) +[ RUN ] BiasLayerTest/0.TestBackwardBroadcastMiddleInPlace +[ OK ] BiasLayerTest/0.TestBackwardBroadcastMiddleInPlace (0 ms) +[ RUN ] BiasLayerTest/0.TestForwardBias +[ OK ] BiasLayerTest/0.TestForwardBias (0 ms) +[ RUN ] BiasLayerTest/0.TestGradientBias +[ OK ] BiasLayerTest/0.TestGradientBias (92 ms) +[ RUN ] BiasLayerTest/0.TestForwardBroadcastMiddleInPlace +[ OK ] BiasLayerTest/0.TestForwardBroadcastMiddleInPlace (0 ms) +[ RUN ] BiasLayerTest/0.TestForwardBiasAxis2 +[ OK ] BiasLayerTest/0.TestForwardBiasAxis2 (0 ms) +[ RUN ] BiasLayerTest/0.TestForwardBroadcastMiddle +[ OK ] BiasLayerTest/0.TestForwardBroadcastMiddle (0 ms) +[ RUN ] BiasLayerTest/0.TestGradientEltwiseWithParam +[ OK ] BiasLayerTest/0.TestGradientEltwiseWithParam (218 ms) +[ RUN ] BiasLayerTest/0.TestGradientBroadcastEnd +[ OK ] BiasLayerTest/0.TestGradientBroadcastEnd (142 ms) +[ RUN ] BiasLayerTest/0.TestGradientBiasAxis2 +[ OK ] BiasLayerTest/0.TestGradientBiasAxis2 (94 ms) +[ RUN ] BiasLayerTest/0.TestGradientBroadcastMiddleWithParam +[ OK ] BiasLayerTest/0.TestGradientBroadcastMiddleWithParam (109 ms) +[ RUN ] BiasLayerTest/0.TestForwardBroadcastEnd +[ OK ] BiasLayerTest/0.TestForwardBroadcastEnd (0 ms) +[----------] 20 tests from BiasLayerTest/0 (868 ms total) + +[----------] 5 tests from EmbedLayerTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] EmbedLayerTest/0.TestSetUp +[ OK ] EmbedLayerTest/0.TestSetUp (0 ms) +[ RUN ] EmbedLayerTest/0.TestForward +[ OK ] EmbedLayerTest/0.TestForward (0 ms) +[ RUN ] EmbedLayerTest/0.TestForwardWithBias +[ OK ] EmbedLayerTest/0.TestForwardWithBias (0 ms) +[ RUN ] EmbedLayerTest/0.TestGradient +[ OK ] EmbedLayerTest/0.TestGradient (13 ms) +[ RUN ] EmbedLayerTest/0.TestGradientWithBias +[ OK ] EmbedLayerTest/0.TestGradientWithBias (17 ms) +[----------] 5 tests from EmbedLayerTest/0 (30 ms total) + +[----------] 15 tests from ConvolutionLayerTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] ConvolutionLayerTest/1.TestSobelConvolution +[ OK ] ConvolutionLayerTest/1.TestSobelConvolution (0 ms) +[ RUN ] ConvolutionLayerTest/1.Test0DConvolution +[ OK ] ConvolutionLayerTest/1.Test0DConvolution (0 ms) +[ RUN ] ConvolutionLayerTest/1.Test1x1Convolution +[ OK ] ConvolutionLayerTest/1.Test1x1Convolution (1 ms) +[ RUN ] ConvolutionLayerTest/1.Test1x1Gradient +[ OK ] ConvolutionLayerTest/1.Test1x1Gradient (716 ms) +[ RUN ] ConvolutionLayerTest/1.TestDilatedConvolution +[ OK ] ConvolutionLayerTest/1.TestDilatedConvolution (1 ms) +[ RUN ] ConvolutionLayerTest/1.TestNDAgainst2D +[ OK ] ConvolutionLayerTest/1.TestNDAgainst2D (502 ms) +[ RUN ] ConvolutionLayerTest/1.TestSetup +[ OK ] ConvolutionLayerTest/1.TestSetup (1 ms) +[ RUN ] ConvolutionLayerTest/1.TestGradient3D +[ OK ] ConvolutionLayerTest/1.TestGradient3D (707 ms) +[ RUN ] ConvolutionLayerTest/1.TestSimple3DConvolution +[ OK ] ConvolutionLayerTest/1.TestSimple3DConvolution (1 ms) +[ RUN ] ConvolutionLayerTest/1.TestDilated3DConvolution +[ OK ] ConvolutionLayerTest/1.TestDilated3DConvolution (7 ms) +[ RUN ] ConvolutionLayerTest/1.TestGradient +[ OK ] ConvolutionLayerTest/1.TestGradient (80 ms) +[ RUN ] ConvolutionLayerTest/1.TestSimpleConvolution +[ OK ] ConvolutionLayerTest/1.TestSimpleConvolution (0 ms) +[ RUN ] ConvolutionLayerTest/1.TestDilatedGradient +[ OK ] ConvolutionLayerTest/1.TestDilatedGradient (20 ms) +[ RUN ] ConvolutionLayerTest/1.TestSimpleConvolutionGroup +[ OK ] ConvolutionLayerTest/1.TestSimpleConvolutionGroup (1 ms) +[ RUN ] ConvolutionLayerTest/1.TestGradientGroup +[ OK ] ConvolutionLayerTest/1.TestGradientGroup (23 ms) +[----------] 15 tests from ConvolutionLayerTest/1 (2060 ms total) + +[----------] 8 tests from RMSPropSolverTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] RMSPropSolverTest/1.TestSnapshot +[ OK ] RMSPropSolverTest/1.TestSnapshot (18 ms) +[ RUN ] RMSPropSolverTest/1.TestRMSPropLeastSquaresUpdateWithEverythingShare +[ OK ] RMSPropSolverTest/1.TestRMSPropLeastSquaresUpdateWithEverythingShare (81 ms) +[ RUN ] RMSPropSolverTest/1.TestLeastSquaresUpdateWithEverythingAccum +[ OK ] RMSPropSolverTest/1.TestLeastSquaresUpdateWithEverythingAccum (3 ms) +[ RUN ] RMSPropSolverTest/1.TestSnapshotShare +[ OK ] RMSPropSolverTest/1.TestSnapshotShare (20 ms) +[ RUN ] RMSPropSolverTest/1.TestRMSPropLeastSquaresUpdateWithEverything +[ OK ] RMSPropSolverTest/1.TestRMSPropLeastSquaresUpdateWithEverything (73 ms) +[ RUN ] RMSPropSolverTest/1.TestRMSPropLeastSquaresUpdateWithRmsDecay +[ OK ] RMSPropSolverTest/1.TestRMSPropLeastSquaresUpdateWithRmsDecay (72 ms) +[ RUN ] RMSPropSolverTest/1.TestRMSPropLeastSquaresUpdateWithWeightDecay +[ OK ] RMSPropSolverTest/1.TestRMSPropLeastSquaresUpdateWithWeightDecay (14 ms) +[ RUN ] RMSPropSolverTest/1.TestLeastSquaresUpdateWithEverythingAccumShare +[ OK ] RMSPropSolverTest/1.TestLeastSquaresUpdateWithEverythingAccumShare (3 ms) +[----------] 8 tests from RMSPropSolverTest/1 (285 ms total) + +[----------] 8 tests from LRNLayerTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] LRNLayerTest/1.TestGradientAcrossChannelsLargeRegion +[ OK ] LRNLayerTest/1.TestGradientAcrossChannelsLargeRegion (1321 ms) +[ RUN ] LRNLayerTest/1.TestGradientAcrossChannels +[ OK ] LRNLayerTest/1.TestGradientAcrossChannels (1148 ms) +[ RUN ] LRNLayerTest/1.TestForwardWithinChannel +[ OK ] LRNLayerTest/1.TestForwardWithinChannel (1 ms) +[ RUN ] LRNLayerTest/1.TestSetupWithinChannel +[ OK ] LRNLayerTest/1.TestSetupWithinChannel (0 ms) +[ RUN ] LRNLayerTest/1.TestForwardAcrossChannelsLargeRegion +[ OK ] LRNLayerTest/1.TestForwardAcrossChannelsLargeRegion (0 ms) +[ RUN ] LRNLayerTest/1.TestGradientWithinChannel +[ OK ] LRNLayerTest/1.TestGradientWithinChannel (1002 ms) +[ RUN ] LRNLayerTest/1.TestSetupAcrossChannels +[ OK ] LRNLayerTest/1.TestSetupAcrossChannels (0 ms) +[ RUN ] LRNLayerTest/1.TestForwardAcrossChannels +[ OK ] LRNLayerTest/1.TestForwardAcrossChannels (1 ms) +[----------] 8 tests from LRNLayerTest/1 (3473 ms total) + +[----------] 4 tests from GaussianFillerTest/1, where TypeParam = double +[ RUN ] GaussianFillerTest/1.TestFill2D +[ OK ] GaussianFillerTest/1.TestFill2D (0 ms) +[ RUN ] GaussianFillerTest/1.TestFill5D +[ OK ] GaussianFillerTest/1.TestFill5D (0 ms) +[ RUN ] GaussianFillerTest/1.TestFill +[ OK ] GaussianFillerTest/1.TestFill (1 ms) +[ RUN ] GaussianFillerTest/1.TestFill1D +[ OK ] GaussianFillerTest/1.TestFill1D (0 ms) +[----------] 4 tests from GaussianFillerTest/1 (1 ms total) + +[----------] 11 tests from PoolingLayerTest/0, where TypeParam = caffe::CPUDevice +[ RUN ] PoolingLayerTest/0.TestForwardAve +[ OK ] PoolingLayerTest/0.TestForwardAve (0 ms) +[ RUN ] PoolingLayerTest/0.TestForwardMaxPadded +[ OK ] PoolingLayerTest/0.TestForwardMaxPadded (0 ms) +[ RUN ] PoolingLayerTest/0.TestForwardMax +[ OK ] PoolingLayerTest/0.TestForwardMax (0 ms) +[ RUN ] PoolingLayerTest/0.TestSetup +[ OK ] PoolingLayerTest/0.TestSetup (0 ms) +[ RUN ] PoolingLayerTest/0.TestGradientAvePadded +[ OK ] PoolingLayerTest/0.TestGradientAvePadded (938 ms) +[ RUN ] PoolingLayerTest/0.TestGradientMaxTopMask +[ OK ] PoolingLayerTest/0.TestGradientMaxTopMask (690 ms) +[ RUN ] PoolingLayerTest/0.TestGradientMax +[ OK ] PoolingLayerTest/0.TestGradientMax (903 ms) +[ RUN ] PoolingLayerTest/0.TestGradientAve +[ OK ] PoolingLayerTest/0.TestGradientAve (247 ms) +[ RUN ] PoolingLayerTest/0.TestSetupPadded +[ OK ] PoolingLayerTest/0.TestSetupPadded (0 ms) +[ RUN ] PoolingLayerTest/0.TestForwardMaxTopMask +[ OK ] PoolingLayerTest/0.TestForwardMaxTopMask (0 ms) +[ RUN ] PoolingLayerTest/0.TestSetupGlobalPooling +[ OK ] PoolingLayerTest/0.TestSetupGlobalPooling (0 ms) +[----------] 11 tests from PoolingLayerTest/0 (2778 ms total) + +[----------] 2 tests from HDF5DataLayerTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] HDF5DataLayerTest/1.TestRead +[ OK ] HDF5DataLayerTest/1.TestRead (5 ms) +[ RUN ] HDF5DataLayerTest/1.TestSkip +[ OK ] HDF5DataLayerTest/1.TestSkip (18 ms) +[----------] 2 tests from HDF5DataLayerTest/1 (23 ms total) + +[----------] 5 tests from MemoryDataLayerTest/1, where TypeParam = caffe::CPUDevice +[ RUN ] MemoryDataLayerTest/1.TestSetup +[ OK ] MemoryDataLayerTest/1.TestSetup (0 ms) +[ RUN ] MemoryDataLayerTest/1.AddDatumVectorDefaultTransform +[ OK ] MemoryDataLayerTest/1.AddDatumVectorDefaultTransform (1 ms) +[ RUN ] MemoryDataLayerTest/1.TestSetBatchSize +[ OK ] MemoryDataLayerTest/1.TestSetBatchSize (3 ms) +[ RUN ] MemoryDataLayerTest/1.TestForward +[ OK ] MemoryDataLayerTest/1.TestForward (42 ms) +[ RUN ] MemoryDataLayerTest/1.AddMatVectorDefaultTransform +[ OK ] MemoryDataLayerTest/1.AddMatVectorDefaultTransform (1 ms) +[----------] 5 tests from MemoryDataLayerTest/1 (47 ms total) [----------] Global test environment tear-down -[==========] 1162 tests from 152 test cases ran. (81028 ms total) +[==========] 1162 tests from 152 test cases ran. (73205 ms total) [ PASSED ] 1162 tests. make[5]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' [100%] Built target runtest @@ -14349,11 +14384,11 @@ .../build/caffe-1.0.0+git20180821.99bd997/python/caffe/test/test_coord_map.py:45: DeprecationWarning: Please use assertEqual instead. self.assertEquals(ax, 1) ................WARNING: Logging before InitGoogleLogging() is written to STDERR -I1128 20:30:24.133457 2042758 net.cpp:296] The NetState phase (1) differed from the phase (0) specified by a rule in layer train_data -I1128 20:30:24.133495 2042758 net.cpp:332] The NetState did not contain stage 'val' specified by a rule in layer val_data -I1128 20:30:24.133504 2042758 net.cpp:296] The NetState phase (1) differed from the phase (0) specified by a rule in layer loss -I1128 20:30:24.133510 2042758 net.cpp:332] The NetState did not contain stage 'val' specified by a rule in layer loss -I1128 20:30:24.133519 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.248502 3038403 net.cpp:296] The NetState phase (1) differed from the phase (0) specified by a rule in layer train_data +I0102 05:14:55.248554 3038403 net.cpp:332] The NetState did not contain stage 'val' specified by a rule in layer val_data +I0102 05:14:55.248596 3038403 net.cpp:296] The NetState phase (1) differed from the phase (0) specified by a rule in layer loss +I0102 05:14:55.248603 3038403 net.cpp:332] The NetState did not contain stage 'val' specified by a rule in layer loss +I0102 05:14:55.248611 3038403 net.cpp:53] Initializing net from parameters: state { phase: TEST level: 0 @@ -14395,35 +14430,35 @@ stage: "deploy" } } -I1128 20:30:24.133599 2042758 layer_factory.hpp:77] Creating layer deploy_data -I1128 20:30:24.133627 2042758 net.cpp:86] Creating Layer deploy_data -I1128 20:30:24.133635 2042758 net.cpp:382] deploy_data -> data -I1128 20:30:24.133675 2042758 net.cpp:124] Setting up deploy_data -I1128 20:30:24.133692 2042758 net.cpp:131] Top shape: 1 1 10 10 (100) -I1128 20:30:24.133702 2042758 net.cpp:139] Memory required for data: 400 -I1128 20:30:24.133708 2042758 layer_factory.hpp:77] Creating layer ip -I1128 20:30:24.133716 2042758 net.cpp:86] Creating Layer ip -I1128 20:30:24.133721 2042758 net.cpp:408] ip <- data -I1128 20:30:24.133730 2042758 net.cpp:382] ip -> ip -I1128 20:30:24.133770 2042758 net.cpp:124] Setting up ip -I1128 20:30:24.133776 2042758 net.cpp:131] Top shape: 1 2 (2) -I1128 20:30:24.133781 2042758 net.cpp:139] Memory required for data: 408 -I1128 20:30:24.133814 2042758 layer_factory.hpp:77] Creating layer pred -I1128 20:30:24.133826 2042758 net.cpp:86] Creating Layer pred -I1128 20:30:24.133834 2042758 net.cpp:408] pred <- ip -I1128 20:30:24.133848 2042758 net.cpp:382] pred -> pred -I1128 20:30:24.133875 2042758 net.cpp:124] Setting up pred -I1128 20:30:24.133879 2042758 net.cpp:131] Top shape: 1 2 (2) -I1128 20:30:24.133885 2042758 net.cpp:139] Memory required for data: 416 -I1128 20:30:24.133890 2042758 net.cpp:202] pred does not need backward computation. -I1128 20:30:24.133898 2042758 net.cpp:202] ip does not need backward computation. -I1128 20:30:24.133915 2042758 net.cpp:202] deploy_data does not need backward computation. -I1128 20:30:24.133919 2042758 net.cpp:244] This network produces output pred -I1128 20:30:24.133927 2042758 net.cpp:257] Network initialization done. -.I1128 20:30:24.135457 2042758 net.cpp:296] The NetState phase (0) differed from the phase (1) specified by a rule in layer val_data -I1128 20:30:24.135473 2042758 net.cpp:296] The NetState phase (0) differed from the phase (1) specified by a rule in layer deploy_data -I1128 20:30:24.135484 2042758 net.cpp:296] The NetState phase (0) differed from the phase (1) specified by a rule in layer pred -I1128 20:30:24.135493 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.248710 3038403 layer_factory.hpp:77] Creating layer deploy_data +I0102 05:14:55.248739 3038403 net.cpp:86] Creating Layer deploy_data +I0102 05:14:55.248749 3038403 net.cpp:382] deploy_data -> data +I0102 05:14:55.248781 3038403 net.cpp:124] Setting up deploy_data +I0102 05:14:55.248788 3038403 net.cpp:131] Top shape: 1 1 10 10 (100) +I0102 05:14:55.248800 3038403 net.cpp:139] Memory required for data: 400 +I0102 05:14:55.248806 3038403 layer_factory.hpp:77] Creating layer ip +I0102 05:14:55.248816 3038403 net.cpp:86] Creating Layer ip +I0102 05:14:55.248823 3038403 net.cpp:408] ip <- data +I0102 05:14:55.248831 3038403 net.cpp:382] ip -> ip +I0102 05:14:55.248869 3038403 net.cpp:124] Setting up ip +I0102 05:14:55.248878 3038403 net.cpp:131] Top shape: 1 2 (2) +I0102 05:14:55.248884 3038403 net.cpp:139] Memory required for data: 408 +I0102 05:14:55.248910 3038403 layer_factory.hpp:77] Creating layer pred +I0102 05:14:55.248921 3038403 net.cpp:86] Creating Layer pred +I0102 05:14:55.248927 3038403 net.cpp:408] pred <- ip +I0102 05:14:55.248936 3038403 net.cpp:382] pred -> pred +I0102 05:14:55.248960 3038403 net.cpp:124] Setting up pred +I0102 05:14:55.248979 3038403 net.cpp:131] Top shape: 1 2 (2) +I0102 05:14:55.248986 3038403 net.cpp:139] Memory required for data: 416 +I0102 05:14:55.248992 3038403 net.cpp:202] pred does not need backward computation. +I0102 05:14:55.248999 3038403 net.cpp:202] ip does not need backward computation. +I0102 05:14:55.249006 3038403 net.cpp:202] deploy_data does not need backward computation. +I0102 05:14:55.249011 3038403 net.cpp:244] This network produces output pred +I0102 05:14:55.249019 3038403 net.cpp:257] Network initialization done. +.I0102 05:14:55.252499 3038403 net.cpp:296] The NetState phase (0) differed from the phase (1) specified by a rule in layer val_data +I0102 05:14:55.252521 3038403 net.cpp:296] The NetState phase (0) differed from the phase (1) specified by a rule in layer deploy_data +I0102 05:14:55.252530 3038403 net.cpp:296] The NetState phase (0) differed from the phase (1) specified by a rule in layer pred +I0102 05:14:55.252537 3038403 net.cpp:53] Initializing net from parameters: state { phase: TRAIN level: 0 @@ -14477,41 +14512,41 @@ stage: "val" } } -I1128 20:30:24.135581 2042758 layer_factory.hpp:77] Creating layer train_data -I1128 20:30:24.135607 2042758 net.cpp:86] Creating Layer train_data -I1128 20:30:24.135617 2042758 net.cpp:382] train_data -> data -I1128 20:30:24.135633 2042758 net.cpp:382] train_data -> label -I1128 20:30:24.135653 2042758 net.cpp:124] Setting up train_data -I1128 20:30:24.135659 2042758 net.cpp:131] Top shape: 1 1 10 10 (100) -I1128 20:30:24.135668 2042758 net.cpp:131] Top shape: 1 1 1 1 (1) -I1128 20:30:24.135677 2042758 net.cpp:139] Memory required for data: 404 -I1128 20:30:24.135682 2042758 layer_factory.hpp:77] Creating layer ip -I1128 20:30:24.135691 2042758 net.cpp:86] Creating Layer ip -I1128 20:30:24.135697 2042758 net.cpp:408] ip <- data -I1128 20:30:24.135707 2042758 net.cpp:382] ip -> ip -I1128 20:30:24.135725 2042758 net.cpp:124] Setting up ip -I1128 20:30:24.135733 2042758 net.cpp:131] Top shape: 1 2 (2) -I1128 20:30:24.135740 2042758 net.cpp:139] Memory required for data: 412 -I1128 20:30:24.135754 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.135774 2042758 net.cpp:86] Creating Layer loss -I1128 20:30:24.135779 2042758 net.cpp:408] loss <- ip -I1128 20:30:24.135785 2042758 net.cpp:408] loss <- label -I1128 20:30:24.135794 2042758 net.cpp:382] loss -> loss -I1128 20:30:24.135818 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.135838 2042758 net.cpp:124] Setting up loss -I1128 20:30:24.135843 2042758 net.cpp:131] Top shape: (1) -I1128 20:30:24.135849 2042758 net.cpp:134] with loss weight 1 -I1128 20:30:24.135881 2042758 net.cpp:139] Memory required for data: 416 -I1128 20:30:24.135887 2042758 net.cpp:200] loss needs backward computation. -I1128 20:30:24.135895 2042758 net.cpp:200] ip needs backward computation. -I1128 20:30:24.135901 2042758 net.cpp:202] train_data does not need backward computation. -I1128 20:30:24.135913 2042758 net.cpp:244] This network produces output loss -I1128 20:30:24.135926 2042758 net.cpp:257] Network initialization done. -.I1128 20:30:24.136659 2042758 net.cpp:296] The NetState phase (1) differed from the phase (0) specified by a rule in layer train_data -I1128 20:30:24.136672 2042758 net.cpp:332] The NetState did not contain stage 'deploy' specified by a rule in layer deploy_data -I1128 20:30:24.136679 2042758 net.cpp:296] The NetState phase (1) differed from the phase (0) specified by a rule in layer loss -I1128 20:30:24.136687 2042758 net.cpp:332] The NetState did not contain stage 'deploy' specified by a rule in layer pred -I1128 20:30:24.136693 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.252671 3038403 layer_factory.hpp:77] Creating layer train_data +I0102 05:14:55.252701 3038403 net.cpp:86] Creating Layer train_data +I0102 05:14:55.252712 3038403 net.cpp:382] train_data -> data +I0102 05:14:55.252732 3038403 net.cpp:382] train_data -> label +I0102 05:14:55.252753 3038403 net.cpp:124] Setting up train_data +I0102 05:14:55.252760 3038403 net.cpp:131] Top shape: 1 1 10 10 (100) +I0102 05:14:55.252770 3038403 net.cpp:131] Top shape: 1 1 1 1 (1) +I0102 05:14:55.252777 3038403 net.cpp:139] Memory required for data: 404 +I0102 05:14:55.252784 3038403 layer_factory.hpp:77] Creating layer ip +I0102 05:14:55.252795 3038403 net.cpp:86] Creating Layer ip +I0102 05:14:55.252801 3038403 net.cpp:408] ip <- data +I0102 05:14:55.252811 3038403 net.cpp:382] ip -> ip +I0102 05:14:55.252830 3038403 net.cpp:124] Setting up ip +I0102 05:14:55.252836 3038403 net.cpp:131] Top shape: 1 2 (2) +I0102 05:14:55.252843 3038403 net.cpp:139] Memory required for data: 412 +I0102 05:14:55.252858 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.252871 3038403 net.cpp:86] Creating Layer loss +I0102 05:14:55.252876 3038403 net.cpp:408] loss <- ip +I0102 05:14:55.252882 3038403 net.cpp:408] loss <- label +I0102 05:14:55.252892 3038403 net.cpp:382] loss -> loss +I0102 05:14:55.252924 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.252947 3038403 net.cpp:124] Setting up loss +I0102 05:14:55.252954 3038403 net.cpp:131] Top shape: (1) +I0102 05:14:55.252961 3038403 net.cpp:134] with loss weight 1 +I0102 05:14:55.253001 3038403 net.cpp:139] Memory required for data: 416 +I0102 05:14:55.253007 3038403 net.cpp:200] loss needs backward computation. +I0102 05:14:55.253015 3038403 net.cpp:200] ip needs backward computation. +I0102 05:14:55.253022 3038403 net.cpp:202] train_data does not need backward computation. +I0102 05:14:55.253027 3038403 net.cpp:244] This network produces output loss +I0102 05:14:55.253036 3038403 net.cpp:257] Network initialization done. +.I0102 05:14:55.253777 3038403 net.cpp:296] The NetState phase (1) differed from the phase (0) specified by a rule in layer train_data +I0102 05:14:55.253789 3038403 net.cpp:332] The NetState did not contain stage 'deploy' specified by a rule in layer deploy_data +I0102 05:14:55.253796 3038403 net.cpp:296] The NetState phase (1) differed from the phase (0) specified by a rule in layer loss +I0102 05:14:55.253803 3038403 net.cpp:332] The NetState did not contain stage 'deploy' specified by a rule in layer pred +I0102 05:14:55.253809 3038403 net.cpp:53] Initializing net from parameters: state { phase: TEST level: 0 @@ -14565,39 +14600,39 @@ stage: "val" } } -I1128 20:30:24.136762 2042758 layer_factory.hpp:77] Creating layer val_data -I1128 20:30:24.136775 2042758 net.cpp:86] Creating Layer val_data -I1128 20:30:24.136783 2042758 net.cpp:382] val_data -> data -I1128 20:30:24.136795 2042758 net.cpp:382] val_data -> label -I1128 20:30:24.136811 2042758 net.cpp:124] Setting up val_data -I1128 20:30:24.136816 2042758 net.cpp:131] Top shape: 1 1 10 10 (100) -I1128 20:30:24.136823 2042758 net.cpp:131] Top shape: 1 1 1 1 (1) -I1128 20:30:24.136829 2042758 net.cpp:139] Memory required for data: 404 -I1128 20:30:24.136834 2042758 layer_factory.hpp:77] Creating layer ip -I1128 20:30:24.136844 2042758 net.cpp:86] Creating Layer ip -I1128 20:30:24.136855 2042758 net.cpp:408] ip <- data -I1128 20:30:24.136864 2042758 net.cpp:382] ip -> ip -I1128 20:30:24.136884 2042758 net.cpp:124] Setting up ip -I1128 20:30:24.136891 2042758 net.cpp:131] Top shape: 1 2 (2) -I1128 20:30:24.136901 2042758 net.cpp:139] Memory required for data: 412 -I1128 20:30:24.136916 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.136924 2042758 net.cpp:86] Creating Layer loss -I1128 20:30:24.136929 2042758 net.cpp:408] loss <- ip -I1128 20:30:24.136934 2042758 net.cpp:408] loss <- label -I1128 20:30:24.136940 2042758 net.cpp:382] loss -> loss -I1128 20:30:24.136950 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.136965 2042758 net.cpp:124] Setting up loss -I1128 20:30:24.136970 2042758 net.cpp:131] Top shape: (1) -I1128 20:30:24.136973 2042758 net.cpp:134] with loss weight 1 -I1128 20:30:24.136984 2042758 net.cpp:139] Memory required for data: 416 -I1128 20:30:24.136988 2042758 net.cpp:200] loss needs backward computation. -I1128 20:30:24.136993 2042758 net.cpp:200] ip needs backward computation. -I1128 20:30:24.137001 2042758 net.cpp:202] val_data does not need backward computation. -I1128 20:30:24.137007 2042758 net.cpp:244] This network produces output loss -I1128 20:30:24.137015 2042758 net.cpp:257] Network initialization done. -.I1128 20:30:24.138283 2042758 net.cpp:306] The NetState level (0) is above the min_level (1) specified by a rule in layer Level1Only -I1128 20:30:24.138305 2042758 net.cpp:306] The NetState level (0) is above the min_level (1) specified by a rule in layer Level>=1 -I1128 20:30:24.138314 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.253885 3038403 layer_factory.hpp:77] Creating layer val_data +I0102 05:14:55.253899 3038403 net.cpp:86] Creating Layer val_data +I0102 05:14:55.253907 3038403 net.cpp:382] val_data -> data +I0102 05:14:55.253921 3038403 net.cpp:382] val_data -> label +I0102 05:14:55.253937 3038403 net.cpp:124] Setting up val_data +I0102 05:14:55.253943 3038403 net.cpp:131] Top shape: 1 1 10 10 (100) +I0102 05:14:55.253950 3038403 net.cpp:131] Top shape: 1 1 1 1 (1) +I0102 05:14:55.253957 3038403 net.cpp:139] Memory required for data: 404 +I0102 05:14:55.253962 3038403 layer_factory.hpp:77] Creating layer ip +I0102 05:14:55.253973 3038403 net.cpp:86] Creating Layer ip +I0102 05:14:55.253978 3038403 net.cpp:408] ip <- data +I0102 05:14:55.253986 3038403 net.cpp:382] ip -> ip +I0102 05:14:55.254004 3038403 net.cpp:124] Setting up ip +I0102 05:14:55.254010 3038403 net.cpp:131] Top shape: 1 2 (2) +I0102 05:14:55.254016 3038403 net.cpp:139] Memory required for data: 412 +I0102 05:14:55.254029 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.254040 3038403 net.cpp:86] Creating Layer loss +I0102 05:14:55.254045 3038403 net.cpp:408] loss <- ip +I0102 05:14:55.254051 3038403 net.cpp:408] loss <- label +I0102 05:14:55.254060 3038403 net.cpp:382] loss -> loss +I0102 05:14:55.254070 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.254091 3038403 net.cpp:124] Setting up loss +I0102 05:14:55.254098 3038403 net.cpp:131] Top shape: (1) +I0102 05:14:55.254104 3038403 net.cpp:134] with loss weight 1 +I0102 05:14:55.254117 3038403 net.cpp:139] Memory required for data: 416 +I0102 05:14:55.254122 3038403 net.cpp:200] loss needs backward computation. +I0102 05:14:55.254129 3038403 net.cpp:200] ip needs backward computation. +I0102 05:14:55.254137 3038403 net.cpp:202] val_data does not need backward computation. +I0102 05:14:55.254141 3038403 net.cpp:244] This network produces output loss +I0102 05:14:55.254148 3038403 net.cpp:257] Network initialization done. +.I0102 05:14:55.254845 3038403 net.cpp:306] The NetState level (0) is above the min_level (1) specified by a rule in layer Level1Only +I0102 05:14:55.254858 3038403 net.cpp:306] The NetState level (0) is above the min_level (1) specified by a rule in layer Level>=1 +I0102 05:14:55.254864 3038403 net.cpp:53] Initializing net from parameters: state { phase: TEST level: 0 @@ -14649,55 +14684,55 @@ num_output: 1 } } -I1128 20:30:24.138430 2042758 layer_factory.hpp:77] Creating layer data -I1128 20:30:24.138442 2042758 net.cpp:86] Creating Layer data -I1128 20:30:24.138448 2042758 net.cpp:382] data -> data -I1128 20:30:24.138466 2042758 net.cpp:124] Setting up data -I1128 20:30:24.139189 2042758 net.cpp:131] Top shape: 1 1 10 10 (100) -I1128 20:30:24.139200 2042758 net.cpp:139] Memory required for data: 400 -I1128 20:30:24.139205 2042758 layer_factory.hpp:77] Creating layer data_data_0_split -I1128 20:30:24.139217 2042758 net.cpp:86] Creating Layer data_data_0_split -I1128 20:30:24.139223 2042758 net.cpp:408] data_data_0_split <- data -I1128 20:30:24.139235 2042758 net.cpp:382] data_data_0_split -> data_data_0_split_0 -I1128 20:30:24.139250 2042758 net.cpp:382] data_data_0_split -> data_data_0_split_1 -I1128 20:30:24.139261 2042758 net.cpp:382] data_data_0_split -> data_data_0_split_2 -I1128 20:30:24.139274 2042758 net.cpp:124] Setting up data_data_0_split -I1128 20:30:24.139281 2042758 net.cpp:131] Top shape: 1 1 10 10 (100) -I1128 20:30:24.139288 2042758 net.cpp:131] Top shape: 1 1 10 10 (100) -I1128 20:30:24.139295 2042758 net.cpp:131] Top shape: 1 1 10 10 (100) -I1128 20:30:24.139302 2042758 net.cpp:139] Memory required for data: 1600 -I1128 20:30:24.139305 2042758 layer_factory.hpp:77] Creating layer NoLevel -I1128 20:30:24.139314 2042758 net.cpp:86] Creating Layer NoLevel -I1128 20:30:24.139319 2042758 net.cpp:408] NoLevel <- data_data_0_split_0 -I1128 20:30:24.139328 2042758 net.cpp:382] NoLevel -> NoLevel -I1128 20:30:24.139351 2042758 net.cpp:124] Setting up NoLevel -I1128 20:30:24.139358 2042758 net.cpp:131] Top shape: 1 1 (1) -I1128 20:30:24.139366 2042758 net.cpp:139] Memory required for data: 1604 -I1128 20:30:24.139382 2042758 layer_factory.hpp:77] Creating layer Level0Only -I1128 20:30:24.139391 2042758 net.cpp:86] Creating Layer Level0Only -I1128 20:30:24.139397 2042758 net.cpp:408] Level0Only <- data_data_0_split_1 -I1128 20:30:24.139410 2042758 net.cpp:382] Level0Only -> Level0Only -I1128 20:30:24.139436 2042758 net.cpp:124] Setting up Level0Only -I1128 20:30:24.139441 2042758 net.cpp:131] Top shape: 1 1 (1) -I1128 20:30:24.139451 2042758 net.cpp:139] Memory required for data: 1608 -I1128 20:30:24.139459 2042758 layer_factory.hpp:77] Creating layer Level>=0 -I1128 20:30:24.139473 2042758 net.cpp:86] Creating Layer Level>=0 -I1128 20:30:24.139478 2042758 net.cpp:408] Level>=0 <- data_data_0_split_2 -I1128 20:30:24.139487 2042758 net.cpp:382] Level>=0 -> Level>=0 -I1128 20:30:24.139501 2042758 net.cpp:124] Setting up Level>=0 -I1128 20:30:24.139509 2042758 net.cpp:131] Top shape: 1 1 (1) -I1128 20:30:24.139515 2042758 net.cpp:139] Memory required for data: 1612 -I1128 20:30:24.139525 2042758 net.cpp:202] Level>=0 does not need backward computation. -I1128 20:30:24.139530 2042758 net.cpp:202] Level0Only does not need backward computation. -I1128 20:30:24.139535 2042758 net.cpp:202] NoLevel does not need backward computation. -I1128 20:30:24.139541 2042758 net.cpp:202] data_data_0_split does not need backward computation. -I1128 20:30:24.139547 2042758 net.cpp:202] data does not need backward computation. -I1128 20:30:24.139551 2042758 net.cpp:244] This network produces output Level0Only -I1128 20:30:24.139557 2042758 net.cpp:244] This network produces output Level>=0 -I1128 20:30:24.139561 2042758 net.cpp:244] This network produces output NoLevel -I1128 20:30:24.139575 2042758 net.cpp:257] Network initialization done. -.I1128 20:30:24.140472 2042758 net.cpp:316] The NetState level (1) is above the max_level (0) specified by a rule in layer Level0Only -I1128 20:30:24.140491 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.254966 3038403 layer_factory.hpp:77] Creating layer data +I0102 05:14:55.254978 3038403 net.cpp:86] Creating Layer data +I0102 05:14:55.254987 3038403 net.cpp:382] data -> data +I0102 05:14:55.255003 3038403 net.cpp:124] Setting up data +I0102 05:14:55.255010 3038403 net.cpp:131] Top shape: 1 1 10 10 (100) +I0102 05:14:55.255017 3038403 net.cpp:139] Memory required for data: 400 +I0102 05:14:55.255025 3038403 layer_factory.hpp:77] Creating layer data_data_0_split +I0102 05:14:55.255035 3038403 net.cpp:86] Creating Layer data_data_0_split +I0102 05:14:55.255040 3038403 net.cpp:408] data_data_0_split <- data +I0102 05:14:55.255048 3038403 net.cpp:382] data_data_0_split -> data_data_0_split_0 +I0102 05:14:55.255056 3038403 net.cpp:382] data_data_0_split -> data_data_0_split_1 +I0102 05:14:55.255065 3038403 net.cpp:382] data_data_0_split -> data_data_0_split_2 +I0102 05:14:55.255075 3038403 net.cpp:124] Setting up data_data_0_split +I0102 05:14:55.255082 3038403 net.cpp:131] Top shape: 1 1 10 10 (100) +I0102 05:14:55.255088 3038403 net.cpp:131] Top shape: 1 1 10 10 (100) +I0102 05:14:55.255095 3038403 net.cpp:131] Top shape: 1 1 10 10 (100) +I0102 05:14:55.255101 3038403 net.cpp:139] Memory required for data: 1600 +I0102 05:14:55.255105 3038403 layer_factory.hpp:77] Creating layer NoLevel +I0102 05:14:55.255115 3038403 net.cpp:86] Creating Layer NoLevel +I0102 05:14:55.255120 3038403 net.cpp:408] NoLevel <- data_data_0_split_0 +I0102 05:14:55.255128 3038403 net.cpp:382] NoLevel -> NoLevel +I0102 05:14:55.255146 3038403 net.cpp:124] Setting up NoLevel +I0102 05:14:55.255151 3038403 net.cpp:131] Top shape: 1 1 (1) +I0102 05:14:55.255157 3038403 net.cpp:139] Memory required for data: 1604 +I0102 05:14:55.255170 3038403 layer_factory.hpp:77] Creating layer Level0Only +I0102 05:14:55.255180 3038403 net.cpp:86] Creating Layer Level0Only +I0102 05:14:55.255185 3038403 net.cpp:408] Level0Only <- data_data_0_split_1 +I0102 05:14:55.255195 3038403 net.cpp:382] Level0Only -> Level0Only +I0102 05:14:55.255214 3038403 net.cpp:124] Setting up Level0Only +I0102 05:14:55.255220 3038403 net.cpp:131] Top shape: 1 1 (1) +I0102 05:14:55.255228 3038403 net.cpp:139] Memory required for data: 1608 +I0102 05:14:55.255239 3038403 layer_factory.hpp:77] Creating layer Level>=0 +I0102 05:14:55.255250 3038403 net.cpp:86] Creating Layer Level>=0 +I0102 05:14:55.255256 3038403 net.cpp:408] Level>=0 <- data_data_0_split_2 +I0102 05:14:55.255264 3038403 net.cpp:382] Level>=0 -> Level>=0 +I0102 05:14:55.255281 3038403 net.cpp:124] Setting up Level>=0 +I0102 05:14:55.255287 3038403 net.cpp:131] Top shape: 1 1 (1) +I0102 05:14:55.255295 3038403 net.cpp:139] Memory required for data: 1612 +I0102 05:14:55.255311 3038403 net.cpp:202] Level>=0 does not need backward computation. +I0102 05:14:55.255316 3038403 net.cpp:202] Level0Only does not need backward computation. +I0102 05:14:55.255322 3038403 net.cpp:202] NoLevel does not need backward computation. +I0102 05:14:55.255328 3038403 net.cpp:202] data_data_0_split does not need backward computation. +I0102 05:14:55.255334 3038403 net.cpp:202] data does not need backward computation. +I0102 05:14:55.255342 3038403 net.cpp:244] This network produces output Level0Only +I0102 05:14:55.255345 3038403 net.cpp:244] This network produces output Level>=0 +I0102 05:14:55.255352 3038403 net.cpp:244] This network produces output NoLevel +I0102 05:14:55.255371 3038403 net.cpp:257] Network initialization done. +.I0102 05:14:55.256088 3038403 net.cpp:316] The NetState level (1) is above the max_level (0) specified by a rule in layer Level0Only +I0102 05:14:55.256104 3038403 net.cpp:53] Initializing net from parameters: state { phase: TEST level: 1 @@ -14761,65 +14796,65 @@ num_output: 1 } } -I1128 20:30:24.140637 2042758 layer_factory.hpp:77] Creating layer data -I1128 20:30:24.140655 2042758 net.cpp:86] Creating Layer data -I1128 20:30:24.140663 2042758 net.cpp:382] data -> data -I1128 20:30:24.140684 2042758 net.cpp:124] Setting up data -I1128 20:30:24.140698 2042758 net.cpp:131] Top shape: 1 1 10 10 (100) -I1128 20:30:24.140709 2042758 net.cpp:139] Memory required for data: 400 -I1128 20:30:24.140717 2042758 layer_factory.hpp:77] Creating layer data_data_0_split -I1128 20:30:24.140728 2042758 net.cpp:86] Creating Layer data_data_0_split -I1128 20:30:24.140734 2042758 net.cpp:408] data_data_0_split <- data -I1128 20:30:24.140745 2042758 net.cpp:382] data_data_0_split -> data_data_0_split_0 -I1128 20:30:24.140758 2042758 net.cpp:382] data_data_0_split -> data_data_0_split_1 -I1128 20:30:24.140767 2042758 net.cpp:382] data_data_0_split -> data_data_0_split_2 -I1128 20:30:24.140779 2042758 net.cpp:382] data_data_0_split -> data_data_0_split_3 -I1128 20:30:24.140794 2042758 net.cpp:124] Setting up data_data_0_split -I1128 20:30:24.140805 2042758 net.cpp:131] Top shape: 1 1 10 10 (100) -I1128 20:30:24.140812 2042758 net.cpp:131] Top shape: 1 1 10 10 (100) -I1128 20:30:24.140820 2042758 net.cpp:131] Top shape: 1 1 10 10 (100) -I1128 20:30:24.140828 2042758 net.cpp:131] Top shape: 1 1 10 10 (100) -I1128 20:30:24.140834 2042758 net.cpp:139] Memory required for data: 2000 -I1128 20:30:24.140838 2042758 layer_factory.hpp:77] Creating layer NoLevel -I1128 20:30:24.140846 2042758 net.cpp:86] Creating Layer NoLevel -I1128 20:30:24.140851 2042758 net.cpp:408] NoLevel <- data_data_0_split_0 -I1128 20:30:24.140858 2042758 net.cpp:382] NoLevel -> NoLevel -I1128 20:30:24.140875 2042758 net.cpp:124] Setting up NoLevel -I1128 20:30:24.140880 2042758 net.cpp:131] Top shape: 1 1 (1) -I1128 20:30:24.140885 2042758 net.cpp:139] Memory required for data: 2004 -I1128 20:30:24.140897 2042758 layer_factory.hpp:77] Creating layer Level1Only -I1128 20:30:24.140903 2042758 net.cpp:86] Creating Layer Level1Only -I1128 20:30:24.140908 2042758 net.cpp:408] Level1Only <- data_data_0_split_1 -I1128 20:30:24.140915 2042758 net.cpp:382] Level1Only -> Level1Only -I1128 20:30:24.140929 2042758 net.cpp:124] Setting up Level1Only -I1128 20:30:24.140933 2042758 net.cpp:131] Top shape: 1 1 (1) -I1128 20:30:24.140939 2042758 net.cpp:139] Memory required for data: 2008 -I1128 20:30:24.140947 2042758 layer_factory.hpp:77] Creating layer Level>=0 -I1128 20:30:24.140956 2042758 net.cpp:86] Creating Layer Level>=0 -I1128 20:30:24.140960 2042758 net.cpp:408] Level>=0 <- data_data_0_split_2 -I1128 20:30:24.140969 2042758 net.cpp:382] Level>=0 -> Level>=0 -I1128 20:30:24.140981 2042758 net.cpp:124] Setting up Level>=0 -I1128 20:30:24.140986 2042758 net.cpp:131] Top shape: 1 1 (1) -I1128 20:30:24.140991 2042758 net.cpp:139] Memory required for data: 2012 -I1128 20:30:24.141000 2042758 layer_factory.hpp:77] Creating layer Level>=1 -I1128 20:30:24.141007 2042758 net.cpp:86] Creating Layer Level>=1 -I1128 20:30:24.141011 2042758 net.cpp:408] Level>=1 <- data_data_0_split_3 -I1128 20:30:24.141018 2042758 net.cpp:382] Level>=1 -> Level>=1 -I1128 20:30:24.141036 2042758 net.cpp:124] Setting up Level>=1 -I1128 20:30:24.141039 2042758 net.cpp:131] Top shape: 1 1 (1) -I1128 20:30:24.141045 2042758 net.cpp:139] Memory required for data: 2016 -I1128 20:30:24.141052 2042758 net.cpp:202] Level>=1 does not need backward computation. -I1128 20:30:24.141057 2042758 net.cpp:202] Level>=0 does not need backward computation. -I1128 20:30:24.141062 2042758 net.cpp:202] Level1Only does not need backward computation. -I1128 20:30:24.141067 2042758 net.cpp:202] NoLevel does not need backward computation. -I1128 20:30:24.141072 2042758 net.cpp:202] data_data_0_split does not need backward computation. -I1128 20:30:24.141077 2042758 net.cpp:202] data does not need backward computation. -I1128 20:30:24.141080 2042758 net.cpp:244] This network produces output Level1Only -I1128 20:30:24.141085 2042758 net.cpp:244] This network produces output Level>=0 -I1128 20:30:24.141090 2042758 net.cpp:244] This network produces output Level>=1 -I1128 20:30:24.141094 2042758 net.cpp:244] This network produces output NoLevel -I1128 20:30:24.141105 2042758 net.cpp:257] Network initialization done. -.I1128 20:30:24.141988 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.256296 3038403 layer_factory.hpp:77] Creating layer data +I0102 05:14:55.256312 3038403 net.cpp:86] Creating Layer data +I0102 05:14:55.256321 3038403 net.cpp:382] data -> data +I0102 05:14:55.256340 3038403 net.cpp:124] Setting up data +I0102 05:14:55.256347 3038403 net.cpp:131] Top shape: 1 1 10 10 (100) +I0102 05:14:55.256354 3038403 net.cpp:139] Memory required for data: 400 +I0102 05:14:55.256361 3038403 layer_factory.hpp:77] Creating layer data_data_0_split +I0102 05:14:55.256372 3038403 net.cpp:86] Creating Layer data_data_0_split +I0102 05:14:55.256378 3038403 net.cpp:408] data_data_0_split <- data +I0102 05:14:55.256388 3038403 net.cpp:382] data_data_0_split -> data_data_0_split_0 +I0102 05:14:55.256399 3038403 net.cpp:382] data_data_0_split -> data_data_0_split_1 +I0102 05:14:55.256412 3038403 net.cpp:382] data_data_0_split -> data_data_0_split_2 +I0102 05:14:55.256422 3038403 net.cpp:382] data_data_0_split -> data_data_0_split_3 +I0102 05:14:55.256438 3038403 net.cpp:124] Setting up data_data_0_split +I0102 05:14:55.256445 3038403 net.cpp:131] Top shape: 1 1 10 10 (100) +I0102 05:14:55.256454 3038403 net.cpp:131] Top shape: 1 1 10 10 (100) +I0102 05:14:55.256464 3038403 net.cpp:131] Top shape: 1 1 10 10 (100) +I0102 05:14:55.256470 3038403 net.cpp:131] Top shape: 1 1 10 10 (100) +I0102 05:14:55.256477 3038403 net.cpp:139] Memory required for data: 2000 +I0102 05:14:55.256484 3038403 layer_factory.hpp:77] Creating layer NoLevel +I0102 05:14:55.256495 3038403 net.cpp:86] Creating Layer NoLevel +I0102 05:14:55.256502 3038403 net.cpp:408] NoLevel <- data_data_0_split_0 +I0102 05:14:55.256512 3038403 net.cpp:382] NoLevel -> NoLevel +I0102 05:14:55.256536 3038403 net.cpp:124] Setting up NoLevel +I0102 05:14:55.256542 3038403 net.cpp:131] Top shape: 1 1 (1) +I0102 05:14:55.256549 3038403 net.cpp:139] Memory required for data: 2004 +I0102 05:14:55.257001 3038403 layer_factory.hpp:77] Creating layer Level1Only +I0102 05:14:55.257030 3038403 net.cpp:86] Creating Layer Level1Only +I0102 05:14:55.257037 3038403 net.cpp:408] Level1Only <- data_data_0_split_1 +I0102 05:14:55.257048 3038403 net.cpp:382] Level1Only -> Level1Only +I0102 05:14:55.257073 3038403 net.cpp:124] Setting up Level1Only +I0102 05:14:55.257081 3038403 net.cpp:131] Top shape: 1 1 (1) +I0102 05:14:55.257089 3038403 net.cpp:139] Memory required for data: 2008 +I0102 05:14:55.257102 3038403 layer_factory.hpp:77] Creating layer Level>=0 +I0102 05:14:55.257115 3038403 net.cpp:86] Creating Layer Level>=0 +I0102 05:14:55.257122 3038403 net.cpp:408] Level>=0 <- data_data_0_split_2 +I0102 05:14:55.257133 3038403 net.cpp:382] Level>=0 -> Level>=0 +I0102 05:14:55.257153 3038403 net.cpp:124] Setting up Level>=0 +I0102 05:14:55.257160 3038403 net.cpp:131] Top shape: 1 1 (1) +I0102 05:14:55.257167 3038403 net.cpp:139] Memory required for data: 2012 +I0102 05:14:55.257180 3038403 layer_factory.hpp:77] Creating layer Level>=1 +I0102 05:14:55.257189 3038403 net.cpp:86] Creating Layer Level>=1 +I0102 05:14:55.257195 3038403 net.cpp:408] Level>=1 <- data_data_0_split_3 +I0102 05:14:55.257205 3038403 net.cpp:382] Level>=1 -> Level>=1 +I0102 05:14:55.257231 3038403 net.cpp:124] Setting up Level>=1 +I0102 05:14:55.257238 3038403 net.cpp:131] Top shape: 1 1 (1) +I0102 05:14:55.257246 3038403 net.cpp:139] Memory required for data: 2016 +I0102 05:14:55.257256 3038403 net.cpp:202] Level>=1 does not need backward computation. +I0102 05:14:55.257262 3038403 net.cpp:202] Level>=0 does not need backward computation. +I0102 05:14:55.257268 3038403 net.cpp:202] Level1Only does not need backward computation. +I0102 05:14:55.257275 3038403 net.cpp:202] NoLevel does not need backward computation. +I0102 05:14:55.257282 3038403 net.cpp:202] data_data_0_split does not need backward computation. +I0102 05:14:55.257288 3038403 net.cpp:202] data does not need backward computation. +I0102 05:14:55.257293 3038403 net.cpp:244] This network produces output Level1Only +I0102 05:14:55.257301 3038403 net.cpp:244] This network produces output Level>=0 +I0102 05:14:55.257308 3038403 net.cpp:244] This network produces output Level>=1 +I0102 05:14:55.257314 3038403 net.cpp:244] This network produces output NoLevel +I0102 05:14:55.257328 3038403 net.cpp:257] Network initialization done. +.I0102 05:14:55.258246 3038403 net.cpp:53] Initializing net from parameters: name: "testnet" force_backward: true state { @@ -14898,45 +14933,45 @@ bottom: "label" top: "loss" } -I1128 20:30:24.142146 2042758 layer_factory.hpp:77] Creating layer data -I1128 20:30:24.142161 2042758 net.cpp:86] Creating Layer data -I1128 20:30:24.142171 2042758 net.cpp:382] data -> data -I1128 20:30:24.142186 2042758 net.cpp:382] data -> label -I1128 20:30:24.142206 2042758 net.cpp:124] Setting up data -I1128 20:30:24.142212 2042758 net.cpp:131] Top shape: 5 2 3 4 (120) -I1128 20:30:24.142225 2042758 net.cpp:131] Top shape: 5 1 1 1 (5) -I1128 20:30:24.142231 2042758 net.cpp:139] Memory required for data: 500 -I1128 20:30:24.142238 2042758 layer_factory.hpp:77] Creating layer conv -I1128 20:30:24.142271 2042758 net.cpp:86] Creating Layer conv -I1128 20:30:24.142275 2042758 net.cpp:408] conv <- data -I1128 20:30:24.142292 2042758 net.cpp:382] conv -> conv -I1128 20:30:24.142397 2042758 net.cpp:124] Setting up conv -I1128 20:30:24.142403 2042758 net.cpp:131] Top shape: 5 11 8 9 (3960) -I1128 20:30:24.142410 2042758 net.cpp:139] Memory required for data: 16340 -I1128 20:30:24.142424 2042758 layer_factory.hpp:77] Creating layer ip -I1128 20:30:24.142436 2042758 net.cpp:86] Creating Layer ip -I1128 20:30:24.142443 2042758 net.cpp:408] ip <- conv -I1128 20:30:24.142454 2042758 net.cpp:382] ip -> ip_blob -I1128 20:30:24.142623 2042758 net.cpp:124] Setting up ip -I1128 20:30:24.142633 2042758 net.cpp:131] Top shape: 5 13 (65) -I1128 20:30:24.142645 2042758 net.cpp:139] Memory required for data: 16600 -I1128 20:30:24.142659 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.142668 2042758 net.cpp:86] Creating Layer loss -I1128 20:30:24.142673 2042758 net.cpp:408] loss <- ip_blob -I1128 20:30:24.142678 2042758 net.cpp:408] loss <- label -I1128 20:30:24.142686 2042758 net.cpp:382] loss -> loss -I1128 20:30:24.142700 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.142721 2042758 net.cpp:124] Setting up loss -I1128 20:30:24.142726 2042758 net.cpp:131] Top shape: (1) -I1128 20:30:24.142733 2042758 net.cpp:134] with loss weight 1 -I1128 20:30:24.142745 2042758 net.cpp:139] Memory required for data: 16604 -I1128 20:30:24.142750 2042758 net.cpp:200] loss needs backward computation. -I1128 20:30:24.142756 2042758 net.cpp:200] ip needs backward computation. -I1128 20:30:24.142761 2042758 net.cpp:200] conv needs backward computation. -I1128 20:30:24.142766 2042758 net.cpp:202] data does not need backward computation. -I1128 20:30:24.142772 2042758 net.cpp:244] This network produces output loss -I1128 20:30:24.142781 2042758 net.cpp:257] Network initialization done. -.I1128 20:30:24.145946 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.258400 3038403 layer_factory.hpp:77] Creating layer data +I0102 05:14:55.258415 3038403 net.cpp:86] Creating Layer data +I0102 05:14:55.258425 3038403 net.cpp:382] data -> data +I0102 05:14:55.258440 3038403 net.cpp:382] data -> label +I0102 05:14:55.258455 3038403 net.cpp:124] Setting up data +I0102 05:14:55.258462 3038403 net.cpp:131] Top shape: 5 2 3 4 (120) +I0102 05:14:55.258471 3038403 net.cpp:131] Top shape: 5 1 1 1 (5) +I0102 05:14:55.258476 3038403 net.cpp:139] Memory required for data: 500 +I0102 05:14:55.258483 3038403 layer_factory.hpp:77] Creating layer conv +I0102 05:14:55.258500 3038403 net.cpp:86] Creating Layer conv +I0102 05:14:55.258507 3038403 net.cpp:408] conv <- data +I0102 05:14:55.258518 3038403 net.cpp:382] conv -> conv +I0102 05:14:55.258615 3038403 net.cpp:124] Setting up conv +I0102 05:14:55.258622 3038403 net.cpp:131] Top shape: 5 11 8 9 (3960) +I0102 05:14:55.258630 3038403 net.cpp:139] Memory required for data: 16340 +I0102 05:14:55.258643 3038403 layer_factory.hpp:77] Creating layer ip +I0102 05:14:55.258656 3038403 net.cpp:86] Creating Layer ip +I0102 05:14:55.258661 3038403 net.cpp:408] ip <- conv +I0102 05:14:55.258671 3038403 net.cpp:382] ip -> ip_blob +I0102 05:14:55.258837 3038403 net.cpp:124] Setting up ip +I0102 05:14:55.258847 3038403 net.cpp:131] Top shape: 5 13 (65) +I0102 05:14:55.258855 3038403 net.cpp:139] Memory required for data: 16600 +I0102 05:14:55.258867 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.258877 3038403 net.cpp:86] Creating Layer loss +I0102 05:14:55.258882 3038403 net.cpp:408] loss <- ip_blob +I0102 05:14:55.258889 3038403 net.cpp:408] loss <- label +I0102 05:14:55.258898 3038403 net.cpp:382] loss -> loss +I0102 05:14:55.258911 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.258934 3038403 net.cpp:124] Setting up loss +I0102 05:14:55.258940 3038403 net.cpp:131] Top shape: (1) +I0102 05:14:55.258946 3038403 net.cpp:134] with loss weight 1 +I0102 05:14:55.258960 3038403 net.cpp:139] Memory required for data: 16604 +I0102 05:14:55.258965 3038403 net.cpp:200] loss needs backward computation. +I0102 05:14:55.258971 3038403 net.cpp:200] ip needs backward computation. +I0102 05:14:55.258976 3038403 net.cpp:200] conv needs backward computation. +I0102 05:14:55.258982 3038403 net.cpp:202] data does not need backward computation. +I0102 05:14:55.258988 3038403 net.cpp:244] This network produces output loss +I0102 05:14:55.258997 3038403 net.cpp:257] Network initialization done. +.I0102 05:14:55.262363 3038403 net.cpp:53] Initializing net from parameters: name: "testnet" force_backward: true state { @@ -15015,45 +15050,45 @@ bottom: "label" top: "loss" } -I1128 20:30:24.146136 2042758 layer_factory.hpp:77] Creating layer data -I1128 20:30:24.146154 2042758 net.cpp:86] Creating Layer data -I1128 20:30:24.146167 2042758 net.cpp:382] data -> data -I1128 20:30:24.146188 2042758 net.cpp:382] data -> label -I1128 20:30:24.146219 2042758 net.cpp:124] Setting up data -I1128 20:30:24.146230 2042758 net.cpp:131] Top shape: 5 2 3 4 (120) -I1128 20:30:24.146243 2042758 net.cpp:131] Top shape: 5 1 1 1 (5) -I1128 20:30:24.146252 2042758 net.cpp:139] Memory required for data: 500 -I1128 20:30:24.146260 2042758 layer_factory.hpp:77] Creating layer conv -I1128 20:30:24.146272 2042758 net.cpp:86] Creating Layer conv -I1128 20:30:24.146277 2042758 net.cpp:408] conv <- data -I1128 20:30:24.146287 2042758 net.cpp:382] conv -> conv -I1128 20:30:24.146330 2042758 net.cpp:124] Setting up conv -I1128 20:30:24.146337 2042758 net.cpp:131] Top shape: 5 11 8 9 (3960) -I1128 20:30:24.146344 2042758 net.cpp:139] Memory required for data: 16340 -I1128 20:30:24.146363 2042758 layer_factory.hpp:77] Creating layer ip -I1128 20:30:24.146379 2042758 net.cpp:86] Creating Layer ip -I1128 20:30:24.146389 2042758 net.cpp:408] ip <- conv -I1128 20:30:24.146400 2042758 net.cpp:382] ip -> ip_blob -I1128 20:30:24.146570 2042758 net.cpp:124] Setting up ip -I1128 20:30:24.146577 2042758 net.cpp:131] Top shape: 5 13 (65) -I1128 20:30:24.146584 2042758 net.cpp:139] Memory required for data: 16600 -I1128 20:30:24.146596 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.146607 2042758 net.cpp:86] Creating Layer loss -I1128 20:30:24.146615 2042758 net.cpp:408] loss <- ip_blob -I1128 20:30:24.146621 2042758 net.cpp:408] loss <- label -I1128 20:30:24.146631 2042758 net.cpp:382] loss -> loss -I1128 20:30:24.146646 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.146672 2042758 net.cpp:124] Setting up loss -I1128 20:30:24.146682 2042758 net.cpp:131] Top shape: (1) -I1128 20:30:24.146692 2042758 net.cpp:134] with loss weight 1 -I1128 20:30:24.146708 2042758 net.cpp:139] Memory required for data: 16604 -I1128 20:30:24.146718 2042758 net.cpp:200] loss needs backward computation. -I1128 20:30:24.146729 2042758 net.cpp:200] ip needs backward computation. -I1128 20:30:24.146739 2042758 net.cpp:200] conv needs backward computation. -I1128 20:30:24.146746 2042758 net.cpp:202] data does not need backward computation. -I1128 20:30:24.146754 2042758 net.cpp:244] This network produces output loss -I1128 20:30:24.146764 2042758 net.cpp:257] Network initialization done. -.I1128 20:30:24.148820 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.262554 3038403 layer_factory.hpp:77] Creating layer data +I0102 05:14:55.262573 3038403 net.cpp:86] Creating Layer data +I0102 05:14:55.262585 3038403 net.cpp:382] data -> data +I0102 05:14:55.262605 3038403 net.cpp:382] data -> label +I0102 05:14:55.262629 3038403 net.cpp:124] Setting up data +I0102 05:14:55.262637 3038403 net.cpp:131] Top shape: 5 2 3 4 (120) +I0102 05:14:55.262648 3038403 net.cpp:131] Top shape: 5 1 1 1 (5) +I0102 05:14:55.262655 3038403 net.cpp:139] Memory required for data: 500 +I0102 05:14:55.262662 3038403 layer_factory.hpp:77] Creating layer conv +I0102 05:14:55.262679 3038403 net.cpp:86] Creating Layer conv +I0102 05:14:55.262686 3038403 net.cpp:408] conv <- data +I0102 05:14:55.262699 3038403 net.cpp:382] conv -> conv +I0102 05:14:55.262743 3038403 net.cpp:124] Setting up conv +I0102 05:14:55.262751 3038403 net.cpp:131] Top shape: 5 11 8 9 (3960) +I0102 05:14:55.262759 3038403 net.cpp:139] Memory required for data: 16340 +I0102 05:14:55.262778 3038403 layer_factory.hpp:77] Creating layer ip +I0102 05:14:55.262792 3038403 net.cpp:86] Creating Layer ip +I0102 05:14:55.262799 3038403 net.cpp:408] ip <- conv +I0102 05:14:55.262809 3038403 net.cpp:382] ip -> ip_blob +I0102 05:14:55.262984 3038403 net.cpp:124] Setting up ip +I0102 05:14:55.262995 3038403 net.cpp:131] Top shape: 5 13 (65) +I0102 05:14:55.263003 3038403 net.cpp:139] Memory required for data: 16600 +I0102 05:14:55.263016 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.263026 3038403 net.cpp:86] Creating Layer loss +I0102 05:14:55.263033 3038403 net.cpp:408] loss <- ip_blob +I0102 05:14:55.263041 3038403 net.cpp:408] loss <- label +I0102 05:14:55.263051 3038403 net.cpp:382] loss -> loss +I0102 05:14:55.263065 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.263092 3038403 net.cpp:124] Setting up loss +I0102 05:14:55.263099 3038403 net.cpp:131] Top shape: (1) +I0102 05:14:55.263108 3038403 net.cpp:134] with loss weight 1 +I0102 05:14:55.263123 3038403 net.cpp:139] Memory required for data: 16604 +I0102 05:14:55.263130 3038403 net.cpp:200] loss needs backward computation. +I0102 05:14:55.263139 3038403 net.cpp:200] ip needs backward computation. +I0102 05:14:55.263145 3038403 net.cpp:200] conv needs backward computation. +I0102 05:14:55.263152 3038403 net.cpp:202] data does not need backward computation. +I0102 05:14:55.263159 3038403 net.cpp:244] This network produces output loss +I0102 05:14:55.263170 3038403 net.cpp:257] Network initialization done. +.I0102 05:14:55.267030 3038403 net.cpp:53] Initializing net from parameters: name: "testnet" force_backward: true state { @@ -15132,45 +15167,45 @@ bottom: "label" top: "loss" } -I1128 20:30:24.148960 2042758 layer_factory.hpp:77] Creating layer data -I1128 20:30:24.148973 2042758 net.cpp:86] Creating Layer data -I1128 20:30:24.148981 2042758 net.cpp:382] data -> data -I1128 20:30:24.148994 2042758 net.cpp:382] data -> label -I1128 20:30:24.149009 2042758 net.cpp:124] Setting up data -I1128 20:30:24.149014 2042758 net.cpp:131] Top shape: 5 2 3 4 (120) -I1128 20:30:24.149021 2042758 net.cpp:131] Top shape: 5 1 1 1 (5) -I1128 20:30:24.149026 2042758 net.cpp:139] Memory required for data: 500 -I1128 20:30:24.149031 2042758 layer_factory.hpp:77] Creating layer conv -I1128 20:30:24.149044 2042758 net.cpp:86] Creating Layer conv -I1128 20:30:24.149049 2042758 net.cpp:408] conv <- data -I1128 20:30:24.149060 2042758 net.cpp:382] conv -> conv -I1128 20:30:24.149098 2042758 net.cpp:124] Setting up conv -I1128 20:30:24.149103 2042758 net.cpp:131] Top shape: 5 11 8 9 (3960) -I1128 20:30:24.149113 2042758 net.cpp:139] Memory required for data: 16340 -I1128 20:30:24.149125 2042758 layer_factory.hpp:77] Creating layer ip -I1128 20:30:24.149137 2042758 net.cpp:86] Creating Layer ip -I1128 20:30:24.149140 2042758 net.cpp:408] ip <- conv -I1128 20:30:24.149149 2042758 net.cpp:382] ip -> ip_blob -I1128 20:30:24.149842 2042758 net.cpp:124] Setting up ip -I1128 20:30:24.149858 2042758 net.cpp:131] Top shape: 5 13 (65) -I1128 20:30:24.149868 2042758 net.cpp:139] Memory required for data: 16600 -I1128 20:30:24.149881 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.149895 2042758 net.cpp:86] Creating Layer loss -I1128 20:30:24.149904 2042758 net.cpp:408] loss <- ip_blob -I1128 20:30:24.149911 2042758 net.cpp:408] loss <- label -I1128 20:30:24.149919 2042758 net.cpp:382] loss -> loss -I1128 20:30:24.149935 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.149963 2042758 net.cpp:124] Setting up loss -I1128 20:30:24.149969 2042758 net.cpp:131] Top shape: (1) -I1128 20:30:24.149976 2042758 net.cpp:134] with loss weight 1 -I1128 20:30:24.149989 2042758 net.cpp:139] Memory required for data: 16604 -I1128 20:30:24.149994 2042758 net.cpp:200] loss needs backward computation. -I1128 20:30:24.150003 2042758 net.cpp:200] ip needs backward computation. -I1128 20:30:24.150008 2042758 net.cpp:200] conv needs backward computation. -I1128 20:30:24.150015 2042758 net.cpp:202] data does not need backward computation. -I1128 20:30:24.150020 2042758 net.cpp:244] This network produces output loss -I1128 20:30:24.150034 2042758 net.cpp:257] Network initialization done. -.I1128 20:30:24.153182 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.267206 3038403 layer_factory.hpp:77] Creating layer data +I0102 05:14:55.267226 3038403 net.cpp:86] Creating Layer data +I0102 05:14:55.267238 3038403 net.cpp:382] data -> data +I0102 05:14:55.267256 3038403 net.cpp:382] data -> label +I0102 05:14:55.267278 3038403 net.cpp:124] Setting up data +I0102 05:14:55.267287 3038403 net.cpp:131] Top shape: 5 2 3 4 (120) +I0102 05:14:55.267297 3038403 net.cpp:131] Top shape: 5 1 1 1 (5) +I0102 05:14:55.267304 3038403 net.cpp:139] Memory required for data: 500 +I0102 05:14:55.267311 3038403 layer_factory.hpp:77] Creating layer conv +I0102 05:14:55.267328 3038403 net.cpp:86] Creating Layer conv +I0102 05:14:55.267334 3038403 net.cpp:408] conv <- data +I0102 05:14:55.267345 3038403 net.cpp:382] conv -> conv +I0102 05:14:55.267392 3038403 net.cpp:124] Setting up conv +I0102 05:14:55.267400 3038403 net.cpp:131] Top shape: 5 11 8 9 (3960) +I0102 05:14:55.267408 3038403 net.cpp:139] Memory required for data: 16340 +I0102 05:14:55.267424 3038403 layer_factory.hpp:77] Creating layer ip +I0102 05:14:55.267438 3038403 net.cpp:86] Creating Layer ip +I0102 05:14:55.267444 3038403 net.cpp:408] ip <- conv +I0102 05:14:55.267457 3038403 net.cpp:382] ip -> ip_blob +I0102 05:14:55.267627 3038403 net.cpp:124] Setting up ip +I0102 05:14:55.267635 3038403 net.cpp:131] Top shape: 5 13 (65) +I0102 05:14:55.267643 3038403 net.cpp:139] Memory required for data: 16600 +I0102 05:14:55.267657 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.267668 3038403 net.cpp:86] Creating Layer loss +I0102 05:14:55.267674 3038403 net.cpp:408] loss <- ip_blob +I0102 05:14:55.267683 3038403 net.cpp:408] loss <- label +I0102 05:14:55.267691 3038403 net.cpp:382] loss -> loss +I0102 05:14:55.267707 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.267731 3038403 net.cpp:124] Setting up loss +I0102 05:14:55.267738 3038403 net.cpp:131] Top shape: (1) +I0102 05:14:55.267745 3038403 net.cpp:134] with loss weight 1 +I0102 05:14:55.267760 3038403 net.cpp:139] Memory required for data: 16604 +I0102 05:14:55.267765 3038403 net.cpp:200] loss needs backward computation. +I0102 05:14:55.267773 3038403 net.cpp:200] ip needs backward computation. +I0102 05:14:55.267781 3038403 net.cpp:200] conv needs backward computation. +I0102 05:14:55.267789 3038403 net.cpp:202] data does not need backward computation. +I0102 05:14:55.267796 3038403 net.cpp:244] This network produces output loss +I0102 05:14:55.267807 3038403 net.cpp:257] Network initialization done. +.I0102 05:14:55.269368 3038403 net.cpp:53] Initializing net from parameters: name: "testnet" force_backward: true state { @@ -15249,45 +15284,45 @@ bottom: "label" top: "loss" } -I1128 20:30:24.154603 2042758 layer_factory.hpp:77] Creating layer data -I1128 20:30:24.154621 2042758 net.cpp:86] Creating Layer data -I1128 20:30:24.154629 2042758 net.cpp:382] data -> data -I1128 20:30:24.154646 2042758 net.cpp:382] data -> label -I1128 20:30:24.154668 2042758 net.cpp:124] Setting up data -I1128 20:30:24.154675 2042758 net.cpp:131] Top shape: 5 2 3 4 (120) -I1128 20:30:24.154685 2042758 net.cpp:131] Top shape: 5 1 1 1 (5) -I1128 20:30:24.154690 2042758 net.cpp:139] Memory required for data: 500 -I1128 20:30:24.154695 2042758 layer_factory.hpp:77] Creating layer conv -I1128 20:30:24.154708 2042758 net.cpp:86] Creating Layer conv -I1128 20:30:24.154714 2042758 net.cpp:408] conv <- data -I1128 20:30:24.154724 2042758 net.cpp:382] conv -> conv -I1128 20:30:24.154765 2042758 net.cpp:124] Setting up conv -I1128 20:30:24.154772 2042758 net.cpp:131] Top shape: 5 11 8 9 (3960) -I1128 20:30:24.154780 2042758 net.cpp:139] Memory required for data: 16340 -I1128 20:30:24.154798 2042758 layer_factory.hpp:77] Creating layer ip -I1128 20:30:24.154810 2042758 net.cpp:86] Creating Layer ip -I1128 20:30:24.154816 2042758 net.cpp:408] ip <- conv -I1128 20:30:24.154825 2042758 net.cpp:382] ip -> ip_blob -I1128 20:30:24.155025 2042758 net.cpp:124] Setting up ip -I1128 20:30:24.155035 2042758 net.cpp:131] Top shape: 5 13 (65) -I1128 20:30:24.155045 2042758 net.cpp:139] Memory required for data: 16600 -I1128 20:30:24.155056 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.155066 2042758 net.cpp:86] Creating Layer loss -I1128 20:30:24.155071 2042758 net.cpp:408] loss <- ip_blob -I1128 20:30:24.155076 2042758 net.cpp:408] loss <- label -I1128 20:30:24.155083 2042758 net.cpp:382] loss -> loss -I1128 20:30:24.155097 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.155117 2042758 net.cpp:124] Setting up loss -I1128 20:30:24.155122 2042758 net.cpp:131] Top shape: (1) -I1128 20:30:24.155128 2042758 net.cpp:134] with loss weight 1 -I1128 20:30:24.155140 2042758 net.cpp:139] Memory required for data: 16604 -I1128 20:30:24.155145 2042758 net.cpp:200] loss needs backward computation. -I1128 20:30:24.155153 2042758 net.cpp:200] ip needs backward computation. -I1128 20:30:24.155158 2042758 net.cpp:200] conv needs backward computation. -I1128 20:30:24.155164 2042758 net.cpp:202] data does not need backward computation. -I1128 20:30:24.155171 2042758 net.cpp:244] This network produces output loss -I1128 20:30:24.155181 2042758 net.cpp:257] Network initialization done. -.I1128 20:30:24.157265 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.269522 3038403 layer_factory.hpp:77] Creating layer data +I0102 05:14:55.269537 3038403 net.cpp:86] Creating Layer data +I0102 05:14:55.269547 3038403 net.cpp:382] data -> data +I0102 05:14:55.269562 3038403 net.cpp:382] data -> label +I0102 05:14:55.269580 3038403 net.cpp:124] Setting up data +I0102 05:14:55.269587 3038403 net.cpp:131] Top shape: 5 2 3 4 (120) +I0102 05:14:55.269595 3038403 net.cpp:131] Top shape: 5 1 1 1 (5) +I0102 05:14:55.269603 3038403 net.cpp:139] Memory required for data: 500 +I0102 05:14:55.269608 3038403 layer_factory.hpp:77] Creating layer conv +I0102 05:14:55.269624 3038403 net.cpp:86] Creating Layer conv +I0102 05:14:55.269634 3038403 net.cpp:408] conv <- data +I0102 05:14:55.269645 3038403 net.cpp:382] conv -> conv +I0102 05:14:55.269681 3038403 net.cpp:124] Setting up conv +I0102 05:14:55.269690 3038403 net.cpp:131] Top shape: 5 11 8 9 (3960) +I0102 05:14:55.269695 3038403 net.cpp:139] Memory required for data: 16340 +I0102 05:14:55.269709 3038403 layer_factory.hpp:77] Creating layer ip +I0102 05:14:55.269721 3038403 net.cpp:86] Creating Layer ip +I0102 05:14:55.269726 3038403 net.cpp:408] ip <- conv +I0102 05:14:55.269735 3038403 net.cpp:382] ip -> ip_blob +I0102 05:14:55.269884 3038403 net.cpp:124] Setting up ip +I0102 05:14:55.269893 3038403 net.cpp:131] Top shape: 5 13 (65) +I0102 05:14:55.269901 3038403 net.cpp:139] Memory required for data: 16600 +I0102 05:14:55.269912 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.269923 3038403 net.cpp:86] Creating Layer loss +I0102 05:14:55.269929 3038403 net.cpp:408] loss <- ip_blob +I0102 05:14:55.269937 3038403 net.cpp:408] loss <- label +I0102 05:14:55.269943 3038403 net.cpp:382] loss -> loss +I0102 05:14:55.269956 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.269979 3038403 net.cpp:124] Setting up loss +I0102 05:14:55.269984 3038403 net.cpp:131] Top shape: (1) +I0102 05:14:55.269990 3038403 net.cpp:134] with loss weight 1 +I0102 05:14:55.270002 3038403 net.cpp:139] Memory required for data: 16604 +I0102 05:14:55.270009 3038403 net.cpp:200] loss needs backward computation. +I0102 05:14:55.270015 3038403 net.cpp:200] ip needs backward computation. +I0102 05:14:55.270020 3038403 net.cpp:200] conv needs backward computation. +I0102 05:14:55.270026 3038403 net.cpp:202] data does not need backward computation. +I0102 05:14:55.270032 3038403 net.cpp:244] This network produces output loss +I0102 05:14:55.270041 3038403 net.cpp:257] Network initialization done. +.I0102 05:14:55.271952 3038403 net.cpp:53] Initializing net from parameters: name: "testnet" force_backward: true state { @@ -15366,45 +15401,45 @@ bottom: "label" top: "loss" } -I1128 20:30:24.157416 2042758 layer_factory.hpp:77] Creating layer data -I1128 20:30:24.157430 2042758 net.cpp:86] Creating Layer data -I1128 20:30:24.157446 2042758 net.cpp:382] data -> data -I1128 20:30:24.157470 2042758 net.cpp:382] data -> label -I1128 20:30:24.157691 2042758 net.cpp:124] Setting up data -I1128 20:30:24.157706 2042758 net.cpp:131] Top shape: 5 2 3 4 (120) -I1128 20:30:24.157714 2042758 net.cpp:131] Top shape: 5 1 1 1 (5) -I1128 20:30:24.157721 2042758 net.cpp:139] Memory required for data: 500 -I1128 20:30:24.157725 2042758 layer_factory.hpp:77] Creating layer conv -I1128 20:30:24.157740 2042758 net.cpp:86] Creating Layer conv -I1128 20:30:24.157747 2042758 net.cpp:408] conv <- data -I1128 20:30:24.157757 2042758 net.cpp:382] conv -> conv -I1128 20:30:24.157794 2042758 net.cpp:124] Setting up conv -I1128 20:30:24.157800 2042758 net.cpp:131] Top shape: 5 11 8 9 (3960) -I1128 20:30:24.157806 2042758 net.cpp:139] Memory required for data: 16340 -I1128 20:30:24.157820 2042758 layer_factory.hpp:77] Creating layer ip -I1128 20:30:24.157830 2042758 net.cpp:86] Creating Layer ip -I1128 20:30:24.157835 2042758 net.cpp:408] ip <- conv -I1128 20:30:24.157843 2042758 net.cpp:382] ip -> ip_blob -I1128 20:30:24.158003 2042758 net.cpp:124] Setting up ip -I1128 20:30:24.158012 2042758 net.cpp:131] Top shape: 5 13 (65) -I1128 20:30:24.158021 2042758 net.cpp:139] Memory required for data: 16600 -I1128 20:30:24.158035 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.158046 2042758 net.cpp:86] Creating Layer loss -I1128 20:30:24.158052 2042758 net.cpp:408] loss <- ip_blob -I1128 20:30:24.158064 2042758 net.cpp:408] loss <- label -I1128 20:30:24.158077 2042758 net.cpp:382] loss -> loss -I1128 20:30:24.158093 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.158118 2042758 net.cpp:124] Setting up loss -I1128 20:30:24.158124 2042758 net.cpp:131] Top shape: (1) -I1128 20:30:24.158130 2042758 net.cpp:134] with loss weight 1 -I1128 20:30:24.158145 2042758 net.cpp:139] Memory required for data: 16604 -I1128 20:30:24.158151 2042758 net.cpp:200] loss needs backward computation. -I1128 20:30:24.158159 2042758 net.cpp:200] ip needs backward computation. -I1128 20:30:24.158165 2042758 net.cpp:200] conv needs backward computation. -I1128 20:30:24.158172 2042758 net.cpp:202] data does not need backward computation. -I1128 20:30:24.158179 2042758 net.cpp:244] This network produces output loss -I1128 20:30:24.158188 2042758 net.cpp:257] Network initialization done. -.I1128 20:30:24.159276 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.272104 3038403 layer_factory.hpp:77] Creating layer data +I0102 05:14:55.272121 3038403 net.cpp:86] Creating Layer data +I0102 05:14:55.272131 3038403 net.cpp:382] data -> data +I0102 05:14:55.272147 3038403 net.cpp:382] data -> label +I0102 05:14:55.272166 3038403 net.cpp:124] Setting up data +I0102 05:14:55.272173 3038403 net.cpp:131] Top shape: 5 2 3 4 (120) +I0102 05:14:55.272181 3038403 net.cpp:131] Top shape: 5 1 1 1 (5) +I0102 05:14:55.272188 3038403 net.cpp:139] Memory required for data: 500 +I0102 05:14:55.272194 3038403 layer_factory.hpp:77] Creating layer conv +I0102 05:14:55.272209 3038403 net.cpp:86] Creating Layer conv +I0102 05:14:55.272215 3038403 net.cpp:408] conv <- data +I0102 05:14:55.272228 3038403 net.cpp:382] conv -> conv +I0102 05:14:55.272266 3038403 net.cpp:124] Setting up conv +I0102 05:14:55.272274 3038403 net.cpp:131] Top shape: 5 11 8 9 (3960) +I0102 05:14:55.272280 3038403 net.cpp:139] Memory required for data: 16340 +I0102 05:14:55.272295 3038403 layer_factory.hpp:77] Creating layer ip +I0102 05:14:55.272306 3038403 net.cpp:86] Creating Layer ip +I0102 05:14:55.272311 3038403 net.cpp:408] ip <- conv +I0102 05:14:55.272320 3038403 net.cpp:382] ip -> ip_blob +I0102 05:14:55.272480 3038403 net.cpp:124] Setting up ip +I0102 05:14:55.272488 3038403 net.cpp:131] Top shape: 5 13 (65) +I0102 05:14:55.272495 3038403 net.cpp:139] Memory required for data: 16600 +I0102 05:14:55.272506 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.272516 3038403 net.cpp:86] Creating Layer loss +I0102 05:14:55.272521 3038403 net.cpp:408] loss <- ip_blob +I0102 05:14:55.272528 3038403 net.cpp:408] loss <- label +I0102 05:14:55.272536 3038403 net.cpp:382] loss -> loss +I0102 05:14:55.272549 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.272835 3038403 net.cpp:124] Setting up loss +I0102 05:14:55.272848 3038403 net.cpp:131] Top shape: (1) +I0102 05:14:55.272857 3038403 net.cpp:134] with loss weight 1 +I0102 05:14:55.272871 3038403 net.cpp:139] Memory required for data: 16604 +I0102 05:14:55.272876 3038403 net.cpp:200] loss needs backward computation. +I0102 05:14:55.272883 3038403 net.cpp:200] ip needs backward computation. +I0102 05:14:55.272889 3038403 net.cpp:200] conv needs backward computation. +I0102 05:14:55.272897 3038403 net.cpp:202] data does not need backward computation. +I0102 05:14:55.272903 3038403 net.cpp:244] This network produces output loss +I0102 05:14:55.272917 3038403 net.cpp:257] Network initialization done. +.I0102 05:14:55.273916 3038403 net.cpp:53] Initializing net from parameters: name: "testnet" force_backward: true state { @@ -15483,45 +15518,45 @@ bottom: "label" top: "loss" } -I1128 20:30:24.159412 2042758 layer_factory.hpp:77] Creating layer data -I1128 20:30:24.159426 2042758 net.cpp:86] Creating Layer data -I1128 20:30:24.159435 2042758 net.cpp:382] data -> data -I1128 20:30:24.159451 2042758 net.cpp:382] data -> label -I1128 20:30:24.159468 2042758 net.cpp:124] Setting up data -I1128 20:30:24.159476 2042758 net.cpp:131] Top shape: 5 2 3 4 (120) -I1128 20:30:24.159483 2042758 net.cpp:131] Top shape: 5 1 1 1 (5) -I1128 20:30:24.159489 2042758 net.cpp:139] Memory required for data: 500 -I1128 20:30:24.159495 2042758 layer_factory.hpp:77] Creating layer conv -I1128 20:30:24.159508 2042758 net.cpp:86] Creating Layer conv -I1128 20:30:24.159514 2042758 net.cpp:408] conv <- data -I1128 20:30:24.159528 2042758 net.cpp:382] conv -> conv -I1128 20:30:24.159566 2042758 net.cpp:124] Setting up conv -I1128 20:30:24.159572 2042758 net.cpp:131] Top shape: 5 11 8 9 (3960) -I1128 20:30:24.159579 2042758 net.cpp:139] Memory required for data: 16340 -I1128 20:30:24.159593 2042758 layer_factory.hpp:77] Creating layer ip -I1128 20:30:24.159603 2042758 net.cpp:86] Creating Layer ip -I1128 20:30:24.159610 2042758 net.cpp:408] ip <- conv -I1128 20:30:24.159618 2042758 net.cpp:382] ip -> ip_blob -I1128 20:30:24.159786 2042758 net.cpp:124] Setting up ip -I1128 20:30:24.159792 2042758 net.cpp:131] Top shape: 5 13 (65) -I1128 20:30:24.159799 2042758 net.cpp:139] Memory required for data: 16600 -I1128 20:30:24.159808 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.159817 2042758 net.cpp:86] Creating Layer loss -I1128 20:30:24.159823 2042758 net.cpp:408] loss <- ip_blob -I1128 20:30:24.159832 2042758 net.cpp:408] loss <- label -I1128 20:30:24.159842 2042758 net.cpp:382] loss -> loss -I1128 20:30:24.159857 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.159878 2042758 net.cpp:124] Setting up loss -I1128 20:30:24.159883 2042758 net.cpp:131] Top shape: (1) -I1128 20:30:24.159889 2042758 net.cpp:134] with loss weight 1 -I1128 20:30:24.159901 2042758 net.cpp:139] Memory required for data: 16604 -I1128 20:30:24.159906 2042758 net.cpp:200] loss needs backward computation. -I1128 20:30:24.159914 2042758 net.cpp:200] ip needs backward computation. -I1128 20:30:24.159920 2042758 net.cpp:200] conv needs backward computation. -I1128 20:30:24.159926 2042758 net.cpp:202] data does not need backward computation. -I1128 20:30:24.159932 2042758 net.cpp:244] This network produces output loss -I1128 20:30:24.159942 2042758 net.cpp:257] Network initialization done. -.I1128 20:30:24.160892 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.274071 3038403 layer_factory.hpp:77] Creating layer data +I0102 05:14:55.274087 3038403 net.cpp:86] Creating Layer data +I0102 05:14:55.274098 3038403 net.cpp:382] data -> data +I0102 05:14:55.274113 3038403 net.cpp:382] data -> label +I0102 05:14:55.274132 3038403 net.cpp:124] Setting up data +I0102 05:14:55.274140 3038403 net.cpp:131] Top shape: 5 2 3 4 (120) +I0102 05:14:55.274150 3038403 net.cpp:131] Top shape: 5 1 1 1 (5) +I0102 05:14:55.274159 3038403 net.cpp:139] Memory required for data: 500 +I0102 05:14:55.274165 3038403 layer_factory.hpp:77] Creating layer conv +I0102 05:14:55.274181 3038403 net.cpp:86] Creating Layer conv +I0102 05:14:55.274188 3038403 net.cpp:408] conv <- data +I0102 05:14:55.274199 3038403 net.cpp:382] conv -> conv +I0102 05:14:55.274236 3038403 net.cpp:124] Setting up conv +I0102 05:14:55.274245 3038403 net.cpp:131] Top shape: 5 11 8 9 (3960) +I0102 05:14:55.274253 3038403 net.cpp:139] Memory required for data: 16340 +I0102 05:14:55.274267 3038403 layer_factory.hpp:77] Creating layer ip +I0102 05:14:55.274281 3038403 net.cpp:86] Creating Layer ip +I0102 05:14:55.274287 3038403 net.cpp:408] ip <- conv +I0102 05:14:55.274298 3038403 net.cpp:382] ip -> ip_blob +I0102 05:14:55.274469 3038403 net.cpp:124] Setting up ip +I0102 05:14:55.274479 3038403 net.cpp:131] Top shape: 5 13 (65) +I0102 05:14:55.274487 3038403 net.cpp:139] Memory required for data: 16600 +I0102 05:14:55.274500 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.274511 3038403 net.cpp:86] Creating Layer loss +I0102 05:14:55.274518 3038403 net.cpp:408] loss <- ip_blob +I0102 05:14:55.274525 3038403 net.cpp:408] loss <- label +I0102 05:14:55.274534 3038403 net.cpp:382] loss -> loss +I0102 05:14:55.274549 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.274570 3038403 net.cpp:124] Setting up loss +I0102 05:14:55.274577 3038403 net.cpp:131] Top shape: (1) +I0102 05:14:55.274585 3038403 net.cpp:134] with loss weight 1 +I0102 05:14:55.274596 3038403 net.cpp:139] Memory required for data: 16604 +I0102 05:14:55.274602 3038403 net.cpp:200] loss needs backward computation. +I0102 05:14:55.274610 3038403 net.cpp:200] ip needs backward computation. +I0102 05:14:55.274616 3038403 net.cpp:200] conv needs backward computation. +I0102 05:14:55.274628 3038403 net.cpp:202] data does not need backward computation. +I0102 05:14:55.274636 3038403 net.cpp:244] This network produces output loss +I0102 05:14:55.274646 3038403 net.cpp:257] Network initialization done. +.I0102 05:14:55.275610 3038403 net.cpp:53] Initializing net from parameters: name: "testnet" force_backward: true state { @@ -15600,45 +15635,45 @@ bottom: "label" top: "loss" } -I1128 20:30:24.160984 2042758 layer_factory.hpp:77] Creating layer data -I1128 20:30:24.160993 2042758 net.cpp:86] Creating Layer data -I1128 20:30:24.161000 2042758 net.cpp:382] data -> data -I1128 20:30:24.161010 2042758 net.cpp:382] data -> label -I1128 20:30:24.161023 2042758 net.cpp:124] Setting up data -I1128 20:30:24.161028 2042758 net.cpp:131] Top shape: 5 2 3 4 (120) -I1128 20:30:24.161034 2042758 net.cpp:131] Top shape: 5 1 1 1 (5) -I1128 20:30:24.161039 2042758 net.cpp:139] Memory required for data: 500 -I1128 20:30:24.161043 2042758 layer_factory.hpp:77] Creating layer conv -I1128 20:30:24.161052 2042758 net.cpp:86] Creating Layer conv -I1128 20:30:24.161057 2042758 net.cpp:408] conv <- data -I1128 20:30:24.161064 2042758 net.cpp:382] conv -> conv -I1128 20:30:24.161092 2042758 net.cpp:124] Setting up conv -I1128 20:30:24.161095 2042758 net.cpp:131] Top shape: 5 11 8 9 (3960) -I1128 20:30:24.161100 2042758 net.cpp:139] Memory required for data: 16340 -I1128 20:30:24.161110 2042758 layer_factory.hpp:77] Creating layer ip -I1128 20:30:24.161118 2042758 net.cpp:86] Creating Layer ip -I1128 20:30:24.161124 2042758 net.cpp:408] ip <- conv -I1128 20:30:24.161131 2042758 net.cpp:382] ip -> ip_blob -I1128 20:30:24.161907 2042758 net.cpp:124] Setting up ip -I1128 20:30:24.161922 2042758 net.cpp:131] Top shape: 5 13 (65) -I1128 20:30:24.161931 2042758 net.cpp:139] Memory required for data: 16600 -I1128 20:30:24.161942 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.161952 2042758 net.cpp:86] Creating Layer loss -I1128 20:30:24.161958 2042758 net.cpp:408] loss <- ip_blob -I1128 20:30:24.161965 2042758 net.cpp:408] loss <- label -I1128 20:30:24.161974 2042758 net.cpp:382] loss -> loss -I1128 20:30:24.161991 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.162014 2042758 net.cpp:124] Setting up loss -I1128 20:30:24.162019 2042758 net.cpp:131] Top shape: (1) -I1128 20:30:24.162024 2042758 net.cpp:134] with loss weight 1 -I1128 20:30:24.162036 2042758 net.cpp:139] Memory required for data: 16604 -I1128 20:30:24.162041 2042758 net.cpp:200] loss needs backward computation. -I1128 20:30:24.162048 2042758 net.cpp:200] ip needs backward computation. -I1128 20:30:24.162055 2042758 net.cpp:200] conv needs backward computation. -I1128 20:30:24.162060 2042758 net.cpp:202] data does not need backward computation. -I1128 20:30:24.162068 2042758 net.cpp:244] This network produces output loss -I1128 20:30:24.162077 2042758 net.cpp:257] Network initialization done. -.I1128 20:30:24.163362 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.275735 3038403 layer_factory.hpp:77] Creating layer data +I0102 05:14:55.275749 3038403 net.cpp:86] Creating Layer data +I0102 05:14:55.275758 3038403 net.cpp:382] data -> data +I0102 05:14:55.275770 3038403 net.cpp:382] data -> label +I0102 05:14:55.275785 3038403 net.cpp:124] Setting up data +I0102 05:14:55.275791 3038403 net.cpp:131] Top shape: 5 2 3 4 (120) +I0102 05:14:55.275799 3038403 net.cpp:131] Top shape: 5 1 1 1 (5) +I0102 05:14:55.275805 3038403 net.cpp:139] Memory required for data: 500 +I0102 05:14:55.275810 3038403 layer_factory.hpp:77] Creating layer conv +I0102 05:14:55.275823 3038403 net.cpp:86] Creating Layer conv +I0102 05:14:55.275830 3038403 net.cpp:408] conv <- data +I0102 05:14:55.275840 3038403 net.cpp:382] conv -> conv +I0102 05:14:55.275871 3038403 net.cpp:124] Setting up conv +I0102 05:14:55.275877 3038403 net.cpp:131] Top shape: 5 11 8 9 (3960) +I0102 05:14:55.275883 3038403 net.cpp:139] Memory required for data: 16340 +I0102 05:14:55.275897 3038403 layer_factory.hpp:77] Creating layer ip +I0102 05:14:55.275907 3038403 net.cpp:86] Creating Layer ip +I0102 05:14:55.275913 3038403 net.cpp:408] ip <- conv +I0102 05:14:55.275921 3038403 net.cpp:382] ip -> ip_blob +I0102 05:14:55.276070 3038403 net.cpp:124] Setting up ip +I0102 05:14:55.276079 3038403 net.cpp:131] Top shape: 5 13 (65) +I0102 05:14:55.276086 3038403 net.cpp:139] Memory required for data: 16600 +I0102 05:14:55.276098 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.276105 3038403 net.cpp:86] Creating Layer loss +I0102 05:14:55.276113 3038403 net.cpp:408] loss <- ip_blob +I0102 05:14:55.276118 3038403 net.cpp:408] loss <- label +I0102 05:14:55.276127 3038403 net.cpp:382] loss -> loss +I0102 05:14:55.276139 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.276158 3038403 net.cpp:124] Setting up loss +I0102 05:14:55.276165 3038403 net.cpp:131] Top shape: (1) +I0102 05:14:55.276170 3038403 net.cpp:134] with loss weight 1 +I0102 05:14:55.276180 3038403 net.cpp:139] Memory required for data: 16604 +I0102 05:14:55.276186 3038403 net.cpp:200] loss needs backward computation. +I0102 05:14:55.276192 3038403 net.cpp:200] ip needs backward computation. +I0102 05:14:55.276197 3038403 net.cpp:200] conv needs backward computation. +I0102 05:14:55.276276 3038403 net.cpp:202] data does not need backward computation. +I0102 05:14:55.276283 3038403 net.cpp:244] This network produces output loss +I0102 05:14:55.276291 3038403 net.cpp:257] Network initialization done. +.I0102 05:14:55.277858 3038403 net.cpp:53] Initializing net from parameters: name: "testnet" force_backward: true state { @@ -15717,48 +15752,48 @@ bottom: "label" top: "loss" } -I1128 20:30:24.163523 2042758 layer_factory.hpp:77] Creating layer data -I1128 20:30:24.163537 2042758 net.cpp:86] Creating Layer data -I1128 20:30:24.163547 2042758 net.cpp:382] data -> data -I1128 20:30:24.163561 2042758 net.cpp:382] data -> label -I1128 20:30:24.163578 2042758 net.cpp:124] Setting up data -I1128 20:30:24.163583 2042758 net.cpp:131] Top shape: 5 2 3 4 (120) -I1128 20:30:24.163590 2042758 net.cpp:131] Top shape: 5 1 1 1 (5) -I1128 20:30:24.163595 2042758 net.cpp:139] Memory required for data: 500 -I1128 20:30:24.163599 2042758 layer_factory.hpp:77] Creating layer conv -I1128 20:30:24.163610 2042758 net.cpp:86] Creating Layer conv -I1128 20:30:24.163614 2042758 net.cpp:408] conv <- data -I1128 20:30:24.163622 2042758 net.cpp:382] conv -> conv -I1128 20:30:24.163652 2042758 net.cpp:124] Setting up conv -I1128 20:30:24.163656 2042758 net.cpp:131] Top shape: 5 11 8 9 (3960) -I1128 20:30:24.163661 2042758 net.cpp:139] Memory required for data: 16340 -I1128 20:30:24.163671 2042758 layer_factory.hpp:77] Creating layer ip -I1128 20:30:24.163679 2042758 net.cpp:86] Creating Layer ip -I1128 20:30:24.163683 2042758 net.cpp:408] ip <- conv -I1128 20:30:24.163691 2042758 net.cpp:382] ip -> ip_blob -I1128 20:30:24.163836 2042758 net.cpp:124] Setting up ip -I1128 20:30:24.163843 2042758 net.cpp:131] Top shape: 5 13 (65) -I1128 20:30:24.163849 2042758 net.cpp:139] Memory required for data: 16600 -I1128 20:30:24.163858 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.163866 2042758 net.cpp:86] Creating Layer loss -I1128 20:30:24.163872 2042758 net.cpp:408] loss <- ip_blob -I1128 20:30:24.163879 2042758 net.cpp:408] loss <- label -I1128 20:30:24.163897 2042758 net.cpp:382] loss -> loss -I1128 20:30:24.163911 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.163929 2042758 net.cpp:124] Setting up loss -I1128 20:30:24.163935 2042758 net.cpp:131] Top shape: (1) -I1128 20:30:24.163941 2042758 net.cpp:134] with loss weight 1 -I1128 20:30:24.163952 2042758 net.cpp:139] Memory required for data: 16604 -I1128 20:30:24.163957 2042758 net.cpp:200] loss needs backward computation. -I1128 20:30:24.163965 2042758 net.cpp:200] ip needs backward computation. -I1128 20:30:24.163972 2042758 net.cpp:200] conv needs backward computation. -I1128 20:30:24.163981 2042758 net.cpp:202] data does not need backward computation. -I1128 20:30:24.163988 2042758 net.cpp:244] This network produces output loss -I1128 20:30:24.164000 2042758 net.cpp:257] Network initialization done. -W1128 20:30:24.164777 2042758 _caffe.cpp:139] DEPRECATION WARNING - deprecated use of Python interface -W1128 20:30:24.164786 2042758 _caffe.cpp:140] Use this instead (with the named "weights" parameter): -W1128 20:30:24.164790 2042758 _caffe.cpp:142] Net('/tmp/tmpycjbcxh7', 0, weights='/tmp/tmpa2fk2b9q') -I1128 20:30:24.165036 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.277995 3038403 layer_factory.hpp:77] Creating layer data +I0102 05:14:55.278010 3038403 net.cpp:86] Creating Layer data +I0102 05:14:55.278019 3038403 net.cpp:382] data -> data +I0102 05:14:55.278033 3038403 net.cpp:382] data -> label +I0102 05:14:55.278052 3038403 net.cpp:124] Setting up data +I0102 05:14:55.278059 3038403 net.cpp:131] Top shape: 5 2 3 4 (120) +I0102 05:14:55.278072 3038403 net.cpp:131] Top shape: 5 1 1 1 (5) +I0102 05:14:55.278079 3038403 net.cpp:139] Memory required for data: 500 +I0102 05:14:55.278084 3038403 layer_factory.hpp:77] Creating layer conv +I0102 05:14:55.278098 3038403 net.cpp:86] Creating Layer conv +I0102 05:14:55.278105 3038403 net.cpp:408] conv <- data +I0102 05:14:55.278115 3038403 net.cpp:382] conv -> conv +I0102 05:14:55.278148 3038403 net.cpp:124] Setting up conv +I0102 05:14:55.278154 3038403 net.cpp:131] Top shape: 5 11 8 9 (3960) +I0102 05:14:55.278162 3038403 net.cpp:139] Memory required for data: 16340 +I0102 05:14:55.278177 3038403 layer_factory.hpp:77] Creating layer ip +I0102 05:14:55.278187 3038403 net.cpp:86] Creating Layer ip +I0102 05:14:55.278192 3038403 net.cpp:408] ip <- conv +I0102 05:14:55.278200 3038403 net.cpp:382] ip -> ip_blob +I0102 05:14:55.278352 3038403 net.cpp:124] Setting up ip +I0102 05:14:55.278362 3038403 net.cpp:131] Top shape: 5 13 (65) +I0102 05:14:55.278368 3038403 net.cpp:139] Memory required for data: 16600 +I0102 05:14:55.278378 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.278388 3038403 net.cpp:86] Creating Layer loss +I0102 05:14:55.278393 3038403 net.cpp:408] loss <- ip_blob +I0102 05:14:55.278400 3038403 net.cpp:408] loss <- label +I0102 05:14:55.278409 3038403 net.cpp:382] loss -> loss +I0102 05:14:55.278422 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.278442 3038403 net.cpp:124] Setting up loss +I0102 05:14:55.278448 3038403 net.cpp:131] Top shape: (1) +I0102 05:14:55.278456 3038403 net.cpp:134] with loss weight 1 +I0102 05:14:55.278468 3038403 net.cpp:139] Memory required for data: 16604 +I0102 05:14:55.278474 3038403 net.cpp:200] loss needs backward computation. +I0102 05:14:55.278481 3038403 net.cpp:200] ip needs backward computation. +I0102 05:14:55.278486 3038403 net.cpp:200] conv needs backward computation. +I0102 05:14:55.278492 3038403 net.cpp:202] data does not need backward computation. +I0102 05:14:55.278498 3038403 net.cpp:244] This network produces output loss +I0102 05:14:55.278507 3038403 net.cpp:257] Network initialization done. +W0102 05:14:55.279249 3038403 _caffe.cpp:139] DEPRECATION WARNING - deprecated use of Python interface +W0102 05:14:55.279263 3038403 _caffe.cpp:140] Use this instead (with the named "weights" parameter): +W0102 05:14:55.279268 3038403 _caffe.cpp:142] Net('/tmp/tmprdrbm597', 0, weights='/tmp/tmp5c3yo9zt') +I0102 05:14:55.279461 3038403 net.cpp:53] Initializing net from parameters: name: "testnet" force_backward: true state { @@ -15837,45 +15872,45 @@ bottom: "label" top: "loss" } -I1128 20:30:24.165163 2042758 layer_factory.hpp:77] Creating layer data -I1128 20:30:24.165175 2042758 net.cpp:86] Creating Layer data -I1128 20:30:24.165184 2042758 net.cpp:382] data -> data -I1128 20:30:24.165488 2042758 net.cpp:382] data -> label -I1128 20:30:24.165509 2042758 net.cpp:124] Setting up data -I1128 20:30:24.165516 2042758 net.cpp:131] Top shape: 5 2 3 4 (120) -I1128 20:30:24.165524 2042758 net.cpp:131] Top shape: 5 1 1 1 (5) -I1128 20:30:24.165531 2042758 net.cpp:139] Memory required for data: 500 -I1128 20:30:24.165536 2042758 layer_factory.hpp:77] Creating layer conv -I1128 20:30:24.165549 2042758 net.cpp:86] Creating Layer conv -I1128 20:30:24.165555 2042758 net.cpp:408] conv <- data -I1128 20:30:24.165565 2042758 net.cpp:382] conv -> conv -I1128 20:30:24.165602 2042758 net.cpp:124] Setting up conv -I1128 20:30:24.165608 2042758 net.cpp:131] Top shape: 5 11 8 9 (3960) -I1128 20:30:24.165616 2042758 net.cpp:139] Memory required for data: 16340 -I1128 20:30:24.165629 2042758 layer_factory.hpp:77] Creating layer ip -I1128 20:30:24.165640 2042758 net.cpp:86] Creating Layer ip -I1128 20:30:24.165647 2042758 net.cpp:408] ip <- conv -I1128 20:30:24.165655 2042758 net.cpp:382] ip -> ip_blob -I1128 20:30:24.165822 2042758 net.cpp:124] Setting up ip -I1128 20:30:24.165829 2042758 net.cpp:131] Top shape: 5 13 (65) -I1128 20:30:24.165836 2042758 net.cpp:139] Memory required for data: 16600 -I1128 20:30:24.165846 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.165859 2042758 net.cpp:86] Creating Layer loss -I1128 20:30:24.165864 2042758 net.cpp:408] loss <- ip_blob -I1128 20:30:24.165871 2042758 net.cpp:408] loss <- label -I1128 20:30:24.165880 2042758 net.cpp:382] loss -> loss -I1128 20:30:24.165894 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.165915 2042758 net.cpp:124] Setting up loss -I1128 20:30:24.165920 2042758 net.cpp:131] Top shape: (1) -I1128 20:30:24.165926 2042758 net.cpp:134] with loss weight 1 -I1128 20:30:24.165938 2042758 net.cpp:139] Memory required for data: 16604 -I1128 20:30:24.165944 2042758 net.cpp:200] loss needs backward computation. -I1128 20:30:24.165951 2042758 net.cpp:200] ip needs backward computation. -I1128 20:30:24.165957 2042758 net.cpp:200] conv needs backward computation. -I1128 20:30:24.165963 2042758 net.cpp:202] data does not need backward computation. -I1128 20:30:24.165969 2042758 net.cpp:244] This network produces output loss -I1128 20:30:24.165979 2042758 net.cpp:257] Network initialization done. -I1128 20:30:24.167834 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.279610 3038403 layer_factory.hpp:77] Creating layer data +I0102 05:14:55.279628 3038403 net.cpp:86] Creating Layer data +I0102 05:14:55.279637 3038403 net.cpp:382] data -> data +I0102 05:14:55.279654 3038403 net.cpp:382] data -> label +I0102 05:14:55.279671 3038403 net.cpp:124] Setting up data +I0102 05:14:55.279678 3038403 net.cpp:131] Top shape: 5 2 3 4 (120) +I0102 05:14:55.279688 3038403 net.cpp:131] Top shape: 5 1 1 1 (5) +I0102 05:14:55.279696 3038403 net.cpp:139] Memory required for data: 500 +I0102 05:14:55.279703 3038403 layer_factory.hpp:77] Creating layer conv +I0102 05:14:55.279718 3038403 net.cpp:86] Creating Layer conv +I0102 05:14:55.279726 3038403 net.cpp:408] conv <- data +I0102 05:14:55.279736 3038403 net.cpp:382] conv -> conv +I0102 05:14:55.279776 3038403 net.cpp:124] Setting up conv +I0102 05:14:55.279785 3038403 net.cpp:131] Top shape: 5 11 8 9 (3960) +I0102 05:14:55.279793 3038403 net.cpp:139] Memory required for data: 16340 +I0102 05:14:55.279808 3038403 layer_factory.hpp:77] Creating layer ip +I0102 05:14:55.279822 3038403 net.cpp:86] Creating Layer ip +I0102 05:14:55.279829 3038403 net.cpp:408] ip <- conv +I0102 05:14:55.279840 3038403 net.cpp:382] ip -> ip_blob +I0102 05:14:55.280009 3038403 net.cpp:124] Setting up ip +I0102 05:14:55.280017 3038403 net.cpp:131] Top shape: 5 13 (65) +I0102 05:14:55.280025 3038403 net.cpp:139] Memory required for data: 16600 +I0102 05:14:55.280037 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.280048 3038403 net.cpp:86] Creating Layer loss +I0102 05:14:55.280055 3038403 net.cpp:408] loss <- ip_blob +I0102 05:14:55.280063 3038403 net.cpp:408] loss <- label +I0102 05:14:55.280073 3038403 net.cpp:382] loss -> loss +I0102 05:14:55.280086 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.280110 3038403 net.cpp:124] Setting up loss +I0102 05:14:55.280117 3038403 net.cpp:131] Top shape: (1) +I0102 05:14:55.280124 3038403 net.cpp:134] with loss weight 1 +I0102 05:14:55.280136 3038403 net.cpp:139] Memory required for data: 16604 +I0102 05:14:55.280143 3038403 net.cpp:200] loss needs backward computation. +I0102 05:14:55.280150 3038403 net.cpp:200] ip needs backward computation. +I0102 05:14:55.280158 3038403 net.cpp:200] conv needs backward computation. +I0102 05:14:55.280164 3038403 net.cpp:202] data does not need backward computation. +I0102 05:14:55.280171 3038403 net.cpp:244] This network produces output loss +I0102 05:14:55.280180 3038403 net.cpp:257] Network initialization done. +I0102 05:14:55.282227 3038403 net.cpp:53] Initializing net from parameters: name: "testnet" force_backward: true state { @@ -15954,45 +15989,45 @@ bottom: "label" top: "loss" } -I1128 20:30:24.167958 2042758 layer_factory.hpp:77] Creating layer data -I1128 20:30:24.167970 2042758 net.cpp:86] Creating Layer data -I1128 20:30:24.167977 2042758 net.cpp:382] data -> data -I1128 20:30:24.167989 2042758 net.cpp:382] data -> label -I1128 20:30:24.168004 2042758 net.cpp:124] Setting up data -I1128 20:30:24.168009 2042758 net.cpp:131] Top shape: 5 2 3 4 (120) -I1128 20:30:24.168017 2042758 net.cpp:131] Top shape: 5 1 1 1 (5) -I1128 20:30:24.168023 2042758 net.cpp:139] Memory required for data: 500 -I1128 20:30:24.168028 2042758 layer_factory.hpp:77] Creating layer conv -I1128 20:30:24.168042 2042758 net.cpp:86] Creating Layer conv -I1128 20:30:24.168048 2042758 net.cpp:408] conv <- data -I1128 20:30:24.168056 2042758 net.cpp:382] conv -> conv -I1128 20:30:24.168090 2042758 net.cpp:124] Setting up conv -I1128 20:30:24.168097 2042758 net.cpp:131] Top shape: 5 11 8 9 (3960) -I1128 20:30:24.168104 2042758 net.cpp:139] Memory required for data: 16340 -I1128 20:30:24.168120 2042758 layer_factory.hpp:77] Creating layer ip -I1128 20:30:24.168131 2042758 net.cpp:86] Creating Layer ip -I1128 20:30:24.168138 2042758 net.cpp:408] ip <- conv -I1128 20:30:24.168146 2042758 net.cpp:382] ip -> ip_blob -I1128 20:30:24.168292 2042758 net.cpp:124] Setting up ip -I1128 20:30:24.168300 2042758 net.cpp:131] Top shape: 5 13 (65) -I1128 20:30:24.168306 2042758 net.cpp:139] Memory required for data: 16600 -I1128 20:30:24.168316 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.168324 2042758 net.cpp:86] Creating Layer loss -I1128 20:30:24.168329 2042758 net.cpp:408] loss <- ip_blob -I1128 20:30:24.168334 2042758 net.cpp:408] loss <- label -I1128 20:30:24.168339 2042758 net.cpp:382] loss -> loss -I1128 20:30:24.168349 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.168362 2042758 net.cpp:124] Setting up loss -I1128 20:30:24.168367 2042758 net.cpp:131] Top shape: (1) -I1128 20:30:24.168372 2042758 net.cpp:134] with loss weight 1 -I1128 20:30:24.168381 2042758 net.cpp:139] Memory required for data: 16604 -I1128 20:30:24.168386 2042758 net.cpp:200] loss needs backward computation. -I1128 20:30:24.168392 2042758 net.cpp:200] ip needs backward computation. -I1128 20:30:24.168396 2042758 net.cpp:200] conv needs backward computation. -I1128 20:30:24.168401 2042758 net.cpp:202] data does not need backward computation. -I1128 20:30:24.168406 2042758 net.cpp:244] This network produces output loss -I1128 20:30:24.168413 2042758 net.cpp:257] Network initialization done. -.I1128 20:30:24.170122 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.282366 3038403 layer_factory.hpp:77] Creating layer data +I0102 05:14:55.282387 3038403 net.cpp:86] Creating Layer data +I0102 05:14:55.282395 3038403 net.cpp:382] data -> data +I0102 05:14:55.282408 3038403 net.cpp:382] data -> label +I0102 05:14:55.282424 3038403 net.cpp:124] Setting up data +I0102 05:14:55.282430 3038403 net.cpp:131] Top shape: 5 2 3 4 (120) +I0102 05:14:55.282439 3038403 net.cpp:131] Top shape: 5 1 1 1 (5) +I0102 05:14:55.282445 3038403 net.cpp:139] Memory required for data: 500 +I0102 05:14:55.282451 3038403 layer_factory.hpp:77] Creating layer conv +I0102 05:14:55.282464 3038403 net.cpp:86] Creating Layer conv +I0102 05:14:55.282469 3038403 net.cpp:408] conv <- data +I0102 05:14:55.282480 3038403 net.cpp:382] conv -> conv +I0102 05:14:55.282513 3038403 net.cpp:124] Setting up conv +I0102 05:14:55.282519 3038403 net.cpp:131] Top shape: 5 11 8 9 (3960) +I0102 05:14:55.282527 3038403 net.cpp:139] Memory required for data: 16340 +I0102 05:14:55.282541 3038403 layer_factory.hpp:77] Creating layer ip +I0102 05:14:55.282550 3038403 net.cpp:86] Creating Layer ip +I0102 05:14:55.282557 3038403 net.cpp:408] ip <- conv +I0102 05:14:55.282565 3038403 net.cpp:382] ip -> ip_blob +I0102 05:14:55.282713 3038403 net.cpp:124] Setting up ip +I0102 05:14:55.282721 3038403 net.cpp:131] Top shape: 5 13 (65) +I0102 05:14:55.282728 3038403 net.cpp:139] Memory required for data: 16600 +I0102 05:14:55.282739 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.282748 3038403 net.cpp:86] Creating Layer loss +I0102 05:14:55.282754 3038403 net.cpp:408] loss <- ip_blob +I0102 05:14:55.282760 3038403 net.cpp:408] loss <- label +I0102 05:14:55.282768 3038403 net.cpp:382] loss -> loss +I0102 05:14:55.282779 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.282797 3038403 net.cpp:124] Setting up loss +I0102 05:14:55.282802 3038403 net.cpp:131] Top shape: (1) +I0102 05:14:55.282809 3038403 net.cpp:134] with loss weight 1 +I0102 05:14:55.282819 3038403 net.cpp:139] Memory required for data: 16604 +I0102 05:14:55.282824 3038403 net.cpp:200] loss needs backward computation. +I0102 05:14:55.282831 3038403 net.cpp:200] ip needs backward computation. +I0102 05:14:55.282836 3038403 net.cpp:200] conv needs backward computation. +I0102 05:14:55.282842 3038403 net.cpp:202] data does not need backward computation. +I0102 05:14:55.282848 3038403 net.cpp:244] This network produces output loss +I0102 05:14:55.282857 3038403 net.cpp:257] Network initialization done. +.I0102 05:14:55.284319 3038403 net.cpp:53] Initializing net from parameters: name: "testnet" force_backward: true state { @@ -16071,45 +16106,45 @@ bottom: "label" top: "loss" } -I1128 20:30:24.170300 2042758 layer_factory.hpp:77] Creating layer data -I1128 20:30:24.170317 2042758 net.cpp:86] Creating Layer data -I1128 20:30:24.170336 2042758 net.cpp:382] data -> data -I1128 20:30:24.170352 2042758 net.cpp:382] data -> label -I1128 20:30:24.170375 2042758 net.cpp:124] Setting up data -I1128 20:30:24.170382 2042758 net.cpp:131] Top shape: 5 2 3 4 (120) -I1128 20:30:24.170392 2042758 net.cpp:131] Top shape: 5 1 1 1 (5) -I1128 20:30:24.170400 2042758 net.cpp:139] Memory required for data: 500 -I1128 20:30:24.170406 2042758 layer_factory.hpp:77] Creating layer conv -I1128 20:30:24.170420 2042758 net.cpp:86] Creating Layer conv -I1128 20:30:24.170428 2042758 net.cpp:408] conv <- data -I1128 20:30:24.170440 2042758 net.cpp:382] conv -> conv -I1128 20:30:24.170477 2042758 net.cpp:124] Setting up conv -I1128 20:30:24.170485 2042758 net.cpp:131] Top shape: 5 11 8 9 (3960) -I1128 20:30:24.170492 2042758 net.cpp:139] Memory required for data: 16340 -I1128 20:30:24.170508 2042758 layer_factory.hpp:77] Creating layer ip -I1128 20:30:24.170521 2042758 net.cpp:86] Creating Layer ip -I1128 20:30:24.170527 2042758 net.cpp:408] ip <- conv -I1128 20:30:24.170536 2042758 net.cpp:382] ip -> ip_blob -I1128 20:30:24.170719 2042758 net.cpp:124] Setting up ip -I1128 20:30:24.170727 2042758 net.cpp:131] Top shape: 5 13 (65) -I1128 20:30:24.170734 2042758 net.cpp:139] Memory required for data: 16600 -I1128 20:30:24.170743 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.170753 2042758 net.cpp:86] Creating Layer loss -I1128 20:30:24.170758 2042758 net.cpp:408] loss <- ip_blob -I1128 20:30:24.170764 2042758 net.cpp:408] loss <- label -I1128 20:30:24.170773 2042758 net.cpp:382] loss -> loss -I1128 20:30:24.170783 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.170804 2042758 net.cpp:124] Setting up loss -I1128 20:30:24.170809 2042758 net.cpp:131] Top shape: (1) -I1128 20:30:24.170816 2042758 net.cpp:134] with loss weight 1 -I1128 20:30:24.170830 2042758 net.cpp:139] Memory required for data: 16604 -I1128 20:30:24.170835 2042758 net.cpp:200] loss needs backward computation. -I1128 20:30:24.170842 2042758 net.cpp:200] ip needs backward computation. -I1128 20:30:24.170847 2042758 net.cpp:200] conv needs backward computation. -I1128 20:30:24.170854 2042758 net.cpp:202] data does not need backward computation. -I1128 20:30:24.170859 2042758 net.cpp:244] This network produces output loss -I1128 20:30:24.170869 2042758 net.cpp:257] Network initialization done. -I1128 20:30:24.173483 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.284472 3038403 layer_factory.hpp:77] Creating layer data +I0102 05:14:55.284488 3038403 net.cpp:86] Creating Layer data +I0102 05:14:55.284498 3038403 net.cpp:382] data -> data +I0102 05:14:55.284513 3038403 net.cpp:382] data -> label +I0102 05:14:55.284529 3038403 net.cpp:124] Setting up data +I0102 05:14:55.284538 3038403 net.cpp:131] Top shape: 5 2 3 4 (120) +I0102 05:14:55.284547 3038403 net.cpp:131] Top shape: 5 1 1 1 (5) +I0102 05:14:55.284554 3038403 net.cpp:139] Memory required for data: 500 +I0102 05:14:55.284560 3038403 layer_factory.hpp:77] Creating layer conv +I0102 05:14:55.284893 3038403 net.cpp:86] Creating Layer conv +I0102 05:14:55.284904 3038403 net.cpp:408] conv <- data +I0102 05:14:55.284915 3038403 net.cpp:382] conv -> conv +I0102 05:14:55.284957 3038403 net.cpp:124] Setting up conv +I0102 05:14:55.284972 3038403 net.cpp:131] Top shape: 5 11 8 9 (3960) +I0102 05:14:55.284981 3038403 net.cpp:139] Memory required for data: 16340 +I0102 05:14:55.284996 3038403 layer_factory.hpp:77] Creating layer ip +I0102 05:14:55.285009 3038403 net.cpp:86] Creating Layer ip +I0102 05:14:55.285017 3038403 net.cpp:408] ip <- conv +I0102 05:14:55.285028 3038403 net.cpp:382] ip -> ip_blob +I0102 05:14:55.285202 3038403 net.cpp:124] Setting up ip +I0102 05:14:55.285212 3038403 net.cpp:131] Top shape: 5 13 (65) +I0102 05:14:55.285220 3038403 net.cpp:139] Memory required for data: 16600 +I0102 05:14:55.285233 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.285243 3038403 net.cpp:86] Creating Layer loss +I0102 05:14:55.285250 3038403 net.cpp:408] loss <- ip_blob +I0102 05:14:55.285257 3038403 net.cpp:408] loss <- label +I0102 05:14:55.285266 3038403 net.cpp:382] loss -> loss +I0102 05:14:55.285279 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.285302 3038403 net.cpp:124] Setting up loss +I0102 05:14:55.285310 3038403 net.cpp:131] Top shape: (1) +I0102 05:14:55.285316 3038403 net.cpp:134] with loss weight 1 +I0102 05:14:55.285331 3038403 net.cpp:139] Memory required for data: 16604 +I0102 05:14:55.285336 3038403 net.cpp:200] loss needs backward computation. +I0102 05:14:55.285344 3038403 net.cpp:200] ip needs backward computation. +I0102 05:14:55.285351 3038403 net.cpp:200] conv needs backward computation. +I0102 05:14:55.285357 3038403 net.cpp:202] data does not need backward computation. +I0102 05:14:55.285364 3038403 net.cpp:244] This network produces output loss +I0102 05:14:55.285375 3038403 net.cpp:257] Network initialization done. +I0102 05:14:55.288866 3038403 net.cpp:53] Initializing net from parameters: name: "testnet" force_backward: true state { @@ -16188,46 +16223,46 @@ bottom: "label" top: "loss" } -I1128 20:30:24.173642 2042758 layer_factory.hpp:77] Creating layer data -I1128 20:30:24.173661 2042758 net.cpp:86] Creating Layer data -I1128 20:30:24.173671 2042758 net.cpp:382] data -> data -I1128 20:30:24.173688 2042758 net.cpp:382] data -> label -I1128 20:30:24.173707 2042758 net.cpp:124] Setting up data -I1128 20:30:24.173714 2042758 net.cpp:131] Top shape: 5 2 3 4 (120) -I1128 20:30:24.173722 2042758 net.cpp:131] Top shape: 5 1 1 1 (5) -I1128 20:30:24.173732 2042758 net.cpp:139] Memory required for data: 500 -I1128 20:30:24.173738 2042758 layer_factory.hpp:77] Creating layer conv -I1128 20:30:24.173749 2042758 net.cpp:86] Creating Layer conv -I1128 20:30:24.173755 2042758 net.cpp:408] conv <- data -I1128 20:30:24.173764 2042758 net.cpp:382] conv -> conv -I1128 20:30:24.173797 2042758 net.cpp:124] Setting up conv -I1128 20:30:24.173804 2042758 net.cpp:131] Top shape: 5 11 8 9 (3960) -I1128 20:30:24.173810 2042758 net.cpp:139] Memory required for data: 16340 -I1128 20:30:24.173825 2042758 layer_factory.hpp:77] Creating layer ip -I1128 20:30:24.173835 2042758 net.cpp:86] Creating Layer ip -I1128 20:30:24.173840 2042758 net.cpp:408] ip <- conv -I1128 20:30:24.173848 2042758 net.cpp:382] ip -> ip_blob -I1128 20:30:24.174001 2042758 net.cpp:124] Setting up ip -I1128 20:30:24.174008 2042758 net.cpp:131] Top shape: 5 13 (65) -I1128 20:30:24.174015 2042758 net.cpp:139] Memory required for data: 16600 -I1128 20:30:24.174024 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.174032 2042758 net.cpp:86] Creating Layer loss -I1128 20:30:24.174038 2042758 net.cpp:408] loss <- ip_blob -I1128 20:30:24.174043 2042758 net.cpp:408] loss <- label -I1128 20:30:24.174050 2042758 net.cpp:382] loss -> loss -I1128 20:30:24.174062 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.174078 2042758 net.cpp:124] Setting up loss -I1128 20:30:24.174083 2042758 net.cpp:131] Top shape: (1) -I1128 20:30:24.174089 2042758 net.cpp:134] with loss weight 1 -I1128 20:30:24.174100 2042758 net.cpp:139] Memory required for data: 16604 -I1128 20:30:24.174105 2042758 net.cpp:200] loss needs backward computation. -I1128 20:30:24.174111 2042758 net.cpp:200] ip needs backward computation. -I1128 20:30:24.174118 2042758 net.cpp:200] conv needs backward computation. -I1128 20:30:24.174124 2042758 net.cpp:202] data does not need backward computation. -I1128 20:30:24.174131 2042758 net.cpp:244] This network produces output loss -I1128 20:30:24.174141 2042758 net.cpp:257] Network initialization done. -I1128 20:30:24.175036 2042758 hdf5.cpp:33] Datatype class: H5T_FLOAT -.I1128 20:30:24.177390 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.289021 3038403 layer_factory.hpp:77] Creating layer data +I0102 05:14:55.289038 3038403 net.cpp:86] Creating Layer data +I0102 05:14:55.289048 3038403 net.cpp:382] data -> data +I0102 05:14:55.289065 3038403 net.cpp:382] data -> label +I0102 05:14:55.289086 3038403 net.cpp:124] Setting up data +I0102 05:14:55.289095 3038403 net.cpp:131] Top shape: 5 2 3 4 (120) +I0102 05:14:55.289106 3038403 net.cpp:131] Top shape: 5 1 1 1 (5) +I0102 05:14:55.289112 3038403 net.cpp:139] Memory required for data: 500 +I0102 05:14:55.289119 3038403 layer_factory.hpp:77] Creating layer conv +I0102 05:14:55.289132 3038403 net.cpp:86] Creating Layer conv +I0102 05:14:55.289145 3038403 net.cpp:408] conv <- data +I0102 05:14:55.289155 3038403 net.cpp:382] conv -> conv +I0102 05:14:55.289192 3038403 net.cpp:124] Setting up conv +I0102 05:14:55.289199 3038403 net.cpp:131] Top shape: 5 11 8 9 (3960) +I0102 05:14:55.289206 3038403 net.cpp:139] Memory required for data: 16340 +I0102 05:14:55.289222 3038403 layer_factory.hpp:77] Creating layer ip +I0102 05:14:55.289232 3038403 net.cpp:86] Creating Layer ip +I0102 05:14:55.289237 3038403 net.cpp:408] ip <- conv +I0102 05:14:55.289244 3038403 net.cpp:382] ip -> ip_blob +I0102 05:14:55.289394 3038403 net.cpp:124] Setting up ip +I0102 05:14:55.289402 3038403 net.cpp:131] Top shape: 5 13 (65) +I0102 05:14:55.289409 3038403 net.cpp:139] Memory required for data: 16600 +I0102 05:14:55.289420 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.289429 3038403 net.cpp:86] Creating Layer loss +I0102 05:14:55.289435 3038403 net.cpp:408] loss <- ip_blob +I0102 05:14:55.289441 3038403 net.cpp:408] loss <- label +I0102 05:14:55.289448 3038403 net.cpp:382] loss -> loss +I0102 05:14:55.289460 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.289477 3038403 net.cpp:124] Setting up loss +I0102 05:14:55.289484 3038403 net.cpp:131] Top shape: (1) +I0102 05:14:55.289490 3038403 net.cpp:134] with loss weight 1 +I0102 05:14:55.289502 3038403 net.cpp:139] Memory required for data: 16604 +I0102 05:14:55.289507 3038403 net.cpp:200] loss needs backward computation. +I0102 05:14:55.289515 3038403 net.cpp:200] ip needs backward computation. +I0102 05:14:55.289520 3038403 net.cpp:200] conv needs backward computation. +I0102 05:14:55.289527 3038403 net.cpp:202] data does not need backward computation. +I0102 05:14:55.289532 3038403 net.cpp:244] This network produces output loss +I0102 05:14:55.289541 3038403 net.cpp:257] Network initialization done. +I0102 05:14:55.290542 3038403 hdf5.cpp:33] Datatype class: H5T_FLOAT +.I0102 05:14:55.292723 3038403 net.cpp:53] Initializing net from parameters: name: "testnet" force_backward: true state { @@ -16306,47 +16341,47 @@ bottom: "label" top: "loss" } -I1128 20:30:24.177595 2042758 layer_factory.hpp:77] Creating layer data -I1128 20:30:24.177613 2042758 net.cpp:86] Creating Layer data -I1128 20:30:24.177625 2042758 net.cpp:382] data -> data -I1128 20:30:24.177641 2042758 net.cpp:382] data -> label -I1128 20:30:24.177664 2042758 net.cpp:124] Setting up data -I1128 20:30:24.177670 2042758 net.cpp:131] Top shape: 5 2 3 4 (120) -I1128 20:30:24.177681 2042758 net.cpp:131] Top shape: 5 1 1 1 (5) -I1128 20:30:24.177692 2042758 net.cpp:139] Memory required for data: 500 -I1128 20:30:24.177700 2042758 layer_factory.hpp:77] Creating layer conv -I1128 20:30:24.177713 2042758 net.cpp:86] Creating Layer conv -I1128 20:30:24.177721 2042758 net.cpp:408] conv <- data -I1128 20:30:24.177734 2042758 net.cpp:382] conv -> conv -I1128 20:30:24.177776 2042758 net.cpp:124] Setting up conv -I1128 20:30:24.177783 2042758 net.cpp:131] Top shape: 5 11 8 9 (3960) -I1128 20:30:24.177793 2042758 net.cpp:139] Memory required for data: 16340 -I1128 20:30:24.177814 2042758 layer_factory.hpp:77] Creating layer ip -I1128 20:30:24.177824 2042758 net.cpp:86] Creating Layer ip -I1128 20:30:24.177831 2042758 net.cpp:408] ip <- conv -I1128 20:30:24.177842 2042758 net.cpp:382] ip -> ip_blob -I1128 20:30:24.178045 2042758 net.cpp:124] Setting up ip -I1128 20:30:24.178052 2042758 net.cpp:131] Top shape: 5 13 (65) -I1128 20:30:24.178061 2042758 net.cpp:139] Memory required for data: 16600 -I1128 20:30:24.178073 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.178082 2042758 net.cpp:86] Creating Layer loss -I1128 20:30:24.178088 2042758 net.cpp:408] loss <- ip_blob -I1128 20:30:24.178093 2042758 net.cpp:408] loss <- label -I1128 20:30:24.178100 2042758 net.cpp:382] loss -> loss -I1128 20:30:24.178115 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.178144 2042758 net.cpp:124] Setting up loss -I1128 20:30:24.178149 2042758 net.cpp:131] Top shape: (1) -I1128 20:30:24.178154 2042758 net.cpp:134] with loss weight 1 -I1128 20:30:24.178166 2042758 net.cpp:139] Memory required for data: 16604 -I1128 20:30:24.178170 2042758 net.cpp:200] loss needs backward computation. -I1128 20:30:24.178176 2042758 net.cpp:200] ip needs backward computation. -I1128 20:30:24.178181 2042758 net.cpp:200] conv needs backward computation. -I1128 20:30:24.178186 2042758 net.cpp:202] data does not need backward computation. -I1128 20:30:24.178191 2042758 net.cpp:244] This network produces output loss -I1128 20:30:24.178200 2042758 net.cpp:257] Network initialization done. -.I1128 20:30:24.179294 2042758 net.cpp:332] The NetState did not contain stage 'B' specified by a rule in layer B -I1128 20:30:24.179309 2042758 net.cpp:332] The NetState did not contain stage 'B' specified by a rule in layer AandB -I1128 20:30:24.179317 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.292879 3038403 layer_factory.hpp:77] Creating layer data +I0102 05:14:55.292896 3038403 net.cpp:86] Creating Layer data +I0102 05:14:55.292904 3038403 net.cpp:382] data -> data +I0102 05:14:55.292923 3038403 net.cpp:382] data -> label +I0102 05:14:55.292943 3038403 net.cpp:124] Setting up data +I0102 05:14:55.292949 3038403 net.cpp:131] Top shape: 5 2 3 4 (120) +I0102 05:14:55.292958 3038403 net.cpp:131] Top shape: 5 1 1 1 (5) +I0102 05:14:55.292964 3038403 net.cpp:139] Memory required for data: 500 +I0102 05:14:55.292970 3038403 layer_factory.hpp:77] Creating layer conv +I0102 05:14:55.292984 3038403 net.cpp:86] Creating Layer conv +I0102 05:14:55.292989 3038403 net.cpp:408] conv <- data +I0102 05:14:55.293000 3038403 net.cpp:382] conv -> conv +I0102 05:14:55.293035 3038403 net.cpp:124] Setting up conv +I0102 05:14:55.293044 3038403 net.cpp:131] Top shape: 5 11 8 9 (3960) +I0102 05:14:55.293051 3038403 net.cpp:139] Memory required for data: 16340 +I0102 05:14:55.293067 3038403 layer_factory.hpp:77] Creating layer ip +I0102 05:14:55.293078 3038403 net.cpp:86] Creating Layer ip +I0102 05:14:55.293084 3038403 net.cpp:408] ip <- conv +I0102 05:14:55.293093 3038403 net.cpp:382] ip -> ip_blob +I0102 05:14:55.293253 3038403 net.cpp:124] Setting up ip +I0102 05:14:55.293262 3038403 net.cpp:131] Top shape: 5 13 (65) +I0102 05:14:55.293268 3038403 net.cpp:139] Memory required for data: 16600 +I0102 05:14:55.293278 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.293288 3038403 net.cpp:86] Creating Layer loss +I0102 05:14:55.293295 3038403 net.cpp:408] loss <- ip_blob +I0102 05:14:55.293303 3038403 net.cpp:408] loss <- label +I0102 05:14:55.293310 3038403 net.cpp:382] loss -> loss +I0102 05:14:55.293323 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.293341 3038403 net.cpp:124] Setting up loss +I0102 05:14:55.293349 3038403 net.cpp:131] Top shape: (1) +I0102 05:14:55.293354 3038403 net.cpp:134] with loss weight 1 +I0102 05:14:55.293367 3038403 net.cpp:139] Memory required for data: 16604 +I0102 05:14:55.293375 3038403 net.cpp:200] loss needs backward computation. +I0102 05:14:55.293381 3038403 net.cpp:200] ip needs backward computation. +I0102 05:14:55.293387 3038403 net.cpp:200] conv needs backward computation. +I0102 05:14:55.293393 3038403 net.cpp:202] data does not need backward computation. +I0102 05:14:55.293399 3038403 net.cpp:244] This network produces output loss +I0102 05:14:55.293411 3038403 net.cpp:257] Network initialization done. +.I0102 05:14:55.294523 3038403 net.cpp:332] The NetState did not contain stage 'B' specified by a rule in layer B +I0102 05:14:55.294538 3038403 net.cpp:332] The NetState did not contain stage 'B' specified by a rule in layer AandB +I0102 05:14:55.294544 3038403 net.cpp:53] Initializing net from parameters: state { phase: TEST level: 0 @@ -16392,43 +16427,43 @@ num_output: 1 } } -I1128 20:30:24.179416 2042758 layer_factory.hpp:77] Creating layer data -I1128 20:30:24.179428 2042758 net.cpp:86] Creating Layer data -I1128 20:30:24.179435 2042758 net.cpp:382] data -> data -I1128 20:30:24.179453 2042758 net.cpp:124] Setting up data -I1128 20:30:24.179459 2042758 net.cpp:131] Top shape: 1 1 10 10 (100) -I1128 20:30:24.179467 2042758 net.cpp:139] Memory required for data: 400 -I1128 20:30:24.179472 2042758 layer_factory.hpp:77] Creating layer data_data_0_split -I1128 20:30:24.179481 2042758 net.cpp:86] Creating Layer data_data_0_split -I1128 20:30:24.179486 2042758 net.cpp:408] data_data_0_split <- data -I1128 20:30:24.179494 2042758 net.cpp:382] data_data_0_split -> data_data_0_split_0 -I1128 20:30:24.179504 2042758 net.cpp:382] data_data_0_split -> data_data_0_split_1 -I1128 20:30:24.179514 2042758 net.cpp:124] Setting up data_data_0_split -I1128 20:30:24.179522 2042758 net.cpp:131] Top shape: 1 1 10 10 (100) -I1128 20:30:24.179528 2042758 net.cpp:131] Top shape: 1 1 10 10 (100) -I1128 20:30:24.179534 2042758 net.cpp:139] Memory required for data: 1200 -I1128 20:30:24.179539 2042758 layer_factory.hpp:77] Creating layer A -I1128 20:30:24.179548 2042758 net.cpp:86] Creating Layer A -I1128 20:30:24.179553 2042758 net.cpp:408] A <- data_data_0_split_0 -I1128 20:30:24.179561 2042758 net.cpp:382] A -> A -I1128 20:30:24.179577 2042758 net.cpp:124] Setting up A -I1128 20:30:24.179582 2042758 net.cpp:131] Top shape: 1 1 (1) -I1128 20:30:24.179589 2042758 net.cpp:139] Memory required for data: 1204 -I1128 20:30:24.179602 2042758 layer_factory.hpp:77] Creating layer AorB -I1128 20:30:24.179611 2042758 net.cpp:86] Creating Layer AorB -I1128 20:30:24.179616 2042758 net.cpp:408] AorB <- data_data_0_split_1 -I1128 20:30:24.179625 2042758 net.cpp:382] AorB -> AorB -I1128 20:30:24.179639 2042758 net.cpp:124] Setting up AorB -I1128 20:30:24.179646 2042758 net.cpp:131] Top shape: 1 1 (1) -I1128 20:30:24.179651 2042758 net.cpp:139] Memory required for data: 1208 -I1128 20:30:24.179659 2042758 net.cpp:202] AorB does not need backward computation. -I1128 20:30:24.179666 2042758 net.cpp:202] A does not need backward computation. -I1128 20:30:24.179672 2042758 net.cpp:202] data_data_0_split does not need backward computation. -I1128 20:30:24.179677 2042758 net.cpp:202] data does not need backward computation. -I1128 20:30:24.179682 2042758 net.cpp:244] This network produces output A -I1128 20:30:24.179687 2042758 net.cpp:244] This network produces output AorB -I1128 20:30:24.179697 2042758 net.cpp:257] Network initialization done. -.I1128 20:30:24.180282 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.294641 3038403 layer_factory.hpp:77] Creating layer data +I0102 05:14:55.294656 3038403 net.cpp:86] Creating Layer data +I0102 05:14:55.294663 3038403 net.cpp:382] data -> data +I0102 05:14:55.294687 3038403 net.cpp:124] Setting up data +I0102 05:14:55.294694 3038403 net.cpp:131] Top shape: 1 1 10 10 (100) +I0102 05:14:55.294703 3038403 net.cpp:139] Memory required for data: 400 +I0102 05:14:55.294708 3038403 layer_factory.hpp:77] Creating layer data_data_0_split +I0102 05:14:55.294718 3038403 net.cpp:86] Creating Layer data_data_0_split +I0102 05:14:55.294735 3038403 net.cpp:408] data_data_0_split <- data +I0102 05:14:55.294744 3038403 net.cpp:382] data_data_0_split -> data_data_0_split_0 +I0102 05:14:55.294757 3038403 net.cpp:382] data_data_0_split -> data_data_0_split_1 +I0102 05:14:55.294768 3038403 net.cpp:124] Setting up data_data_0_split +I0102 05:14:55.294775 3038403 net.cpp:131] Top shape: 1 1 10 10 (100) +I0102 05:14:55.294781 3038403 net.cpp:131] Top shape: 1 1 10 10 (100) +I0102 05:14:55.294787 3038403 net.cpp:139] Memory required for data: 1200 +I0102 05:14:55.294793 3038403 layer_factory.hpp:77] Creating layer A +I0102 05:14:55.294803 3038403 net.cpp:86] Creating Layer A +I0102 05:14:55.294808 3038403 net.cpp:408] A <- data_data_0_split_0 +I0102 05:14:55.294816 3038403 net.cpp:382] A -> A +I0102 05:14:55.294833 3038403 net.cpp:124] Setting up A +I0102 05:14:55.294839 3038403 net.cpp:131] Top shape: 1 1 (1) +I0102 05:14:55.294847 3038403 net.cpp:139] Memory required for data: 1204 +I0102 05:14:55.294859 3038403 layer_factory.hpp:77] Creating layer AorB +I0102 05:14:55.294873 3038403 net.cpp:86] Creating Layer AorB +I0102 05:14:55.294879 3038403 net.cpp:408] AorB <- data_data_0_split_1 +I0102 05:14:55.294888 3038403 net.cpp:382] AorB -> AorB +I0102 05:14:55.294905 3038403 net.cpp:124] Setting up AorB +I0102 05:14:55.294911 3038403 net.cpp:131] Top shape: 1 1 (1) +I0102 05:14:55.294919 3038403 net.cpp:139] Memory required for data: 1208 +I0102 05:14:55.294929 3038403 net.cpp:202] AorB does not need backward computation. +I0102 05:14:55.294934 3038403 net.cpp:202] A does not need backward computation. +I0102 05:14:55.294939 3038403 net.cpp:202] data_data_0_split does not need backward computation. +I0102 05:14:55.294945 3038403 net.cpp:202] data does not need backward computation. +I0102 05:14:55.294950 3038403 net.cpp:244] This network produces output A +I0102 05:14:55.294955 3038403 net.cpp:244] This network produces output AorB +I0102 05:14:55.294965 3038403 net.cpp:257] Network initialization done. +.I0102 05:14:55.295630 3038403 net.cpp:53] Initializing net from parameters: state { phase: TEST level: 0 @@ -16500,68 +16535,68 @@ num_output: 1 } } -I1128 20:30:24.180387 2042758 layer_factory.hpp:77] Creating layer data -I1128 20:30:24.180395 2042758 net.cpp:86] Creating Layer data -I1128 20:30:24.180402 2042758 net.cpp:382] data -> data -I1128 20:30:24.180418 2042758 net.cpp:124] Setting up data -I1128 20:30:24.180424 2042758 net.cpp:131] Top shape: 1 1 10 10 (100) -I1128 20:30:24.180431 2042758 net.cpp:139] Memory required for data: 400 -I1128 20:30:24.180436 2042758 layer_factory.hpp:77] Creating layer data_data_0_split -I1128 20:30:24.180444 2042758 net.cpp:86] Creating Layer data_data_0_split -I1128 20:30:24.180450 2042758 net.cpp:408] data_data_0_split <- data -I1128 20:30:24.180459 2042758 net.cpp:382] data_data_0_split -> data_data_0_split_0 -I1128 20:30:24.180469 2042758 net.cpp:382] data_data_0_split -> data_data_0_split_1 -I1128 20:30:24.180477 2042758 net.cpp:382] data_data_0_split -> data_data_0_split_2 -I1128 20:30:24.180486 2042758 net.cpp:382] data_data_0_split -> data_data_0_split_3 -I1128 20:30:24.180496 2042758 net.cpp:124] Setting up data_data_0_split -I1128 20:30:24.180502 2042758 net.cpp:131] Top shape: 1 1 10 10 (100) -I1128 20:30:24.180508 2042758 net.cpp:131] Top shape: 1 1 10 10 (100) -I1128 20:30:24.180514 2042758 net.cpp:131] Top shape: 1 1 10 10 (100) -I1128 20:30:24.180521 2042758 net.cpp:131] Top shape: 1 1 10 10 (100) -I1128 20:30:24.180528 2042758 net.cpp:139] Memory required for data: 2000 -I1128 20:30:24.180533 2042758 layer_factory.hpp:77] Creating layer A -I1128 20:30:24.180541 2042758 net.cpp:86] Creating Layer A -I1128 20:30:24.180546 2042758 net.cpp:408] A <- data_data_0_split_0 -I1128 20:30:24.180555 2042758 net.cpp:382] A -> A -I1128 20:30:24.180570 2042758 net.cpp:124] Setting up A -I1128 20:30:24.180577 2042758 net.cpp:131] Top shape: 1 1 (1) -I1128 20:30:24.180583 2042758 net.cpp:139] Memory required for data: 2004 -I1128 20:30:24.180594 2042758 layer_factory.hpp:77] Creating layer B -I1128 20:30:24.180601 2042758 net.cpp:86] Creating Layer B -I1128 20:30:24.180606 2042758 net.cpp:408] B <- data_data_0_split_1 -I1128 20:30:24.180613 2042758 net.cpp:382] B -> B -I1128 20:30:24.180627 2042758 net.cpp:124] Setting up B -I1128 20:30:24.180632 2042758 net.cpp:131] Top shape: 1 1 (1) -I1128 20:30:24.180639 2042758 net.cpp:139] Memory required for data: 2008 -I1128 20:30:24.180647 2042758 layer_factory.hpp:77] Creating layer AorB -I1128 20:30:24.180656 2042758 net.cpp:86] Creating Layer AorB -I1128 20:30:24.180661 2042758 net.cpp:408] AorB <- data_data_0_split_2 -I1128 20:30:24.180671 2042758 net.cpp:382] AorB -> AorB -I1128 20:30:24.180685 2042758 net.cpp:124] Setting up AorB -I1128 20:30:24.180691 2042758 net.cpp:131] Top shape: 1 1 (1) -I1128 20:30:24.180696 2042758 net.cpp:139] Memory required for data: 2012 -I1128 20:30:24.180706 2042758 layer_factory.hpp:77] Creating layer AandB -I1128 20:30:24.180718 2042758 net.cpp:86] Creating Layer AandB -I1128 20:30:24.180723 2042758 net.cpp:408] AandB <- data_data_0_split_3 -I1128 20:30:24.180732 2042758 net.cpp:382] AandB -> AandB -I1128 20:30:24.180747 2042758 net.cpp:124] Setting up AandB -I1128 20:30:24.180752 2042758 net.cpp:131] Top shape: 1 1 (1) -I1128 20:30:24.180758 2042758 net.cpp:139] Memory required for data: 2016 -I1128 20:30:24.180765 2042758 net.cpp:202] AandB does not need backward computation. -I1128 20:30:24.180771 2042758 net.cpp:202] AorB does not need backward computation. -I1128 20:30:24.180776 2042758 net.cpp:202] B does not need backward computation. -I1128 20:30:24.180781 2042758 net.cpp:202] A does not need backward computation. -I1128 20:30:24.180788 2042758 net.cpp:202] data_data_0_split does not need backward computation. -I1128 20:30:24.180794 2042758 net.cpp:202] data does not need backward computation. -I1128 20:30:24.180799 2042758 net.cpp:244] This network produces output A -I1128 20:30:24.180804 2042758 net.cpp:244] This network produces output AandB -I1128 20:30:24.180810 2042758 net.cpp:244] This network produces output AorB -I1128 20:30:24.180816 2042758 net.cpp:244] This network produces output B -I1128 20:30:24.180840 2042758 net.cpp:257] Network initialization done. -.I1128 20:30:24.181464 2042758 net.cpp:332] The NetState did not contain stage 'A' specified by a rule in layer A -I1128 20:30:24.181481 2042758 net.cpp:332] The NetState did not contain stage 'A' specified by a rule in layer AorB -I1128 20:30:24.181489 2042758 net.cpp:332] The NetState did not contain stage 'A' specified by a rule in layer AandB -I1128 20:30:24.181495 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.295758 3038403 layer_factory.hpp:77] Creating layer data +I0102 05:14:55.295774 3038403 net.cpp:86] Creating Layer data +I0102 05:14:55.295783 3038403 net.cpp:382] data -> data +I0102 05:14:55.295801 3038403 net.cpp:124] Setting up data +I0102 05:14:55.295812 3038403 net.cpp:131] Top shape: 1 1 10 10 (100) +I0102 05:14:55.295821 3038403 net.cpp:139] Memory required for data: 400 +I0102 05:14:55.295828 3038403 layer_factory.hpp:77] Creating layer data_data_0_split +I0102 05:14:55.295838 3038403 net.cpp:86] Creating Layer data_data_0_split +I0102 05:14:55.295845 3038403 net.cpp:408] data_data_0_split <- data +I0102 05:14:55.295853 3038403 net.cpp:382] data_data_0_split -> data_data_0_split_0 +I0102 05:14:55.295864 3038403 net.cpp:382] data_data_0_split -> data_data_0_split_1 +I0102 05:14:55.295876 3038403 net.cpp:382] data_data_0_split -> data_data_0_split_2 +I0102 05:14:55.295886 3038403 net.cpp:382] data_data_0_split -> data_data_0_split_3 +I0102 05:14:55.295909 3038403 net.cpp:124] Setting up data_data_0_split +I0102 05:14:55.295917 3038403 net.cpp:131] Top shape: 1 1 10 10 (100) +I0102 05:14:55.295925 3038403 net.cpp:131] Top shape: 1 1 10 10 (100) +I0102 05:14:55.295931 3038403 net.cpp:131] Top shape: 1 1 10 10 (100) +I0102 05:14:55.295938 3038403 net.cpp:131] Top shape: 1 1 10 10 (100) +I0102 05:14:55.295946 3038403 net.cpp:139] Memory required for data: 2000 +I0102 05:14:55.295953 3038403 layer_factory.hpp:77] Creating layer A +I0102 05:14:55.295964 3038403 net.cpp:86] Creating Layer A +I0102 05:14:55.295980 3038403 net.cpp:408] A <- data_data_0_split_0 +I0102 05:14:55.295989 3038403 net.cpp:382] A -> A +I0102 05:14:55.296013 3038403 net.cpp:124] Setting up A +I0102 05:14:55.296028 3038403 net.cpp:131] Top shape: 1 1 (1) +I0102 05:14:55.296038 3038403 net.cpp:139] Memory required for data: 2004 +I0102 05:14:55.296058 3038403 layer_factory.hpp:77] Creating layer B +I0102 05:14:55.296073 3038403 net.cpp:86] Creating Layer B +I0102 05:14:55.296079 3038403 net.cpp:408] B <- data_data_0_split_1 +I0102 05:14:55.296089 3038403 net.cpp:382] B -> B +I0102 05:14:55.296106 3038403 net.cpp:124] Setting up B +I0102 05:14:55.296121 3038403 net.cpp:131] Top shape: 1 1 (1) +I0102 05:14:55.296131 3038403 net.cpp:139] Memory required for data: 2008 +I0102 05:14:55.296146 3038403 layer_factory.hpp:77] Creating layer AorB +I0102 05:14:55.296159 3038403 net.cpp:86] Creating Layer AorB +I0102 05:14:55.296164 3038403 net.cpp:408] AorB <- data_data_0_split_2 +I0102 05:14:55.296175 3038403 net.cpp:382] AorB -> AorB +I0102 05:14:55.296197 3038403 net.cpp:124] Setting up AorB +I0102 05:14:55.296268 3038403 net.cpp:131] Top shape: 1 1 (1) +I0102 05:14:55.296276 3038403 net.cpp:139] Memory required for data: 2012 +I0102 05:14:55.296289 3038403 layer_factory.hpp:77] Creating layer AandB +I0102 05:14:55.296306 3038403 net.cpp:86] Creating Layer AandB +I0102 05:14:55.296314 3038403 net.cpp:408] AandB <- data_data_0_split_3 +I0102 05:14:55.296324 3038403 net.cpp:382] AandB -> AandB +I0102 05:14:55.296342 3038403 net.cpp:124] Setting up AandB +I0102 05:14:55.296350 3038403 net.cpp:131] Top shape: 1 1 (1) +I0102 05:14:55.296357 3038403 net.cpp:139] Memory required for data: 2016 +I0102 05:14:55.296367 3038403 net.cpp:202] AandB does not need backward computation. +I0102 05:14:55.296380 3038403 net.cpp:202] AorB does not need backward computation. +I0102 05:14:55.296386 3038403 net.cpp:202] B does not need backward computation. +I0102 05:14:55.296392 3038403 net.cpp:202] A does not need backward computation. +I0102 05:14:55.296399 3038403 net.cpp:202] data_data_0_split does not need backward computation. +I0102 05:14:55.296406 3038403 net.cpp:202] data does not need backward computation. +I0102 05:14:55.296412 3038403 net.cpp:244] This network produces output A +I0102 05:14:55.296419 3038403 net.cpp:244] This network produces output AandB +I0102 05:14:55.296427 3038403 net.cpp:244] This network produces output AorB +I0102 05:14:55.296433 3038403 net.cpp:244] This network produces output B +I0102 05:14:55.296445 3038403 net.cpp:257] Network initialization done. +.I0102 05:14:55.297264 3038403 net.cpp:332] The NetState did not contain stage 'A' specified by a rule in layer A +I0102 05:14:55.297286 3038403 net.cpp:332] The NetState did not contain stage 'A' specified by a rule in layer AorB +I0102 05:14:55.297294 3038403 net.cpp:332] The NetState did not contain stage 'A' specified by a rule in layer AandB +I0102 05:14:55.297302 3038403 net.cpp:53] Initializing net from parameters: state { phase: TEST level: 0 @@ -16607,43 +16642,43 @@ num_output: 1 } } -I1128 20:30:24.181576 2042758 layer_factory.hpp:77] Creating layer data -I1128 20:30:24.181587 2042758 net.cpp:86] Creating Layer data -I1128 20:30:24.181596 2042758 net.cpp:382] data -> data -I1128 20:30:24.181612 2042758 net.cpp:124] Setting up data -I1128 20:30:24.181617 2042758 net.cpp:131] Top shape: 1 1 10 10 (100) -I1128 20:30:24.181625 2042758 net.cpp:139] Memory required for data: 400 -I1128 20:30:24.181632 2042758 layer_factory.hpp:77] Creating layer data_data_0_split -I1128 20:30:24.181641 2042758 net.cpp:86] Creating Layer data_data_0_split -I1128 20:30:24.181646 2042758 net.cpp:408] data_data_0_split <- data -I1128 20:30:24.181658 2042758 net.cpp:382] data_data_0_split -> data_data_0_split_0 -I1128 20:30:24.181680 2042758 net.cpp:382] data_data_0_split -> data_data_0_split_1 -I1128 20:30:24.181691 2042758 net.cpp:124] Setting up data_data_0_split -I1128 20:30:24.181697 2042758 net.cpp:131] Top shape: 1 1 10 10 (100) -I1128 20:30:24.181704 2042758 net.cpp:131] Top shape: 1 1 10 10 (100) -I1128 20:30:24.181711 2042758 net.cpp:139] Memory required for data: 1200 -I1128 20:30:24.181717 2042758 layer_factory.hpp:77] Creating layer B -I1128 20:30:24.181726 2042758 net.cpp:86] Creating Layer B -I1128 20:30:24.181731 2042758 net.cpp:408] B <- data_data_0_split_0 -I1128 20:30:24.181740 2042758 net.cpp:382] B -> B -I1128 20:30:24.181757 2042758 net.cpp:124] Setting up B -I1128 20:30:24.181762 2042758 net.cpp:131] Top shape: 1 1 (1) -I1128 20:30:24.181769 2042758 net.cpp:139] Memory required for data: 1204 -I1128 20:30:24.181784 2042758 layer_factory.hpp:77] Creating layer AorB -I1128 20:30:24.181793 2042758 net.cpp:86] Creating Layer AorB -I1128 20:30:24.181799 2042758 net.cpp:408] AorB <- data_data_0_split_1 -I1128 20:30:24.181810 2042758 net.cpp:382] AorB -> AorB -I1128 20:30:24.181838 2042758 net.cpp:124] Setting up AorB -I1128 20:30:24.181843 2042758 net.cpp:131] Top shape: 1 1 (1) -I1128 20:30:24.181851 2042758 net.cpp:139] Memory required for data: 1208 -I1128 20:30:24.181862 2042758 net.cpp:202] AorB does not need backward computation. -I1128 20:30:24.181869 2042758 net.cpp:202] B does not need backward computation. -I1128 20:30:24.181874 2042758 net.cpp:202] data_data_0_split does not need backward computation. -I1128 20:30:24.181880 2042758 net.cpp:202] data does not need backward computation. -I1128 20:30:24.181885 2042758 net.cpp:244] This network produces output AorB -I1128 20:30:24.181891 2042758 net.cpp:244] This network produces output B -I1128 20:30:24.181900 2042758 net.cpp:257] Network initialization done. -.I1128 20:30:24.183801 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.297399 3038403 layer_factory.hpp:77] Creating layer data +I0102 05:14:55.297412 3038403 net.cpp:86] Creating Layer data +I0102 05:14:55.297421 3038403 net.cpp:382] data -> data +I0102 05:14:55.297441 3038403 net.cpp:124] Setting up data +I0102 05:14:55.297447 3038403 net.cpp:131] Top shape: 1 1 10 10 (100) +I0102 05:14:55.297456 3038403 net.cpp:139] Memory required for data: 400 +I0102 05:14:55.297461 3038403 layer_factory.hpp:77] Creating layer data_data_0_split +I0102 05:14:55.297472 3038403 net.cpp:86] Creating Layer data_data_0_split +I0102 05:14:55.297482 3038403 net.cpp:408] data_data_0_split <- data +I0102 05:14:55.297494 3038403 net.cpp:382] data_data_0_split -> data_data_0_split_0 +I0102 05:14:55.297508 3038403 net.cpp:382] data_data_0_split -> data_data_0_split_1 +I0102 05:14:55.297519 3038403 net.cpp:124] Setting up data_data_0_split +I0102 05:14:55.297526 3038403 net.cpp:131] Top shape: 1 1 10 10 (100) +I0102 05:14:55.297534 3038403 net.cpp:131] Top shape: 1 1 10 10 (100) +I0102 05:14:55.297542 3038403 net.cpp:139] Memory required for data: 1200 +I0102 05:14:55.297547 3038403 layer_factory.hpp:77] Creating layer B +I0102 05:14:55.297562 3038403 net.cpp:86] Creating Layer B +I0102 05:14:55.297569 3038403 net.cpp:408] B <- data_data_0_split_0 +I0102 05:14:55.297580 3038403 net.cpp:382] B -> B +I0102 05:14:55.297597 3038403 net.cpp:124] Setting up B +I0102 05:14:55.297603 3038403 net.cpp:131] Top shape: 1 1 (1) +I0102 05:14:55.297611 3038403 net.cpp:139] Memory required for data: 1204 +I0102 05:14:55.297626 3038403 layer_factory.hpp:77] Creating layer AorB +I0102 05:14:55.297636 3038403 net.cpp:86] Creating Layer AorB +I0102 05:14:55.297642 3038403 net.cpp:408] AorB <- data_data_0_split_1 +I0102 05:14:55.297652 3038403 net.cpp:382] AorB -> AorB +I0102 05:14:55.297673 3038403 net.cpp:124] Setting up AorB +I0102 05:14:55.297680 3038403 net.cpp:131] Top shape: 1 1 (1) +I0102 05:14:55.297688 3038403 net.cpp:139] Memory required for data: 1208 +I0102 05:14:55.297698 3038403 net.cpp:202] AorB does not need backward computation. +I0102 05:14:55.297704 3038403 net.cpp:202] B does not need backward computation. +I0102 05:14:55.297710 3038403 net.cpp:202] data_data_0_split does not need backward computation. +I0102 05:14:55.297716 3038403 net.cpp:202] data does not need backward computation. +I0102 05:14:55.297721 3038403 net.cpp:244] This network produces output AorB +I0102 05:14:55.297727 3038403 net.cpp:244] This network produces output B +I0102 05:14:55.297737 3038403 net.cpp:257] Network initialization done. +.I0102 05:14:55.299732 3038403 net.cpp:53] Initializing net from parameters: state { phase: TEST level: 0 @@ -16756,85 +16791,85 @@ bottom: "label" top: "loss" } -I1128 20:30:24.183961 2042758 layer_factory.hpp:77] Creating layer data -I1128 20:30:24.183974 2042758 net.cpp:86] Creating Layer data -I1128 20:30:24.183981 2042758 net.cpp:382] data -> data -I1128 20:30:24.183995 2042758 net.cpp:382] data -> label -I1128 20:30:24.184063 2042758 net.cpp:124] Setting up data -I1128 20:30:24.184072 2042758 net.cpp:131] Top shape: 50 1 28 28 (39200) -I1128 20:30:24.184082 2042758 net.cpp:131] Top shape: 50 1 1 1 (50) -I1128 20:30:24.184093 2042758 net.cpp:139] Memory required for data: 157000 -I1128 20:30:24.184100 2042758 layer_factory.hpp:77] Creating layer conv1 -I1128 20:30:24.184113 2042758 net.cpp:86] Creating Layer conv1 -I1128 20:30:24.184119 2042758 net.cpp:408] conv1 <- data -I1128 20:30:24.184128 2042758 net.cpp:382] conv1 -> conv1 -I1128 20:30:24.184177 2042758 net.cpp:124] Setting up conv1 -I1128 20:30:24.184182 2042758 net.cpp:131] Top shape: 50 20 24 24 (576000) -I1128 20:30:24.184193 2042758 net.cpp:139] Memory required for data: 2461000 -I1128 20:30:24.184209 2042758 layer_factory.hpp:77] Creating layer pool1 -I1128 20:30:24.184219 2042758 net.cpp:86] Creating Layer pool1 -I1128 20:30:24.184224 2042758 net.cpp:408] pool1 <- conv1 -I1128 20:30:24.184234 2042758 net.cpp:382] pool1 -> pool1 -I1128 20:30:24.184263 2042758 net.cpp:124] Setting up pool1 -I1128 20:30:24.184268 2042758 net.cpp:131] Top shape: 50 20 12 12 (144000) -I1128 20:30:24.184275 2042758 net.cpp:139] Memory required for data: 3037000 -I1128 20:30:24.184280 2042758 layer_factory.hpp:77] Creating layer conv2 -I1128 20:30:24.184288 2042758 net.cpp:86] Creating Layer conv2 -I1128 20:30:24.184293 2042758 net.cpp:408] conv2 <- pool1 -I1128 20:30:24.184301 2042758 net.cpp:382] conv2 -> conv2 -I1128 20:30:24.184567 2042758 net.cpp:124] Setting up conv2 -I1128 20:30:24.184573 2042758 net.cpp:131] Top shape: 50 50 8 8 (160000) -I1128 20:30:24.184581 2042758 net.cpp:139] Memory required for data: 3677000 -I1128 20:30:24.184588 2042758 layer_factory.hpp:77] Creating layer pool2 -I1128 20:30:24.184597 2042758 net.cpp:86] Creating Layer pool2 -I1128 20:30:24.184602 2042758 net.cpp:408] pool2 <- conv2 -I1128 20:30:24.184608 2042758 net.cpp:382] pool2 -> pool2 -I1128 20:30:24.184619 2042758 net.cpp:124] Setting up pool2 -I1128 20:30:24.184635 2042758 net.cpp:131] Top shape: 50 50 4 4 (40000) -I1128 20:30:24.184641 2042758 net.cpp:139] Memory required for data: 3837000 -I1128 20:30:24.184650 2042758 layer_factory.hpp:77] Creating layer ip1 -I1128 20:30:24.184662 2042758 net.cpp:86] Creating Layer ip1 -I1128 20:30:24.184667 2042758 net.cpp:408] ip1 <- pool2 -I1128 20:30:24.184679 2042758 net.cpp:382] ip1 -> ip1 -I1128 20:30:24.189538 2042758 net.cpp:124] Setting up ip1 -I1128 20:30:24.189577 2042758 net.cpp:131] Top shape: 50 500 (25000) -I1128 20:30:24.189589 2042758 net.cpp:139] Memory required for data: 3937000 -I1128 20:30:24.189612 2042758 layer_factory.hpp:77] Creating layer relu1 -I1128 20:30:24.189635 2042758 net.cpp:86] Creating Layer relu1 -I1128 20:30:24.189641 2042758 net.cpp:408] relu1 <- ip1 -I1128 20:30:24.189651 2042758 net.cpp:369] relu1 -> ip1 (in-place) -I1128 20:30:24.189663 2042758 net.cpp:124] Setting up relu1 -I1128 20:30:24.189668 2042758 net.cpp:131] Top shape: 50 500 (25000) -I1128 20:30:24.189674 2042758 net.cpp:139] Memory required for data: 4037000 -I1128 20:30:24.189679 2042758 layer_factory.hpp:77] Creating layer ip2 -I1128 20:30:24.189688 2042758 net.cpp:86] Creating Layer ip2 -I1128 20:30:24.189692 2042758 net.cpp:408] ip2 <- ip1 -I1128 20:30:24.189700 2042758 net.cpp:382] ip2 -> ip2 -I1128 20:30:24.189771 2042758 net.cpp:124] Setting up ip2 -I1128 20:30:24.189777 2042758 net.cpp:131] Top shape: 50 10 (500) -I1128 20:30:24.189783 2042758 net.cpp:139] Memory required for data: 4039000 -I1128 20:30:24.189792 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.189803 2042758 net.cpp:86] Creating Layer loss -I1128 20:30:24.189808 2042758 net.cpp:408] loss <- ip2 -I1128 20:30:24.189815 2042758 net.cpp:408] loss <- label -I1128 20:30:24.189822 2042758 net.cpp:382] loss -> loss -I1128 20:30:24.189834 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.189857 2042758 net.cpp:124] Setting up loss -I1128 20:30:24.189862 2042758 net.cpp:131] Top shape: (1) -I1128 20:30:24.189867 2042758 net.cpp:134] with loss weight 1 -I1128 20:30:24.189889 2042758 net.cpp:139] Memory required for data: 4039004 -I1128 20:30:24.189894 2042758 net.cpp:200] loss needs backward computation. -I1128 20:30:24.189901 2042758 net.cpp:200] ip2 needs backward computation. -I1128 20:30:24.189906 2042758 net.cpp:200] relu1 needs backward computation. -I1128 20:30:24.189911 2042758 net.cpp:200] ip1 needs backward computation. -I1128 20:30:24.189916 2042758 net.cpp:200] pool2 needs backward computation. -I1128 20:30:24.189922 2042758 net.cpp:200] conv2 needs backward computation. -I1128 20:30:24.189929 2042758 net.cpp:200] pool1 needs backward computation. -I1128 20:30:24.189934 2042758 net.cpp:200] conv1 needs backward computation. -I1128 20:30:24.189941 2042758 net.cpp:202] data does not need backward computation. -I1128 20:30:24.189946 2042758 net.cpp:244] This network produces output loss -I1128 20:30:24.189958 2042758 net.cpp:257] Network initialization done. -I1128 20:30:24.191813 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.299906 3038403 layer_factory.hpp:77] Creating layer data +I0102 05:14:55.299922 3038403 net.cpp:86] Creating Layer data +I0102 05:14:55.299930 3038403 net.cpp:382] data -> data +I0102 05:14:55.299947 3038403 net.cpp:382] data -> label +I0102 05:14:55.300015 3038403 net.cpp:124] Setting up data +I0102 05:14:55.300024 3038403 net.cpp:131] Top shape: 50 1 28 28 (39200) +I0102 05:14:55.300034 3038403 net.cpp:131] Top shape: 50 1 1 1 (50) +I0102 05:14:55.300041 3038403 net.cpp:139] Memory required for data: 157000 +I0102 05:14:55.300046 3038403 layer_factory.hpp:77] Creating layer conv1 +I0102 05:14:55.300060 3038403 net.cpp:86] Creating Layer conv1 +I0102 05:14:55.300065 3038403 net.cpp:408] conv1 <- data +I0102 05:14:55.300073 3038403 net.cpp:382] conv1 -> conv1 +I0102 05:14:55.300117 3038403 net.cpp:124] Setting up conv1 +I0102 05:14:55.300124 3038403 net.cpp:131] Top shape: 50 20 24 24 (576000) +I0102 05:14:55.300132 3038403 net.cpp:139] Memory required for data: 2461000 +I0102 05:14:55.300148 3038403 layer_factory.hpp:77] Creating layer pool1 +I0102 05:14:55.300158 3038403 net.cpp:86] Creating Layer pool1 +I0102 05:14:55.300163 3038403 net.cpp:408] pool1 <- conv1 +I0102 05:14:55.300174 3038403 net.cpp:382] pool1 -> pool1 +I0102 05:14:55.300204 3038403 net.cpp:124] Setting up pool1 +I0102 05:14:55.300210 3038403 net.cpp:131] Top shape: 50 20 12 12 (144000) +I0102 05:14:55.300217 3038403 net.cpp:139] Memory required for data: 3037000 +I0102 05:14:55.300222 3038403 layer_factory.hpp:77] Creating layer conv2 +I0102 05:14:55.300231 3038403 net.cpp:86] Creating Layer conv2 +I0102 05:14:55.300237 3038403 net.cpp:408] conv2 <- pool1 +I0102 05:14:55.300246 3038403 net.cpp:382] conv2 -> conv2 +I0102 05:14:55.300519 3038403 net.cpp:124] Setting up conv2 +I0102 05:14:55.300530 3038403 net.cpp:131] Top shape: 50 50 8 8 (160000) +I0102 05:14:55.300540 3038403 net.cpp:139] Memory required for data: 3677000 +I0102 05:14:55.300551 3038403 layer_factory.hpp:77] Creating layer pool2 +I0102 05:14:55.300561 3038403 net.cpp:86] Creating Layer pool2 +I0102 05:14:55.301260 3038403 net.cpp:408] pool2 <- conv2 +I0102 05:14:55.301277 3038403 net.cpp:382] pool2 -> pool2 +I0102 05:14:55.301295 3038403 net.cpp:124] Setting up pool2 +I0102 05:14:55.301301 3038403 net.cpp:131] Top shape: 50 50 4 4 (40000) +I0102 05:14:55.301311 3038403 net.cpp:139] Memory required for data: 3837000 +I0102 05:14:55.301317 3038403 layer_factory.hpp:77] Creating layer ip1 +I0102 05:14:55.301328 3038403 net.cpp:86] Creating Layer ip1 +I0102 05:14:55.301333 3038403 net.cpp:408] ip1 <- pool2 +I0102 05:14:55.301340 3038403 net.cpp:382] ip1 -> ip1 +I0102 05:14:55.306532 3038403 net.cpp:124] Setting up ip1 +I0102 05:14:55.306572 3038403 net.cpp:131] Top shape: 50 500 (25000) +I0102 05:14:55.306584 3038403 net.cpp:139] Memory required for data: 3937000 +I0102 05:14:55.306610 3038403 layer_factory.hpp:77] Creating layer relu1 +I0102 05:14:55.306629 3038403 net.cpp:86] Creating Layer relu1 +I0102 05:14:55.306638 3038403 net.cpp:408] relu1 <- ip1 +I0102 05:14:55.306649 3038403 net.cpp:369] relu1 -> ip1 (in-place) +I0102 05:14:55.306664 3038403 net.cpp:124] Setting up relu1 +I0102 05:14:55.306671 3038403 net.cpp:131] Top shape: 50 500 (25000) +I0102 05:14:55.306679 3038403 net.cpp:139] Memory required for data: 4037000 +I0102 05:14:55.306684 3038403 layer_factory.hpp:77] Creating layer ip2 +I0102 05:14:55.306694 3038403 net.cpp:86] Creating Layer ip2 +I0102 05:14:55.306700 3038403 net.cpp:408] ip2 <- ip1 +I0102 05:14:55.306715 3038403 net.cpp:382] ip2 -> ip2 +I0102 05:14:55.306789 3038403 net.cpp:124] Setting up ip2 +I0102 05:14:55.306797 3038403 net.cpp:131] Top shape: 50 10 (500) +I0102 05:14:55.306804 3038403 net.cpp:139] Memory required for data: 4039000 +I0102 05:14:55.306813 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.306826 3038403 net.cpp:86] Creating Layer loss +I0102 05:14:55.306831 3038403 net.cpp:408] loss <- ip2 +I0102 05:14:55.306838 3038403 net.cpp:408] loss <- label +I0102 05:14:55.306846 3038403 net.cpp:382] loss -> loss +I0102 05:14:55.306860 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.306882 3038403 net.cpp:124] Setting up loss +I0102 05:14:55.306890 3038403 net.cpp:131] Top shape: (1) +I0102 05:14:55.306896 3038403 net.cpp:134] with loss weight 1 +I0102 05:14:55.306921 3038403 net.cpp:139] Memory required for data: 4039004 +I0102 05:14:55.306926 3038403 net.cpp:200] loss needs backward computation. +I0102 05:14:55.306933 3038403 net.cpp:200] ip2 needs backward computation. +I0102 05:14:55.306938 3038403 net.cpp:200] relu1 needs backward computation. +I0102 05:14:55.306943 3038403 net.cpp:200] ip1 needs backward computation. +I0102 05:14:55.306949 3038403 net.cpp:200] pool2 needs backward computation. +I0102 05:14:55.306955 3038403 net.cpp:200] conv2 needs backward computation. +I0102 05:14:55.306962 3038403 net.cpp:200] pool1 needs backward computation. +I0102 05:14:55.306968 3038403 net.cpp:200] conv1 needs backward computation. +I0102 05:14:55.306977 3038403 net.cpp:202] data does not need backward computation. +I0102 05:14:55.306982 3038403 net.cpp:244] This network produces output loss +I0102 05:14:55.306995 3038403 net.cpp:257] Network initialization done. +I0102 05:14:55.309027 3038403 net.cpp:53] Initializing net from parameters: state { phase: TEST level: 0 @@ -16947,87 +16982,87 @@ bottom: "DummyData2" top: "SoftmaxWithLoss1" } -I1128 20:30:24.191974 2042758 layer_factory.hpp:77] Creating layer DummyData1 -I1128 20:30:24.191989 2042758 net.cpp:86] Creating Layer DummyData1 -I1128 20:30:24.191998 2042758 net.cpp:382] DummyData1 -> DummyData1 -I1128 20:30:24.192019 2042758 net.cpp:382] DummyData1 -> DummyData2 -I1128 20:30:24.192093 2042758 net.cpp:124] Setting up DummyData1 -I1128 20:30:24.192107 2042758 net.cpp:131] Top shape: 50 1 28 28 (39200) -I1128 20:30:24.192118 2042758 net.cpp:131] Top shape: 50 1 1 1 (50) -I1128 20:30:24.192124 2042758 net.cpp:139] Memory required for data: 157000 -I1128 20:30:24.192129 2042758 layer_factory.hpp:77] Creating layer Convolution1 -I1128 20:30:24.192142 2042758 net.cpp:86] Creating Layer Convolution1 -I1128 20:30:24.192147 2042758 net.cpp:408] Convolution1 <- DummyData1 -I1128 20:30:24.192157 2042758 net.cpp:382] Convolution1 -> Convolution1 -I1128 20:30:24.192198 2042758 net.cpp:124] Setting up Convolution1 -I1128 20:30:24.192204 2042758 net.cpp:131] Top shape: 50 20 24 24 (576000) -I1128 20:30:24.192216 2042758 net.cpp:139] Memory required for data: 2461000 -I1128 20:30:24.192234 2042758 layer_factory.hpp:77] Creating layer Pooling1 -I1128 20:30:24.192245 2042758 net.cpp:86] Creating Layer Pooling1 -I1128 20:30:24.192252 2042758 net.cpp:408] Pooling1 <- Convolution1 -I1128 20:30:24.192261 2042758 net.cpp:382] Pooling1 -> Pooling1 -I1128 20:30:24.192274 2042758 net.cpp:124] Setting up Pooling1 -I1128 20:30:24.192292 2042758 net.cpp:131] Top shape: 50 20 12 12 (144000) -I1128 20:30:24.192301 2042758 net.cpp:139] Memory required for data: 3037000 -I1128 20:30:24.192307 2042758 layer_factory.hpp:77] Creating layer Convolution2 -I1128 20:30:24.192318 2042758 net.cpp:86] Creating Layer Convolution2 -I1128 20:30:24.192324 2042758 net.cpp:408] Convolution2 <- Pooling1 -I1128 20:30:24.192332 2042758 net.cpp:382] Convolution2 -> Convolution2 -I1128 20:30:24.192678 2042758 net.cpp:124] Setting up Convolution2 -I1128 20:30:24.192687 2042758 net.cpp:131] Top shape: 50 50 8 8 (160000) -I1128 20:30:24.192694 2042758 net.cpp:139] Memory required for data: 3677000 -I1128 20:30:24.192705 2042758 layer_factory.hpp:77] Creating layer Pooling2 -I1128 20:30:24.192716 2042758 net.cpp:86] Creating Layer Pooling2 -I1128 20:30:24.192721 2042758 net.cpp:408] Pooling2 <- Convolution2 -I1128 20:30:24.192729 2042758 net.cpp:382] Pooling2 -> Pooling2 -I1128 20:30:24.192740 2042758 net.cpp:124] Setting up Pooling2 -I1128 20:30:24.192745 2042758 net.cpp:131] Top shape: 50 50 4 4 (40000) -I1128 20:30:24.192751 2042758 net.cpp:139] Memory required for data: 3837000 -I1128 20:30:24.192756 2042758 layer_factory.hpp:77] Creating layer InnerProduct1 -I1128 20:30:24.192764 2042758 net.cpp:86] Creating Layer InnerProduct1 -I1128 20:30:24.192768 2042758 net.cpp:408] InnerProduct1 <- Pooling2 -I1128 20:30:24.192775 2042758 net.cpp:382] InnerProduct1 -> InnerProduct1 -I1128 20:30:24.198350 2042758 net.cpp:124] Setting up InnerProduct1 -I1128 20:30:24.198387 2042758 net.cpp:131] Top shape: 50 500 (25000) -I1128 20:30:24.198398 2042758 net.cpp:139] Memory required for data: 3937000 -I1128 20:30:24.198421 2042758 layer_factory.hpp:77] Creating layer ReLU1 -I1128 20:30:24.198436 2042758 net.cpp:86] Creating Layer ReLU1 -I1128 20:30:24.198443 2042758 net.cpp:408] ReLU1 <- InnerProduct1 -I1128 20:30:24.198454 2042758 net.cpp:369] ReLU1 -> InnerProduct1 (in-place) -I1128 20:30:24.198478 2042758 net.cpp:124] Setting up ReLU1 -I1128 20:30:24.198490 2042758 net.cpp:131] Top shape: 50 500 (25000) -I1128 20:30:24.198498 2042758 net.cpp:139] Memory required for data: 4037000 -I1128 20:30:24.198503 2042758 layer_factory.hpp:77] Creating layer InnerProduct2 -I1128 20:30:24.198513 2042758 net.cpp:86] Creating Layer InnerProduct2 -I1128 20:30:24.198518 2042758 net.cpp:408] InnerProduct2 <- InnerProduct1 -I1128 20:30:24.198526 2042758 net.cpp:382] InnerProduct2 -> InnerProduct2 -I1128 20:30:24.198619 2042758 net.cpp:124] Setting up InnerProduct2 -I1128 20:30:24.198630 2042758 net.cpp:131] Top shape: 50 10 (500) -I1128 20:30:24.198643 2042758 net.cpp:139] Memory required for data: 4039000 -I1128 20:30:24.198654 2042758 layer_factory.hpp:77] Creating layer SoftmaxWithLoss1 -I1128 20:30:24.198674 2042758 net.cpp:86] Creating Layer SoftmaxWithLoss1 -I1128 20:30:24.198683 2042758 net.cpp:408] SoftmaxWithLoss1 <- InnerProduct2 -I1128 20:30:24.198693 2042758 net.cpp:408] SoftmaxWithLoss1 <- DummyData2 -I1128 20:30:24.198706 2042758 net.cpp:382] SoftmaxWithLoss1 -> SoftmaxWithLoss1 -I1128 20:30:24.198719 2042758 layer_factory.hpp:77] Creating layer SoftmaxWithLoss1 -I1128 20:30:24.198738 2042758 net.cpp:124] Setting up SoftmaxWithLoss1 -I1128 20:30:24.198743 2042758 net.cpp:131] Top shape: (1) -I1128 20:30:24.198748 2042758 net.cpp:134] with loss weight 1 -I1128 20:30:24.198773 2042758 net.cpp:139] Memory required for data: 4039004 -I1128 20:30:24.198779 2042758 net.cpp:200] SoftmaxWithLoss1 needs backward computation. -I1128 20:30:24.198798 2042758 net.cpp:200] InnerProduct2 needs backward computation. -I1128 20:30:24.198804 2042758 net.cpp:200] ReLU1 needs backward computation. -I1128 20:30:24.198812 2042758 net.cpp:200] InnerProduct1 needs backward computation. -I1128 20:30:24.198817 2042758 net.cpp:200] Pooling2 needs backward computation. -I1128 20:30:24.198824 2042758 net.cpp:200] Convolution2 needs backward computation. -I1128 20:30:24.198833 2042758 net.cpp:200] Pooling1 needs backward computation. -I1128 20:30:24.198839 2042758 net.cpp:200] Convolution1 needs backward computation. -I1128 20:30:24.198848 2042758 net.cpp:202] DummyData1 does not need backward computation. -I1128 20:30:24.198856 2042758 net.cpp:244] This network produces output SoftmaxWithLoss1 -I1128 20:30:24.198873 2042758 net.cpp:257] Network initialization done. +I0102 05:14:55.309237 3038403 layer_factory.hpp:77] Creating layer DummyData1 +I0102 05:14:55.309257 3038403 net.cpp:86] Creating Layer DummyData1 +I0102 05:14:55.309276 3038403 net.cpp:382] DummyData1 -> DummyData1 +I0102 05:14:55.309295 3038403 net.cpp:382] DummyData1 -> DummyData2 +I0102 05:14:55.309370 3038403 net.cpp:124] Setting up DummyData1 +I0102 05:14:55.309379 3038403 net.cpp:131] Top shape: 50 1 28 28 (39200) +I0102 05:14:55.309393 3038403 net.cpp:131] Top shape: 50 1 1 1 (50) +I0102 05:14:55.309401 3038403 net.cpp:139] Memory required for data: 157000 +I0102 05:14:55.309407 3038403 layer_factory.hpp:77] Creating layer Convolution1 +I0102 05:14:55.309422 3038403 net.cpp:86] Creating Layer Convolution1 +I0102 05:14:55.309437 3038403 net.cpp:408] Convolution1 <- DummyData1 +I0102 05:14:55.309448 3038403 net.cpp:382] Convolution1 -> Convolution1 +I0102 05:14:55.309494 3038403 net.cpp:124] Setting up Convolution1 +I0102 05:14:55.309509 3038403 net.cpp:131] Top shape: 50 20 24 24 (576000) +I0102 05:14:55.309518 3038403 net.cpp:139] Memory required for data: 2461000 +I0102 05:14:55.309535 3038403 layer_factory.hpp:77] Creating layer Pooling1 +I0102 05:14:55.309551 3038403 net.cpp:86] Creating Layer Pooling1 +I0102 05:14:55.309562 3038403 net.cpp:408] Pooling1 <- Convolution1 +I0102 05:14:55.309571 3038403 net.cpp:382] Pooling1 -> Pooling1 +I0102 05:14:55.309587 3038403 net.cpp:124] Setting up Pooling1 +I0102 05:14:55.309593 3038403 net.cpp:131] Top shape: 50 20 12 12 (144000) +I0102 05:14:55.309603 3038403 net.cpp:139] Memory required for data: 3037000 +I0102 05:14:55.309609 3038403 layer_factory.hpp:77] Creating layer Convolution2 +I0102 05:14:55.309619 3038403 net.cpp:86] Creating Layer Convolution2 +I0102 05:14:55.309633 3038403 net.cpp:408] Convolution2 <- Pooling1 +I0102 05:14:55.309643 3038403 net.cpp:382] Convolution2 -> Convolution2 +I0102 05:14:55.309952 3038403 net.cpp:124] Setting up Convolution2 +I0102 05:14:55.309967 3038403 net.cpp:131] Top shape: 50 50 8 8 (160000) +I0102 05:14:55.309978 3038403 net.cpp:139] Memory required for data: 3677000 +I0102 05:14:55.309989 3038403 layer_factory.hpp:77] Creating layer Pooling2 +I0102 05:14:55.310003 3038403 net.cpp:86] Creating Layer Pooling2 +I0102 05:14:55.310009 3038403 net.cpp:408] Pooling2 <- Convolution2 +I0102 05:14:55.310024 3038403 net.cpp:382] Pooling2 -> Pooling2 +I0102 05:14:55.310034 3038403 net.cpp:124] Setting up Pooling2 +I0102 05:14:55.310040 3038403 net.cpp:131] Top shape: 50 50 4 4 (40000) +I0102 05:14:55.310047 3038403 net.cpp:139] Memory required for data: 3837000 +I0102 05:14:55.310052 3038403 layer_factory.hpp:77] Creating layer InnerProduct1 +I0102 05:14:55.310060 3038403 net.cpp:86] Creating Layer InnerProduct1 +I0102 05:14:55.310065 3038403 net.cpp:408] InnerProduct1 <- Pooling2 +I0102 05:14:55.310073 3038403 net.cpp:382] InnerProduct1 -> InnerProduct1 +I0102 05:14:55.315107 3038403 net.cpp:124] Setting up InnerProduct1 +I0102 05:14:55.315148 3038403 net.cpp:131] Top shape: 50 500 (25000) +I0102 05:14:55.315160 3038403 net.cpp:139] Memory required for data: 3937000 +I0102 05:14:55.315184 3038403 layer_factory.hpp:77] Creating layer ReLU1 +I0102 05:14:55.315201 3038403 net.cpp:86] Creating Layer ReLU1 +I0102 05:14:55.315209 3038403 net.cpp:408] ReLU1 <- InnerProduct1 +I0102 05:14:55.315222 3038403 net.cpp:369] ReLU1 -> InnerProduct1 (in-place) +I0102 05:14:55.315235 3038403 net.cpp:124] Setting up ReLU1 +I0102 05:14:55.315241 3038403 net.cpp:131] Top shape: 50 500 (25000) +I0102 05:14:55.315248 3038403 net.cpp:139] Memory required for data: 4037000 +I0102 05:14:55.315254 3038403 layer_factory.hpp:77] Creating layer InnerProduct2 +I0102 05:14:55.315264 3038403 net.cpp:86] Creating Layer InnerProduct2 +I0102 05:14:55.315271 3038403 net.cpp:408] InnerProduct2 <- InnerProduct1 +I0102 05:14:55.315280 3038403 net.cpp:382] InnerProduct2 -> InnerProduct2 +I0102 05:14:55.315361 3038403 net.cpp:124] Setting up InnerProduct2 +I0102 05:14:55.315369 3038403 net.cpp:131] Top shape: 50 10 (500) +I0102 05:14:55.315377 3038403 net.cpp:139] Memory required for data: 4039000 +I0102 05:14:55.315387 3038403 layer_factory.hpp:77] Creating layer SoftmaxWithLoss1 +I0102 05:14:55.315402 3038403 net.cpp:86] Creating Layer SoftmaxWithLoss1 +I0102 05:14:55.315409 3038403 net.cpp:408] SoftmaxWithLoss1 <- InnerProduct2 +I0102 05:14:55.315416 3038403 net.cpp:408] SoftmaxWithLoss1 <- DummyData2 +I0102 05:14:55.315424 3038403 net.cpp:382] SoftmaxWithLoss1 -> SoftmaxWithLoss1 +I0102 05:14:55.315438 3038403 layer_factory.hpp:77] Creating layer SoftmaxWithLoss1 +I0102 05:14:55.315460 3038403 net.cpp:124] Setting up SoftmaxWithLoss1 +I0102 05:14:55.315472 3038403 net.cpp:131] Top shape: (1) +I0102 05:14:55.315479 3038403 net.cpp:134] with loss weight 1 +I0102 05:14:55.315502 3038403 net.cpp:139] Memory required for data: 4039004 +I0102 05:14:55.315510 3038403 net.cpp:200] SoftmaxWithLoss1 needs backward computation. +I0102 05:14:55.315519 3038403 net.cpp:200] InnerProduct2 needs backward computation. +I0102 05:14:55.315526 3038403 net.cpp:200] ReLU1 needs backward computation. +I0102 05:14:55.315531 3038403 net.cpp:200] InnerProduct1 needs backward computation. +I0102 05:14:55.315538 3038403 net.cpp:200] Pooling2 needs backward computation. +I0102 05:14:55.315546 3038403 net.cpp:200] Convolution2 needs backward computation. +I0102 05:14:55.315563 3038403 net.cpp:200] Pooling1 needs backward computation. +I0102 05:14:55.315570 3038403 net.cpp:200] Convolution1 needs backward computation. +I0102 05:14:55.315579 3038403 net.cpp:202] DummyData1 does not need backward computation. +I0102 05:14:55.315585 3038403 net.cpp:244] This network produces output SoftmaxWithLoss1 +I0102 05:14:55.315600 3038403 net.cpp:257] Network initialization done. ./build/caffe-1.0.0+git20180821.99bd997/python/caffe/test/test_net_spec.py:87: DeprecationWarning: Please use assertRaisesRegex instead. with self.assertRaisesRegexp(TypeError, r): -.I1128 20:30:24.201539 2042758 net.cpp:53] Initializing net from parameters: +.I0102 05:14:55.318581 3038403 net.cpp:53] Initializing net from parameters: state { phase: TEST level: 0 @@ -17053,32 +17088,32 @@ type: "Silence" bottom: "data2" } -I1128 20:30:24.201608 2042758 layer_factory.hpp:77] Creating layer data -I1128 20:30:24.201622 2042758 net.cpp:86] Creating Layer data -I1128 20:30:24.201629 2042758 net.cpp:382] data -> data -I1128 20:30:24.201644 2042758 net.cpp:382] data -> data2 -I1128 20:30:24.201664 2042758 net.cpp:124] Setting up data -I1128 20:30:24.201670 2042758 net.cpp:131] Top shape: 3 (3) -I1128 20:30:24.201678 2042758 net.cpp:131] Top shape: 3 (3) -I1128 20:30:24.201683 2042758 net.cpp:139] Memory required for data: 24 -I1128 20:30:24.201689 2042758 layer_factory.hpp:77] Creating layer silence_data -I1128 20:30:24.201710 2042758 net.cpp:86] Creating Layer silence_data -I1128 20:30:24.201715 2042758 net.cpp:408] silence_data <- data -I1128 20:30:24.201722 2042758 net.cpp:124] Setting up silence_data -I1128 20:30:24.201727 2042758 net.cpp:139] Memory required for data: 24 -I1128 20:30:24.201732 2042758 layer_factory.hpp:77] Creating layer silence_data2 -I1128 20:30:24.201740 2042758 net.cpp:86] Creating Layer silence_data2 -I1128 20:30:24.201745 2042758 net.cpp:408] silence_data2 <- data2 -I1128 20:30:24.201751 2042758 net.cpp:124] Setting up silence_data2 -I1128 20:30:24.201756 2042758 net.cpp:139] Memory required for data: 24 -I1128 20:30:24.201761 2042758 net.cpp:202] silence_data2 does not need backward computation. -I1128 20:30:24.201766 2042758 net.cpp:202] silence_data does not need backward computation. -I1128 20:30:24.201771 2042758 net.cpp:202] data does not need backward computation. -I1128 20:30:24.201778 2042758 net.cpp:257] Network initialization done. -.I1128 20:30:24.202419 2042758 upgrade_proto.cpp:69] Attempting to upgrade input file specified using deprecated input fields: /tmp/tmp68ziiq3l -I1128 20:30:24.202435 2042758 upgrade_proto.cpp:72] Successfully upgraded file specified using deprecated input fields. -W1128 20:30:24.202440 2042758 upgrade_proto.cpp:74] Note that future Caffe releases will only support input layers and not input fields. -I1128 20:30:24.202462 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.318653 3038403 layer_factory.hpp:77] Creating layer data +I0102 05:14:55.318670 3038403 net.cpp:86] Creating Layer data +I0102 05:14:55.318678 3038403 net.cpp:382] data -> data +I0102 05:14:55.318696 3038403 net.cpp:382] data -> data2 +I0102 05:14:55.318717 3038403 net.cpp:124] Setting up data +I0102 05:14:55.318724 3038403 net.cpp:131] Top shape: 3 (3) +I0102 05:14:55.318733 3038403 net.cpp:131] Top shape: 3 (3) +I0102 05:14:55.318739 3038403 net.cpp:139] Memory required for data: 24 +I0102 05:14:55.318745 3038403 layer_factory.hpp:77] Creating layer silence_data +I0102 05:14:55.318768 3038403 net.cpp:86] Creating Layer silence_data +I0102 05:14:55.318774 3038403 net.cpp:408] silence_data <- data +I0102 05:14:55.318783 3038403 net.cpp:124] Setting up silence_data +I0102 05:14:55.318789 3038403 net.cpp:139] Memory required for data: 24 +I0102 05:14:55.318792 3038403 layer_factory.hpp:77] Creating layer silence_data2 +I0102 05:14:55.318801 3038403 net.cpp:86] Creating Layer silence_data2 +I0102 05:14:55.318806 3038403 net.cpp:408] silence_data2 <- data2 +I0102 05:14:55.318814 3038403 net.cpp:124] Setting up silence_data2 +I0102 05:14:55.318818 3038403 net.cpp:139] Memory required for data: 24 +I0102 05:14:55.318825 3038403 net.cpp:202] silence_data2 does not need backward computation. +I0102 05:14:55.318830 3038403 net.cpp:202] silence_data does not need backward computation. +I0102 05:14:55.318836 3038403 net.cpp:202] data does not need backward computation. +I0102 05:14:55.318843 3038403 net.cpp:257] Network initialization done. +.I0102 05:14:55.319486 3038403 upgrade_proto.cpp:69] Attempting to upgrade input file specified using deprecated input fields: /tmp/tmpagbyqhwd +I0102 05:14:55.319504 3038403 upgrade_proto.cpp:72] Successfully upgraded file specified using deprecated input fields. +W0102 05:14:55.319509 3038403 upgrade_proto.cpp:74] Note that future Caffe releases will only support input layers and not input fields. +I0102 05:14:55.319532 3038403 net.cpp:53] Initializing net from parameters: name: "pythonnet" force_backward: true state { @@ -17127,43 +17162,43 @@ layer: "SimpleLayer" } } -I1128 20:30:24.202530 2042758 layer_factory.hpp:77] Creating layer input -I1128 20:30:24.202543 2042758 net.cpp:86] Creating Layer input -I1128 20:30:24.202550 2042758 net.cpp:382] input -> data -I1128 20:30:24.202567 2042758 net.cpp:124] Setting up input -I1128 20:30:24.202572 2042758 net.cpp:131] Top shape: 10 9 8 (720) -I1128 20:30:24.202579 2042758 net.cpp:139] Memory required for data: 2880 -I1128 20:30:24.202585 2042758 layer_factory.hpp:77] Creating layer one -I1128 20:30:24.202625 2042758 net.cpp:86] Creating Layer one -I1128 20:30:24.202631 2042758 net.cpp:408] one <- data -I1128 20:30:24.202641 2042758 net.cpp:382] one -> one -I1128 20:30:24.202697 2042758 net.cpp:124] Setting up one -I1128 20:30:24.202703 2042758 net.cpp:131] Top shape: 10 9 8 (720) -I1128 20:30:24.202710 2042758 net.cpp:139] Memory required for data: 5760 -I1128 20:30:24.202715 2042758 layer_factory.hpp:77] Creating layer two -I1128 20:30:24.202735 2042758 net.cpp:86] Creating Layer two -I1128 20:30:24.202741 2042758 net.cpp:408] two <- one -I1128 20:30:24.202747 2042758 net.cpp:382] two -> two -I1128 20:30:24.202773 2042758 net.cpp:124] Setting up two -I1128 20:30:24.202781 2042758 net.cpp:131] Top shape: 10 9 8 (720) -I1128 20:30:24.202787 2042758 net.cpp:139] Memory required for data: 8640 -I1128 20:30:24.202793 2042758 layer_factory.hpp:77] Creating layer three -I1128 20:30:24.202813 2042758 net.cpp:86] Creating Layer three -I1128 20:30:24.202818 2042758 net.cpp:408] three <- two -I1128 20:30:24.202826 2042758 net.cpp:382] three -> three -I1128 20:30:24.202849 2042758 net.cpp:124] Setting up three -I1128 20:30:24.202855 2042758 net.cpp:131] Top shape: 10 9 8 (720) -I1128 20:30:24.202862 2042758 net.cpp:139] Memory required for data: 11520 -I1128 20:30:24.202867 2042758 net.cpp:202] three does not need backward computation. -I1128 20:30:24.202872 2042758 net.cpp:202] two does not need backward computation. -I1128 20:30:24.202877 2042758 net.cpp:202] one does not need backward computation. -I1128 20:30:24.202883 2042758 net.cpp:202] input does not need backward computation. -I1128 20:30:24.202888 2042758 net.cpp:244] This network produces output three -I1128 20:30:24.202898 2042758 net.cpp:257] Network initialization done. -.I1128 20:30:24.208988 2042758 upgrade_proto.cpp:69] Attempting to upgrade input file specified using deprecated input fields: /tmp/tmp9wcw9ipf -I1128 20:30:24.209013 2042758 upgrade_proto.cpp:72] Successfully upgraded file specified using deprecated input fields. -W1128 20:30:24.209018 2042758 upgrade_proto.cpp:74] Note that future Caffe releases will only support input layers and not input fields. -I1128 20:30:24.209048 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.319603 3038403 layer_factory.hpp:77] Creating layer input +I0102 05:14:55.319614 3038403 net.cpp:86] Creating Layer input +I0102 05:14:55.319622 3038403 net.cpp:382] input -> data +I0102 05:14:55.319633 3038403 net.cpp:124] Setting up input +I0102 05:14:55.319639 3038403 net.cpp:131] Top shape: 10 9 8 (720) +I0102 05:14:55.319646 3038403 net.cpp:139] Memory required for data: 2880 +I0102 05:14:55.319651 3038403 layer_factory.hpp:77] Creating layer one +I0102 05:14:55.319695 3038403 net.cpp:86] Creating Layer one +I0102 05:14:55.319702 3038403 net.cpp:408] one <- data +I0102 05:14:55.319710 3038403 net.cpp:382] one -> one +I0102 05:14:55.319778 3038403 net.cpp:124] Setting up one +I0102 05:14:55.319787 3038403 net.cpp:131] Top shape: 10 9 8 (720) +I0102 05:14:55.319795 3038403 net.cpp:139] Memory required for data: 5760 +I0102 05:14:55.319802 3038403 layer_factory.hpp:77] Creating layer two +I0102 05:14:55.319831 3038403 net.cpp:86] Creating Layer two +I0102 05:14:55.319839 3038403 net.cpp:408] two <- one +I0102 05:14:55.319846 3038403 net.cpp:382] two -> two +I0102 05:14:55.319882 3038403 net.cpp:124] Setting up two +I0102 05:14:55.319891 3038403 net.cpp:131] Top shape: 10 9 8 (720) +I0102 05:14:55.319900 3038403 net.cpp:139] Memory required for data: 8640 +I0102 05:14:55.319905 3038403 layer_factory.hpp:77] Creating layer three +I0102 05:14:55.319931 3038403 net.cpp:86] Creating Layer three +I0102 05:14:55.319939 3038403 net.cpp:408] three <- two +I0102 05:14:55.319947 3038403 net.cpp:382] three -> three +I0102 05:14:55.319983 3038403 net.cpp:124] Setting up three +I0102 05:14:55.319991 3038403 net.cpp:131] Top shape: 10 9 8 (720) +I0102 05:14:55.319999 3038403 net.cpp:139] Memory required for data: 11520 +I0102 05:14:55.320005 3038403 net.cpp:202] three does not need backward computation. +I0102 05:14:55.320012 3038403 net.cpp:202] two does not need backward computation. +I0102 05:14:55.320019 3038403 net.cpp:202] one does not need backward computation. +I0102 05:14:55.320024 3038403 net.cpp:202] input does not need backward computation. +I0102 05:14:55.320030 3038403 net.cpp:244] This network produces output three +I0102 05:14:55.320039 3038403 net.cpp:257] Network initialization done. +.I0102 05:14:55.326858 3038403 upgrade_proto.cpp:69] Attempting to upgrade input file specified using deprecated input fields: /tmp/tmpi4lkqbhb +I0102 05:14:55.326897 3038403 upgrade_proto.cpp:72] Successfully upgraded file specified using deprecated input fields. +W0102 05:14:55.326905 3038403 upgrade_proto.cpp:74] Note that future Caffe releases will only support input layers and not input fields. +I0102 05:14:55.326944 3038403 net.cpp:53] Initializing net from parameters: name: "pythonnet" force_backward: true state { @@ -17212,43 +17247,43 @@ layer: "SimpleLayer" } } -I1128 20:30:24.209127 2042758 layer_factory.hpp:77] Creating layer input -I1128 20:30:24.209139 2042758 net.cpp:86] Creating Layer input -I1128 20:30:24.209146 2042758 net.cpp:382] input -> data -I1128 20:30:24.209164 2042758 net.cpp:124] Setting up input -I1128 20:30:24.209169 2042758 net.cpp:131] Top shape: 10 9 8 (720) -I1128 20:30:24.209177 2042758 net.cpp:139] Memory required for data: 2880 -I1128 20:30:24.209183 2042758 layer_factory.hpp:77] Creating layer one -I1128 20:30:24.209465 2042758 net.cpp:86] Creating Layer one -I1128 20:30:24.209483 2042758 net.cpp:408] one <- data -I1128 20:30:24.209497 2042758 net.cpp:382] one -> one -I1128 20:30:24.209551 2042758 net.cpp:124] Setting up one -I1128 20:30:24.209558 2042758 net.cpp:131] Top shape: 10 9 8 (720) -I1128 20:30:24.209563 2042758 net.cpp:139] Memory required for data: 5760 -I1128 20:30:24.209568 2042758 layer_factory.hpp:77] Creating layer two -I1128 20:30:24.209586 2042758 net.cpp:86] Creating Layer two -I1128 20:30:24.209591 2042758 net.cpp:408] two <- one -I1128 20:30:24.209597 2042758 net.cpp:382] two -> two -I1128 20:30:24.209619 2042758 net.cpp:124] Setting up two -I1128 20:30:24.209625 2042758 net.cpp:131] Top shape: 10 9 8 (720) -I1128 20:30:24.209630 2042758 net.cpp:139] Memory required for data: 8640 -I1128 20:30:24.209635 2042758 layer_factory.hpp:77] Creating layer three -I1128 20:30:24.209651 2042758 net.cpp:86] Creating Layer three -I1128 20:30:24.209656 2042758 net.cpp:408] three <- two -I1128 20:30:24.209663 2042758 net.cpp:382] three -> three -I1128 20:30:24.209700 2042758 net.cpp:124] Setting up three -I1128 20:30:24.209717 2042758 net.cpp:131] Top shape: 10 9 8 (720) -I1128 20:30:24.209726 2042758 net.cpp:139] Memory required for data: 11520 -I1128 20:30:24.209734 2042758 net.cpp:202] three does not need backward computation. -I1128 20:30:24.209741 2042758 net.cpp:202] two does not need backward computation. -I1128 20:30:24.209748 2042758 net.cpp:202] one does not need backward computation. -I1128 20:30:24.209755 2042758 net.cpp:202] input does not need backward computation. -I1128 20:30:24.209760 2042758 net.cpp:244] This network produces output three -I1128 20:30:24.209769 2042758 net.cpp:257] Network initialization done. -I1128 20:30:24.210142 2042758 upgrade_proto.cpp:69] Attempting to upgrade input file specified using deprecated input fields: /tmp/tmptylew547 -I1128 20:30:24.210160 2042758 upgrade_proto.cpp:72] Successfully upgraded file specified using deprecated input fields. -W1128 20:30:24.210165 2042758 upgrade_proto.cpp:74] Note that future Caffe releases will only support input layers and not input fields. -I1128 20:30:24.210180 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.327054 3038403 layer_factory.hpp:77] Creating layer input +I0102 05:14:55.327072 3038403 net.cpp:86] Creating Layer input +I0102 05:14:55.327082 3038403 net.cpp:382] input -> data +I0102 05:14:55.327111 3038403 net.cpp:124] Setting up input +I0102 05:14:55.327118 3038403 net.cpp:131] Top shape: 10 9 8 (720) +I0102 05:14:55.327129 3038403 net.cpp:139] Memory required for data: 2880 +I0102 05:14:55.327137 3038403 layer_factory.hpp:77] Creating layer one +I0102 05:14:55.327188 3038403 net.cpp:86] Creating Layer one +I0102 05:14:55.327203 3038403 net.cpp:408] one <- data +I0102 05:14:55.327214 3038403 net.cpp:382] one -> one +I0102 05:14:55.327273 3038403 net.cpp:124] Setting up one +I0102 05:14:55.327287 3038403 net.cpp:131] Top shape: 10 9 8 (720) +I0102 05:14:55.327296 3038403 net.cpp:139] Memory required for data: 5760 +I0102 05:14:55.327304 3038403 layer_factory.hpp:77] Creating layer two +I0102 05:14:55.327338 3038403 net.cpp:86] Creating Layer two +I0102 05:14:55.327353 3038403 net.cpp:408] two <- one +I0102 05:14:55.327361 3038403 net.cpp:382] two -> two +I0102 05:14:55.327397 3038403 net.cpp:124] Setting up two +I0102 05:14:55.327411 3038403 net.cpp:131] Top shape: 10 9 8 (720) +I0102 05:14:55.327420 3038403 net.cpp:139] Memory required for data: 8640 +I0102 05:14:55.327426 3038403 layer_factory.hpp:77] Creating layer three +I0102 05:14:55.327464 3038403 net.cpp:86] Creating Layer three +I0102 05:14:55.327472 3038403 net.cpp:408] three <- two +I0102 05:14:55.327481 3038403 net.cpp:382] three -> three +I0102 05:14:55.327518 3038403 net.cpp:124] Setting up three +I0102 05:14:55.327526 3038403 net.cpp:131] Top shape: 10 9 8 (720) +I0102 05:14:55.327533 3038403 net.cpp:139] Memory required for data: 11520 +I0102 05:14:55.327539 3038403 net.cpp:202] three does not need backward computation. +I0102 05:14:55.327548 3038403 net.cpp:202] two does not need backward computation. +I0102 05:14:55.327553 3038403 net.cpp:202] one does not need backward computation. +I0102 05:14:55.327564 3038403 net.cpp:202] input does not need backward computation. +I0102 05:14:55.327570 3038403 net.cpp:244] This network produces output three +I0102 05:14:55.327580 3038403 net.cpp:257] Network initialization done. +I0102 05:14:55.328006 3038403 upgrade_proto.cpp:69] Attempting to upgrade input file specified using deprecated input fields: /tmp/tmpsqp6hpjg +I0102 05:14:55.328022 3038403 upgrade_proto.cpp:72] Successfully upgraded file specified using deprecated input fields. +W0102 05:14:55.328028 3038403 upgrade_proto.cpp:74] Note that future Caffe releases will only support input layers and not input fields. +I0102 05:14:55.328047 3038403 net.cpp:53] Initializing net from parameters: name: "pythonnet" force_backward: true state { @@ -17277,20 +17312,20 @@ layer: "ExceptionLayer" } } -I1128 20:30:24.210224 2042758 layer_factory.hpp:77] Creating layer input -I1128 20:30:24.210234 2042758 net.cpp:86] Creating Layer input -I1128 20:30:24.210240 2042758 net.cpp:382] input -> data -I1128 20:30:24.210255 2042758 net.cpp:124] Setting up input -I1128 20:30:24.210260 2042758 net.cpp:131] Top shape: 10 9 8 (720) -I1128 20:30:24.210271 2042758 net.cpp:139] Memory required for data: 2880 -I1128 20:30:24.210275 2042758 layer_factory.hpp:77] Creating layer layer -I1128 20:30:24.210304 2042758 net.cpp:86] Creating Layer layer -I1128 20:30:24.210309 2042758 net.cpp:408] layer <- data -I1128 20:30:24.210315 2042758 net.cpp:382] layer -> top -.I1128 20:30:24.210927 2042758 upgrade_proto.cpp:69] Attempting to upgrade input file specified using deprecated input fields: /tmp/tmpmpdk2rka -I1128 20:30:24.210952 2042758 upgrade_proto.cpp:72] Successfully upgraded file specified using deprecated input fields. -W1128 20:30:24.210958 2042758 upgrade_proto.cpp:74] Note that future Caffe releases will only support input layers and not input fields. -I1128 20:30:24.210988 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.328101 3038403 layer_factory.hpp:77] Creating layer input +I0102 05:14:55.328114 3038403 net.cpp:86] Creating Layer input +I0102 05:14:55.328135 3038403 net.cpp:382] input -> data +I0102 05:14:55.328146 3038403 net.cpp:124] Setting up input +I0102 05:14:55.328152 3038403 net.cpp:131] Top shape: 10 9 8 (720) +I0102 05:14:55.328161 3038403 net.cpp:139] Memory required for data: 2880 +I0102 05:14:55.328166 3038403 layer_factory.hpp:77] Creating layer layer +I0102 05:14:55.328198 3038403 net.cpp:86] Creating Layer layer +I0102 05:14:55.328205 3038403 net.cpp:408] layer <- data +I0102 05:14:55.328214 3038403 net.cpp:382] layer -> top +.I0102 05:14:55.328940 3038403 upgrade_proto.cpp:69] Attempting to upgrade input file specified using deprecated input fields: /tmp/tmp47dvmqkb +I0102 05:14:55.328965 3038403 upgrade_proto.cpp:72] Successfully upgraded file specified using deprecated input fields. +W0102 05:14:55.328971 3038403 upgrade_proto.cpp:74] Note that future Caffe releases will only support input layers and not input fields. +I0102 05:14:55.328994 3038403 net.cpp:53] Initializing net from parameters: name: "pythonnet" force_backward: true state { @@ -17339,43 +17374,43 @@ layer: "SimpleLayer" } } -I1128 20:30:24.211067 2042758 layer_factory.hpp:77] Creating layer input -I1128 20:30:24.211076 2042758 net.cpp:86] Creating Layer input -I1128 20:30:24.211082 2042758 net.cpp:382] input -> data -I1128 20:30:24.211094 2042758 net.cpp:124] Setting up input -I1128 20:30:24.211099 2042758 net.cpp:131] Top shape: 10 9 8 (720) -I1128 20:30:24.211107 2042758 net.cpp:139] Memory required for data: 2880 -I1128 20:30:24.211118 2042758 layer_factory.hpp:77] Creating layer one -I1128 20:30:24.211153 2042758 net.cpp:86] Creating Layer one -I1128 20:30:24.211160 2042758 net.cpp:408] one <- data -I1128 20:30:24.211169 2042758 net.cpp:382] one -> one -I1128 20:30:24.211211 2042758 net.cpp:124] Setting up one -I1128 20:30:24.211222 2042758 net.cpp:131] Top shape: 10 9 8 (720) -I1128 20:30:24.211232 2042758 net.cpp:139] Memory required for data: 5760 -I1128 20:30:24.211242 2042758 layer_factory.hpp:77] Creating layer two -I1128 20:30:24.211275 2042758 net.cpp:86] Creating Layer two -I1128 20:30:24.211287 2042758 net.cpp:408] two <- one -I1128 20:30:24.211297 2042758 net.cpp:382] two -> two -I1128 20:30:24.211339 2042758 net.cpp:124] Setting up two -I1128 20:30:24.211351 2042758 net.cpp:131] Top shape: 10 9 8 (720) -I1128 20:30:24.211361 2042758 net.cpp:139] Memory required for data: 8640 -I1128 20:30:24.211367 2042758 layer_factory.hpp:77] Creating layer three -I1128 20:30:24.211390 2042758 net.cpp:86] Creating Layer three -I1128 20:30:24.211395 2042758 net.cpp:408] three <- two -I1128 20:30:24.211402 2042758 net.cpp:382] three -> three -I1128 20:30:24.211439 2042758 net.cpp:124] Setting up three -I1128 20:30:24.211447 2042758 net.cpp:131] Top shape: 10 9 8 (720) -I1128 20:30:24.211454 2042758 net.cpp:139] Memory required for data: 11520 -I1128 20:30:24.211462 2042758 net.cpp:202] three does not need backward computation. -I1128 20:30:24.211468 2042758 net.cpp:202] two does not need backward computation. -I1128 20:30:24.211474 2042758 net.cpp:202] one does not need backward computation. -I1128 20:30:24.211481 2042758 net.cpp:202] input does not need backward computation. -I1128 20:30:24.211488 2042758 net.cpp:244] This network produces output three -I1128 20:30:24.211500 2042758 net.cpp:257] Network initialization done. -.I1128 20:30:24.217628 2042758 upgrade_proto.cpp:69] Attempting to upgrade input file specified using deprecated input fields: /tmp/tmpbtw_r96d -I1128 20:30:24.217669 2042758 upgrade_proto.cpp:72] Successfully upgraded file specified using deprecated input fields. -W1128 20:30:24.217674 2042758 upgrade_proto.cpp:74] Note that future Caffe releases will only support input layers and not input fields. -I1128 20:30:24.217708 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.329066 3038403 layer_factory.hpp:77] Creating layer input +I0102 05:14:55.329079 3038403 net.cpp:86] Creating Layer input +I0102 05:14:55.329087 3038403 net.cpp:382] input -> data +I0102 05:14:55.329100 3038403 net.cpp:124] Setting up input +I0102 05:14:55.329106 3038403 net.cpp:131] Top shape: 10 9 8 (720) +I0102 05:14:55.329113 3038403 net.cpp:139] Memory required for data: 2880 +I0102 05:14:55.329119 3038403 layer_factory.hpp:77] Creating layer one +I0102 05:14:55.329152 3038403 net.cpp:86] Creating Layer one +I0102 05:14:55.329160 3038403 net.cpp:408] one <- data +I0102 05:14:55.329169 3038403 net.cpp:382] one -> one +I0102 05:14:55.329210 3038403 net.cpp:124] Setting up one +I0102 05:14:55.329218 3038403 net.cpp:131] Top shape: 10 9 8 (720) +I0102 05:14:55.329224 3038403 net.cpp:139] Memory required for data: 5760 +I0102 05:14:55.329231 3038403 layer_factory.hpp:77] Creating layer two +I0102 05:14:55.329254 3038403 net.cpp:86] Creating Layer two +I0102 05:14:55.329262 3038403 net.cpp:408] two <- one +I0102 05:14:55.329270 3038403 net.cpp:382] two -> two +I0102 05:14:55.329303 3038403 net.cpp:124] Setting up two +I0102 05:14:55.329309 3038403 net.cpp:131] Top shape: 10 9 8 (720) +I0102 05:14:55.329316 3038403 net.cpp:139] Memory required for data: 8640 +I0102 05:14:55.329321 3038403 layer_factory.hpp:77] Creating layer three +I0102 05:14:55.329345 3038403 net.cpp:86] Creating Layer three +I0102 05:14:55.329352 3038403 net.cpp:408] three <- two +I0102 05:14:55.329360 3038403 net.cpp:382] three -> three +I0102 05:14:55.329391 3038403 net.cpp:124] Setting up three +I0102 05:14:55.329399 3038403 net.cpp:131] Top shape: 10 9 8 (720) +I0102 05:14:55.329406 3038403 net.cpp:139] Memory required for data: 11520 +I0102 05:14:55.329412 3038403 net.cpp:202] three does not need backward computation. +I0102 05:14:55.329418 3038403 net.cpp:202] two does not need backward computation. +I0102 05:14:55.329423 3038403 net.cpp:202] one does not need backward computation. +I0102 05:14:55.329428 3038403 net.cpp:202] input does not need backward computation. +I0102 05:14:55.329433 3038403 net.cpp:244] This network produces output three +I0102 05:14:55.329442 3038403 net.cpp:257] Network initialization done. +.I0102 05:14:55.335852 3038403 upgrade_proto.cpp:69] Attempting to upgrade input file specified using deprecated input fields: /tmp/tmpbkjrzp3u +I0102 05:14:55.335886 3038403 upgrade_proto.cpp:72] Successfully upgraded file specified using deprecated input fields. +W0102 05:14:55.335892 3038403 upgrade_proto.cpp:74] Note that future Caffe releases will only support input layers and not input fields. +I0102 05:14:55.335927 3038403 net.cpp:53] Initializing net from parameters: name: "pythonnet" force_backward: true state { @@ -17424,43 +17459,43 @@ layer: "SimpleLayer" } } -I1128 20:30:24.217806 2042758 layer_factory.hpp:77] Creating layer input -I1128 20:30:24.217820 2042758 net.cpp:86] Creating Layer input -I1128 20:30:24.217828 2042758 net.cpp:382] input -> data -I1128 20:30:24.217847 2042758 net.cpp:124] Setting up input -I1128 20:30:24.217854 2042758 net.cpp:131] Top shape: 10 9 8 (720) -I1128 20:30:24.217862 2042758 net.cpp:139] Memory required for data: 2880 -I1128 20:30:24.217869 2042758 layer_factory.hpp:77] Creating layer one -I1128 20:30:24.217913 2042758 net.cpp:86] Creating Layer one -I1128 20:30:24.217922 2042758 net.cpp:408] one <- data -I1128 20:30:24.217931 2042758 net.cpp:382] one -> one -I1128 20:30:24.217978 2042758 net.cpp:124] Setting up one -I1128 20:30:24.217984 2042758 net.cpp:131] Top shape: 10 9 8 (720) -I1128 20:30:24.217991 2042758 net.cpp:139] Memory required for data: 5760 -I1128 20:30:24.217996 2042758 layer_factory.hpp:77] Creating layer two -I1128 20:30:24.218019 2042758 net.cpp:86] Creating Layer two -I1128 20:30:24.218025 2042758 net.cpp:408] two <- one -I1128 20:30:24.218032 2042758 net.cpp:382] two -> two -I1128 20:30:24.218060 2042758 net.cpp:124] Setting up two -I1128 20:30:24.218066 2042758 net.cpp:131] Top shape: 10 9 8 (720) -I1128 20:30:24.218072 2042758 net.cpp:139] Memory required for data: 8640 -I1128 20:30:24.218077 2042758 layer_factory.hpp:77] Creating layer three -I1128 20:30:24.218101 2042758 net.cpp:86] Creating Layer three -I1128 20:30:24.218106 2042758 net.cpp:408] three <- two -I1128 20:30:24.218113 2042758 net.cpp:382] three -> three -I1128 20:30:24.218142 2042758 net.cpp:124] Setting up three -I1128 20:30:24.218148 2042758 net.cpp:131] Top shape: 10 9 8 (720) -I1128 20:30:24.218154 2042758 net.cpp:139] Memory required for data: 11520 -I1128 20:30:24.218159 2042758 net.cpp:202] three does not need backward computation. -I1128 20:30:24.218165 2042758 net.cpp:202] two does not need backward computation. -I1128 20:30:24.218170 2042758 net.cpp:202] one does not need backward computation. -I1128 20:30:24.218175 2042758 net.cpp:202] input does not need backward computation. -I1128 20:30:24.218180 2042758 net.cpp:244] This network produces output three -I1128 20:30:24.218189 2042758 net.cpp:257] Network initialization done. -I1128 20:30:24.218533 2042758 upgrade_proto.cpp:69] Attempting to upgrade input file specified using deprecated input fields: /tmp/tmpdn5n1b81 -I1128 20:30:24.218552 2042758 upgrade_proto.cpp:72] Successfully upgraded file specified using deprecated input fields. -W1128 20:30:24.218557 2042758 upgrade_proto.cpp:74] Note that future Caffe releases will only support input layers and not input fields. -I1128 20:30:24.218571 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.336027 3038403 layer_factory.hpp:77] Creating layer input +I0102 05:14:55.336045 3038403 net.cpp:86] Creating Layer input +I0102 05:14:55.336055 3038403 net.cpp:382] input -> data +I0102 05:14:55.336076 3038403 net.cpp:124] Setting up input +I0102 05:14:55.336082 3038403 net.cpp:131] Top shape: 10 9 8 (720) +I0102 05:14:55.336102 3038403 net.cpp:139] Memory required for data: 2880 +I0102 05:14:55.336109 3038403 layer_factory.hpp:77] Creating layer one +I0102 05:14:55.336155 3038403 net.cpp:86] Creating Layer one +I0102 05:14:55.336164 3038403 net.cpp:408] one <- data +I0102 05:14:55.336174 3038403 net.cpp:382] one -> one +I0102 05:14:55.336270 3038403 net.cpp:124] Setting up one +I0102 05:14:55.336284 3038403 net.cpp:131] Top shape: 10 9 8 (720) +I0102 05:14:55.336292 3038403 net.cpp:139] Memory required for data: 5760 +I0102 05:14:55.336297 3038403 layer_factory.hpp:77] Creating layer two +I0102 05:14:55.336324 3038403 net.cpp:86] Creating Layer two +I0102 05:14:55.336331 3038403 net.cpp:408] two <- one +I0102 05:14:55.336339 3038403 net.cpp:382] two -> two +I0102 05:14:55.336371 3038403 net.cpp:124] Setting up two +I0102 05:14:55.336378 3038403 net.cpp:131] Top shape: 10 9 8 (720) +I0102 05:14:55.336385 3038403 net.cpp:139] Memory required for data: 8640 +I0102 05:14:55.336390 3038403 layer_factory.hpp:77] Creating layer three +I0102 05:14:55.336413 3038403 net.cpp:86] Creating Layer three +I0102 05:14:55.336421 3038403 net.cpp:408] three <- two +I0102 05:14:55.336431 3038403 net.cpp:382] three -> three +I0102 05:14:55.336460 3038403 net.cpp:124] Setting up three +I0102 05:14:55.336467 3038403 net.cpp:131] Top shape: 10 9 8 (720) +I0102 05:14:55.336474 3038403 net.cpp:139] Memory required for data: 11520 +I0102 05:14:55.336479 3038403 net.cpp:202] three does not need backward computation. +I0102 05:14:55.336486 3038403 net.cpp:202] two does not need backward computation. +I0102 05:14:55.336491 3038403 net.cpp:202] one does not need backward computation. +I0102 05:14:55.336498 3038403 net.cpp:202] input does not need backward computation. +I0102 05:14:55.336503 3038403 net.cpp:244] This network produces output three +I0102 05:14:55.336513 3038403 net.cpp:257] Network initialization done. +I0102 05:14:55.336877 3038403 upgrade_proto.cpp:69] Attempting to upgrade input file specified using deprecated input fields: /tmp/tmp3b30h6a8 +I0102 05:14:55.336897 3038403 upgrade_proto.cpp:72] Successfully upgraded file specified using deprecated input fields. +W0102 05:14:55.336905 3038403 upgrade_proto.cpp:74] Note that future Caffe releases will only support input layers and not input fields. +I0102 05:14:55.336920 3038403 net.cpp:53] Initializing net from parameters: name: "pythonnet" force_backward: true state { @@ -17489,27 +17524,27 @@ layer: "ParameterLayer" } } -I1128 20:30:24.218617 2042758 layer_factory.hpp:77] Creating layer input -I1128 20:30:24.218626 2042758 net.cpp:86] Creating Layer input -I1128 20:30:24.218632 2042758 net.cpp:382] input -> data -I1128 20:30:24.218644 2042758 net.cpp:124] Setting up input -I1128 20:30:24.218649 2042758 net.cpp:131] Top shape: 10 9 8 (720) -I1128 20:30:24.218657 2042758 net.cpp:139] Memory required for data: 2880 -I1128 20:30:24.218662 2042758 layer_factory.hpp:77] Creating layer layer -I1128 20:30:24.218693 2042758 net.cpp:86] Creating Layer layer -I1128 20:30:24.218698 2042758 net.cpp:408] layer <- data -I1128 20:30:24.218705 2042758 net.cpp:382] layer -> top -I1128 20:30:24.218787 2042758 net.cpp:124] Setting up layer -I1128 20:30:24.218796 2042758 net.cpp:131] Top shape: 10 9 8 (720) -I1128 20:30:24.218802 2042758 net.cpp:139] Memory required for data: 5760 -I1128 20:30:24.218816 2042758 net.cpp:202] layer does not need backward computation. -I1128 20:30:24.218822 2042758 net.cpp:202] input does not need backward computation. -I1128 20:30:24.218827 2042758 net.cpp:244] This network produces output top -I1128 20:30:24.218839 2042758 net.cpp:257] Network initialization done. -I1128 20:30:24.219524 2042758 upgrade_proto.cpp:69] Attempting to upgrade input file specified using deprecated input fields: /tmp/tmpdn5n1b81 -I1128 20:30:24.219537 2042758 upgrade_proto.cpp:72] Successfully upgraded file specified using deprecated input fields. -W1128 20:30:24.219542 2042758 upgrade_proto.cpp:74] Note that future Caffe releases will only support input layers and not input fields. -I1128 20:30:24.219556 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.336972 3038403 layer_factory.hpp:77] Creating layer input +I0102 05:14:55.336983 3038403 net.cpp:86] Creating Layer input +I0102 05:14:55.336992 3038403 net.cpp:382] input -> data +I0102 05:14:55.337004 3038403 net.cpp:124] Setting up input +I0102 05:14:55.337010 3038403 net.cpp:131] Top shape: 10 9 8 (720) +I0102 05:14:55.337018 3038403 net.cpp:139] Memory required for data: 2880 +I0102 05:14:55.337023 3038403 layer_factory.hpp:77] Creating layer layer +I0102 05:14:55.337055 3038403 net.cpp:86] Creating Layer layer +I0102 05:14:55.337064 3038403 net.cpp:408] layer <- data +I0102 05:14:55.337071 3038403 net.cpp:382] layer -> top +I0102 05:14:55.337138 3038403 net.cpp:124] Setting up layer +I0102 05:14:55.337157 3038403 net.cpp:131] Top shape: 10 9 8 (720) +I0102 05:14:55.337167 3038403 net.cpp:139] Memory required for data: 5760 +I0102 05:14:55.337181 3038403 net.cpp:202] layer does not need backward computation. +I0102 05:14:55.337194 3038403 net.cpp:202] input does not need backward computation. +I0102 05:14:55.337200 3038403 net.cpp:244] This network produces output top +I0102 05:14:55.337208 3038403 net.cpp:257] Network initialization done. +I0102 05:14:55.337940 3038403 upgrade_proto.cpp:69] Attempting to upgrade input file specified using deprecated input fields: /tmp/tmp3b30h6a8 +I0102 05:14:55.337961 3038403 upgrade_proto.cpp:72] Successfully upgraded file specified using deprecated input fields. +W0102 05:14:55.337968 3038403 upgrade_proto.cpp:74] Note that future Caffe releases will only support input layers and not input fields. +I0102 05:14:55.337982 3038403 net.cpp:53] Initializing net from parameters: name: "pythonnet" force_backward: true state { @@ -17538,27 +17573,27 @@ layer: "ParameterLayer" } } -I1128 20:30:24.219604 2042758 layer_factory.hpp:77] Creating layer input -I1128 20:30:24.219614 2042758 net.cpp:86] Creating Layer input -I1128 20:30:24.219621 2042758 net.cpp:382] input -> data -I1128 20:30:24.219633 2042758 net.cpp:124] Setting up input -I1128 20:30:24.219640 2042758 net.cpp:131] Top shape: 10 9 8 (720) -I1128 20:30:24.219646 2042758 net.cpp:139] Memory required for data: 2880 -I1128 20:30:24.219652 2042758 layer_factory.hpp:77] Creating layer layer -I1128 20:30:24.219682 2042758 net.cpp:86] Creating Layer layer -I1128 20:30:24.219689 2042758 net.cpp:408] layer <- data -I1128 20:30:24.219697 2042758 net.cpp:382] layer -> top -I1128 20:30:24.219745 2042758 net.cpp:124] Setting up layer -I1128 20:30:24.219753 2042758 net.cpp:131] Top shape: 10 9 8 (720) -I1128 20:30:24.219759 2042758 net.cpp:139] Memory required for data: 5760 -I1128 20:30:24.219770 2042758 net.cpp:202] layer does not need backward computation. -I1128 20:30:24.219777 2042758 net.cpp:202] input does not need backward computation. -I1128 20:30:24.219782 2042758 net.cpp:244] This network produces output top -I1128 20:30:24.219789 2042758 net.cpp:257] Network initialization done. -.I1128 20:30:24.220322 2042758 upgrade_proto.cpp:69] Attempting to upgrade input file specified using deprecated input fields: /tmp/tmp0b0pieon -I1128 20:30:24.220336 2042758 upgrade_proto.cpp:72] Successfully upgraded file specified using deprecated input fields. -W1128 20:30:24.220342 2042758 upgrade_proto.cpp:74] Note that future Caffe releases will only support input layers and not input fields. -I1128 20:30:24.220360 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.338035 3038403 layer_factory.hpp:77] Creating layer input +I0102 05:14:55.338047 3038403 net.cpp:86] Creating Layer input +I0102 05:14:55.338057 3038403 net.cpp:382] input -> data +I0102 05:14:55.338070 3038403 net.cpp:124] Setting up input +I0102 05:14:55.338076 3038403 net.cpp:131] Top shape: 10 9 8 (720) +I0102 05:14:55.338084 3038403 net.cpp:139] Memory required for data: 2880 +I0102 05:14:55.338090 3038403 layer_factory.hpp:77] Creating layer layer +I0102 05:14:55.338124 3038403 net.cpp:86] Creating Layer layer +I0102 05:14:55.338132 3038403 net.cpp:408] layer <- data +I0102 05:14:55.338140 3038403 net.cpp:382] layer -> top +I0102 05:14:55.338192 3038403 net.cpp:124] Setting up layer +I0102 05:14:55.338199 3038403 net.cpp:131] Top shape: 10 9 8 (720) +I0102 05:14:55.338207 3038403 net.cpp:139] Memory required for data: 5760 +I0102 05:14:55.338217 3038403 net.cpp:202] layer does not need backward computation. +I0102 05:14:55.338224 3038403 net.cpp:202] input does not need backward computation. +I0102 05:14:55.338229 3038403 net.cpp:244] This network produces output top +I0102 05:14:55.338238 3038403 net.cpp:257] Network initialization done. +.I0102 05:14:55.338814 3038403 upgrade_proto.cpp:69] Attempting to upgrade input file specified using deprecated input fields: /tmp/tmp9kjjdujh +I0102 05:14:55.338829 3038403 upgrade_proto.cpp:72] Successfully upgraded file specified using deprecated input fields. +W0102 05:14:55.338835 3038403 upgrade_proto.cpp:74] Note that future Caffe releases will only support input layers and not input fields. +I0102 05:14:55.338858 3038403 net.cpp:53] Initializing net from parameters: name: "pythonnet" force_backward: true state { @@ -17607,40 +17642,40 @@ layer: "SimpleLayer" } } -I1128 20:30:24.220427 2042758 layer_factory.hpp:77] Creating layer input -I1128 20:30:24.220436 2042758 net.cpp:86] Creating Layer input -I1128 20:30:24.220444 2042758 net.cpp:382] input -> data -I1128 20:30:24.220455 2042758 net.cpp:124] Setting up input -I1128 20:30:24.220461 2042758 net.cpp:131] Top shape: 10 9 8 (720) -I1128 20:30:24.220468 2042758 net.cpp:139] Memory required for data: 2880 -I1128 20:30:24.220474 2042758 layer_factory.hpp:77] Creating layer one -I1128 20:30:24.220504 2042758 net.cpp:86] Creating Layer one -I1128 20:30:24.220511 2042758 net.cpp:408] one <- data -I1128 20:30:24.220520 2042758 net.cpp:382] one -> one -I1128 20:30:24.220556 2042758 net.cpp:124] Setting up one -I1128 20:30:24.220562 2042758 net.cpp:131] Top shape: 10 9 8 (720) -I1128 20:30:24.220568 2042758 net.cpp:139] Memory required for data: 5760 -I1128 20:30:24.220573 2042758 layer_factory.hpp:77] Creating layer two -I1128 20:30:24.220597 2042758 net.cpp:86] Creating Layer two -I1128 20:30:24.220602 2042758 net.cpp:408] two <- one -I1128 20:30:24.220609 2042758 net.cpp:382] two -> two -I1128 20:30:24.220638 2042758 net.cpp:124] Setting up two -I1128 20:30:24.220643 2042758 net.cpp:131] Top shape: 10 9 8 (720) -I1128 20:30:24.220649 2042758 net.cpp:139] Memory required for data: 8640 -I1128 20:30:24.220654 2042758 layer_factory.hpp:77] Creating layer three -I1128 20:30:24.220672 2042758 net.cpp:86] Creating Layer three -I1128 20:30:24.220679 2042758 net.cpp:408] three <- two -I1128 20:30:24.220686 2042758 net.cpp:382] three -> three -I1128 20:30:24.220713 2042758 net.cpp:124] Setting up three -I1128 20:30:24.220719 2042758 net.cpp:131] Top shape: 10 9 8 (720) -I1128 20:30:24.220726 2042758 net.cpp:139] Memory required for data: 11520 -I1128 20:30:24.220731 2042758 net.cpp:202] three does not need backward computation. -I1128 20:30:24.220737 2042758 net.cpp:202] two does not need backward computation. -I1128 20:30:24.220742 2042758 net.cpp:202] one does not need backward computation. -I1128 20:30:24.220748 2042758 net.cpp:202] input does not need backward computation. -I1128 20:30:24.220755 2042758 net.cpp:244] This network produces output three -I1128 20:30:24.220763 2042758 net.cpp:257] Network initialization done. -I1128 20:30:24.221093 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.338930 3038403 layer_factory.hpp:77] Creating layer input +I0102 05:14:55.338943 3038403 net.cpp:86] Creating Layer input +I0102 05:14:55.338950 3038403 net.cpp:382] input -> data +I0102 05:14:55.338964 3038403 net.cpp:124] Setting up input +I0102 05:14:55.338970 3038403 net.cpp:131] Top shape: 10 9 8 (720) +I0102 05:14:55.338979 3038403 net.cpp:139] Memory required for data: 2880 +I0102 05:14:55.338984 3038403 layer_factory.hpp:77] Creating layer one +I0102 05:14:55.339017 3038403 net.cpp:86] Creating Layer one +I0102 05:14:55.339025 3038403 net.cpp:408] one <- data +I0102 05:14:55.339032 3038403 net.cpp:382] one -> one +I0102 05:14:55.339074 3038403 net.cpp:124] Setting up one +I0102 05:14:55.339082 3038403 net.cpp:131] Top shape: 10 9 8 (720) +I0102 05:14:55.339095 3038403 net.cpp:139] Memory required for data: 5760 +I0102 05:14:55.339104 3038403 layer_factory.hpp:77] Creating layer two +I0102 05:14:55.339154 3038403 net.cpp:86] Creating Layer two +I0102 05:14:55.339167 3038403 net.cpp:408] two <- one +I0102 05:14:55.339179 3038403 net.cpp:382] two -> two +I0102 05:14:55.339223 3038403 net.cpp:124] Setting up two +I0102 05:14:55.339231 3038403 net.cpp:131] Top shape: 10 9 8 (720) +I0102 05:14:55.339237 3038403 net.cpp:139] Memory required for data: 8640 +I0102 05:14:55.339241 3038403 layer_factory.hpp:77] Creating layer three +I0102 05:14:55.339259 3038403 net.cpp:86] Creating Layer three +I0102 05:14:55.339264 3038403 net.cpp:408] three <- two +I0102 05:14:55.339272 3038403 net.cpp:382] three -> three +I0102 05:14:55.339294 3038403 net.cpp:124] Setting up three +I0102 05:14:55.339303 3038403 net.cpp:131] Top shape: 10 9 8 (720) +I0102 05:14:55.339309 3038403 net.cpp:139] Memory required for data: 11520 +I0102 05:14:55.339314 3038403 net.cpp:202] three does not need backward computation. +I0102 05:14:55.339318 3038403 net.cpp:202] two does not need backward computation. +I0102 05:14:55.339323 3038403 net.cpp:202] one does not need backward computation. +I0102 05:14:55.339329 3038403 net.cpp:202] input does not need backward computation. +I0102 05:14:55.339334 3038403 net.cpp:244] This network produces output three +I0102 05:14:55.339340 3038403 net.cpp:257] Network initialization done. +I0102 05:14:55.339815 3038403 net.cpp:53] Initializing net from parameters: name: "pythonnet" force_backward: true state { @@ -17656,16 +17691,16 @@ layer: "PhaseLayer" } } -I1128 20:30:24.221134 2042758 layer_factory.hpp:77] Creating layer layer -I1128 20:30:24.221168 2042758 net.cpp:86] Creating Layer layer -I1128 20:30:24.221176 2042758 net.cpp:382] layer -> phase -I1128 20:30:24.221522 2042758 net.cpp:124] Setting up layer -I1128 20:30:24.221536 2042758 net.cpp:131] Top shape: (1) -I1128 20:30:24.221544 2042758 net.cpp:139] Memory required for data: 4 -I1128 20:30:24.221551 2042758 net.cpp:202] layer does not need backward computation. -I1128 20:30:24.221556 2042758 net.cpp:244] This network produces output phase -I1128 20:30:24.221568 2042758 net.cpp:257] Network initialization done. -I1128 20:30:24.221820 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.339886 3038403 layer_factory.hpp:77] Creating layer layer +I0102 05:14:55.339936 3038403 net.cpp:86] Creating Layer layer +I0102 05:14:55.339947 3038403 net.cpp:382] layer -> phase +I0102 05:14:55.339998 3038403 net.cpp:124] Setting up layer +I0102 05:14:55.340008 3038403 net.cpp:131] Top shape: (1) +I0102 05:14:55.340021 3038403 net.cpp:139] Memory required for data: 4 +I0102 05:14:55.340029 3038403 net.cpp:202] layer does not need backward computation. +I0102 05:14:55.340032 3038403 net.cpp:244] This network produces output phase +I0102 05:14:55.340045 3038403 net.cpp:257] Network initialization done. +I0102 05:14:55.340400 3038403 net.cpp:53] Initializing net from parameters: name: "pythonnet" force_backward: true state { @@ -17681,19 +17716,19 @@ layer: "PhaseLayer" } } -I1128 20:30:24.221860 2042758 layer_factory.hpp:77] Creating layer layer -I1128 20:30:24.221899 2042758 net.cpp:86] Creating Layer layer -I1128 20:30:24.221908 2042758 net.cpp:382] layer -> phase -I1128 20:30:24.221940 2042758 net.cpp:124] Setting up layer -I1128 20:30:24.221946 2042758 net.cpp:131] Top shape: (1) -I1128 20:30:24.221954 2042758 net.cpp:139] Memory required for data: 4 -I1128 20:30:24.221962 2042758 net.cpp:202] layer does not need backward computation. -I1128 20:30:24.221969 2042758 net.cpp:244] This network produces output phase -I1128 20:30:24.221978 2042758 net.cpp:257] Network initialization done. -.I1128 20:30:24.222604 2042758 upgrade_proto.cpp:69] Attempting to upgrade input file specified using deprecated input fields: /tmp/tmpxaxitra0 -I1128 20:30:24.222618 2042758 upgrade_proto.cpp:72] Successfully upgraded file specified using deprecated input fields. -W1128 20:30:24.222625 2042758 upgrade_proto.cpp:74] Note that future Caffe releases will only support input layers and not input fields. -I1128 20:30:24.222645 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.340500 3038403 layer_factory.hpp:77] Creating layer layer +I0102 05:14:55.340920 3038403 net.cpp:86] Creating Layer layer +I0102 05:14:55.340946 3038403 net.cpp:382] layer -> phase +I0102 05:14:55.341012 3038403 net.cpp:124] Setting up layer +I0102 05:14:55.341039 3038403 net.cpp:131] Top shape: (1) +I0102 05:14:55.341075 3038403 net.cpp:139] Memory required for data: 4 +I0102 05:14:55.341106 3038403 net.cpp:202] layer does not need backward computation. +I0102 05:14:55.341118 3038403 net.cpp:244] This network produces output phase +I0102 05:14:55.341127 3038403 net.cpp:257] Network initialization done. +.I0102 05:14:55.342151 3038403 upgrade_proto.cpp:69] Attempting to upgrade input file specified using deprecated input fields: /tmp/tmpmzneyetq +I0102 05:14:55.342172 3038403 upgrade_proto.cpp:72] Successfully upgraded file specified using deprecated input fields. +W0102 05:14:55.342178 3038403 upgrade_proto.cpp:74] Note that future Caffe releases will only support input layers and not input fields. +I0102 05:14:55.342209 3038403 net.cpp:53] Initializing net from parameters: name: "pythonnet" force_backward: true state { @@ -17742,43 +17777,43 @@ layer: "SimpleLayer" } } -I1128 20:30:24.222713 2042758 layer_factory.hpp:77] Creating layer input -I1128 20:30:24.222724 2042758 net.cpp:86] Creating Layer input -I1128 20:30:24.222731 2042758 net.cpp:382] input -> data -I1128 20:30:24.222745 2042758 net.cpp:124] Setting up input -I1128 20:30:24.222751 2042758 net.cpp:131] Top shape: 10 9 8 (720) -I1128 20:30:24.222761 2042758 net.cpp:139] Memory required for data: 2880 -I1128 20:30:24.222767 2042758 layer_factory.hpp:77] Creating layer one -I1128 20:30:24.222802 2042758 net.cpp:86] Creating Layer one -I1128 20:30:24.222811 2042758 net.cpp:408] one <- data -I1128 20:30:24.222820 2042758 net.cpp:382] one -> one -I1128 20:30:24.222860 2042758 net.cpp:124] Setting up one -I1128 20:30:24.222867 2042758 net.cpp:131] Top shape: 10 9 8 (720) -I1128 20:30:24.222875 2042758 net.cpp:139] Memory required for data: 5760 -I1128 20:30:24.222880 2042758 layer_factory.hpp:77] Creating layer two -I1128 20:30:24.222901 2042758 net.cpp:86] Creating Layer two -I1128 20:30:24.222908 2042758 net.cpp:408] two <- one -I1128 20:30:24.222916 2042758 net.cpp:382] two -> two -I1128 20:30:24.222955 2042758 net.cpp:124] Setting up two -I1128 20:30:24.222962 2042758 net.cpp:131] Top shape: 10 9 8 (720) -I1128 20:30:24.222970 2042758 net.cpp:139] Memory required for data: 8640 -I1128 20:30:24.222975 2042758 layer_factory.hpp:77] Creating layer three -I1128 20:30:24.222996 2042758 net.cpp:86] Creating Layer three -I1128 20:30:24.223003 2042758 net.cpp:408] three <- two -I1128 20:30:24.223011 2042758 net.cpp:382] three -> three -I1128 20:30:24.223039 2042758 net.cpp:124] Setting up three -I1128 20:30:24.223045 2042758 net.cpp:131] Top shape: 10 9 8 (720) -I1128 20:30:24.223057 2042758 net.cpp:139] Memory required for data: 11520 -I1128 20:30:24.223062 2042758 net.cpp:202] three does not need backward computation. -I1128 20:30:24.223067 2042758 net.cpp:202] two does not need backward computation. -I1128 20:30:24.223073 2042758 net.cpp:202] one does not need backward computation. -I1128 20:30:24.223078 2042758 net.cpp:202] input does not need backward computation. -I1128 20:30:24.223083 2042758 net.cpp:244] This network produces output three -I1128 20:30:24.223093 2042758 net.cpp:257] Network initialization done. -.I1128 20:30:24.224210 2042758 upgrade_proto.cpp:69] Attempting to upgrade input file specified using deprecated input fields: /tmp/tmpnij7caln -I1128 20:30:24.224226 2042758 upgrade_proto.cpp:72] Successfully upgraded file specified using deprecated input fields. -W1128 20:30:24.224231 2042758 upgrade_proto.cpp:74] Note that future Caffe releases will only support input layers and not input fields. -I1128 20:30:24.224249 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.342300 3038403 layer_factory.hpp:77] Creating layer input +I0102 05:14:55.342314 3038403 net.cpp:86] Creating Layer input +I0102 05:14:55.342322 3038403 net.cpp:382] input -> data +I0102 05:14:55.342340 3038403 net.cpp:124] Setting up input +I0102 05:14:55.342347 3038403 net.cpp:131] Top shape: 10 9 8 (720) +I0102 05:14:55.342356 3038403 net.cpp:139] Memory required for data: 2880 +I0102 05:14:55.342362 3038403 layer_factory.hpp:77] Creating layer one +I0102 05:14:55.342407 3038403 net.cpp:86] Creating Layer one +I0102 05:14:55.342415 3038403 net.cpp:408] one <- data +I0102 05:14:55.342422 3038403 net.cpp:382] one -> one +I0102 05:14:55.342471 3038403 net.cpp:124] Setting up one +I0102 05:14:55.342478 3038403 net.cpp:131] Top shape: 10 9 8 (720) +I0102 05:14:55.342485 3038403 net.cpp:139] Memory required for data: 5760 +I0102 05:14:55.342491 3038403 layer_factory.hpp:77] Creating layer two +I0102 05:14:55.342514 3038403 net.cpp:86] Creating Layer two +I0102 05:14:55.342519 3038403 net.cpp:408] two <- one +I0102 05:14:55.342526 3038403 net.cpp:382] two -> two +I0102 05:14:55.342553 3038403 net.cpp:124] Setting up two +I0102 05:14:55.342561 3038403 net.cpp:131] Top shape: 10 9 8 (720) +I0102 05:14:55.342567 3038403 net.cpp:139] Memory required for data: 8640 +I0102 05:14:55.342572 3038403 layer_factory.hpp:77] Creating layer three +I0102 05:14:55.342592 3038403 net.cpp:86] Creating Layer three +I0102 05:14:55.342597 3038403 net.cpp:408] three <- two +I0102 05:14:55.342605 3038403 net.cpp:382] three -> three +I0102 05:14:55.342631 3038403 net.cpp:124] Setting up three +I0102 05:14:55.342638 3038403 net.cpp:131] Top shape: 10 9 8 (720) +I0102 05:14:55.342643 3038403 net.cpp:139] Memory required for data: 11520 +I0102 05:14:55.342648 3038403 net.cpp:202] three does not need backward computation. +I0102 05:14:55.342655 3038403 net.cpp:202] two does not need backward computation. +I0102 05:14:55.342660 3038403 net.cpp:202] one does not need backward computation. +I0102 05:14:55.342666 3038403 net.cpp:202] input does not need backward computation. +I0102 05:14:55.342672 3038403 net.cpp:244] This network produces output three +I0102 05:14:55.342682 3038403 net.cpp:257] Network initialization done. +.I0102 05:14:55.343741 3038403 upgrade_proto.cpp:69] Attempting to upgrade input file specified using deprecated input fields: /tmp/tmp1gnqhu9i +I0102 05:14:55.343768 3038403 upgrade_proto.cpp:72] Successfully upgraded file specified using deprecated input fields. +W0102 05:14:55.343775 3038403 upgrade_proto.cpp:74] Note that future Caffe releases will only support input layers and not input fields. +I0102 05:14:55.343802 3038403 net.cpp:53] Initializing net from parameters: name: "pythonnet" force_backward: true state { @@ -17819,35 +17854,35 @@ param_str: "2" } } -I1128 20:30:24.224304 2042758 layer_factory.hpp:77] Creating layer input -I1128 20:30:24.224314 2042758 net.cpp:86] Creating Layer input -I1128 20:30:24.224319 2042758 net.cpp:382] input -> data -I1128 20:30:24.224331 2042758 net.cpp:124] Setting up input -I1128 20:30:24.224336 2042758 net.cpp:131] Top shape: 10 9 8 (720) -I1128 20:30:24.224342 2042758 net.cpp:139] Memory required for data: 2880 -I1128 20:30:24.224347 2042758 layer_factory.hpp:77] Creating layer mul10 -I1128 20:30:24.224375 2042758 net.cpp:86] Creating Layer mul10 -I1128 20:30:24.224380 2042758 net.cpp:408] mul10 <- data -I1128 20:30:24.224386 2042758 net.cpp:382] mul10 -> mul10 -I1128 20:30:24.224431 2042758 net.cpp:124] Setting up mul10 -I1128 20:30:24.224436 2042758 net.cpp:131] Top shape: 10 9 8 (720) -I1128 20:30:24.224440 2042758 net.cpp:139] Memory required for data: 5760 -I1128 20:30:24.224444 2042758 layer_factory.hpp:77] Creating layer mul2 -I1128 20:30:24.224462 2042758 net.cpp:86] Creating Layer mul2 -I1128 20:30:24.224575 2042758 net.cpp:408] mul2 <- mul10 -I1128 20:30:24.224582 2042758 net.cpp:382] mul2 -> mul2 -I1128 20:30:24.224613 2042758 net.cpp:124] Setting up mul2 -I1128 20:30:24.224618 2042758 net.cpp:131] Top shape: 10 9 8 (720) -I1128 20:30:24.224623 2042758 net.cpp:139] Memory required for data: 8640 -I1128 20:30:24.224627 2042758 net.cpp:202] mul2 does not need backward computation. -I1128 20:30:24.224632 2042758 net.cpp:202] mul10 does not need backward computation. -I1128 20:30:24.224637 2042758 net.cpp:202] input does not need backward computation. -I1128 20:30:24.224640 2042758 net.cpp:244] This network produces output mul2 -I1128 20:30:24.224645 2042758 net.cpp:257] Network initialization done. -.I1128 20:30:24.230506 2042758 upgrade_proto.cpp:69] Attempting to upgrade input file specified using deprecated input fields: /tmp/tmpsw346vsn -I1128 20:30:24.230548 2042758 upgrade_proto.cpp:72] Successfully upgraded file specified using deprecated input fields. -W1128 20:30:24.230557 2042758 upgrade_proto.cpp:74] Note that future Caffe releases will only support input layers and not input fields. -I1128 20:30:24.230587 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.343888 3038403 layer_factory.hpp:77] Creating layer input +I0102 05:14:55.343902 3038403 net.cpp:86] Creating Layer input +I0102 05:14:55.343910 3038403 net.cpp:382] input -> data +I0102 05:14:55.343925 3038403 net.cpp:124] Setting up input +I0102 05:14:55.343932 3038403 net.cpp:131] Top shape: 10 9 8 (720) +I0102 05:14:55.343941 3038403 net.cpp:139] Memory required for data: 2880 +I0102 05:14:55.343946 3038403 layer_factory.hpp:77] Creating layer mul10 +I0102 05:14:55.343988 3038403 net.cpp:86] Creating Layer mul10 +I0102 05:14:55.343995 3038403 net.cpp:408] mul10 <- data +I0102 05:14:55.344005 3038403 net.cpp:382] mul10 -> mul10 +I0102 05:14:55.344065 3038403 net.cpp:124] Setting up mul10 +I0102 05:14:55.344084 3038403 net.cpp:131] Top shape: 10 9 8 (720) +I0102 05:14:55.344092 3038403 net.cpp:139] Memory required for data: 5760 +I0102 05:14:55.344097 3038403 layer_factory.hpp:77] Creating layer mul2 +I0102 05:14:55.344120 3038403 net.cpp:86] Creating Layer mul2 +I0102 05:14:55.344126 3038403 net.cpp:408] mul2 <- mul10 +I0102 05:14:55.344133 3038403 net.cpp:382] mul2 -> mul2 +I0102 05:14:55.344164 3038403 net.cpp:124] Setting up mul2 +I0102 05:14:55.344172 3038403 net.cpp:131] Top shape: 10 9 8 (720) +I0102 05:14:55.344178 3038403 net.cpp:139] Memory required for data: 8640 +I0102 05:14:55.344183 3038403 net.cpp:202] mul2 does not need backward computation. +I0102 05:14:55.344189 3038403 net.cpp:202] mul10 does not need backward computation. +I0102 05:14:55.344195 3038403 net.cpp:202] input does not need backward computation. +I0102 05:14:55.344202 3038403 net.cpp:244] This network produces output mul2 +I0102 05:14:55.344209 3038403 net.cpp:257] Network initialization done. +.I0102 05:14:55.350539 3038403 upgrade_proto.cpp:69] Attempting to upgrade input file specified using deprecated input fields: /tmp/tmpp9g2tjho +I0102 05:14:55.350574 3038403 upgrade_proto.cpp:72] Successfully upgraded file specified using deprecated input fields. +W0102 05:14:55.350579 3038403 upgrade_proto.cpp:74] Note that future Caffe releases will only support input layers and not input fields. +I0102 05:14:55.350610 3038403 net.cpp:53] Initializing net from parameters: name: "pythonnet" force_backward: true state { @@ -17888,32 +17923,32 @@ param_str: "2" } } -I1128 20:30:24.230669 2042758 layer_factory.hpp:77] Creating layer input -I1128 20:30:24.230682 2042758 net.cpp:86] Creating Layer input -I1128 20:30:24.230690 2042758 net.cpp:382] input -> data -I1128 20:30:24.230707 2042758 net.cpp:124] Setting up input -I1128 20:30:24.230715 2042758 net.cpp:131] Top shape: 10 9 8 (720) -I1128 20:30:24.230723 2042758 net.cpp:139] Memory required for data: 2880 -I1128 20:30:24.230729 2042758 layer_factory.hpp:77] Creating layer mul10 -I1128 20:30:24.230772 2042758 net.cpp:86] Creating Layer mul10 -I1128 20:30:24.230780 2042758 net.cpp:408] mul10 <- data -I1128 20:30:24.230787 2042758 net.cpp:382] mul10 -> mul10 -I1128 20:30:24.230840 2042758 net.cpp:124] Setting up mul10 -I1128 20:30:24.230851 2042758 net.cpp:131] Top shape: 10 9 8 (720) -I1128 20:30:24.230859 2042758 net.cpp:139] Memory required for data: 5760 -I1128 20:30:24.230865 2042758 layer_factory.hpp:77] Creating layer mul2 -I1128 20:30:24.230886 2042758 net.cpp:86] Creating Layer mul2 -I1128 20:30:24.230893 2042758 net.cpp:408] mul2 <- mul10 -I1128 20:30:24.230901 2042758 net.cpp:382] mul2 -> mul2 -I1128 20:30:24.231060 2042758 net.cpp:124] Setting up mul2 -I1128 20:30:24.231079 2042758 net.cpp:131] Top shape: 10 9 8 (720) -I1128 20:30:24.231088 2042758 net.cpp:139] Memory required for data: 8640 -I1128 20:30:24.231094 2042758 net.cpp:202] mul2 does not need backward computation. -I1128 20:30:24.231101 2042758 net.cpp:202] mul10 does not need backward computation. -I1128 20:30:24.231185 2042758 net.cpp:202] input does not need backward computation. -I1128 20:30:24.231258 2042758 net.cpp:244] This network produces output mul2 -I1128 20:30:24.231277 2042758 net.cpp:257] Network initialization done. -.I1128 20:30:24.237663 2042758 solver.cpp:45] Initializing solver from parameters: +I0102 05:14:55.350705 3038403 layer_factory.hpp:77] Creating layer input +I0102 05:14:55.350723 3038403 net.cpp:86] Creating Layer input +I0102 05:14:55.350733 3038403 net.cpp:382] input -> data +I0102 05:14:55.350764 3038403 net.cpp:124] Setting up input +I0102 05:14:55.350772 3038403 net.cpp:131] Top shape: 10 9 8 (720) +I0102 05:14:55.350781 3038403 net.cpp:139] Memory required for data: 2880 +I0102 05:14:55.350788 3038403 layer_factory.hpp:77] Creating layer mul10 +I0102 05:14:55.350837 3038403 net.cpp:86] Creating Layer mul10 +I0102 05:14:55.350845 3038403 net.cpp:408] mul10 <- data +I0102 05:14:55.350855 3038403 net.cpp:382] mul10 -> mul10 +I0102 05:14:55.350910 3038403 net.cpp:124] Setting up mul10 +I0102 05:14:55.350925 3038403 net.cpp:131] Top shape: 10 9 8 (720) +I0102 05:14:55.350934 3038403 net.cpp:139] Memory required for data: 5760 +I0102 05:14:55.350939 3038403 layer_factory.hpp:77] Creating layer mul2 +I0102 05:14:55.350965 3038403 net.cpp:86] Creating Layer mul2 +I0102 05:14:55.350975 3038403 net.cpp:408] mul2 <- mul10 +I0102 05:14:55.350991 3038403 net.cpp:382] mul2 -> mul2 +I0102 05:14:55.351028 3038403 net.cpp:124] Setting up mul2 +I0102 05:14:55.351037 3038403 net.cpp:131] Top shape: 10 9 8 (720) +I0102 05:14:55.351043 3038403 net.cpp:139] Memory required for data: 8640 +I0102 05:14:55.351049 3038403 net.cpp:202] mul2 does not need backward computation. +I0102 05:14:55.351055 3038403 net.cpp:202] mul10 does not need backward computation. +I0102 05:14:55.351064 3038403 net.cpp:202] input does not need backward computation. +I0102 05:14:55.351069 3038403 net.cpp:244] This network produces output mul2 +I0102 05:14:55.351078 3038403 net.cpp:257] Network initialization done. +.I0102 05:14:55.357622 3038403 solver.cpp:45] Initializing solver from parameters: test_iter: 10 test_interval: 10 base_lr: 0.01 @@ -17925,10 +17960,10 @@ momentum: 0.9 weight_decay: 0.0005 snapshot_prefix: "model" -net: "/tmp/tmp47bp4zpb" +net: "/tmp/tmp1n51ln18" snapshot_after_train: false -I1128 20:30:24.237763 2042758 solver.cpp:102] Creating training net from net file: /tmp/tmp47bp4zpb -I1128 20:30:24.237985 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.357723 3038403 solver.cpp:102] Creating training net from net file: /tmp/tmp1n51ln18 +I0102 05:14:55.357913 3038403 net.cpp:53] Initializing net from parameters: name: "testnet" force_backward: true state { @@ -18006,46 +18041,46 @@ bottom: "label" top: "loss" } -I1128 20:30:24.238122 2042758 layer_factory.hpp:77] Creating layer data -I1128 20:30:24.238139 2042758 net.cpp:86] Creating Layer data -I1128 20:30:24.238148 2042758 net.cpp:382] data -> data -I1128 20:30:24.238165 2042758 net.cpp:382] data -> label -I1128 20:30:24.238184 2042758 net.cpp:124] Setting up data -I1128 20:30:24.238190 2042758 net.cpp:131] Top shape: 5 2 3 4 (120) -I1128 20:30:24.238199 2042758 net.cpp:131] Top shape: 5 1 1 1 (5) -I1128 20:30:24.238205 2042758 net.cpp:139] Memory required for data: 500 -I1128 20:30:24.238210 2042758 layer_factory.hpp:77] Creating layer conv -I1128 20:30:24.238224 2042758 net.cpp:86] Creating Layer conv -I1128 20:30:24.238229 2042758 net.cpp:408] conv <- data -I1128 20:30:24.238240 2042758 net.cpp:382] conv -> conv -I1128 20:30:24.238276 2042758 net.cpp:124] Setting up conv -I1128 20:30:24.238281 2042758 net.cpp:131] Top shape: 5 11 8 9 (3960) -I1128 20:30:24.238287 2042758 net.cpp:139] Memory required for data: 16340 -I1128 20:30:24.238302 2042758 layer_factory.hpp:77] Creating layer ip -I1128 20:30:24.238312 2042758 net.cpp:86] Creating Layer ip -I1128 20:30:24.238319 2042758 net.cpp:408] ip <- conv -I1128 20:30:24.238330 2042758 net.cpp:382] ip -> ip_blob -I1128 20:30:24.238474 2042758 net.cpp:124] Setting up ip -I1128 20:30:24.238479 2042758 net.cpp:131] Top shape: 5 13 (65) -I1128 20:30:24.238484 2042758 net.cpp:139] Memory required for data: 16600 -I1128 20:30:24.238492 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.238499 2042758 net.cpp:86] Creating Layer loss -I1128 20:30:24.238503 2042758 net.cpp:408] loss <- ip_blob -I1128 20:30:24.238507 2042758 net.cpp:408] loss <- label -I1128 20:30:24.238513 2042758 net.cpp:382] loss -> loss -I1128 20:30:24.238523 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.238538 2042758 net.cpp:124] Setting up loss -I1128 20:30:24.238541 2042758 net.cpp:131] Top shape: (1) -I1128 20:30:24.238546 2042758 net.cpp:134] with loss weight 1 -I1128 20:30:24.238559 2042758 net.cpp:139] Memory required for data: 16604 -I1128 20:30:24.238564 2042758 net.cpp:200] loss needs backward computation. -I1128 20:30:24.238571 2042758 net.cpp:200] ip needs backward computation. -I1128 20:30:24.238579 2042758 net.cpp:200] conv needs backward computation. -I1128 20:30:24.238585 2042758 net.cpp:202] data does not need backward computation. -I1128 20:30:24.238595 2042758 net.cpp:244] This network produces output loss -I1128 20:30:24.238605 2042758 net.cpp:257] Network initialization done. -I1128 20:30:24.238960 2042758 solver.cpp:190] Creating test net (#0) specified by net file: /tmp/tmp47bp4zpb -I1128 20:30:24.238988 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.358040 3038403 layer_factory.hpp:77] Creating layer data +I0102 05:14:55.358057 3038403 net.cpp:86] Creating Layer data +I0102 05:14:55.358067 3038403 net.cpp:382] data -> data +I0102 05:14:55.358083 3038403 net.cpp:382] data -> label +I0102 05:14:55.358101 3038403 net.cpp:124] Setting up data +I0102 05:14:55.358107 3038403 net.cpp:131] Top shape: 5 2 3 4 (120) +I0102 05:14:55.358117 3038403 net.cpp:131] Top shape: 5 1 1 1 (5) +I0102 05:14:55.358124 3038403 net.cpp:139] Memory required for data: 500 +I0102 05:14:55.358130 3038403 layer_factory.hpp:77] Creating layer conv +I0102 05:14:55.358145 3038403 net.cpp:86] Creating Layer conv +I0102 05:14:55.358152 3038403 net.cpp:408] conv <- data +I0102 05:14:55.358162 3038403 net.cpp:382] conv -> conv +I0102 05:14:55.358199 3038403 net.cpp:124] Setting up conv +I0102 05:14:55.358207 3038403 net.cpp:131] Top shape: 5 11 8 9 (3960) +I0102 05:14:55.358214 3038403 net.cpp:139] Memory required for data: 16340 +I0102 05:14:55.358230 3038403 layer_factory.hpp:77] Creating layer ip +I0102 05:14:55.358242 3038403 net.cpp:86] Creating Layer ip +I0102 05:14:55.358247 3038403 net.cpp:408] ip <- conv +I0102 05:14:55.358256 3038403 net.cpp:382] ip -> ip_blob +I0102 05:14:55.358430 3038403 net.cpp:124] Setting up ip +I0102 05:14:55.358440 3038403 net.cpp:131] Top shape: 5 13 (65) +I0102 05:14:55.358458 3038403 net.cpp:139] Memory required for data: 16600 +I0102 05:14:55.358470 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.358480 3038403 net.cpp:86] Creating Layer loss +I0102 05:14:55.358486 3038403 net.cpp:408] loss <- ip_blob +I0102 05:14:55.358494 3038403 net.cpp:408] loss <- label +I0102 05:14:55.358502 3038403 net.cpp:382] loss -> loss +I0102 05:14:55.358515 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.358538 3038403 net.cpp:124] Setting up loss +I0102 05:14:55.358546 3038403 net.cpp:131] Top shape: (1) +I0102 05:14:55.358552 3038403 net.cpp:134] with loss weight 1 +I0102 05:14:55.358567 3038403 net.cpp:139] Memory required for data: 16604 +I0102 05:14:55.358574 3038403 net.cpp:200] loss needs backward computation. +I0102 05:14:55.358582 3038403 net.cpp:200] ip needs backward computation. +I0102 05:14:55.358588 3038403 net.cpp:200] conv needs backward computation. +I0102 05:14:55.358597 3038403 net.cpp:202] data does not need backward computation. +I0102 05:14:55.358608 3038403 net.cpp:244] This network produces output loss +I0102 05:14:55.358618 3038403 net.cpp:257] Network initialization done. +I0102 05:14:55.358791 3038403 solver.cpp:190] Creating test net (#0) specified by net file: /tmp/tmp1n51ln18 +I0102 05:14:55.358820 3038403 net.cpp:53] Initializing net from parameters: name: "testnet" force_backward: true state { @@ -18123,46 +18158,46 @@ bottom: "label" top: "loss" } -I1128 20:30:24.239305 2042758 layer_factory.hpp:77] Creating layer data -I1128 20:30:24.239341 2042758 net.cpp:86] Creating Layer data -I1128 20:30:24.239434 2042758 net.cpp:382] data -> data -I1128 20:30:24.239524 2042758 net.cpp:382] data -> label -I1128 20:30:24.239557 2042758 net.cpp:124] Setting up data -I1128 20:30:24.239642 2042758 net.cpp:131] Top shape: 5 2 3 4 (120) -I1128 20:30:24.239696 2042758 net.cpp:131] Top shape: 5 1 1 1 (5) -I1128 20:30:24.239707 2042758 net.cpp:139] Memory required for data: 500 -I1128 20:30:24.239712 2042758 layer_factory.hpp:77] Creating layer conv -I1128 20:30:24.239766 2042758 net.cpp:86] Creating Layer conv -I1128 20:30:24.239778 2042758 net.cpp:408] conv <- data -I1128 20:30:24.239789 2042758 net.cpp:382] conv -> conv -I1128 20:30:24.239868 2042758 net.cpp:124] Setting up conv -I1128 20:30:24.239879 2042758 net.cpp:131] Top shape: 5 11 8 9 (3960) -I1128 20:30:24.239885 2042758 net.cpp:139] Memory required for data: 16340 -I1128 20:30:24.239895 2042758 layer_factory.hpp:77] Creating layer ip -I1128 20:30:24.239902 2042758 net.cpp:86] Creating Layer ip -I1128 20:30:24.239907 2042758 net.cpp:408] ip <- conv -I1128 20:30:24.239964 2042758 net.cpp:382] ip -> ip_blob -I1128 20:30:24.240103 2042758 net.cpp:124] Setting up ip -I1128 20:30:24.240108 2042758 net.cpp:131] Top shape: 5 13 (65) -I1128 20:30:24.240113 2042758 net.cpp:139] Memory required for data: 16600 -I1128 20:30:24.240119 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.240126 2042758 net.cpp:86] Creating Layer loss -I1128 20:30:24.240130 2042758 net.cpp:408] loss <- ip_blob -I1128 20:30:24.240134 2042758 net.cpp:408] loss <- label -I1128 20:30:24.240140 2042758 net.cpp:382] loss -> loss -I1128 20:30:24.240151 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.240164 2042758 net.cpp:124] Setting up loss -I1128 20:30:24.240167 2042758 net.cpp:131] Top shape: (1) -I1128 20:30:24.240172 2042758 net.cpp:134] with loss weight 1 -I1128 20:30:24.240181 2042758 net.cpp:139] Memory required for data: 16604 -I1128 20:30:24.240185 2042758 net.cpp:200] loss needs backward computation. -I1128 20:30:24.240190 2042758 net.cpp:200] ip needs backward computation. -I1128 20:30:24.240193 2042758 net.cpp:200] conv needs backward computation. -I1128 20:30:24.240198 2042758 net.cpp:202] data does not need backward computation. -I1128 20:30:24.240202 2042758 net.cpp:244] This network produces output loss -I1128 20:30:24.240208 2042758 net.cpp:257] Network initialization done. -I1128 20:30:24.240229 2042758 solver.cpp:57] Solver scaffolding done. -I1128 20:30:24.240342 2042758 solver.cpp:45] Initializing solver from parameters: +I0102 05:14:55.358947 3038403 layer_factory.hpp:77] Creating layer data +I0102 05:14:55.358961 3038403 net.cpp:86] Creating Layer data +I0102 05:14:55.358971 3038403 net.cpp:382] data -> data +I0102 05:14:55.358983 3038403 net.cpp:382] data -> label +I0102 05:14:55.358999 3038403 net.cpp:124] Setting up data +I0102 05:14:55.359005 3038403 net.cpp:131] Top shape: 5 2 3 4 (120) +I0102 05:14:55.359014 3038403 net.cpp:131] Top shape: 5 1 1 1 (5) +I0102 05:14:55.359021 3038403 net.cpp:139] Memory required for data: 500 +I0102 05:14:55.359027 3038403 layer_factory.hpp:77] Creating layer conv +I0102 05:14:55.359040 3038403 net.cpp:86] Creating Layer conv +I0102 05:14:55.359046 3038403 net.cpp:408] conv <- data +I0102 05:14:55.359056 3038403 net.cpp:382] conv -> conv +I0102 05:14:55.359086 3038403 net.cpp:124] Setting up conv +I0102 05:14:55.359093 3038403 net.cpp:131] Top shape: 5 11 8 9 (3960) +I0102 05:14:55.359102 3038403 net.cpp:139] Memory required for data: 16340 +I0102 05:14:55.359115 3038403 layer_factory.hpp:77] Creating layer ip +I0102 05:14:55.359125 3038403 net.cpp:86] Creating Layer ip +I0102 05:14:55.359133 3038403 net.cpp:408] ip <- conv +I0102 05:14:55.359141 3038403 net.cpp:382] ip -> ip_blob +I0102 05:14:55.359311 3038403 net.cpp:124] Setting up ip +I0102 05:14:55.359320 3038403 net.cpp:131] Top shape: 5 13 (65) +I0102 05:14:55.359328 3038403 net.cpp:139] Memory required for data: 16600 +I0102 05:14:55.359340 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.359350 3038403 net.cpp:86] Creating Layer loss +I0102 05:14:55.359356 3038403 net.cpp:408] loss <- ip_blob +I0102 05:14:55.359364 3038403 net.cpp:408] loss <- label +I0102 05:14:55.359373 3038403 net.cpp:382] loss -> loss +I0102 05:14:55.359385 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.359402 3038403 net.cpp:124] Setting up loss +I0102 05:14:55.359408 3038403 net.cpp:131] Top shape: (1) +I0102 05:14:55.359416 3038403 net.cpp:134] with loss weight 1 +I0102 05:14:55.359428 3038403 net.cpp:139] Memory required for data: 16604 +I0102 05:14:55.359434 3038403 net.cpp:200] loss needs backward computation. +I0102 05:14:55.359441 3038403 net.cpp:200] ip needs backward computation. +I0102 05:14:55.359447 3038403 net.cpp:200] conv needs backward computation. +I0102 05:14:55.359454 3038403 net.cpp:202] data does not need backward computation. +I0102 05:14:55.359460 3038403 net.cpp:244] This network produces output loss +I0102 05:14:55.359469 3038403 net.cpp:257] Network initialization done. +I0102 05:14:55.359498 3038403 solver.cpp:57] Solver scaffolding done. +I0102 05:14:55.359616 3038403 solver.cpp:45] Initializing solver from parameters: test_iter: 10 test_interval: 10 base_lr: 0.01 @@ -18174,10 +18209,10 @@ momentum: 0.9 weight_decay: 0.0005 snapshot_prefix: "model" -net: "/tmp/tmp47bp4zpb" +net: "/tmp/tmp1n51ln18" snapshot_after_train: false -I1128 20:30:24.240373 2042758 solver.cpp:102] Creating training net from net file: /tmp/tmp47bp4zpb -I1128 20:30:24.240465 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.359668 3038403 solver.cpp:102] Creating training net from net file: /tmp/tmp1n51ln18 +I0102 05:14:55.359830 3038403 net.cpp:53] Initializing net from parameters: name: "testnet" force_backward: true state { @@ -18255,46 +18290,46 @@ bottom: "label" top: "loss" } -I1128 20:30:24.240550 2042758 layer_factory.hpp:77] Creating layer data -I1128 20:30:24.240558 2042758 net.cpp:86] Creating Layer data -I1128 20:30:24.240566 2042758 net.cpp:382] data -> data -I1128 20:30:24.240576 2042758 net.cpp:382] data -> label -I1128 20:30:24.240590 2042758 net.cpp:124] Setting up data -I1128 20:30:24.240598 2042758 net.cpp:131] Top shape: 5 2 3 4 (120) -I1128 20:30:24.240607 2042758 net.cpp:131] Top shape: 5 1 1 1 (5) -I1128 20:30:24.240615 2042758 net.cpp:139] Memory required for data: 500 -I1128 20:30:24.240622 2042758 layer_factory.hpp:77] Creating layer conv -I1128 20:30:24.240633 2042758 net.cpp:86] Creating Layer conv -I1128 20:30:24.240639 2042758 net.cpp:408] conv <- data -I1128 20:30:24.240648 2042758 net.cpp:382] conv -> conv -I1128 20:30:24.240681 2042758 net.cpp:124] Setting up conv -I1128 20:30:24.240687 2042758 net.cpp:131] Top shape: 5 11 8 9 (3960) -I1128 20:30:24.240700 2042758 net.cpp:139] Memory required for data: 16340 -I1128 20:30:24.240711 2042758 layer_factory.hpp:77] Creating layer ip -I1128 20:30:24.240720 2042758 net.cpp:86] Creating Layer ip -I1128 20:30:24.240725 2042758 net.cpp:408] ip <- conv -I1128 20:30:24.240734 2042758 net.cpp:382] ip -> ip_blob -I1128 20:30:24.240897 2042758 net.cpp:124] Setting up ip -I1128 20:30:24.240904 2042758 net.cpp:131] Top shape: 5 13 (65) -I1128 20:30:24.240911 2042758 net.cpp:139] Memory required for data: 16600 -I1128 20:30:24.240921 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.240928 2042758 net.cpp:86] Creating Layer loss -I1128 20:30:24.240933 2042758 net.cpp:408] loss <- ip_blob -I1128 20:30:24.240940 2042758 net.cpp:408] loss <- label -I1128 20:30:24.240947 2042758 net.cpp:382] loss -> loss -I1128 20:30:24.240957 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.240972 2042758 net.cpp:124] Setting up loss -I1128 20:30:24.240978 2042758 net.cpp:131] Top shape: (1) -I1128 20:30:24.240983 2042758 net.cpp:134] with loss weight 1 -I1128 20:30:24.240991 2042758 net.cpp:139] Memory required for data: 16604 -I1128 20:30:24.240996 2042758 net.cpp:200] loss needs backward computation. -I1128 20:30:24.241003 2042758 net.cpp:200] ip needs backward computation. -I1128 20:30:24.241008 2042758 net.cpp:200] conv needs backward computation. -I1128 20:30:24.241015 2042758 net.cpp:202] data does not need backward computation. -I1128 20:30:24.241026 2042758 net.cpp:244] This network produces output loss -I1128 20:30:24.241036 2042758 net.cpp:257] Network initialization done. -I1128 20:30:24.241165 2042758 solver.cpp:190] Creating test net (#0) specified by net file: /tmp/tmp47bp4zpb -I1128 20:30:24.241223 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.359953 3038403 layer_factory.hpp:77] Creating layer data +I0102 05:14:55.359968 3038403 net.cpp:86] Creating Layer data +I0102 05:14:55.359977 3038403 net.cpp:382] data -> data +I0102 05:14:55.359992 3038403 net.cpp:382] data -> label +I0102 05:14:55.360006 3038403 net.cpp:124] Setting up data +I0102 05:14:55.360013 3038403 net.cpp:131] Top shape: 5 2 3 4 (120) +I0102 05:14:55.360023 3038403 net.cpp:131] Top shape: 5 1 1 1 (5) +I0102 05:14:55.360030 3038403 net.cpp:139] Memory required for data: 500 +I0102 05:14:55.360038 3038403 layer_factory.hpp:77] Creating layer conv +I0102 05:14:55.360049 3038403 net.cpp:86] Creating Layer conv +I0102 05:14:55.360056 3038403 net.cpp:408] conv <- data +I0102 05:14:55.360066 3038403 net.cpp:382] conv -> conv +I0102 05:14:55.360097 3038403 net.cpp:124] Setting up conv +I0102 05:14:55.360105 3038403 net.cpp:131] Top shape: 5 11 8 9 (3960) +I0102 05:14:55.360113 3038403 net.cpp:139] Memory required for data: 16340 +I0102 05:14:55.360127 3038403 layer_factory.hpp:77] Creating layer ip +I0102 05:14:55.360139 3038403 net.cpp:86] Creating Layer ip +I0102 05:14:55.360146 3038403 net.cpp:408] ip <- conv +I0102 05:14:55.360155 3038403 net.cpp:382] ip -> ip_blob +I0102 05:14:55.360327 3038403 net.cpp:124] Setting up ip +I0102 05:14:55.360343 3038403 net.cpp:131] Top shape: 5 13 (65) +I0102 05:14:55.360352 3038403 net.cpp:139] Memory required for data: 16600 +I0102 05:14:55.360365 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.360375 3038403 net.cpp:86] Creating Layer loss +I0102 05:14:55.360381 3038403 net.cpp:408] loss <- ip_blob +I0102 05:14:55.360388 3038403 net.cpp:408] loss <- label +I0102 05:14:55.360397 3038403 net.cpp:382] loss -> loss +I0102 05:14:55.360409 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.360433 3038403 net.cpp:124] Setting up loss +I0102 05:14:55.360440 3038403 net.cpp:131] Top shape: (1) +I0102 05:14:55.360448 3038403 net.cpp:134] with loss weight 1 +I0102 05:14:55.360460 3038403 net.cpp:139] Memory required for data: 16604 +I0102 05:14:55.360466 3038403 net.cpp:200] loss needs backward computation. +I0102 05:14:55.360472 3038403 net.cpp:200] ip needs backward computation. +I0102 05:14:55.360478 3038403 net.cpp:200] conv needs backward computation. +I0102 05:14:55.360486 3038403 net.cpp:202] data does not need backward computation. +I0102 05:14:55.360491 3038403 net.cpp:244] This network produces output loss +I0102 05:14:55.360502 3038403 net.cpp:257] Network initialization done. +I0102 05:14:55.360750 3038403 solver.cpp:190] Creating test net (#0) specified by net file: /tmp/tmp1n51ln18 +I0102 05:14:55.360787 3038403 net.cpp:53] Initializing net from parameters: name: "testnet" force_backward: true state { @@ -18372,47 +18407,47 @@ bottom: "label" top: "loss" } -I1128 20:30:24.241565 2042758 layer_factory.hpp:77] Creating layer data -I1128 20:30:24.241595 2042758 net.cpp:86] Creating Layer data -I1128 20:30:24.241602 2042758 net.cpp:382] data -> data -I1128 20:30:24.241662 2042758 net.cpp:382] data -> label -I1128 20:30:24.241684 2042758 net.cpp:124] Setting up data -I1128 20:30:24.241691 2042758 net.cpp:131] Top shape: 5 2 3 4 (120) -I1128 20:30:24.241755 2042758 net.cpp:131] Top shape: 5 1 1 1 (5) -I1128 20:30:24.241760 2042758 net.cpp:139] Memory required for data: 500 -I1128 20:30:24.241828 2042758 layer_factory.hpp:77] Creating layer conv -I1128 20:30:24.241899 2042758 net.cpp:86] Creating Layer conv -I1128 20:30:24.241912 2042758 net.cpp:408] conv <- data -I1128 20:30:24.241921 2042758 net.cpp:382] conv -> conv -I1128 20:30:24.241994 2042758 net.cpp:124] Setting up conv -I1128 20:30:24.242007 2042758 net.cpp:131] Top shape: 5 11 8 9 (3960) -I1128 20:30:24.242012 2042758 net.cpp:139] Memory required for data: 16340 -I1128 20:30:24.242080 2042758 layer_factory.hpp:77] Creating layer ip -I1128 20:30:24.242096 2042758 net.cpp:86] Creating Layer ip -I1128 20:30:24.242101 2042758 net.cpp:408] ip <- conv -I1128 20:30:24.242108 2042758 net.cpp:382] ip -> ip_blob -I1128 20:30:24.242267 2042758 net.cpp:124] Setting up ip -I1128 20:30:24.242274 2042758 net.cpp:131] Top shape: 5 13 (65) -I1128 20:30:24.242285 2042758 net.cpp:139] Memory required for data: 16600 -I1128 20:30:24.242295 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.242305 2042758 net.cpp:86] Creating Layer loss -I1128 20:30:24.242316 2042758 net.cpp:408] loss <- ip_blob -I1128 20:30:24.242323 2042758 net.cpp:408] loss <- label -I1128 20:30:24.242332 2042758 net.cpp:382] loss -> loss -I1128 20:30:24.242344 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.242359 2042758 net.cpp:124] Setting up loss -I1128 20:30:24.242365 2042758 net.cpp:131] Top shape: (1) -I1128 20:30:24.242372 2042758 net.cpp:134] with loss weight 1 -I1128 20:30:24.242383 2042758 net.cpp:139] Memory required for data: 16604 -I1128 20:30:24.242390 2042758 net.cpp:200] loss needs backward computation. -I1128 20:30:24.242396 2042758 net.cpp:200] ip needs backward computation. -I1128 20:30:24.242404 2042758 net.cpp:200] conv needs backward computation. -I1128 20:30:24.242415 2042758 net.cpp:202] data does not need backward computation. -I1128 20:30:24.242425 2042758 net.cpp:244] This network produces output loss -I1128 20:30:24.242436 2042758 net.cpp:257] Network initialization done. -I1128 20:30:24.242467 2042758 solver.cpp:57] Solver scaffolding done. -I1128 20:30:24.243115 2042758 sgd_solver.cpp:112] Iteration 0, lr = 0.01 -.I1128 20:30:24.244304 2042758 solver.cpp:45] Initializing solver from parameters: +I0102 05:14:55.360888 3038403 layer_factory.hpp:77] Creating layer data +I0102 05:14:55.360903 3038403 net.cpp:86] Creating Layer data +I0102 05:14:55.360911 3038403 net.cpp:382] data -> data +I0102 05:14:55.360924 3038403 net.cpp:382] data -> label +I0102 05:14:55.360939 3038403 net.cpp:124] Setting up data +I0102 05:14:55.360944 3038403 net.cpp:131] Top shape: 5 2 3 4 (120) +I0102 05:14:55.360951 3038403 net.cpp:131] Top shape: 5 1 1 1 (5) +I0102 05:14:55.360957 3038403 net.cpp:139] Memory required for data: 500 +I0102 05:14:55.360963 3038403 layer_factory.hpp:77] Creating layer conv +I0102 05:14:55.360975 3038403 net.cpp:86] Creating Layer conv +I0102 05:14:55.360980 3038403 net.cpp:408] conv <- data +I0102 05:14:55.360988 3038403 net.cpp:382] conv -> conv +I0102 05:14:55.361014 3038403 net.cpp:124] Setting up conv +I0102 05:14:55.361021 3038403 net.cpp:131] Top shape: 5 11 8 9 (3960) +I0102 05:14:55.361027 3038403 net.cpp:139] Memory required for data: 16340 +I0102 05:14:55.361038 3038403 layer_factory.hpp:77] Creating layer ip +I0102 05:14:55.361048 3038403 net.cpp:86] Creating Layer ip +I0102 05:14:55.361054 3038403 net.cpp:408] ip <- conv +I0102 05:14:55.361063 3038403 net.cpp:382] ip -> ip_blob +I0102 05:14:55.361214 3038403 net.cpp:124] Setting up ip +I0102 05:14:55.361223 3038403 net.cpp:131] Top shape: 5 13 (65) +I0102 05:14:55.361230 3038403 net.cpp:139] Memory required for data: 16600 +I0102 05:14:55.361241 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.361249 3038403 net.cpp:86] Creating Layer loss +I0102 05:14:55.361255 3038403 net.cpp:408] loss <- ip_blob +I0102 05:14:55.361261 3038403 net.cpp:408] loss <- label +I0102 05:14:55.361268 3038403 net.cpp:382] loss -> loss +I0102 05:14:55.361279 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.361294 3038403 net.cpp:124] Setting up loss +I0102 05:14:55.361300 3038403 net.cpp:131] Top shape: (1) +I0102 05:14:55.361307 3038403 net.cpp:134] with loss weight 1 +I0102 05:14:55.361317 3038403 net.cpp:139] Memory required for data: 16604 +I0102 05:14:55.361322 3038403 net.cpp:200] loss needs backward computation. +I0102 05:14:55.361328 3038403 net.cpp:200] ip needs backward computation. +I0102 05:14:55.361335 3038403 net.cpp:200] conv needs backward computation. +I0102 05:14:55.361340 3038403 net.cpp:202] data does not need backward computation. +I0102 05:14:55.361346 3038403 net.cpp:244] This network produces output loss +I0102 05:14:55.361354 3038403 net.cpp:257] Network initialization done. +I0102 05:14:55.361377 3038403 solver.cpp:57] Solver scaffolding done. +I0102 05:14:55.361953 3038403 sgd_solver.cpp:112] Iteration 0, lr = 0.01 +.I0102 05:14:55.362895 3038403 solver.cpp:45] Initializing solver from parameters: test_iter: 10 test_interval: 10 base_lr: 0.01 @@ -18424,10 +18459,10 @@ momentum: 0.9 weight_decay: 0.0005 snapshot_prefix: "model" -net: "/tmp/tmpbwxkcs7x" +net: "/tmp/tmpwyaqic9_" snapshot_after_train: false -I1128 20:30:24.244380 2042758 solver.cpp:102] Creating training net from net file: /tmp/tmpbwxkcs7x -I1128 20:30:24.244629 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.362953 3038403 solver.cpp:102] Creating training net from net file: /tmp/tmpwyaqic9_ +I0102 05:14:55.363113 3038403 net.cpp:53] Initializing net from parameters: name: "testnet" force_backward: true state { @@ -18505,46 +18540,46 @@ bottom: "label" top: "loss" } -I1128 20:30:24.244788 2042758 layer_factory.hpp:77] Creating layer data -I1128 20:30:24.244809 2042758 net.cpp:86] Creating Layer data -I1128 20:30:24.244906 2042758 net.cpp:382] data -> data -I1128 20:30:24.244933 2042758 net.cpp:382] data -> label -I1128 20:30:24.245003 2042758 net.cpp:124] Setting up data -I1128 20:30:24.245018 2042758 net.cpp:131] Top shape: 5 2 3 4 (120) -I1128 20:30:24.245025 2042758 net.cpp:131] Top shape: 5 1 1 1 (5) -I1128 20:30:24.245079 2042758 net.cpp:139] Memory required for data: 500 -I1128 20:30:24.245085 2042758 layer_factory.hpp:77] Creating layer conv -I1128 20:30:24.245213 2042758 net.cpp:86] Creating Layer conv -I1128 20:30:24.245227 2042758 net.cpp:408] conv <- data -I1128 20:30:24.245288 2042758 net.cpp:382] conv -> conv -I1128 20:30:24.245389 2042758 net.cpp:124] Setting up conv -I1128 20:30:24.245400 2042758 net.cpp:131] Top shape: 5 11 8 9 (3960) -I1128 20:30:24.245406 2042758 net.cpp:139] Memory required for data: 16340 -I1128 20:30:24.245419 2042758 layer_factory.hpp:77] Creating layer ip -I1128 20:30:24.245427 2042758 net.cpp:86] Creating Layer ip -I1128 20:30:24.245431 2042758 net.cpp:408] ip <- conv -I1128 20:30:24.245440 2042758 net.cpp:382] ip -> ip_blob -I1128 20:30:24.245577 2042758 net.cpp:124] Setting up ip -I1128 20:30:24.245582 2042758 net.cpp:131] Top shape: 5 13 (65) -I1128 20:30:24.245587 2042758 net.cpp:139] Memory required for data: 16600 -I1128 20:30:24.245594 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.245601 2042758 net.cpp:86] Creating Layer loss -I1128 20:30:24.245612 2042758 net.cpp:408] loss <- ip_blob -I1128 20:30:24.245620 2042758 net.cpp:408] loss <- label -I1128 20:30:24.245630 2042758 net.cpp:382] loss -> loss -I1128 20:30:24.245641 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.245658 2042758 net.cpp:124] Setting up loss -I1128 20:30:24.245664 2042758 net.cpp:131] Top shape: (1) -I1128 20:30:24.245671 2042758 net.cpp:134] with loss weight 1 -I1128 20:30:24.245684 2042758 net.cpp:139] Memory required for data: 16604 -I1128 20:30:24.245692 2042758 net.cpp:200] loss needs backward computation. -I1128 20:30:24.245699 2042758 net.cpp:200] ip needs backward computation. -I1128 20:30:24.245705 2042758 net.cpp:200] conv needs backward computation. -I1128 20:30:24.245712 2042758 net.cpp:202] data does not need backward computation. -I1128 20:30:24.245718 2042758 net.cpp:244] This network produces output loss -I1128 20:30:24.245726 2042758 net.cpp:257] Network initialization done. -I1128 20:30:24.245852 2042758 solver.cpp:190] Creating test net (#0) specified by net file: /tmp/tmpbwxkcs7x -I1128 20:30:24.245872 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.363241 3038403 layer_factory.hpp:77] Creating layer data +I0102 05:14:55.363256 3038403 net.cpp:86] Creating Layer data +I0102 05:14:55.363266 3038403 net.cpp:382] data -> data +I0102 05:14:55.363282 3038403 net.cpp:382] data -> label +I0102 05:14:55.363301 3038403 net.cpp:124] Setting up data +I0102 05:14:55.363308 3038403 net.cpp:131] Top shape: 5 2 3 4 (120) +I0102 05:14:55.363317 3038403 net.cpp:131] Top shape: 5 1 1 1 (5) +I0102 05:14:55.363324 3038403 net.cpp:139] Memory required for data: 500 +I0102 05:14:55.363330 3038403 layer_factory.hpp:77] Creating layer conv +I0102 05:14:55.363345 3038403 net.cpp:86] Creating Layer conv +I0102 05:14:55.363358 3038403 net.cpp:408] conv <- data +I0102 05:14:55.363368 3038403 net.cpp:382] conv -> conv +I0102 05:14:55.363402 3038403 net.cpp:124] Setting up conv +I0102 05:14:55.363409 3038403 net.cpp:131] Top shape: 5 11 8 9 (3960) +I0102 05:14:55.363416 3038403 net.cpp:139] Memory required for data: 16340 +I0102 05:14:55.363431 3038403 layer_factory.hpp:77] Creating layer ip +I0102 05:14:55.363442 3038403 net.cpp:86] Creating Layer ip +I0102 05:14:55.363447 3038403 net.cpp:408] ip <- conv +I0102 05:14:55.363456 3038403 net.cpp:382] ip -> ip_blob +I0102 05:14:55.363607 3038403 net.cpp:124] Setting up ip +I0102 05:14:55.363616 3038403 net.cpp:131] Top shape: 5 13 (65) +I0102 05:14:55.363623 3038403 net.cpp:139] Memory required for data: 16600 +I0102 05:14:55.363633 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.363642 3038403 net.cpp:86] Creating Layer loss +I0102 05:14:55.363648 3038403 net.cpp:408] loss <- ip_blob +I0102 05:14:55.363654 3038403 net.cpp:408] loss <- label +I0102 05:14:55.363662 3038403 net.cpp:382] loss -> loss +I0102 05:14:55.363673 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.363694 3038403 net.cpp:124] Setting up loss +I0102 05:14:55.363700 3038403 net.cpp:131] Top shape: (1) +I0102 05:14:55.363708 3038403 net.cpp:134] with loss weight 1 +I0102 05:14:55.363718 3038403 net.cpp:139] Memory required for data: 16604 +I0102 05:14:55.363724 3038403 net.cpp:200] loss needs backward computation. +I0102 05:14:55.363731 3038403 net.cpp:200] ip needs backward computation. +I0102 05:14:55.363737 3038403 net.cpp:200] conv needs backward computation. +I0102 05:14:55.363792 3038403 net.cpp:202] data does not need backward computation. +I0102 05:14:55.363799 3038403 net.cpp:244] This network produces output loss +I0102 05:14:55.363809 3038403 net.cpp:257] Network initialization done. +I0102 05:14:55.363981 3038403 solver.cpp:190] Creating test net (#0) specified by net file: /tmp/tmpwyaqic9_ +I0102 05:14:55.364015 3038403 net.cpp:53] Initializing net from parameters: name: "testnet" force_backward: true state { @@ -18622,46 +18657,46 @@ bottom: "label" top: "loss" } -I1128 20:30:24.245957 2042758 layer_factory.hpp:77] Creating layer data -I1128 20:30:24.245967 2042758 net.cpp:86] Creating Layer data -I1128 20:30:24.245976 2042758 net.cpp:382] data -> data -I1128 20:30:24.245990 2042758 net.cpp:382] data -> label -I1128 20:30:24.246006 2042758 net.cpp:124] Setting up data -I1128 20:30:24.246013 2042758 net.cpp:131] Top shape: 5 2 3 4 (120) -I1128 20:30:24.246145 2042758 net.cpp:131] Top shape: 5 1 1 1 (5) -I1128 20:30:24.246151 2042758 net.cpp:139] Memory required for data: 500 -I1128 20:30:24.246156 2042758 layer_factory.hpp:77] Creating layer conv -I1128 20:30:24.246167 2042758 net.cpp:86] Creating Layer conv -I1128 20:30:24.246171 2042758 net.cpp:408] conv <- data -I1128 20:30:24.246179 2042758 net.cpp:382] conv -> conv -I1128 20:30:24.246207 2042758 net.cpp:124] Setting up conv -I1128 20:30:24.246213 2042758 net.cpp:131] Top shape: 5 11 8 9 (3960) -I1128 20:30:24.246219 2042758 net.cpp:139] Memory required for data: 16340 -I1128 20:30:24.246229 2042758 layer_factory.hpp:77] Creating layer ip -I1128 20:30:24.246237 2042758 net.cpp:86] Creating Layer ip -I1128 20:30:24.246241 2042758 net.cpp:408] ip <- conv -I1128 20:30:24.246248 2042758 net.cpp:382] ip -> ip_blob -I1128 20:30:24.246393 2042758 net.cpp:124] Setting up ip -I1128 20:30:24.246399 2042758 net.cpp:131] Top shape: 5 13 (65) -I1128 20:30:24.246410 2042758 net.cpp:139] Memory required for data: 16600 -I1128 20:30:24.246419 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.246428 2042758 net.cpp:86] Creating Layer loss -I1128 20:30:24.246431 2042758 net.cpp:408] loss <- ip_blob -I1128 20:30:24.246438 2042758 net.cpp:408] loss <- label -I1128 20:30:24.246445 2042758 net.cpp:382] loss -> loss -I1128 20:30:24.246457 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.246472 2042758 net.cpp:124] Setting up loss -I1128 20:30:24.246479 2042758 net.cpp:131] Top shape: (1) -I1128 20:30:24.246486 2042758 net.cpp:134] with loss weight 1 -I1128 20:30:24.246503 2042758 net.cpp:139] Memory required for data: 16604 -I1128 20:30:24.246508 2042758 net.cpp:200] loss needs backward computation. -I1128 20:30:24.246513 2042758 net.cpp:200] ip needs backward computation. -I1128 20:30:24.246518 2042758 net.cpp:200] conv needs backward computation. -I1128 20:30:24.246527 2042758 net.cpp:202] data does not need backward computation. -I1128 20:30:24.246536 2042758 net.cpp:244] This network produces output loss -I1128 20:30:24.246548 2042758 net.cpp:257] Network initialization done. -I1128 20:30:24.246573 2042758 solver.cpp:57] Solver scaffolding done. -I1128 20:30:24.246666 2042758 solver.cpp:45] Initializing solver from parameters: +I0102 05:14:55.364127 3038403 layer_factory.hpp:77] Creating layer data +I0102 05:14:55.364142 3038403 net.cpp:86] Creating Layer data +I0102 05:14:55.364151 3038403 net.cpp:382] data -> data +I0102 05:14:55.364164 3038403 net.cpp:382] data -> label +I0102 05:14:55.364179 3038403 net.cpp:124] Setting up data +I0102 05:14:55.364187 3038403 net.cpp:131] Top shape: 5 2 3 4 (120) +I0102 05:14:55.364195 3038403 net.cpp:131] Top shape: 5 1 1 1 (5) +I0102 05:14:55.364207 3038403 net.cpp:139] Memory required for data: 500 +I0102 05:14:55.364212 3038403 layer_factory.hpp:77] Creating layer conv +I0102 05:14:55.364226 3038403 net.cpp:86] Creating Layer conv +I0102 05:14:55.364233 3038403 net.cpp:408] conv <- data +I0102 05:14:55.364243 3038403 net.cpp:382] conv -> conv +I0102 05:14:55.364274 3038403 net.cpp:124] Setting up conv +I0102 05:14:55.364280 3038403 net.cpp:131] Top shape: 5 11 8 9 (3960) +I0102 05:14:55.364289 3038403 net.cpp:139] Memory required for data: 16340 +I0102 05:14:55.364302 3038403 layer_factory.hpp:77] Creating layer ip +I0102 05:14:55.364315 3038403 net.cpp:86] Creating Layer ip +I0102 05:14:55.364321 3038403 net.cpp:408] ip <- conv +I0102 05:14:55.364331 3038403 net.cpp:382] ip -> ip_blob +I0102 05:14:55.364499 3038403 net.cpp:124] Setting up ip +I0102 05:14:55.364508 3038403 net.cpp:131] Top shape: 5 13 (65) +I0102 05:14:55.364516 3038403 net.cpp:139] Memory required for data: 16600 +I0102 05:14:55.364528 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.364538 3038403 net.cpp:86] Creating Layer loss +I0102 05:14:55.364544 3038403 net.cpp:408] loss <- ip_blob +I0102 05:14:55.364552 3038403 net.cpp:408] loss <- label +I0102 05:14:55.364562 3038403 net.cpp:382] loss -> loss +I0102 05:14:55.364701 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.364722 3038403 net.cpp:124] Setting up loss +I0102 05:14:55.364728 3038403 net.cpp:131] Top shape: (1) +I0102 05:14:55.364737 3038403 net.cpp:134] with loss weight 1 +I0102 05:14:55.364748 3038403 net.cpp:139] Memory required for data: 16604 +I0102 05:14:55.364755 3038403 net.cpp:200] loss needs backward computation. +I0102 05:14:55.364763 3038403 net.cpp:200] ip needs backward computation. +I0102 05:14:55.364769 3038403 net.cpp:200] conv needs backward computation. +I0102 05:14:55.364778 3038403 net.cpp:202] data does not need backward computation. +I0102 05:14:55.364784 3038403 net.cpp:244] This network produces output loss +I0102 05:14:55.364794 3038403 net.cpp:257] Network initialization done. +I0102 05:14:55.364823 3038403 solver.cpp:57] Solver scaffolding done. +I0102 05:14:55.364934 3038403 solver.cpp:45] Initializing solver from parameters: test_iter: 10 test_interval: 10 base_lr: 0.01 @@ -18673,10 +18708,10 @@ momentum: 0.9 weight_decay: 0.0005 snapshot_prefix: "model" -net: "/tmp/tmpbwxkcs7x" +net: "/tmp/tmpwyaqic9_" snapshot_after_train: false -I1128 20:30:24.246703 2042758 solver.cpp:102] Creating training net from net file: /tmp/tmpbwxkcs7x -I1128 20:30:24.246918 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.364984 3038403 solver.cpp:102] Creating training net from net file: /tmp/tmpwyaqic9_ +I0102 05:14:55.365139 3038403 net.cpp:53] Initializing net from parameters: name: "testnet" force_backward: true state { @@ -18754,46 +18789,46 @@ bottom: "label" top: "loss" } -I1128 20:30:24.247020 2042758 layer_factory.hpp:77] Creating layer data -I1128 20:30:24.247097 2042758 net.cpp:86] Creating Layer data -I1128 20:30:24.247104 2042758 net.cpp:382] data -> data -I1128 20:30:24.247175 2042758 net.cpp:382] data -> label -I1128 20:30:24.247195 2042758 net.cpp:124] Setting up data -I1128 20:30:24.247200 2042758 net.cpp:131] Top shape: 5 2 3 4 (120) -I1128 20:30:24.247208 2042758 net.cpp:131] Top shape: 5 1 1 1 (5) -I1128 20:30:24.247215 2042758 net.cpp:139] Memory required for data: 500 -I1128 20:30:24.247220 2042758 layer_factory.hpp:77] Creating layer conv -I1128 20:30:24.247229 2042758 net.cpp:86] Creating Layer conv -I1128 20:30:24.247238 2042758 net.cpp:408] conv <- data -I1128 20:30:24.247246 2042758 net.cpp:382] conv -> conv -I1128 20:30:24.247277 2042758 net.cpp:124] Setting up conv -I1128 20:30:24.247287 2042758 net.cpp:131] Top shape: 5 11 8 9 (3960) -I1128 20:30:24.247294 2042758 net.cpp:139] Memory required for data: 16340 -I1128 20:30:24.247308 2042758 layer_factory.hpp:77] Creating layer ip -I1128 20:30:24.247318 2042758 net.cpp:86] Creating Layer ip -I1128 20:30:24.247325 2042758 net.cpp:408] ip <- conv -I1128 20:30:24.247334 2042758 net.cpp:382] ip -> ip_blob -I1128 20:30:24.247478 2042758 net.cpp:124] Setting up ip -I1128 20:30:24.247483 2042758 net.cpp:131] Top shape: 5 13 (65) -I1128 20:30:24.247488 2042758 net.cpp:139] Memory required for data: 16600 -I1128 20:30:24.247495 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.247503 2042758 net.cpp:86] Creating Layer loss -I1128 20:30:24.247506 2042758 net.cpp:408] loss <- ip_blob -I1128 20:30:24.247510 2042758 net.cpp:408] loss <- label -I1128 20:30:24.247515 2042758 net.cpp:382] loss -> loss -I1128 20:30:24.247524 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.247535 2042758 net.cpp:124] Setting up loss -I1128 20:30:24.247539 2042758 net.cpp:131] Top shape: (1) -I1128 20:30:24.247545 2042758 net.cpp:134] with loss weight 1 -I1128 20:30:24.247551 2042758 net.cpp:139] Memory required for data: 16604 -I1128 20:30:24.247555 2042758 net.cpp:200] loss needs backward computation. -I1128 20:30:24.247560 2042758 net.cpp:200] ip needs backward computation. -I1128 20:30:24.247565 2042758 net.cpp:200] conv needs backward computation. -I1128 20:30:24.247568 2042758 net.cpp:202] data does not need backward computation. -I1128 20:30:24.247572 2042758 net.cpp:244] This network produces output loss -I1128 20:30:24.247579 2042758 net.cpp:257] Network initialization done. -I1128 20:30:24.247678 2042758 solver.cpp:190] Creating test net (#0) specified by net file: /tmp/tmpbwxkcs7x -I1128 20:30:24.247695 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.365254 3038403 layer_factory.hpp:77] Creating layer data +I0102 05:14:55.365268 3038403 net.cpp:86] Creating Layer data +I0102 05:14:55.365278 3038403 net.cpp:382] data -> data +I0102 05:14:55.365291 3038403 net.cpp:382] data -> label +I0102 05:14:55.365306 3038403 net.cpp:124] Setting up data +I0102 05:14:55.365314 3038403 net.cpp:131] Top shape: 5 2 3 4 (120) +I0102 05:14:55.365322 3038403 net.cpp:131] Top shape: 5 1 1 1 (5) +I0102 05:14:55.365329 3038403 net.cpp:139] Memory required for data: 500 +I0102 05:14:55.365335 3038403 layer_factory.hpp:77] Creating layer conv +I0102 05:14:55.365347 3038403 net.cpp:86] Creating Layer conv +I0102 05:14:55.365355 3038403 net.cpp:408] conv <- data +I0102 05:14:55.365365 3038403 net.cpp:382] conv -> conv +I0102 05:14:55.365396 3038403 net.cpp:124] Setting up conv +I0102 05:14:55.365403 3038403 net.cpp:131] Top shape: 5 11 8 9 (3960) +I0102 05:14:55.365411 3038403 net.cpp:139] Memory required for data: 16340 +I0102 05:14:55.365424 3038403 layer_factory.hpp:77] Creating layer ip +I0102 05:14:55.365434 3038403 net.cpp:86] Creating Layer ip +I0102 05:14:55.365440 3038403 net.cpp:408] ip <- conv +I0102 05:14:55.365449 3038403 net.cpp:382] ip -> ip_blob +I0102 05:14:55.365622 3038403 net.cpp:124] Setting up ip +I0102 05:14:55.365644 3038403 net.cpp:131] Top shape: 5 13 (65) +I0102 05:14:55.365653 3038403 net.cpp:139] Memory required for data: 16600 +I0102 05:14:55.365665 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.365676 3038403 net.cpp:86] Creating Layer loss +I0102 05:14:55.365684 3038403 net.cpp:408] loss <- ip_blob +I0102 05:14:55.365690 3038403 net.cpp:408] loss <- label +I0102 05:14:55.365710 3038403 net.cpp:382] loss -> loss +I0102 05:14:55.365725 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.365743 3038403 net.cpp:124] Setting up loss +I0102 05:14:55.365751 3038403 net.cpp:131] Top shape: (1) +I0102 05:14:55.365758 3038403 net.cpp:134] with loss weight 1 +I0102 05:14:55.365775 3038403 net.cpp:139] Memory required for data: 16604 +I0102 05:14:55.365782 3038403 net.cpp:200] loss needs backward computation. +I0102 05:14:55.365789 3038403 net.cpp:200] ip needs backward computation. +I0102 05:14:55.365795 3038403 net.cpp:200] conv needs backward computation. +I0102 05:14:55.365803 3038403 net.cpp:202] data does not need backward computation. +I0102 05:14:55.365809 3038403 net.cpp:244] This network produces output loss +I0102 05:14:55.365819 3038403 net.cpp:257] Network initialization done. +I0102 05:14:55.365988 3038403 solver.cpp:190] Creating test net (#0) specified by net file: /tmp/tmpwyaqic9_ +I0102 05:14:55.366024 3038403 net.cpp:53] Initializing net from parameters: name: "testnet" force_backward: true state { @@ -18871,46 +18906,46 @@ bottom: "label" top: "loss" } -I1128 20:30:24.247931 2042758 layer_factory.hpp:77] Creating layer data -I1128 20:30:24.247946 2042758 net.cpp:86] Creating Layer data -I1128 20:30:24.247956 2042758 net.cpp:382] data -> data -I1128 20:30:24.247970 2042758 net.cpp:382] data -> label -I1128 20:30:24.247985 2042758 net.cpp:124] Setting up data -I1128 20:30:24.247992 2042758 net.cpp:131] Top shape: 5 2 3 4 (120) -I1128 20:30:24.248000 2042758 net.cpp:131] Top shape: 5 1 1 1 (5) -I1128 20:30:24.248008 2042758 net.cpp:139] Memory required for data: 500 -I1128 20:30:24.248013 2042758 layer_factory.hpp:77] Creating layer conv -I1128 20:30:24.248026 2042758 net.cpp:86] Creating Layer conv -I1128 20:30:24.248032 2042758 net.cpp:408] conv <- data -I1128 20:30:24.248042 2042758 net.cpp:382] conv -> conv -I1128 20:30:24.248073 2042758 net.cpp:124] Setting up conv -I1128 20:30:24.248080 2042758 net.cpp:131] Top shape: 5 11 8 9 (3960) -I1128 20:30:24.248090 2042758 net.cpp:139] Memory required for data: 16340 -I1128 20:30:24.248104 2042758 layer_factory.hpp:77] Creating layer ip -I1128 20:30:24.248114 2042758 net.cpp:86] Creating Layer ip -I1128 20:30:24.248121 2042758 net.cpp:408] ip <- conv -I1128 20:30:24.248131 2042758 net.cpp:382] ip -> ip_blob -I1128 20:30:24.248374 2042758 net.cpp:124] Setting up ip -I1128 20:30:24.248392 2042758 net.cpp:131] Top shape: 5 13 (65) -I1128 20:30:24.248401 2042758 net.cpp:139] Memory required for data: 16600 -I1128 20:30:24.248476 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.248495 2042758 net.cpp:86] Creating Layer loss -I1128 20:30:24.248503 2042758 net.cpp:408] loss <- ip_blob -I1128 20:30:24.248586 2042758 net.cpp:408] loss <- label -I1128 20:30:24.248661 2042758 net.cpp:382] loss -> loss -I1128 20:30:24.248685 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.248764 2042758 net.cpp:124] Setting up loss -I1128 20:30:24.248777 2042758 net.cpp:131] Top shape: (1) -I1128 20:30:24.248785 2042758 net.cpp:134] with loss weight 1 -I1128 20:30:24.248863 2042758 net.cpp:139] Memory required for data: 16604 -I1128 20:30:24.248931 2042758 net.cpp:200] loss needs backward computation. -I1128 20:30:24.248945 2042758 net.cpp:200] ip needs backward computation. -I1128 20:30:24.249007 2042758 net.cpp:200] conv needs backward computation. -I1128 20:30:24.249022 2042758 net.cpp:202] data does not need backward computation. -I1128 20:30:24.249083 2042758 net.cpp:244] This network produces output loss -I1128 20:30:24.249100 2042758 net.cpp:257] Network initialization done. -I1128 20:30:24.249301 2042758 solver.cpp:57] Solver scaffolding done. -.I1128 20:30:24.251492 2042758 solver.cpp:45] Initializing solver from parameters: +I0102 05:14:55.366150 3038403 layer_factory.hpp:77] Creating layer data +I0102 05:14:55.366163 3038403 net.cpp:86] Creating Layer data +I0102 05:14:55.366170 3038403 net.cpp:382] data -> data +I0102 05:14:55.366183 3038403 net.cpp:382] data -> label +I0102 05:14:55.366204 3038403 net.cpp:124] Setting up data +I0102 05:14:55.366271 3038403 net.cpp:131] Top shape: 5 2 3 4 (120) +I0102 05:14:55.366278 3038403 net.cpp:131] Top shape: 5 1 1 1 (5) +I0102 05:14:55.366286 3038403 net.cpp:139] Memory required for data: 500 +I0102 05:14:55.366291 3038403 layer_factory.hpp:77] Creating layer conv +I0102 05:14:55.366303 3038403 net.cpp:86] Creating Layer conv +I0102 05:14:55.366309 3038403 net.cpp:408] conv <- data +I0102 05:14:55.366318 3038403 net.cpp:382] conv -> conv +I0102 05:14:55.366355 3038403 net.cpp:124] Setting up conv +I0102 05:14:55.366362 3038403 net.cpp:131] Top shape: 5 11 8 9 (3960) +I0102 05:14:55.366369 3038403 net.cpp:139] Memory required for data: 16340 +I0102 05:14:55.366384 3038403 layer_factory.hpp:77] Creating layer ip +I0102 05:14:55.366394 3038403 net.cpp:86] Creating Layer ip +I0102 05:14:55.366405 3038403 net.cpp:408] ip <- conv +I0102 05:14:55.366415 3038403 net.cpp:382] ip -> ip_blob +I0102 05:14:55.366564 3038403 net.cpp:124] Setting up ip +I0102 05:14:55.366572 3038403 net.cpp:131] Top shape: 5 13 (65) +I0102 05:14:55.366580 3038403 net.cpp:139] Memory required for data: 16600 +I0102 05:14:55.366590 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.366598 3038403 net.cpp:86] Creating Layer loss +I0102 05:14:55.366605 3038403 net.cpp:408] loss <- ip_blob +I0102 05:14:55.366611 3038403 net.cpp:408] loss <- label +I0102 05:14:55.366618 3038403 net.cpp:382] loss -> loss +I0102 05:14:55.366628 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.366643 3038403 net.cpp:124] Setting up loss +I0102 05:14:55.366650 3038403 net.cpp:131] Top shape: (1) +I0102 05:14:55.366657 3038403 net.cpp:134] with loss weight 1 +I0102 05:14:55.366667 3038403 net.cpp:139] Memory required for data: 16604 +I0102 05:14:55.366672 3038403 net.cpp:200] loss needs backward computation. +I0102 05:14:55.366676 3038403 net.cpp:200] ip needs backward computation. +I0102 05:14:55.366683 3038403 net.cpp:200] conv needs backward computation. +I0102 05:14:55.366688 3038403 net.cpp:202] data does not need backward computation. +I0102 05:14:55.366693 3038403 net.cpp:244] This network produces output loss +I0102 05:14:55.366701 3038403 net.cpp:257] Network initialization done. +I0102 05:14:55.366724 3038403 solver.cpp:57] Solver scaffolding done. +.I0102 05:14:55.369308 3038403 solver.cpp:45] Initializing solver from parameters: test_iter: 10 test_interval: 10 base_lr: 0.01 @@ -18922,10 +18957,10 @@ momentum: 0.9 weight_decay: 0.0005 snapshot_prefix: "model" -net: "/tmp/tmpdxn_ppu_" +net: "/tmp/tmp_r1g42l7" snapshot_after_train: false -I1128 20:30:24.251560 2042758 solver.cpp:102] Creating training net from net file: /tmp/tmpdxn_ppu_ -I1128 20:30:24.251724 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.369385 3038403 solver.cpp:102] Creating training net from net file: /tmp/tmp_r1g42l7 +I0102 05:14:55.369558 3038403 net.cpp:53] Initializing net from parameters: name: "testnet" force_backward: true state { @@ -19003,46 +19038,46 @@ bottom: "label" top: "loss" } -I1128 20:30:24.251822 2042758 layer_factory.hpp:77] Creating layer data -I1128 20:30:24.251837 2042758 net.cpp:86] Creating Layer data -I1128 20:30:24.251843 2042758 net.cpp:382] data -> data -I1128 20:30:24.251857 2042758 net.cpp:382] data -> label -I1128 20:30:24.251871 2042758 net.cpp:124] Setting up data -I1128 20:30:24.251876 2042758 net.cpp:131] Top shape: 5 2 3 4 (120) -I1128 20:30:24.251883 2042758 net.cpp:131] Top shape: 5 1 1 1 (5) -I1128 20:30:24.251889 2042758 net.cpp:139] Memory required for data: 500 -I1128 20:30:24.251895 2042758 layer_factory.hpp:77] Creating layer conv -I1128 20:30:24.251906 2042758 net.cpp:86] Creating Layer conv -I1128 20:30:24.251911 2042758 net.cpp:408] conv <- data -I1128 20:30:24.251920 2042758 net.cpp:382] conv -> conv -I1128 20:30:24.251956 2042758 net.cpp:124] Setting up conv -I1128 20:30:24.251962 2042758 net.cpp:131] Top shape: 5 11 8 9 (3960) -I1128 20:30:24.251968 2042758 net.cpp:139] Memory required for data: 16340 -I1128 20:30:24.251986 2042758 layer_factory.hpp:77] Creating layer ip -I1128 20:30:24.251996 2042758 net.cpp:86] Creating Layer ip -I1128 20:30:24.252002 2042758 net.cpp:408] ip <- conv -I1128 20:30:24.252009 2042758 net.cpp:382] ip -> ip_blob -I1128 20:30:24.252153 2042758 net.cpp:124] Setting up ip -I1128 20:30:24.252159 2042758 net.cpp:131] Top shape: 5 13 (65) -I1128 20:30:24.252166 2042758 net.cpp:139] Memory required for data: 16600 -I1128 20:30:24.252175 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.252184 2042758 net.cpp:86] Creating Layer loss -I1128 20:30:24.252189 2042758 net.cpp:408] loss <- ip_blob -I1128 20:30:24.252194 2042758 net.cpp:408] loss <- label -I1128 20:30:24.252200 2042758 net.cpp:382] loss -> loss -I1128 20:30:24.252211 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.252228 2042758 net.cpp:124] Setting up loss -I1128 20:30:24.252233 2042758 net.cpp:131] Top shape: (1) -I1128 20:30:24.252240 2042758 net.cpp:134] with loss weight 1 -I1128 20:30:24.252252 2042758 net.cpp:139] Memory required for data: 16604 -I1128 20:30:24.252257 2042758 net.cpp:200] loss needs backward computation. -I1128 20:30:24.252264 2042758 net.cpp:200] ip needs backward computation. -I1128 20:30:24.252269 2042758 net.cpp:200] conv needs backward computation. -I1128 20:30:24.252275 2042758 net.cpp:202] data does not need backward computation. -I1128 20:30:24.252282 2042758 net.cpp:244] This network produces output loss -I1128 20:30:24.252296 2042758 net.cpp:257] Network initialization done. -I1128 20:30:24.252490 2042758 solver.cpp:190] Creating test net (#0) specified by net file: /tmp/tmpdxn_ppu_ -I1128 20:30:24.252528 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.369699 3038403 layer_factory.hpp:77] Creating layer data +I0102 05:14:55.369715 3038403 net.cpp:86] Creating Layer data +I0102 05:14:55.369726 3038403 net.cpp:382] data -> data +I0102 05:14:55.369745 3038403 net.cpp:382] data -> label +I0102 05:14:55.369765 3038403 net.cpp:124] Setting up data +I0102 05:14:55.369771 3038403 net.cpp:131] Top shape: 5 2 3 4 (120) +I0102 05:14:55.369781 3038403 net.cpp:131] Top shape: 5 1 1 1 (5) +I0102 05:14:55.369789 3038403 net.cpp:139] Memory required for data: 500 +I0102 05:14:55.369796 3038403 layer_factory.hpp:77] Creating layer conv +I0102 05:14:55.369810 3038403 net.cpp:86] Creating Layer conv +I0102 05:14:55.369817 3038403 net.cpp:408] conv <- data +I0102 05:14:55.369828 3038403 net.cpp:382] conv -> conv +I0102 05:14:55.369870 3038403 net.cpp:124] Setting up conv +I0102 05:14:55.369879 3038403 net.cpp:131] Top shape: 5 11 8 9 (3960) +I0102 05:14:55.369885 3038403 net.cpp:139] Memory required for data: 16340 +I0102 05:14:55.369902 3038403 layer_factory.hpp:77] Creating layer ip +I0102 05:14:55.369913 3038403 net.cpp:86] Creating Layer ip +I0102 05:14:55.369920 3038403 net.cpp:408] ip <- conv +I0102 05:14:55.369930 3038403 net.cpp:382] ip -> ip_blob +I0102 05:14:55.370106 3038403 net.cpp:124] Setting up ip +I0102 05:14:55.370115 3038403 net.cpp:131] Top shape: 5 13 (65) +I0102 05:14:55.370123 3038403 net.cpp:139] Memory required for data: 16600 +I0102 05:14:55.370136 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.370147 3038403 net.cpp:86] Creating Layer loss +I0102 05:14:55.370153 3038403 net.cpp:408] loss <- ip_blob +I0102 05:14:55.370162 3038403 net.cpp:408] loss <- label +I0102 05:14:55.370169 3038403 net.cpp:382] loss -> loss +I0102 05:14:55.370182 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.370203 3038403 net.cpp:124] Setting up loss +I0102 05:14:55.370210 3038403 net.cpp:131] Top shape: (1) +I0102 05:14:55.370218 3038403 net.cpp:134] with loss weight 1 +I0102 05:14:55.370231 3038403 net.cpp:139] Memory required for data: 16604 +I0102 05:14:55.370239 3038403 net.cpp:200] loss needs backward computation. +I0102 05:14:55.370246 3038403 net.cpp:200] ip needs backward computation. +I0102 05:14:55.370254 3038403 net.cpp:200] conv needs backward computation. +I0102 05:14:55.370261 3038403 net.cpp:202] data does not need backward computation. +I0102 05:14:55.370268 3038403 net.cpp:244] This network produces output loss +I0102 05:14:55.370278 3038403 net.cpp:257] Network initialization done. +I0102 05:14:55.370447 3038403 solver.cpp:190] Creating test net (#0) specified by net file: /tmp/tmp_r1g42l7 +I0102 05:14:55.370478 3038403 net.cpp:53] Initializing net from parameters: name: "testnet" force_backward: true state { @@ -19120,46 +19155,46 @@ bottom: "label" top: "loss" } -I1128 20:30:24.252643 2042758 layer_factory.hpp:77] Creating layer data -I1128 20:30:24.252655 2042758 net.cpp:86] Creating Layer data -I1128 20:30:24.252663 2042758 net.cpp:382] data -> data -I1128 20:30:24.252676 2042758 net.cpp:382] data -> label -I1128 20:30:24.252687 2042758 net.cpp:124] Setting up data -I1128 20:30:24.252692 2042758 net.cpp:131] Top shape: 5 2 3 4 (120) -I1128 20:30:24.252698 2042758 net.cpp:131] Top shape: 5 1 1 1 (5) -I1128 20:30:24.252704 2042758 net.cpp:139] Memory required for data: 500 -I1128 20:30:24.252709 2042758 layer_factory.hpp:77] Creating layer conv -I1128 20:30:24.252718 2042758 net.cpp:86] Creating Layer conv -I1128 20:30:24.252722 2042758 net.cpp:408] conv <- data -I1128 20:30:24.252729 2042758 net.cpp:382] conv -> conv -I1128 20:30:24.252756 2042758 net.cpp:124] Setting up conv -I1128 20:30:24.252761 2042758 net.cpp:131] Top shape: 5 11 8 9 (3960) -I1128 20:30:24.252768 2042758 net.cpp:139] Memory required for data: 16340 -I1128 20:30:24.252777 2042758 layer_factory.hpp:77] Creating layer ip -I1128 20:30:24.252785 2042758 net.cpp:86] Creating Layer ip -I1128 20:30:24.252790 2042758 net.cpp:408] ip <- conv -I1128 20:30:24.252796 2042758 net.cpp:382] ip -> ip_blob -I1128 20:30:24.253113 2042758 net.cpp:124] Setting up ip -I1128 20:30:24.253130 2042758 net.cpp:131] Top shape: 5 13 (65) -I1128 20:30:24.253139 2042758 net.cpp:139] Memory required for data: 16600 -I1128 20:30:24.253151 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.253268 2042758 net.cpp:86] Creating Layer loss -I1128 20:30:24.253278 2042758 net.cpp:408] loss <- ip_blob -I1128 20:30:24.253288 2042758 net.cpp:408] loss <- label -I1128 20:30:24.253295 2042758 net.cpp:382] loss -> loss -I1128 20:30:24.253309 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.253327 2042758 net.cpp:124] Setting up loss -I1128 20:30:24.253334 2042758 net.cpp:131] Top shape: (1) -I1128 20:30:24.253412 2042758 net.cpp:134] with loss weight 1 -I1128 20:30:24.253423 2042758 net.cpp:139] Memory required for data: 16604 -I1128 20:30:24.253429 2042758 net.cpp:200] loss needs backward computation. -I1128 20:30:24.253435 2042758 net.cpp:200] ip needs backward computation. -I1128 20:30:24.253440 2042758 net.cpp:200] conv needs backward computation. -I1128 20:30:24.253446 2042758 net.cpp:202] data does not need backward computation. -I1128 20:30:24.253453 2042758 net.cpp:244] This network produces output loss -I1128 20:30:24.253463 2042758 net.cpp:257] Network initialization done. -I1128 20:30:24.253492 2042758 solver.cpp:57] Solver scaffolding done. -I1128 20:30:24.253604 2042758 solver.cpp:45] Initializing solver from parameters: +I0102 05:14:55.370601 3038403 layer_factory.hpp:77] Creating layer data +I0102 05:14:55.370616 3038403 net.cpp:86] Creating Layer data +I0102 05:14:55.370627 3038403 net.cpp:382] data -> data +I0102 05:14:55.370640 3038403 net.cpp:382] data -> label +I0102 05:14:55.370656 3038403 net.cpp:124] Setting up data +I0102 05:14:55.370661 3038403 net.cpp:131] Top shape: 5 2 3 4 (120) +I0102 05:14:55.370671 3038403 net.cpp:131] Top shape: 5 1 1 1 (5) +I0102 05:14:55.370678 3038403 net.cpp:139] Memory required for data: 500 +I0102 05:14:55.370683 3038403 layer_factory.hpp:77] Creating layer conv +I0102 05:14:55.370697 3038403 net.cpp:86] Creating Layer conv +I0102 05:14:55.370703 3038403 net.cpp:408] conv <- data +I0102 05:14:55.370713 3038403 net.cpp:382] conv -> conv +I0102 05:14:55.370744 3038403 net.cpp:124] Setting up conv +I0102 05:14:55.370752 3038403 net.cpp:131] Top shape: 5 11 8 9 (3960) +I0102 05:14:55.370760 3038403 net.cpp:139] Memory required for data: 16340 +I0102 05:14:55.370774 3038403 layer_factory.hpp:77] Creating layer ip +I0102 05:14:55.370785 3038403 net.cpp:86] Creating Layer ip +I0102 05:14:55.370792 3038403 net.cpp:408] ip <- conv +I0102 05:14:55.370801 3038403 net.cpp:382] ip -> ip_blob +I0102 05:14:55.370970 3038403 net.cpp:124] Setting up ip +I0102 05:14:55.370980 3038403 net.cpp:131] Top shape: 5 13 (65) +I0102 05:14:55.370988 3038403 net.cpp:139] Memory required for data: 16600 +I0102 05:14:55.371001 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.371011 3038403 net.cpp:86] Creating Layer loss +I0102 05:14:55.371016 3038403 net.cpp:408] loss <- ip_blob +I0102 05:14:55.371023 3038403 net.cpp:408] loss <- label +I0102 05:14:55.371032 3038403 net.cpp:382] loss -> loss +I0102 05:14:55.371044 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.371063 3038403 net.cpp:124] Setting up loss +I0102 05:14:55.371069 3038403 net.cpp:131] Top shape: (1) +I0102 05:14:55.371076 3038403 net.cpp:134] with loss weight 1 +I0102 05:14:55.371088 3038403 net.cpp:139] Memory required for data: 16604 +I0102 05:14:55.371093 3038403 net.cpp:200] loss needs backward computation. +I0102 05:14:55.371100 3038403 net.cpp:200] ip needs backward computation. +I0102 05:14:55.371106 3038403 net.cpp:200] conv needs backward computation. +I0102 05:14:55.371114 3038403 net.cpp:202] data does not need backward computation. +I0102 05:14:55.371119 3038403 net.cpp:244] This network produces output loss +I0102 05:14:55.371129 3038403 net.cpp:257] Network initialization done. +I0102 05:14:55.371156 3038403 solver.cpp:57] Solver scaffolding done. +I0102 05:14:55.371260 3038403 solver.cpp:45] Initializing solver from parameters: test_iter: 10 test_interval: 10 base_lr: 0.01 @@ -19171,10 +19206,10 @@ momentum: 0.9 weight_decay: 0.0005 snapshot_prefix: "model" -net: "/tmp/tmpdxn_ppu_" +net: "/tmp/tmp_r1g42l7" snapshot_after_train: false -I1128 20:30:24.253643 2042758 solver.cpp:102] Creating training net from net file: /tmp/tmpdxn_ppu_ -I1128 20:30:24.253767 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.371306 3038403 solver.cpp:102] Creating training net from net file: /tmp/tmp_r1g42l7 +I0102 05:14:55.371465 3038403 net.cpp:53] Initializing net from parameters: name: "testnet" force_backward: true state { @@ -19252,46 +19287,46 @@ bottom: "label" top: "loss" } -I1128 20:30:24.254027 2042758 layer_factory.hpp:77] Creating layer data -I1128 20:30:24.254048 2042758 net.cpp:86] Creating Layer data -I1128 20:30:24.254110 2042758 net.cpp:382] data -> data -I1128 20:30:24.254130 2042758 net.cpp:382] data -> label -I1128 20:30:24.254251 2042758 net.cpp:124] Setting up data -I1128 20:30:24.254261 2042758 net.cpp:131] Top shape: 5 2 3 4 (120) -I1128 20:30:24.254318 2042758 net.cpp:131] Top shape: 5 1 1 1 (5) -I1128 20:30:24.254330 2042758 net.cpp:139] Memory required for data: 500 -I1128 20:30:24.254338 2042758 layer_factory.hpp:77] Creating layer conv -I1128 20:30:24.254451 2042758 net.cpp:86] Creating Layer conv -I1128 20:30:24.254462 2042758 net.cpp:408] conv <- data -I1128 20:30:24.254523 2042758 net.cpp:382] conv -> conv -I1128 20:30:24.254626 2042758 net.cpp:124] Setting up conv -I1128 20:30:24.254639 2042758 net.cpp:131] Top shape: 5 11 8 9 (3960) -I1128 20:30:24.254648 2042758 net.cpp:139] Memory required for data: 16340 -I1128 20:30:24.254781 2042758 layer_factory.hpp:77] Creating layer ip -I1128 20:30:24.254798 2042758 net.cpp:86] Creating Layer ip -I1128 20:30:24.254926 2042758 net.cpp:408] ip <- conv -I1128 20:30:24.254997 2042758 net.cpp:382] ip -> ip_blob -I1128 20:30:24.255228 2042758 net.cpp:124] Setting up ip -I1128 20:30:24.255245 2042758 net.cpp:131] Top shape: 5 13 (65) -I1128 20:30:24.255255 2042758 net.cpp:139] Memory required for data: 16600 -I1128 20:30:24.255370 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.255380 2042758 net.cpp:86] Creating Layer loss -I1128 20:30:24.255386 2042758 net.cpp:408] loss <- ip_blob -I1128 20:30:24.255393 2042758 net.cpp:408] loss <- label -I1128 20:30:24.255405 2042758 net.cpp:382] loss -> loss -I1128 20:30:24.255416 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.255442 2042758 net.cpp:124] Setting up loss -I1128 20:30:24.255447 2042758 net.cpp:131] Top shape: (1) -I1128 20:30:24.255453 2042758 net.cpp:134] with loss weight 1 -I1128 20:30:24.255465 2042758 net.cpp:139] Memory required for data: 16604 -I1128 20:30:24.255470 2042758 net.cpp:200] loss needs backward computation. -I1128 20:30:24.255475 2042758 net.cpp:200] ip needs backward computation. -I1128 20:30:24.255481 2042758 net.cpp:200] conv needs backward computation. -I1128 20:30:24.255487 2042758 net.cpp:202] data does not need backward computation. -I1128 20:30:24.255493 2042758 net.cpp:244] This network produces output loss -I1128 20:30:24.255501 2042758 net.cpp:257] Network initialization done. -I1128 20:30:24.255653 2042758 solver.cpp:190] Creating test net (#0) specified by net file: /tmp/tmpdxn_ppu_ -I1128 20:30:24.255676 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.371583 3038403 layer_factory.hpp:77] Creating layer data +I0102 05:14:55.371598 3038403 net.cpp:86] Creating Layer data +I0102 05:14:55.371608 3038403 net.cpp:382] data -> data +I0102 05:14:55.371621 3038403 net.cpp:382] data -> label +I0102 05:14:55.371636 3038403 net.cpp:124] Setting up data +I0102 05:14:55.371644 3038403 net.cpp:131] Top shape: 5 2 3 4 (120) +I0102 05:14:55.371652 3038403 net.cpp:131] Top shape: 5 1 1 1 (5) +I0102 05:14:55.371659 3038403 net.cpp:139] Memory required for data: 500 +I0102 05:14:55.371667 3038403 layer_factory.hpp:77] Creating layer conv +I0102 05:14:55.371680 3038403 net.cpp:86] Creating Layer conv +I0102 05:14:55.371686 3038403 net.cpp:408] conv <- data +I0102 05:14:55.371696 3038403 net.cpp:382] conv -> conv +I0102 05:14:55.371728 3038403 net.cpp:124] Setting up conv +I0102 05:14:55.371735 3038403 net.cpp:131] Top shape: 5 11 8 9 (3960) +I0102 05:14:55.371743 3038403 net.cpp:139] Memory required for data: 16340 +I0102 05:14:55.371757 3038403 layer_factory.hpp:77] Creating layer ip +I0102 05:14:55.371767 3038403 net.cpp:86] Creating Layer ip +I0102 05:14:55.371773 3038403 net.cpp:408] ip <- conv +I0102 05:14:55.371781 3038403 net.cpp:382] ip -> ip_blob +I0102 05:14:55.371953 3038403 net.cpp:124] Setting up ip +I0102 05:14:55.371963 3038403 net.cpp:131] Top shape: 5 13 (65) +I0102 05:14:55.371971 3038403 net.cpp:139] Memory required for data: 16600 +I0102 05:14:55.371984 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.371994 3038403 net.cpp:86] Creating Layer loss +I0102 05:14:55.372001 3038403 net.cpp:408] loss <- ip_blob +I0102 05:14:55.372009 3038403 net.cpp:408] loss <- label +I0102 05:14:55.372023 3038403 net.cpp:382] loss -> loss +I0102 05:14:55.372033 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.372049 3038403 net.cpp:124] Setting up loss +I0102 05:14:55.372056 3038403 net.cpp:131] Top shape: (1) +I0102 05:14:55.372061 3038403 net.cpp:134] with loss weight 1 +I0102 05:14:55.372071 3038403 net.cpp:139] Memory required for data: 16604 +I0102 05:14:55.372077 3038403 net.cpp:200] loss needs backward computation. +I0102 05:14:55.372083 3038403 net.cpp:200] ip needs backward computation. +I0102 05:14:55.372089 3038403 net.cpp:200] conv needs backward computation. +I0102 05:14:55.372095 3038403 net.cpp:202] data does not need backward computation. +I0102 05:14:55.372100 3038403 net.cpp:244] This network produces output loss +I0102 05:14:55.372108 3038403 net.cpp:257] Network initialization done. +I0102 05:14:55.372259 3038403 solver.cpp:190] Creating test net (#0) specified by net file: /tmp/tmp_r1g42l7 +I0102 05:14:55.372292 3038403 net.cpp:53] Initializing net from parameters: name: "testnet" force_backward: true state { @@ -19369,48 +19404,48 @@ bottom: "label" top: "loss" } -I1128 20:30:24.255946 2042758 layer_factory.hpp:77] Creating layer data -I1128 20:30:24.255968 2042758 net.cpp:86] Creating Layer data -I1128 20:30:24.256037 2042758 net.cpp:382] data -> data -I1128 20:30:24.256062 2042758 net.cpp:382] data -> label -I1128 20:30:24.256213 2042758 net.cpp:124] Setting up data -I1128 20:30:24.256232 2042758 net.cpp:131] Top shape: 5 2 3 4 (120) -I1128 20:30:24.256240 2042758 net.cpp:131] Top shape: 5 1 1 1 (5) -I1128 20:30:24.256247 2042758 net.cpp:139] Memory required for data: 500 -I1128 20:30:24.256253 2042758 layer_factory.hpp:77] Creating layer conv -I1128 20:30:24.256268 2042758 net.cpp:86] Creating Layer conv -I1128 20:30:24.256273 2042758 net.cpp:408] conv <- data -I1128 20:30:24.256283 2042758 net.cpp:382] conv -> conv -I1128 20:30:24.256318 2042758 net.cpp:124] Setting up conv -I1128 20:30:24.256325 2042758 net.cpp:131] Top shape: 5 11 8 9 (3960) -I1128 20:30:24.256333 2042758 net.cpp:139] Memory required for data: 16340 -I1128 20:30:24.256345 2042758 layer_factory.hpp:77] Creating layer ip -I1128 20:30:24.256354 2042758 net.cpp:86] Creating Layer ip -I1128 20:30:24.256359 2042758 net.cpp:408] ip <- conv -I1128 20:30:24.256367 2042758 net.cpp:382] ip -> ip_blob -I1128 20:30:24.256536 2042758 net.cpp:124] Setting up ip -I1128 20:30:24.256543 2042758 net.cpp:131] Top shape: 5 13 (65) -I1128 20:30:24.256549 2042758 net.cpp:139] Memory required for data: 16600 -I1128 20:30:24.256565 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.256573 2042758 net.cpp:86] Creating Layer loss -I1128 20:30:24.256587 2042758 net.cpp:408] loss <- ip_blob -I1128 20:30:24.256593 2042758 net.cpp:408] loss <- label -I1128 20:30:24.256604 2042758 net.cpp:382] loss -> loss -I1128 20:30:24.256644 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.256661 2042758 net.cpp:124] Setting up loss -I1128 20:30:24.256666 2042758 net.cpp:131] Top shape: (1) -I1128 20:30:24.256678 2042758 net.cpp:134] with loss weight 1 -I1128 20:30:24.256691 2042758 net.cpp:139] Memory required for data: 16604 -I1128 20:30:24.256695 2042758 net.cpp:200] loss needs backward computation. -I1128 20:30:24.256701 2042758 net.cpp:200] ip needs backward computation. -I1128 20:30:24.256707 2042758 net.cpp:200] conv needs backward computation. -I1128 20:30:24.256713 2042758 net.cpp:202] data does not need backward computation. -I1128 20:30:24.256723 2042758 net.cpp:244] This network produces output loss -I1128 20:30:24.256732 2042758 net.cpp:257] Network initialization done. -I1128 20:30:24.256759 2042758 solver.cpp:57] Solver scaffolding done. -I1128 20:30:24.257447 2042758 solver.cpp:464] Snapshotting to binary proto file model_iter_0.caffemodel -I1128 20:30:24.257755 2042758 sgd_solver.cpp:284] Snapshotting solver state to binary proto file model_iter_0.solverstate -.I1128 20:30:24.258677 2042758 solver.cpp:45] Initializing solver from parameters: +I0102 05:14:55.372396 3038403 layer_factory.hpp:77] Creating layer data +I0102 05:14:55.372428 3038403 net.cpp:86] Creating Layer data +I0102 05:14:55.372437 3038403 net.cpp:382] data -> data +I0102 05:14:55.372447 3038403 net.cpp:382] data -> label +I0102 05:14:55.372462 3038403 net.cpp:124] Setting up data +I0102 05:14:55.372467 3038403 net.cpp:131] Top shape: 5 2 3 4 (120) +I0102 05:14:55.372474 3038403 net.cpp:131] Top shape: 5 1 1 1 (5) +I0102 05:14:55.372480 3038403 net.cpp:139] Memory required for data: 500 +I0102 05:14:55.372485 3038403 layer_factory.hpp:77] Creating layer conv +I0102 05:14:55.372503 3038403 net.cpp:86] Creating Layer conv +I0102 05:14:55.372509 3038403 net.cpp:408] conv <- data +I0102 05:14:55.372519 3038403 net.cpp:382] conv -> conv +I0102 05:14:55.372545 3038403 net.cpp:124] Setting up conv +I0102 05:14:55.372562 3038403 net.cpp:131] Top shape: 5 11 8 9 (3960) +I0102 05:14:55.372606 3038403 net.cpp:139] Memory required for data: 16340 +I0102 05:14:55.372618 3038403 layer_factory.hpp:77] Creating layer ip +I0102 05:14:55.372628 3038403 net.cpp:86] Creating Layer ip +I0102 05:14:55.372633 3038403 net.cpp:408] ip <- conv +I0102 05:14:55.372642 3038403 net.cpp:382] ip -> ip_blob +I0102 05:14:55.372793 3038403 net.cpp:124] Setting up ip +I0102 05:14:55.372802 3038403 net.cpp:131] Top shape: 5 13 (65) +I0102 05:14:55.372810 3038403 net.cpp:139] Memory required for data: 16600 +I0102 05:14:55.372822 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.372833 3038403 net.cpp:86] Creating Layer loss +I0102 05:14:55.372838 3038403 net.cpp:408] loss <- ip_blob +I0102 05:14:55.372853 3038403 net.cpp:408] loss <- label +I0102 05:14:55.372862 3038403 net.cpp:382] loss -> loss +I0102 05:14:55.372877 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.372893 3038403 net.cpp:124] Setting up loss +I0102 05:14:55.372905 3038403 net.cpp:131] Top shape: (1) +I0102 05:14:55.372915 3038403 net.cpp:134] with loss weight 1 +I0102 05:14:55.372923 3038403 net.cpp:139] Memory required for data: 16604 +I0102 05:14:55.372928 3038403 net.cpp:200] loss needs backward computation. +I0102 05:14:55.372937 3038403 net.cpp:200] ip needs backward computation. +I0102 05:14:55.372942 3038403 net.cpp:200] conv needs backward computation. +I0102 05:14:55.372948 3038403 net.cpp:202] data does not need backward computation. +I0102 05:14:55.372954 3038403 net.cpp:244] This network produces output loss +I0102 05:14:55.372963 3038403 net.cpp:257] Network initialization done. +I0102 05:14:55.372989 3038403 solver.cpp:57] Solver scaffolding done. +I0102 05:14:55.373541 3038403 solver.cpp:464] Snapshotting to binary proto file model_iter_0.caffemodel +I0102 05:14:55.373798 3038403 sgd_solver.cpp:284] Snapshotting solver state to binary proto file model_iter_0.solverstate +.I0102 05:14:55.374730 3038403 solver.cpp:45] Initializing solver from parameters: test_iter: 10 test_interval: 10 base_lr: 0.01 @@ -19422,10 +19457,10 @@ momentum: 0.9 weight_decay: 0.0005 snapshot_prefix: "model" -net: "/tmp/tmps8ynhm28" +net: "/tmp/tmpxzopbysw" snapshot_after_train: false -I1128 20:30:24.258741 2042758 solver.cpp:102] Creating training net from net file: /tmp/tmps8ynhm28 -I1128 20:30:24.258934 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.374804 3038403 solver.cpp:102] Creating training net from net file: /tmp/tmpxzopbysw +I0102 05:14:55.374969 3038403 net.cpp:53] Initializing net from parameters: name: "testnet" force_backward: true state { @@ -19503,46 +19538,46 @@ bottom: "label" top: "loss" } -I1128 20:30:24.259096 2042758 layer_factory.hpp:77] Creating layer data -I1128 20:30:24.259117 2042758 net.cpp:86] Creating Layer data -I1128 20:30:24.259181 2042758 net.cpp:382] data -> data -I1128 20:30:24.259204 2042758 net.cpp:382] data -> label -I1128 20:30:24.259343 2042758 net.cpp:124] Setting up data -I1128 20:30:24.259359 2042758 net.cpp:131] Top shape: 5 2 3 4 (120) -I1128 20:30:24.259368 2042758 net.cpp:131] Top shape: 5 1 1 1 (5) -I1128 20:30:24.259377 2042758 net.cpp:139] Memory required for data: 500 -I1128 20:30:24.259382 2042758 layer_factory.hpp:77] Creating layer conv -I1128 20:30:24.259395 2042758 net.cpp:86] Creating Layer conv -I1128 20:30:24.259402 2042758 net.cpp:408] conv <- data -I1128 20:30:24.259411 2042758 net.cpp:382] conv -> conv -I1128 20:30:24.259447 2042758 net.cpp:124] Setting up conv -I1128 20:30:24.259454 2042758 net.cpp:131] Top shape: 5 11 8 9 (3960) -I1128 20:30:24.259459 2042758 net.cpp:139] Memory required for data: 16340 -I1128 20:30:24.259472 2042758 layer_factory.hpp:77] Creating layer ip -I1128 20:30:24.259483 2042758 net.cpp:86] Creating Layer ip -I1128 20:30:24.259487 2042758 net.cpp:408] ip <- conv -I1128 20:30:24.259496 2042758 net.cpp:382] ip -> ip_blob -I1128 20:30:24.259646 2042758 net.cpp:124] Setting up ip -I1128 20:30:24.259653 2042758 net.cpp:131] Top shape: 5 13 (65) -I1128 20:30:24.259660 2042758 net.cpp:139] Memory required for data: 16600 -I1128 20:30:24.259668 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.259676 2042758 net.cpp:86] Creating Layer loss -I1128 20:30:24.259681 2042758 net.cpp:408] loss <- ip_blob -I1128 20:30:24.259687 2042758 net.cpp:408] loss <- label -I1128 20:30:24.259693 2042758 net.cpp:382] loss -> loss -I1128 20:30:24.259701 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.259718 2042758 net.cpp:124] Setting up loss -I1128 20:30:24.259722 2042758 net.cpp:131] Top shape: (1) -I1128 20:30:24.259728 2042758 net.cpp:134] with loss weight 1 -I1128 20:30:24.259744 2042758 net.cpp:139] Memory required for data: 16604 -I1128 20:30:24.259750 2042758 net.cpp:200] loss needs backward computation. -I1128 20:30:24.259757 2042758 net.cpp:200] ip needs backward computation. -I1128 20:30:24.259763 2042758 net.cpp:200] conv needs backward computation. -I1128 20:30:24.259768 2042758 net.cpp:202] data does not need backward computation. -I1128 20:30:24.259776 2042758 net.cpp:244] This network produces output loss -I1128 20:30:24.259891 2042758 net.cpp:257] Network initialization done. -I1128 20:30:24.260063 2042758 solver.cpp:190] Creating test net (#0) specified by net file: /tmp/tmps8ynhm28 -I1128 20:30:24.260149 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.375094 3038403 layer_factory.hpp:77] Creating layer data +I0102 05:14:55.375108 3038403 net.cpp:86] Creating Layer data +I0102 05:14:55.375118 3038403 net.cpp:382] data -> data +I0102 05:14:55.375133 3038403 net.cpp:382] data -> label +I0102 05:14:55.375150 3038403 net.cpp:124] Setting up data +I0102 05:14:55.375156 3038403 net.cpp:131] Top shape: 5 2 3 4 (120) +I0102 05:14:55.375165 3038403 net.cpp:131] Top shape: 5 1 1 1 (5) +I0102 05:14:55.375172 3038403 net.cpp:139] Memory required for data: 500 +I0102 05:14:55.375178 3038403 layer_factory.hpp:77] Creating layer conv +I0102 05:14:55.375192 3038403 net.cpp:86] Creating Layer conv +I0102 05:14:55.375198 3038403 net.cpp:408] conv <- data +I0102 05:14:55.375209 3038403 net.cpp:382] conv -> conv +I0102 05:14:55.375243 3038403 net.cpp:124] Setting up conv +I0102 05:14:55.375250 3038403 net.cpp:131] Top shape: 5 11 8 9 (3960) +I0102 05:14:55.375257 3038403 net.cpp:139] Memory required for data: 16340 +I0102 05:14:55.375272 3038403 layer_factory.hpp:77] Creating layer ip +I0102 05:14:55.375283 3038403 net.cpp:86] Creating Layer ip +I0102 05:14:55.375289 3038403 net.cpp:408] ip <- conv +I0102 05:14:55.375298 3038403 net.cpp:382] ip -> ip_blob +I0102 05:14:55.375463 3038403 net.cpp:124] Setting up ip +I0102 05:14:55.375479 3038403 net.cpp:131] Top shape: 5 13 (65) +I0102 05:14:55.375488 3038403 net.cpp:139] Memory required for data: 16600 +I0102 05:14:55.375497 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.375506 3038403 net.cpp:86] Creating Layer loss +I0102 05:14:55.375512 3038403 net.cpp:408] loss <- ip_blob +I0102 05:14:55.375519 3038403 net.cpp:408] loss <- label +I0102 05:14:55.375526 3038403 net.cpp:382] loss -> loss +I0102 05:14:55.375538 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.375558 3038403 net.cpp:124] Setting up loss +I0102 05:14:55.375564 3038403 net.cpp:131] Top shape: (1) +I0102 05:14:55.375572 3038403 net.cpp:134] with loss weight 1 +I0102 05:14:55.375592 3038403 net.cpp:139] Memory required for data: 16604 +I0102 05:14:55.375598 3038403 net.cpp:200] loss needs backward computation. +I0102 05:14:55.375604 3038403 net.cpp:200] ip needs backward computation. +I0102 05:14:55.375610 3038403 net.cpp:200] conv needs backward computation. +I0102 05:14:55.375618 3038403 net.cpp:202] data does not need backward computation. +I0102 05:14:55.375624 3038403 net.cpp:244] This network produces output loss +I0102 05:14:55.375634 3038403 net.cpp:257] Network initialization done. +I0102 05:14:55.375799 3038403 solver.cpp:190] Creating test net (#0) specified by net file: /tmp/tmpxzopbysw +I0102 05:14:55.375828 3038403 net.cpp:53] Initializing net from parameters: name: "testnet" force_backward: true state { @@ -19620,46 +19655,46 @@ bottom: "label" top: "loss" } -I1128 20:30:24.260304 2042758 layer_factory.hpp:77] Creating layer data -I1128 20:30:24.260321 2042758 net.cpp:86] Creating Layer data -I1128 20:30:24.260408 2042758 net.cpp:382] data -> data -I1128 20:30:24.260461 2042758 net.cpp:382] data -> label -I1128 20:30:24.260515 2042758 net.cpp:124] Setting up data -I1128 20:30:24.260526 2042758 net.cpp:131] Top shape: 5 2 3 4 (120) -I1128 20:30:24.260587 2042758 net.cpp:131] Top shape: 5 1 1 1 (5) -I1128 20:30:24.260599 2042758 net.cpp:139] Memory required for data: 500 -I1128 20:30:24.260607 2042758 layer_factory.hpp:77] Creating layer conv -I1128 20:30:24.260731 2042758 net.cpp:86] Creating Layer conv -I1128 20:30:24.260744 2042758 net.cpp:408] conv <- data -I1128 20:30:24.260808 2042758 net.cpp:382] conv -> conv -I1128 20:30:24.260891 2042758 net.cpp:124] Setting up conv -I1128 20:30:24.260902 2042758 net.cpp:131] Top shape: 5 11 8 9 (3960) -I1128 20:30:24.260959 2042758 net.cpp:139] Memory required for data: 16340 -I1128 20:30:24.261027 2042758 layer_factory.hpp:77] Creating layer ip -I1128 20:30:24.261044 2042758 net.cpp:86] Creating Layer ip -I1128 20:30:24.261050 2042758 net.cpp:408] ip <- conv -I1128 20:30:24.261058 2042758 net.cpp:382] ip -> ip_blob -I1128 20:30:24.261338 2042758 net.cpp:124] Setting up ip -I1128 20:30:24.261345 2042758 net.cpp:131] Top shape: 5 13 (65) -I1128 20:30:24.261354 2042758 net.cpp:139] Memory required for data: 16600 -I1128 20:30:24.261365 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.261374 2042758 net.cpp:86] Creating Layer loss -I1128 20:30:24.261379 2042758 net.cpp:408] loss <- ip_blob -I1128 20:30:24.261387 2042758 net.cpp:408] loss <- label -I1128 20:30:24.261394 2042758 net.cpp:382] loss -> loss -I1128 20:30:24.261405 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.261422 2042758 net.cpp:124] Setting up loss -I1128 20:30:24.261427 2042758 net.cpp:131] Top shape: (1) -I1128 20:30:24.261440 2042758 net.cpp:134] with loss weight 1 -I1128 20:30:24.261449 2042758 net.cpp:139] Memory required for data: 16604 -I1128 20:30:24.261454 2042758 net.cpp:200] loss needs backward computation. -I1128 20:30:24.261461 2042758 net.cpp:200] ip needs backward computation. -I1128 20:30:24.261466 2042758 net.cpp:200] conv needs backward computation. -I1128 20:30:24.261472 2042758 net.cpp:202] data does not need backward computation. -I1128 20:30:24.261479 2042758 net.cpp:244] This network produces output loss -I1128 20:30:24.261488 2042758 net.cpp:257] Network initialization done. -I1128 20:30:24.261518 2042758 solver.cpp:57] Solver scaffolding done. -I1128 20:30:24.261698 2042758 solver.cpp:45] Initializing solver from parameters: +I0102 05:14:55.375943 3038403 layer_factory.hpp:77] Creating layer data +I0102 05:14:55.375957 3038403 net.cpp:86] Creating Layer data +I0102 05:14:55.375967 3038403 net.cpp:382] data -> data +I0102 05:14:55.375982 3038403 net.cpp:382] data -> label +I0102 05:14:55.375999 3038403 net.cpp:124] Setting up data +I0102 05:14:55.376008 3038403 net.cpp:131] Top shape: 5 2 3 4 (120) +I0102 05:14:55.376016 3038403 net.cpp:131] Top shape: 5 1 1 1 (5) +I0102 05:14:55.376025 3038403 net.cpp:139] Memory required for data: 500 +I0102 05:14:55.376030 3038403 layer_factory.hpp:77] Creating layer conv +I0102 05:14:55.376044 3038403 net.cpp:86] Creating Layer conv +I0102 05:14:55.376049 3038403 net.cpp:408] conv <- data +I0102 05:14:55.376058 3038403 net.cpp:382] conv -> conv +I0102 05:14:55.376091 3038403 net.cpp:124] Setting up conv +I0102 05:14:55.376101 3038403 net.cpp:131] Top shape: 5 11 8 9 (3960) +I0102 05:14:55.376108 3038403 net.cpp:139] Memory required for data: 16340 +I0102 05:14:55.376121 3038403 layer_factory.hpp:77] Creating layer ip +I0102 05:14:55.376132 3038403 net.cpp:86] Creating Layer ip +I0102 05:14:55.376137 3038403 net.cpp:408] ip <- conv +I0102 05:14:55.376147 3038403 net.cpp:382] ip -> ip_blob +I0102 05:14:55.376387 3038403 net.cpp:124] Setting up ip +I0102 05:14:55.376402 3038403 net.cpp:131] Top shape: 5 13 (65) +I0102 05:14:55.376410 3038403 net.cpp:139] Memory required for data: 16600 +I0102 05:14:55.376422 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.376432 3038403 net.cpp:86] Creating Layer loss +I0102 05:14:55.376438 3038403 net.cpp:408] loss <- ip_blob +I0102 05:14:55.376446 3038403 net.cpp:408] loss <- label +I0102 05:14:55.376454 3038403 net.cpp:382] loss -> loss +I0102 05:14:55.376466 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.376483 3038403 net.cpp:124] Setting up loss +I0102 05:14:55.376489 3038403 net.cpp:131] Top shape: (1) +I0102 05:14:55.376497 3038403 net.cpp:134] with loss weight 1 +I0102 05:14:55.376508 3038403 net.cpp:139] Memory required for data: 16604 +I0102 05:14:55.376514 3038403 net.cpp:200] loss needs backward computation. +I0102 05:14:55.376520 3038403 net.cpp:200] ip needs backward computation. +I0102 05:14:55.376526 3038403 net.cpp:200] conv needs backward computation. +I0102 05:14:55.376533 3038403 net.cpp:202] data does not need backward computation. +I0102 05:14:55.376539 3038403 net.cpp:244] This network produces output loss +I0102 05:14:55.376549 3038403 net.cpp:257] Network initialization done. +I0102 05:14:55.376639 3038403 solver.cpp:57] Solver scaffolding done. +I0102 05:14:55.376756 3038403 solver.cpp:45] Initializing solver from parameters: test_iter: 10 test_interval: 10 base_lr: 0.01 @@ -19671,10 +19706,10 @@ momentum: 0.9 weight_decay: 0.0005 snapshot_prefix: "model" -net: "/tmp/tmps8ynhm28" +net: "/tmp/tmpxzopbysw" snapshot_after_train: false -I1128 20:30:24.261742 2042758 solver.cpp:102] Creating training net from net file: /tmp/tmps8ynhm28 -I1128 20:30:24.261943 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.376809 3038403 solver.cpp:102] Creating training net from net file: /tmp/tmpxzopbysw +I0102 05:14:55.376976 3038403 net.cpp:53] Initializing net from parameters: name: "testnet" force_backward: true state { @@ -19752,46 +19787,46 @@ bottom: "label" top: "loss" } -I1128 20:30:24.262109 2042758 layer_factory.hpp:77] Creating layer data -I1128 20:30:24.262130 2042758 net.cpp:86] Creating Layer data -I1128 20:30:24.262185 2042758 net.cpp:382] data -> data -I1128 20:30:24.262209 2042758 net.cpp:382] data -> label -I1128 20:30:24.262347 2042758 net.cpp:124] Setting up data -I1128 20:30:24.262367 2042758 net.cpp:131] Top shape: 5 2 3 4 (120) -I1128 20:30:24.262375 2042758 net.cpp:131] Top shape: 5 1 1 1 (5) -I1128 20:30:24.262451 2042758 net.cpp:139] Memory required for data: 500 -I1128 20:30:24.262516 2042758 layer_factory.hpp:77] Creating layer conv -I1128 20:30:24.262591 2042758 net.cpp:86] Creating Layer conv -I1128 20:30:24.262604 2042758 net.cpp:408] conv <- data -I1128 20:30:24.262670 2042758 net.cpp:382] conv -> conv -I1128 20:30:24.262766 2042758 net.cpp:124] Setting up conv -I1128 20:30:24.262778 2042758 net.cpp:131] Top shape: 5 11 8 9 (3960) -I1128 20:30:24.262845 2042758 net.cpp:139] Memory required for data: 16340 -I1128 20:30:24.262926 2042758 layer_factory.hpp:77] Creating layer ip -I1128 20:30:24.262944 2042758 net.cpp:86] Creating Layer ip -I1128 20:30:24.263011 2042758 net.cpp:408] ip <- conv -I1128 20:30:24.263082 2042758 net.cpp:382] ip -> ip_blob -I1128 20:30:24.263366 2042758 net.cpp:124] Setting up ip -I1128 20:30:24.263381 2042758 net.cpp:131] Top shape: 5 13 (65) -I1128 20:30:24.263389 2042758 net.cpp:139] Memory required for data: 16600 -I1128 20:30:24.263450 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.263465 2042758 net.cpp:86] Creating Layer loss -I1128 20:30:24.263473 2042758 net.cpp:408] loss <- ip_blob -I1128 20:30:24.263478 2042758 net.cpp:408] loss <- label -I1128 20:30:24.263546 2042758 net.cpp:382] loss -> loss -I1128 20:30:24.263566 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.263582 2042758 net.cpp:124] Setting up loss -I1128 20:30:24.263588 2042758 net.cpp:131] Top shape: (1) -I1128 20:30:24.263600 2042758 net.cpp:134] with loss weight 1 -I1128 20:30:24.263612 2042758 net.cpp:139] Memory required for data: 16604 -I1128 20:30:24.263617 2042758 net.cpp:200] loss needs backward computation. -I1128 20:30:24.263621 2042758 net.cpp:200] ip needs backward computation. -I1128 20:30:24.263633 2042758 net.cpp:200] conv needs backward computation. -I1128 20:30:24.263638 2042758 net.cpp:202] data does not need backward computation. -I1128 20:30:24.263643 2042758 net.cpp:244] This network produces output loss -I1128 20:30:24.263649 2042758 net.cpp:257] Network initialization done. -I1128 20:30:24.263777 2042758 solver.cpp:190] Creating test net (#0) specified by net file: /tmp/tmps8ynhm28 -I1128 20:30:24.263798 2042758 net.cpp:53] Initializing net from parameters: +I0102 05:14:55.377094 3038403 layer_factory.hpp:77] Creating layer data +I0102 05:14:55.377106 3038403 net.cpp:86] Creating Layer data +I0102 05:14:55.377116 3038403 net.cpp:382] data -> data +I0102 05:14:55.377130 3038403 net.cpp:382] data -> label +I0102 05:14:55.377144 3038403 net.cpp:124] Setting up data +I0102 05:14:55.377151 3038403 net.cpp:131] Top shape: 5 2 3 4 (120) +I0102 05:14:55.377161 3038403 net.cpp:131] Top shape: 5 1 1 1 (5) +I0102 05:14:55.377168 3038403 net.cpp:139] Memory required for data: 500 +I0102 05:14:55.377175 3038403 layer_factory.hpp:77] Creating layer conv +I0102 05:14:55.377188 3038403 net.cpp:86] Creating Layer conv +I0102 05:14:55.377194 3038403 net.cpp:408] conv <- data +I0102 05:14:55.377203 3038403 net.cpp:382] conv -> conv +I0102 05:14:55.377235 3038403 net.cpp:124] Setting up conv +I0102 05:14:55.377243 3038403 net.cpp:131] Top shape: 5 11 8 9 (3960) +I0102 05:14:55.377250 3038403 net.cpp:139] Memory required for data: 16340 +I0102 05:14:55.377264 3038403 layer_factory.hpp:77] Creating layer ip +I0102 05:14:55.377275 3038403 net.cpp:86] Creating Layer ip +I0102 05:14:55.377281 3038403 net.cpp:408] ip <- conv +I0102 05:14:55.377290 3038403 net.cpp:382] ip -> ip_blob +I0102 05:14:55.377465 3038403 net.cpp:124] Setting up ip +I0102 05:14:55.377475 3038403 net.cpp:131] Top shape: 5 13 (65) +I0102 05:14:55.377481 3038403 net.cpp:139] Memory required for data: 16600 +I0102 05:14:55.377492 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.377502 3038403 net.cpp:86] Creating Layer loss +I0102 05:14:55.377509 3038403 net.cpp:408] loss <- ip_blob +I0102 05:14:55.377516 3038403 net.cpp:408] loss <- label +I0102 05:14:55.377524 3038403 net.cpp:382] loss -> loss +I0102 05:14:55.377537 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.377557 3038403 net.cpp:124] Setting up loss +I0102 05:14:55.377563 3038403 net.cpp:131] Top shape: (1) +I0102 05:14:55.377570 3038403 net.cpp:134] with loss weight 1 +I0102 05:14:55.377581 3038403 net.cpp:139] Memory required for data: 16604 +I0102 05:14:55.377588 3038403 net.cpp:200] loss needs backward computation. +I0102 05:14:55.377594 3038403 net.cpp:200] ip needs backward computation. +I0102 05:14:55.377600 3038403 net.cpp:200] conv needs backward computation. +I0102 05:14:55.377609 3038403 net.cpp:202] data does not need backward computation. +I0102 05:14:55.377614 3038403 net.cpp:244] This network produces output loss +I0102 05:14:55.377625 3038403 net.cpp:257] Network initialization done. +I0102 05:14:55.377790 3038403 solver.cpp:190] Creating test net (#0) specified by net file: /tmp/tmpxzopbysw +I0102 05:14:55.377820 3038403 net.cpp:53] Initializing net from parameters: name: "testnet" force_backward: true state { @@ -19869,77 +19904,77 @@ bottom: "label" top: "loss" } -I1128 20:30:24.263974 2042758 layer_factory.hpp:77] Creating layer data -I1128 20:30:24.263994 2042758 net.cpp:86] Creating Layer data -I1128 20:30:24.264003 2042758 net.cpp:382] data -> data -I1128 20:30:24.264079 2042758 net.cpp:382] data -> label -I1128 20:30:24.264093 2042758 net.cpp:124] Setting up data -I1128 20:30:24.264101 2042758 net.cpp:131] Top shape: 5 2 3 4 (120) -I1128 20:30:24.264168 2042758 net.cpp:131] Top shape: 5 1 1 1 (5) -I1128 20:30:24.264176 2042758 net.cpp:139] Memory required for data: 500 -I1128 20:30:24.264242 2042758 layer_factory.hpp:77] Creating layer conv -I1128 20:30:24.264304 2042758 net.cpp:86] Creating Layer conv -I1128 20:30:24.264318 2042758 net.cpp:408] conv <- data -I1128 20:30:24.264329 2042758 net.cpp:382] conv -> conv -I1128 20:30:24.264461 2042758 net.cpp:124] Setting up conv -I1128 20:30:24.264472 2042758 net.cpp:131] Top shape: 5 11 8 9 (3960) -I1128 20:30:24.264528 2042758 net.cpp:139] Memory required for data: 16340 -I1128 20:30:24.264603 2042758 layer_factory.hpp:77] Creating layer ip -I1128 20:30:24.264621 2042758 net.cpp:86] Creating Layer ip -I1128 20:30:24.264680 2042758 net.cpp:408] ip <- conv -I1128 20:30:24.264772 2042758 net.cpp:382] ip -> ip_blob -I1128 20:30:24.265033 2042758 net.cpp:124] Setting up ip -I1128 20:30:24.265040 2042758 net.cpp:131] Top shape: 5 13 (65) -I1128 20:30:24.265048 2042758 net.cpp:139] Memory required for data: 16600 -I1128 20:30:24.265059 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.265067 2042758 net.cpp:86] Creating Layer loss -I1128 20:30:24.265072 2042758 net.cpp:408] loss <- ip_blob -I1128 20:30:24.265079 2042758 net.cpp:408] loss <- label -I1128 20:30:24.265085 2042758 net.cpp:382] loss -> loss -I1128 20:30:24.265095 2042758 layer_factory.hpp:77] Creating layer loss -I1128 20:30:24.265112 2042758 net.cpp:124] Setting up loss -I1128 20:30:24.265118 2042758 net.cpp:131] Top shape: (1) -I1128 20:30:24.265125 2042758 net.cpp:134] with loss weight 1 -I1128 20:30:24.265134 2042758 net.cpp:139] Memory required for data: 16604 -I1128 20:30:24.265141 2042758 net.cpp:200] loss needs backward computation. -I1128 20:30:24.265147 2042758 net.cpp:200] ip needs backward computation. -I1128 20:30:24.265153 2042758 net.cpp:200] conv needs backward computation. -I1128 20:30:24.265161 2042758 net.cpp:202] data does not need backward computation. -I1128 20:30:24.265166 2042758 net.cpp:244] This network produces output loss -I1128 20:30:24.265174 2042758 net.cpp:257] Network initialization done. -I1128 20:30:24.265216 2042758 solver.cpp:57] Solver scaffolding done. -I1128 20:30:24.265776 2042758 solver.cpp:289] Solving testnet -I1128 20:30:24.265786 2042758 solver.cpp:290] Learning Rate Policy: inv -I1128 20:30:24.265899 2042758 solver.cpp:347] Iteration 0, Testing net (#0) -I1128 20:30:24.267552 2042758 solver.cpp:414] Test net output #0: loss = 87.0649 (* 1 = 87.0649 loss) -I1128 20:30:24.268040 2042758 solver.cpp:239] Iteration 0 (0 iter/s, 0.002s/100 iters), loss = 87.3365 -I1128 20:30:24.268070 2042758 solver.cpp:258] Train net output #0: loss = 87.3365 (* 1 = 87.3365 loss) -I1128 20:30:24.268146 2042758 sgd_solver.cpp:112] Iteration 0, lr = 0.01 -I1128 20:30:24.278916 2042758 solver.cpp:347] Iteration 10, Testing net (#0) -I1128 20:30:24.280683 2042758 solver.cpp:414] Test net output #0: loss = 72.4531 (* 1 = 72.4531 loss) -I1128 20:30:24.290028 2042758 solver.cpp:347] Iteration 20, Testing net (#0) -I1128 20:30:24.291792 2042758 solver.cpp:414] Test net output #0: loss = 87.3365 (* 1 = 87.3365 loss) -I1128 20:30:24.308894 2042758 solver.cpp:347] Iteration 30, Testing net (#0) -I1128 20:30:24.310616 2042758 solver.cpp:414] Test net output #0: loss = 87.3365 (* 1 = 87.3365 loss) -I1128 20:30:24.325353 2042758 solver.cpp:347] Iteration 40, Testing net (#0) -I1128 20:30:24.327210 2042758 solver.cpp:414] Test net output #0: loss = 87.3365 (* 1 = 87.3365 loss) -I1128 20:30:24.337836 2042758 solver.cpp:347] Iteration 50, Testing net (#0) -I1128 20:30:24.339730 2042758 solver.cpp:414] Test net output #0: loss = 85.2998 (* 1 = 85.2998 loss) -I1128 20:30:24.358696 2042758 solver.cpp:347] Iteration 60, Testing net (#0) -I1128 20:30:24.360590 2042758 solver.cpp:414] Test net output #0: loss = 87.3365 (* 1 = 87.3365 loss) -I1128 20:30:24.372735 2042758 solver.cpp:347] Iteration 70, Testing net (#0) -I1128 20:30:24.374702 2042758 solver.cpp:414] Test net output #0: loss = 87.3365 (* 1 = 87.3365 loss) -I1128 20:30:24.387136 2042758 solver.cpp:347] Iteration 80, Testing net (#0) -I1128 20:30:24.388800 2042758 solver.cpp:414] Test net output #0: loss = 87.3365 (* 1 = 87.3365 loss) -I1128 20:30:24.395992 2042758 solver.cpp:347] Iteration 90, Testing net (#0) -I1128 20:30:24.397715 2042758 solver.cpp:414] Test net output #0: loss = 86.0843 (* 1 = 86.0843 loss) -I1128 20:30:24.412814 2042758 solver.cpp:327] Iteration 100, loss = 51.5943 -I1128 20:30:24.412891 2042758 solver.cpp:347] Iteration 100, Testing net (#0) -I1128 20:30:24.414717 2042758 solver.cpp:414] Test net output #0: loss = 87.1567 (* 1 = 87.1567 loss) -I1128 20:30:24.414738 2042758 solver.cpp:332] Optimization Done. +I0102 05:14:55.377943 3038403 layer_factory.hpp:77] Creating layer data +I0102 05:14:55.377954 3038403 net.cpp:86] Creating Layer data +I0102 05:14:55.377962 3038403 net.cpp:382] data -> data +I0102 05:14:55.377974 3038403 net.cpp:382] data -> label +I0102 05:14:55.377987 3038403 net.cpp:124] Setting up data +I0102 05:14:55.377993 3038403 net.cpp:131] Top shape: 5 2 3 4 (120) +I0102 05:14:55.378001 3038403 net.cpp:131] Top shape: 5 1 1 1 (5) +I0102 05:14:55.378007 3038403 net.cpp:139] Memory required for data: 500 +I0102 05:14:55.378013 3038403 layer_factory.hpp:77] Creating layer conv +I0102 05:14:55.378023 3038403 net.cpp:86] Creating Layer conv +I0102 05:14:55.378029 3038403 net.cpp:408] conv <- data +I0102 05:14:55.378042 3038403 net.cpp:382] conv -> conv +I0102 05:14:55.378073 3038403 net.cpp:124] Setting up conv +I0102 05:14:55.378078 3038403 net.cpp:131] Top shape: 5 11 8 9 (3960) +I0102 05:14:55.378085 3038403 net.cpp:139] Memory required for data: 16340 +I0102 05:14:55.378098 3038403 layer_factory.hpp:77] Creating layer ip +I0102 05:14:55.378108 3038403 net.cpp:86] Creating Layer ip +I0102 05:14:55.378113 3038403 net.cpp:408] ip <- conv +I0102 05:14:55.378120 3038403 net.cpp:382] ip -> ip_blob +I0102 05:14:55.378270 3038403 net.cpp:124] Setting up ip +I0102 05:14:55.378278 3038403 net.cpp:131] Top shape: 5 13 (65) +I0102 05:14:55.378285 3038403 net.cpp:139] Memory required for data: 16600 +I0102 05:14:55.378296 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.378306 3038403 net.cpp:86] Creating Layer loss +I0102 05:14:55.378311 3038403 net.cpp:408] loss <- ip_blob +I0102 05:14:55.378319 3038403 net.cpp:408] loss <- label +I0102 05:14:55.378325 3038403 net.cpp:382] loss -> loss +I0102 05:14:55.378336 3038403 layer_factory.hpp:77] Creating layer loss +I0102 05:14:55.378351 3038403 net.cpp:124] Setting up loss +I0102 05:14:55.378357 3038403 net.cpp:131] Top shape: (1) +I0102 05:14:55.378365 3038403 net.cpp:134] with loss weight 1 +I0102 05:14:55.378374 3038403 net.cpp:139] Memory required for data: 16604 +I0102 05:14:55.378379 3038403 net.cpp:200] loss needs backward computation. +I0102 05:14:55.378386 3038403 net.cpp:200] ip needs backward computation. +I0102 05:14:55.378391 3038403 net.cpp:200] conv needs backward computation. +I0102 05:14:55.378397 3038403 net.cpp:202] data does not need backward computation. +I0102 05:14:55.378402 3038403 net.cpp:244] This network produces output loss +I0102 05:14:55.378412 3038403 net.cpp:257] Network initialization done. +I0102 05:14:55.378433 3038403 solver.cpp:57] Solver scaffolding done. +I0102 05:14:55.378986 3038403 solver.cpp:289] Solving testnet +I0102 05:14:55.379006 3038403 solver.cpp:290] Learning Rate Policy: inv +I0102 05:14:55.379029 3038403 solver.cpp:347] Iteration 0, Testing net (#0) +I0102 05:14:55.380685 3038403 solver.cpp:414] Test net output #0: loss = 82.2748 (* 1 = 82.2748 loss) +I0102 05:14:55.383117 3038403 solver.cpp:239] Iteration 0 (-0.00199566 iter/s, 0.004s/100 iters), loss = 87.3365 +I0102 05:14:55.383177 3038403 solver.cpp:258] Train net output #0: loss = 87.3365 (* 1 = 87.3365 loss) +I0102 05:14:55.383191 3038403 sgd_solver.cpp:112] Iteration 0, lr = 0.01 +I0102 05:14:55.396688 3038403 solver.cpp:347] Iteration 10, Testing net (#0) +I0102 05:14:55.398443 3038403 solver.cpp:414] Test net output #0: loss = 79.2608 (* 1 = 79.2608 loss) +I0102 05:14:55.406282 3038403 solver.cpp:347] Iteration 20, Testing net (#0) +I0102 05:14:55.407900 3038403 solver.cpp:414] Test net output #0: loss = 87.3365 (* 1 = 87.3365 loss) +I0102 05:14:55.413710 3038403 solver.cpp:347] Iteration 30, Testing net (#0) +I0102 05:14:55.415333 3038403 solver.cpp:414] Test net output #0: loss = 87.3365 (* 1 = 87.3365 loss) +I0102 05:14:55.431182 3038403 solver.cpp:347] Iteration 40, Testing net (#0) +I0102 05:14:55.432875 3038403 solver.cpp:414] Test net output #0: loss = 85.5094 (* 1 = 85.5094 loss) +I0102 05:14:55.452396 3038403 solver.cpp:347] Iteration 50, Testing net (#0) +I0102 05:14:55.454315 3038403 solver.cpp:414] Test net output #0: loss = 86.3175 (* 1 = 86.3175 loss) +I0102 05:14:55.464682 3038403 solver.cpp:347] Iteration 60, Testing net (#0) +I0102 05:14:55.466418 3038403 solver.cpp:414] Test net output #0: loss = 87.3365 (* 1 = 87.3365 loss) +I0102 05:14:55.476182 3038403 solver.cpp:347] Iteration 70, Testing net (#0) +I0102 05:14:55.478013 3038403 solver.cpp:414] Test net output #0: loss = 87.3365 (* 1 = 87.3365 loss) +I0102 05:14:55.490283 3038403 solver.cpp:347] Iteration 80, Testing net (#0) +I0102 05:14:55.491930 3038403 solver.cpp:414] Test net output #0: loss = 87.3365 (* 1 = 87.3365 loss) +I0102 05:14:55.503432 3038403 solver.cpp:347] Iteration 90, Testing net (#0) +I0102 05:14:55.505100 3038403 solver.cpp:414] Test net output #0: loss = 87.3365 (* 1 = 87.3365 loss) +I0102 05:14:55.520737 3038403 solver.cpp:327] Iteration 100, loss = 69.8692 +I0102 05:14:55.520798 3038403 solver.cpp:347] Iteration 100, Testing net (#0) +I0102 05:14:55.522401 3038403 solver.cpp:414] Test net output #0: loss = 86.8133 (* 1 = 86.8133 loss) +I0102 05:14:55.522428 3038403 solver.cpp:332] Optimization Done. . ---------------------------------------------------------------------- -Ran 52 tests in 22.885s +Ran 52 tests in 16.820s OK make[5]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' @@ -19959,7 +19994,7 @@ debian/rules override_dh_auto_install-arch make[1]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997' dh_auto_install --builddirectory="caffe_cpu_build" -- install - cd caffe_cpu_build && make -j15 install DESTDIR=/build/caffe-1.0.0\+git20180821.99bd997/debian/tmp AM_UPDATE_INFO_DIR=no "INSTALL=install --strip-program=true" install + cd caffe_cpu_build && make -j16 install DESTDIR=/build/caffe-1.0.0\+git20180821.99bd997/debian/tmp AM_UPDATE_INFO_DIR=no "INSTALL=install --strip-program=true" install make[2]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' /usr/bin/cmake -S/build/caffe-1.0.0+git20180821.99bd997 -B/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build --check-build-system CMakeFiles/Makefile.cmake 0 /usr/bin/cmake -E cmake_progress_start /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/CMakeFiles /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/CMakeFiles/progress.marks @@ -20014,79 +20049,79 @@ make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build && /usr/bin/cmake -E cmake_depends "Unix Makefiles" /build/caffe-1.0.0+git20180821.99bd997 /build/caffe-1.0.0+git20180821.99bd997/examples /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples/CMakeFiles/convert_mnist_data.dir/DependInfo.cmake --color= make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build && /usr/bin/cmake -E cmake_depends "Unix Makefiles" /build/caffe-1.0.0+git20180821.99bd997 /build/caffe-1.0.0+git20180821.99bd997/examples /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples/CMakeFiles/convert_mnist_siamese_data.dir/DependInfo.cmake --color= -make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build && /usr/bin/cmake -E cmake_depends "Unix Makefiles" /build/caffe-1.0.0+git20180821.99bd997 /build/caffe-1.0.0+git20180821.99bd997/examples /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples/CMakeFiles/convert_cifar_data.dir/DependInfo.cmake --color= make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' +cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build && /usr/bin/cmake -E cmake_depends "Unix Makefiles" /build/caffe-1.0.0+git20180821.99bd997 /build/caffe-1.0.0+git20180821.99bd997/examples /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/examples/CMakeFiles/convert_mnist_siamese_data.dir/DependInfo.cmake --color= +make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' cd /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build && /usr/bin/cmake -E cmake_depends "Unix Makefiles" /build/caffe-1.0.0+git20180821.99bd997 /build/caffe-1.0.0+git20180821.99bd997/python /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/python /build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build/python/CMakeFiles/pycaffe.dir/DependInfo.cmake --color= make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' make -f tools/CMakeFiles/compute_image_mean.dir/build.make tools/CMakeFiles/compute_image_mean.dir/build make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -make -f examples/CMakeFiles/convert_mnist_data.dir/build.make examples/CMakeFiles/convert_mnist_data.dir/build make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' +make -f examples/CMakeFiles/convert_mnist_data.dir/build.make examples/CMakeFiles/convert_mnist_data.dir/build make -f examples/CMakeFiles/convert_cifar_data.dir/build.make examples/CMakeFiles/convert_cifar_data.dir/build make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' make -f tools/CMakeFiles/convert_imageset.dir/build.make tools/CMakeFiles/convert_imageset.dir/build +make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' make -f examples/CMakeFiles/convert_mnist_siamese_data.dir/build.make examples/CMakeFiles/convert_mnist_siamese_data.dir/build make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' make -f tools/CMakeFiles/extract_features.dir/build.make tools/CMakeFiles/extract_features.dir/build make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -make -f tools/CMakeFiles/caffe.bin.dir/build.make tools/CMakeFiles/caffe.bin.dir/build -make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -make -f tools/CMakeFiles/upgrade_net_proto_text.dir/build.make tools/CMakeFiles/upgrade_net_proto_text.dir/build +make -f tools/CMakeFiles/upgrade_solver_proto_text.dir/build.make tools/CMakeFiles/upgrade_solver_proto_text.dir/build make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' make -f tools/CMakeFiles/upgrade_net_proto_binary.dir/build.make tools/CMakeFiles/upgrade_net_proto_binary.dir/build make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -make -f tools/CMakeFiles/upgrade_solver_proto_text.dir/build.make tools/CMakeFiles/upgrade_solver_proto_text.dir/build +make -f tools/CMakeFiles/caffe.bin.dir/build.make tools/CMakeFiles/caffe.bin.dir/build +make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' +make -f tools/CMakeFiles/upgrade_net_proto_text.dir/build.make tools/CMakeFiles/upgrade_net_proto_text.dir/build make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' make -f examples/CMakeFiles/classification.dir/build.make examples/CMakeFiles/classification.dir/build make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' +make[4]: Nothing to be done for 'tools/CMakeFiles/compute_image_mean.dir/build'. +make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' +make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' make[4]: Nothing to be done for 'examples/CMakeFiles/convert_mnist_data.dir/build'. make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' make[4]: Nothing to be done for 'examples/CMakeFiles/convert_cifar_data.dir/build'. make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -make[4]: Nothing to be done for 'tools/CMakeFiles/compute_image_mean.dir/build'. +make[4]: Nothing to be done for 'tools/CMakeFiles/convert_imageset.dir/build'. make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' make[4]: Nothing to be done for 'examples/CMakeFiles/convert_mnist_siamese_data.dir/build'. make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' +[ 82%] Built target compute_image_mean +[ 84%] Built target convert_mnist_data +[ 85%] Built target convert_cifar_data +[ 87%] Built target convert_imageset +[ 89%] Built target convert_mnist_siamese_data make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -make[4]: Nothing to be done for 'tools/CMakeFiles/convert_imageset.dir/build'. +make[4]: Nothing to be done for 'tools/CMakeFiles/extract_features.dir/build'. make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -[ 82%] Built target convert_mnist_data -[ 84%] Built target convert_cifar_data -[ 85%] Built target compute_image_mean -[ 87%] Built target convert_mnist_siamese_data -[ 89%] Built target convert_imageset +[ 90%] Built target extract_features make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -make[4]: Nothing to be done for 'tools/CMakeFiles/extract_features.dir/build'. +make[4]: Nothing to be done for 'tools/CMakeFiles/upgrade_solver_proto_text.dir/build'. make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' make[4]: Nothing to be done for 'tools/CMakeFiles/upgrade_net_proto_binary.dir/build'. make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -make[4]: Nothing to be done for 'tools/CMakeFiles/upgrade_net_proto_text.dir/build'. +make[4]: Nothing to be done for 'tools/CMakeFiles/caffe.bin.dir/build'. make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -make[4]: Nothing to be done for 'tools/CMakeFiles/upgrade_solver_proto_text.dir/build'. +make[4]: Nothing to be done for 'tools/CMakeFiles/upgrade_net_proto_text.dir/build'. make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' make[4]: Nothing to be done for 'examples/CMakeFiles/classification.dir/build'. make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -make[4]: Nothing to be done for 'tools/CMakeFiles/caffe.bin.dir/build'. -make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' -[ 90%] Built target extract_features -[ 92%] Built target upgrade_net_proto_binary -[ 93%] Built target upgrade_net_proto_text -[ 95%] Built target classification make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' make -f python/CMakeFiles/pycaffe.dir/build.make python/CMakeFiles/pycaffe.dir/build -[ 96%] Built target upgrade_solver_proto_text -[ 98%] Built target caffe.bin +[ 92%] Built target upgrade_solver_proto_text +[ 93%] Built target upgrade_net_proto_binary +[ 95%] Built target caffe.bin +[ 96%] Built target upgrade_net_proto_text +[ 98%] Built target classification make[4]: Entering directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' make[4]: Nothing to be done for 'python/CMakeFiles/pycaffe.dir/build'. make[4]: Leaving directory '/build/caffe-1.0.0+git20180821.99bd997/caffe_cpu_build' @@ -20320,19 +20355,19 @@ dpkg-gencontrol: warning: package caffe-tools-cpu: substitution variable ${python3:Depends} unused, but is defined dpkg-gencontrol: warning: package libcaffe-cpu-dev: substitution variable ${python3:Depends} unused, but is defined dpkg-gencontrol: warning: package libcaffe-cpu1: substitution variable ${python3:Depends} unused, but is defined -dpkg-gencontrol: warning: package caffe-tools-cpu: substitution variable ${python3:Depends} unused, but is defined dpkg-gencontrol: warning: package python3-caffe-cpu: substitution variable ${python3:Provides} unused, but is defined dpkg-gencontrol: warning: package python3-caffe-cpu: substitution variable ${python3:Versions} unused, but is defined +dpkg-gencontrol: warning: package caffe-tools-cpu: substitution variable ${python3:Depends} unused, but is defined dpkg-gencontrol: warning: package caffe-doc: substitution variable ${python3:Depends} unused, but is defined dh_md5sums -O--buildsystem=cmake dh_builddeb -O--buildsystem=cmake dpkg-deb: building package 'caffe-cpu' in '../caffe-cpu_1.0.0+git20180821.99bd997-2_amd64.deb'. +dpkg-deb: building package 'caffe-tools-cpu' in '../caffe-tools-cpu_1.0.0+git20180821.99bd997-2_amd64.deb'. dpkg-deb: building package 'libcaffe-cpu1' in '../libcaffe-cpu1_1.0.0+git20180821.99bd997-2_amd64.deb'. +dpkg-deb: building package 'caffe-tools-cpu-dbgsym' in '../caffe-tools-cpu-dbgsym_1.0.0+git20180821.99bd997-2_amd64.deb'. dpkg-deb: building package 'python3-caffe-cpu-dbgsym' in '../python3-caffe-cpu-dbgsym_1.0.0+git20180821.99bd997-2_amd64.deb'. dpkg-deb: building package 'libcaffe-cpu-dev' in '../libcaffe-cpu-dev_1.0.0+git20180821.99bd997-2_amd64.deb'. dpkg-deb: building package 'libcaffe-cpu1-dbgsym' in '../libcaffe-cpu1-dbgsym_1.0.0+git20180821.99bd997-2_amd64.deb'. -dpkg-deb: building package 'caffe-tools-cpu' in '../caffe-tools-cpu_1.0.0+git20180821.99bd997-2_amd64.deb'. -dpkg-deb: building package 'caffe-tools-cpu-dbgsym' in '../caffe-tools-cpu-dbgsym_1.0.0+git20180821.99bd997-2_amd64.deb'. dpkg-deb: building package 'caffe-doc' in '../caffe-doc_1.0.0+git20180821.99bd997-2_all.deb'. dpkg-deb: building package 'python3-caffe-cpu' in '../python3-caffe-cpu_1.0.0+git20180821.99bd997-2_amd64.deb'. dpkg-genbuildinfo --build=binary @@ -20342,12 +20377,14 @@ dpkg-buildpackage: info: binary-only upload (no source included) dpkg-genchanges: info: not including original source code in upload I: copying local configuration +I: user script /srv/workspace/pbuilder/2636591/tmp/hooks/B01_cleanup starting +I: user script /srv/workspace/pbuilder/2636591/tmp/hooks/B01_cleanup finished I: unmounting dev/ptmx filesystem I: unmounting dev/pts filesystem I: unmounting dev/shm filesystem I: unmounting proc filesystem I: unmounting sys filesystem I: cleaning the build env -I: removing directory /srv/workspace/pbuilder/1832359 and its subdirectories -I: Current time: Sun Nov 28 20:34:15 -12 2021 -I: pbuilder-time-stamp: 1638174855 +I: removing directory /srv/workspace/pbuilder/2636591 and its subdirectories +I: Current time: Mon Jan 2 05:16:43 +14 2023 +I: pbuilder-time-stamp: 1672586203